Compare commits
82 Commits
revert-341
...
features/r
Author | SHA1 | Date | |
---|---|---|---|
![]() |
88d364a6e2 | ||
![]() |
2b84985aa7 | ||
![]() |
ab6499ce1e | ||
![]() |
412bec45aa | ||
![]() |
c3dcd94ebc | ||
![]() |
cb8f642297 | ||
![]() |
92f19c8491 | ||
![]() |
f3f8b31be5 | ||
![]() |
63cadf04ea | ||
![]() |
541e75350f | ||
![]() |
8806e74419 | ||
![]() |
381f8161b1 | ||
![]() |
884123b7ce | ||
![]() |
35aa875762 | ||
![]() |
9b0e79fcab | ||
![]() |
8ba0faa9ee | ||
![]() |
d464185bba | ||
![]() |
7f4d71252b | ||
![]() |
7950311767 | ||
![]() |
194f9a9ca9 | ||
![]() |
a72021fd63 | ||
![]() |
d910b3725b | ||
![]() |
99f209019e | ||
![]() |
c11a4e0ad3 | ||
![]() |
4a429ec315 | ||
![]() |
eadccfe332 | ||
![]() |
dfab5b5ceb | ||
![]() |
862029215c | ||
![]() |
559c3de213 | ||
![]() |
e3bf7358d7 | ||
![]() |
b58ec9e2b9 | ||
![]() |
95b5d54129 | ||
![]() |
bcce9c3e9c | ||
![]() |
4c05fe569c | ||
![]() |
e550665df7 | ||
![]() |
d92d34b162 | ||
![]() |
f27be808a4 | ||
![]() |
855d3519b6 | ||
![]() |
37f232e319 | ||
![]() |
ac1c29eac0 | ||
![]() |
56072172f5 | ||
![]() |
64d957dece | ||
![]() |
3edc85ec21 | ||
![]() |
d8006a9495 | ||
![]() |
a2cfc07412 | ||
![]() |
1295ea5d40 | ||
![]() |
4664b3cd1e | ||
![]() |
dc7e0e3ef6 | ||
![]() |
9aa615aa98 | ||
![]() |
85b6bf99a4 | ||
![]() |
78ec3d5662 | ||
![]() |
a7b5f2ef39 | ||
![]() |
f71701f39d | ||
![]() |
54008a2342 | ||
![]() |
1670c325c6 | ||
![]() |
534a994b4c | ||
![]() |
359efca201 | ||
![]() |
65809140f3 | ||
![]() |
3f1622f9e7 | ||
![]() |
8332a59194 | ||
![]() |
05abea3a3a | ||
![]() |
e7fc9ea243 | ||
![]() |
eea3ea7675 | ||
![]() |
895ac2626d | ||
![]() |
94dc86e163 | ||
![]() |
729b1c9fa6 | ||
![]() |
82b7fe649f | ||
![]() |
76417d6ac6 | ||
![]() |
fe995542ab | ||
![]() |
8f5209063d | ||
![]() |
241a8f6be6 | ||
![]() |
a8a0a6916a | ||
![]() |
8d10dce651 | ||
![]() |
a2938c9348 | ||
![]() |
8017f4b55b | ||
![]() |
588d2e295f | ||
![]() |
c10b84f08d | ||
![]() |
99044bedd7 | ||
![]() |
3afe6f1adc | ||
![]() |
fcd9038225 | ||
![]() |
9d82024f1a | ||
![]() |
bcefe6a73e |
2
.github/workflows/unit_tests.yaml
vendored
2
.github/workflows/unit_tests.yaml
vendored
@@ -145,7 +145,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d solve zlib
|
||||
spack -d bootstrap now --dev
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
|
@@ -394,7 +394,7 @@ are indicated at the start of the path with ``~`` or ``~user``.
|
||||
Spack-specific variables
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack understands several special variables. These are:
|
||||
Spack understands over a dozen special variables. These are:
|
||||
|
||||
* ``$env``: name of the currently active :ref:`environment <environments>`
|
||||
* ``$spack``: path to the prefix of this Spack installation
|
||||
|
@@ -175,14 +175,11 @@ Spec-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.spec`
|
||||
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
|
||||
Also implements most of the logic for normalization and concretization
|
||||
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||
of specs.
|
||||
|
||||
:mod:`spack.parse`
|
||||
Contains some base classes for implementing simple recursive descent
|
||||
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
|
||||
Used by :class:`~spack.spec.SpecParser`.
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
|
@@ -23,7 +23,10 @@ def symlink(real_path, link_path):
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
"""
|
||||
if not is_windows or _win32_can_symlink():
|
||||
if not is_windows:
|
||||
os.symlink(real_path, link_path)
|
||||
elif _win32_can_symlink():
|
||||
# Windows requires target_is_directory=True when the target is a dir.
|
||||
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
||||
else:
|
||||
try:
|
||||
|
@@ -914,8 +914,6 @@ def _fetch_spec_from_mirror(spec_url):
|
||||
return Spec.from_dict(specfile_json)
|
||||
if spec_url.endswith(".json"):
|
||||
return Spec.from_json(spec_file_contents)
|
||||
if spec_url.endswith(".yaml"):
|
||||
return Spec.from_yaml(spec_file_contents)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
@@ -990,8 +988,6 @@ def file_read_method(file_path):
|
||||
"*.spec.json.sig",
|
||||
"--include",
|
||||
"*.spec.json",
|
||||
"--include",
|
||||
"*.spec.yaml",
|
||||
cache_prefix,
|
||||
tmpspecsdir,
|
||||
]
|
||||
@@ -1001,7 +997,7 @@ def file_read_method(file_path):
|
||||
"Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir)
|
||||
)
|
||||
aws(*sync_command_args, output=os.devnull, error=os.devnull)
|
||||
file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json", "*.spec.yaml"])
|
||||
file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json"])
|
||||
read_fn = file_read_method
|
||||
except Exception:
|
||||
tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch")
|
||||
@@ -1037,9 +1033,7 @@ def url_read_method(url):
|
||||
file_list = [
|
||||
url_util.join(cache_prefix, entry)
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith(".yaml")
|
||||
or entry.endswith("spec.json")
|
||||
or entry.endswith("spec.json.sig")
|
||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||
]
|
||||
read_fn = url_read_method
|
||||
except KeyError as inst:
|
||||
@@ -1101,14 +1095,6 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
tty.error("Unabled to generate package index, {0}".format(err))
|
||||
return
|
||||
|
||||
if any(x.endswith(".yaml") for x in file_list):
|
||||
msg = (
|
||||
"The mirror in '{}' contains specs in the deprecated YAML format.\n\n\tSupport for "
|
||||
"this format will be removed in v0.20, please regenerate the build cache with a "
|
||||
"recent Spack\n"
|
||||
).format(cache_prefix)
|
||||
warnings.warn(msg)
|
||||
|
||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
@@ -1236,15 +1222,11 @@ def _build_tarball(
|
||||
specfile_name = tarball_name(spec, ".spec.json")
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
signed_specfile_path = "{0}.sig".format(specfile_path)
|
||||
deprecated_specfile_path = specfile_path.replace(".spec.json", ".spec.yaml")
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
|
||||
)
|
||||
remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
|
||||
remote_specfile_path_deprecated = url_util.join(
|
||||
outdir, os.path.relpath(deprecated_specfile_path, os.path.realpath(tmpdir))
|
||||
)
|
||||
|
||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||
if force:
|
||||
@@ -1252,12 +1234,8 @@ def _build_tarball(
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
if web_util.url_exists(remote_signed_specfile_path):
|
||||
web_util.remove_url(remote_signed_specfile_path)
|
||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
||||
web_util.remove_url(remote_specfile_path_deprecated)
|
||||
elif (
|
||||
web_util.url_exists(remote_specfile_path)
|
||||
or web_util.url_exists(remote_signed_specfile_path)
|
||||
or web_util.url_exists(remote_specfile_path_deprecated)
|
||||
elif web_util.url_exists(remote_specfile_path) or web_util.url_exists(
|
||||
remote_signed_specfile_path
|
||||
):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
@@ -1313,12 +1291,10 @@ def _build_tarball(
|
||||
|
||||
with open(spec_file, "r") as inputfile:
|
||||
content = inputfile.read()
|
||||
if spec_file.endswith(".yaml"):
|
||||
spec_dict = yaml.load(content)
|
||||
elif spec_file.endswith(".json"):
|
||||
if spec_file.endswith(".json"):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
raise ValueError("{0} not a valid spec file type (json or yaml)".format(spec_file))
|
||||
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
bchecksum = {}
|
||||
bchecksum["hash_algorithm"] = "sha256"
|
||||
@@ -1539,7 +1515,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# Assumes we care more about finding a spec file by preferred ext
|
||||
# than by mirrory priority. This can be made less complicated as
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for ext in ["json.sig", "json", "yaml"]:
|
||||
for ext in ["json.sig", "json"]:
|
||||
for mirror_to_try in mirrors_to_try:
|
||||
specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
|
||||
spackfile_url = mirror_to_try["spackfile"]
|
||||
@@ -1576,13 +1552,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
if ext == "yaml":
|
||||
msg = (
|
||||
"Reading {} from mirror.\n\n\tThe YAML format for buildcaches is "
|
||||
"deprecated and will be removed in v0.20\n"
|
||||
).format(spackfile_url)
|
||||
warnings.warn(msg)
|
||||
|
||||
return {
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
@@ -1826,8 +1795,6 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, ".tar.gz")
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
deprecated_yaml_name = tarball_name(spec, ".spec.yaml")
|
||||
deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, ".spec.json")
|
||||
json_path = os.path.join(extract_to, json_name)
|
||||
with closing(tarfile.open(spackfile_path, "r")) as tar:
|
||||
@@ -1839,8 +1806,6 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError("Cannot find spec file for {0}.".format(extract_to))
|
||||
|
||||
@@ -1887,10 +1852,8 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
||||
content = inputfile.read()
|
||||
if specfile_path.endswith(".json.sig"):
|
||||
spec_dict = Spec.extract_json_from_clearsig(content)
|
||||
elif specfile_path.endswith(".json"):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
spec_dict = syaml.load(content)
|
||||
spec_dict = sjson.load(content)
|
||||
|
||||
bchecksum = spec_dict["binary_cache_checksum"]
|
||||
filename = download_result["tarball_stage"].save_filename
|
||||
@@ -1902,7 +1865,7 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
||||
or int(spec_dict["buildcache_layout_version"]) < 1
|
||||
):
|
||||
# Handle the older buildcache layout where the .spack file
|
||||
# contains a spec json/yaml, maybe an .asc file (signature),
|
||||
# contains a spec json, maybe an .asc file (signature),
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
@@ -2053,17 +2016,12 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
"""
|
||||
Try to find the spec directly on the configured mirrors
|
||||
"""
|
||||
deprecated_specfile_name = tarball_name(spec, ".spec.yaml")
|
||||
specfile_name = tarball_name(spec, ".spec.json")
|
||||
signed_specfile_name = tarball_name(spec, ".spec.json.sig")
|
||||
specfile_is_signed = False
|
||||
specfile_is_json = True
|
||||
found_specs = []
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
buildcache_fetch_url_yaml = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name
|
||||
)
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name
|
||||
)
|
||||
@@ -2077,28 +2035,19 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(
|
||||
specfile_name, buildcache_fetch_url_signed_json
|
||||
),
|
||||
url_err,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
||||
url_err_x,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_yaml),
|
||||
url_err_y,
|
||||
level=2,
|
||||
)
|
||||
continue
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(
|
||||
specfile_name, buildcache_fetch_url_signed_json
|
||||
),
|
||||
url_err,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
||||
url_err_x,
|
||||
level=2,
|
||||
)
|
||||
continue
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
@@ -2107,10 +2056,8 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
if specfile_is_signed:
|
||||
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif specfile_is_json:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
found_specs.append(
|
||||
@@ -2321,7 +2268,7 @@ def needs_rebuild(spec, mirror_url):
|
||||
specfile_path = os.path.join(cache_prefix, specfile_name)
|
||||
|
||||
# Only check for the presence of the json version of the spec. If the
|
||||
# mirror only has the yaml version, or doesn't have the spec at all, we
|
||||
# mirror only has the json version, or doesn't have the spec at all, we
|
||||
# need to rebuild.
|
||||
return not web_util.url_exists(specfile_path)
|
||||
|
||||
@@ -2429,7 +2376,6 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
||||
"url": [
|
||||
tarball_name(concrete_spec, ".spec.json.sig"),
|
||||
tarball_name(concrete_spec, ".spec.json"),
|
||||
tarball_name(concrete_spec, ".spec.yaml"),
|
||||
],
|
||||
"path": destination,
|
||||
"required": True,
|
||||
|
File diff suppressed because it is too large
Load Diff
25
lib/spack/spack/bootstrap/__init__.py
Normal file
25
lib/spack/spack/bootstrap/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||
from .core import (
|
||||
all_core_root_specs,
|
||||
ensure_core_dependencies,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
|
||||
__all__ = [
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
"all_core_root_specs",
|
||||
"ensure_environment_dependencies",
|
||||
"BootstrapEnvironment",
|
||||
"status_message",
|
||||
]
|
218
lib/spack/spack/bootstrap/_common.py
Normal file
218
lib/spack/spack/bootstrap/_common.py
Normal file
@@ -0,0 +1,218 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common basic functions used through the spack.bootstrap package"""
|
||||
import fnmatch
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
def _python_import(module):
|
||||
try:
|
||||
__import__(module)
|
||||
except ImportError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _try_import_from_store(module, query_spec, query_info=None):
|
||||
"""Return True if the module can be imported from an already
|
||||
installed spec, False otherwise.
|
||||
|
||||
Args:
|
||||
module: Python module to be imported
|
||||
query_spec: spec that may provide the module
|
||||
query_info (dict or None): if a dict is passed it is populated with the
|
||||
command found and the concrete spec providing it
|
||||
"""
|
||||
# If it is a string assume it's one of the root specs by this module
|
||||
if isinstance(query_spec, str):
|
||||
# We have to run as part of this python interpreter
|
||||
query_spec += " ^" + spec_for_current_python()
|
||||
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec["python"].package
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
] # type: list[str]
|
||||
path_before = list(sys.path)
|
||||
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
# to be picked up and used, possibly depending on something in the store, first
|
||||
# allows the bootstrap version to work when an incompatible version is in
|
||||
# sys.path
|
||||
orders = [
|
||||
module_paths + sys.path,
|
||||
sys.path + module_paths,
|
||||
]
|
||||
for path in orders:
|
||||
sys.path = path
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = (
|
||||
f"[BOOTSTRAP MODULE {module}] The installed spec "
|
||||
f'"{query_spec}/{candidate_spec.dag_hash()}" '
|
||||
f'provides the "{module}" Python module'
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info["spec"] = candidate_spec
|
||||
return True
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
msg = (
|
||||
"unexpected error while trying to import module "
|
||||
f'"{module}" from spec "{candidate_spec}" [error="{str(exc)}"]'
|
||||
)
|
||||
warnings.warn(msg)
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
warnings.warn(msg.format(candidate_spec, module))
|
||||
|
||||
sys.path = path_before
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _fix_ext_suffix(candidate_spec):
|
||||
"""Fix the external suffixes of Python extensions on the fly for
|
||||
platforms that may need it
|
||||
|
||||
Args:
|
||||
candidate_spec (Spec): installed spec with a Python module
|
||||
to be checked.
|
||||
"""
|
||||
# Here we map target families to the patterns expected
|
||||
# by pristine CPython. Only architectures with known issues
|
||||
# are included. Known issues:
|
||||
#
|
||||
# [RHEL + ppc64le]: https://github.com/spack/spack/issues/25734
|
||||
#
|
||||
_suffix_to_be_checked = {
|
||||
"ppc64le": {
|
||||
"glob": "*.cpython-*-powerpc64le-linux-gnu.so",
|
||||
"re": r".cpython-[\w]*-powerpc64le-linux-gnu.so",
|
||||
"fmt": r"{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so",
|
||||
}
|
||||
}
|
||||
|
||||
# If the current architecture is not problematic return
|
||||
generic_target = archspec.cpu.host().family
|
||||
if str(generic_target) not in _suffix_to_be_checked:
|
||||
return
|
||||
|
||||
# If there's no EXT_SUFFIX (Python < 3.5) or the suffix matches
|
||||
# the expectations, return since the package is surely good
|
||||
ext_suffix = sysconfig.get_config_var("EXT_SUFFIX")
|
||||
if ext_suffix is None:
|
||||
return
|
||||
|
||||
expected = _suffix_to_be_checked[str(generic_target)]
|
||||
if fnmatch.fnmatch(ext_suffix, expected["glob"]):
|
||||
return
|
||||
|
||||
# If we are here it means the current interpreter expects different names
|
||||
# than pristine CPython. So:
|
||||
# 1. Find what we have installed
|
||||
# 2. Create symbolic links for the other names, it they're not there already
|
||||
|
||||
# Check if standard names are installed and if we have to create
|
||||
# link for this interpreter
|
||||
standard_extensions = fs.find(candidate_spec.prefix, expected["glob"])
|
||||
link_names = [re.sub(expected["re"], ext_suffix, s) for s in standard_extensions]
|
||||
for file_name, link_name in zip(standard_extensions, link_names):
|
||||
if os.path.exists(link_name):
|
||||
continue
|
||||
os.symlink(file_name, link_name)
|
||||
|
||||
# Check if this interpreter installed something and we have to create
|
||||
# links for a standard CPython interpreter
|
||||
non_standard_extensions = fs.find(candidate_spec.prefix, "*" + ext_suffix)
|
||||
for abs_path in non_standard_extensions:
|
||||
directory, filename = os.path.split(abs_path)
|
||||
module = filename.split(".")[0]
|
||||
link_name = os.path.join(
|
||||
directory,
|
||||
expected["fmt"].format(
|
||||
module=module, major=sys.version_info[0], minor=sys.version_info[1]
|
||||
),
|
||||
)
|
||||
if os.path.exists(link_name):
|
||||
continue
|
||||
os.symlink(abs_path, link_name)
|
||||
|
||||
|
||||
def _executables_in_store(executables, query_spec, query_info=None):
|
||||
"""Return True if at least one of the executables can be retrieved from
|
||||
a spec in store, False otherwise.
|
||||
|
||||
The different executables must provide the same functionality and are
|
||||
"alternate" to each other, i.e. the function will exit True on the first
|
||||
executable found.
|
||||
|
||||
Args:
|
||||
executables: list of executables to be searched
|
||||
query_spec: spec that may provide the executable
|
||||
query_info (dict or None): if a dict is passed it is populated with the
|
||||
command found and the concrete spec providing it
|
||||
"""
|
||||
executables_str = ", ".join(executables)
|
||||
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
||||
tty.debug(msg.format(executables_str, query_spec))
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
if installed_specs:
|
||||
for concrete_spec in installed_specs:
|
||||
bin_dir = concrete_spec.prefix.bin
|
||||
# IF we have a "bin" directory and it contains
|
||||
# the executables we are looking for
|
||||
if (
|
||||
os.path.exists(bin_dir)
|
||||
and os.path.isdir(bin_dir)
|
||||
and spack.util.executable.which_string(*executables, path=bin_dir)
|
||||
):
|
||||
spack.util.environment.path_put_first("PATH", [bin_dir])
|
||||
if query_info is not None:
|
||||
query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
|
||||
query_info["spec"] = concrete_spec
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _root_spec(spec_str):
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS and Windows.
|
||||
if str(spack.platforms.host()) == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif str(spack.platforms.host()) == "windows":
|
||||
spec_str += " %msvc"
|
||||
else:
|
||||
spec_str += " %gcc"
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
||||
tty.debug(f"[BOOTSTRAP ROOT SPEC] {spec_str}")
|
||||
return spec_str
|
169
lib/spack/spack/bootstrap/config.py
Normal file
169
lib/spack/spack/bootstrap/config.py
Normal file
@@ -0,0 +1,169 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Manage configuration swapping for bootstrapping purposes"""
|
||||
|
||||
import contextlib
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
|
||||
#: Reference counter for the bootstrapping configuration context manager
|
||||
_REF_COUNT = 0
|
||||
|
||||
|
||||
def is_bootstrapping():
|
||||
"""Return True if we are in a bootstrapping context, False otherwise."""
|
||||
return _REF_COUNT > 0
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
"""For bootstrapping purposes we are just interested in the Python
|
||||
minor version (all patches are ABI compatible with the same minor).
|
||||
|
||||
See:
|
||||
https://www.python.org/dev/peps/pep-0513/
|
||||
https://stackoverflow.com/a/35801395/771663
|
||||
"""
|
||||
version_str = ".".join(str(x) for x in sys.version_info[:2])
|
||||
return f"python@{version_str}"
|
||||
|
||||
|
||||
def root_path():
|
||||
"""Root of all the bootstrap related folders"""
|
||||
return spack.util.path.canonicalize_path(
|
||||
spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path)
|
||||
)
|
||||
|
||||
|
||||
def store_path():
|
||||
"""Path to the store used for bootstrapped software"""
|
||||
enabled = spack.config.get("bootstrap:enable", True)
|
||||
if not enabled:
|
||||
msg = 'bootstrapping is currently disabled. Use "spack bootstrap enable" to enable it'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return _store_path()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def spack_python_interpreter():
|
||||
"""Override the current configuration to set the interpreter under
|
||||
which Spack is currently running as the only Python external spec
|
||||
available.
|
||||
"""
|
||||
python_prefix = sys.exec_prefix
|
||||
external_python = spec_for_current_python()
|
||||
|
||||
entry = {
|
||||
"buildable": False,
|
||||
"externals": [{"prefix": python_prefix, "spec": str(external_python)}],
|
||||
}
|
||||
|
||||
with spack.config.override("packages:python::", entry):
|
||||
yield
|
||||
|
||||
|
||||
def _store_path():
|
||||
bootstrap_root_path = root_path()
|
||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "store"))
|
||||
|
||||
|
||||
def _config_path():
|
||||
bootstrap_root_path = root_path()
|
||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "config"))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
"""Swap the current configuration for the one used to bootstrap Spack.
|
||||
|
||||
The context manager is reference counted to ensure we don't swap multiple
|
||||
times if there's nested use of it in the stack. One compelling use case
|
||||
is bootstrapping patchelf during the bootstrap of clingo.
|
||||
"""
|
||||
global _REF_COUNT # pylint: disable=global-statement
|
||||
already_swapped = bool(_REF_COUNT)
|
||||
_REF_COUNT += 1
|
||||
try:
|
||||
if already_swapped:
|
||||
yield
|
||||
else:
|
||||
with _ensure_bootstrap_configuration():
|
||||
yield
|
||||
finally:
|
||||
_REF_COUNT -= 1
|
||||
|
||||
|
||||
def _read_and_sanitize_configuration():
|
||||
"""Read the user configuration that needs to be reused for bootstrapping
|
||||
and remove the entries that should not be copied over.
|
||||
"""
|
||||
# Read the "config" section but pop the install tree (the entry will not be
|
||||
# considered due to the use_store context manager, so it will be confusing
|
||||
# to have it in the configuration).
|
||||
config_yaml = spack.config.get("config")
|
||||
config_yaml.pop("install_tree", None)
|
||||
user_configuration = {"bootstrap": spack.config.get("bootstrap"), "config": config_yaml}
|
||||
return user_configuration
|
||||
|
||||
|
||||
def _bootstrap_config_scopes():
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes = [spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
"/".join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
tty.debug(msg.format(platform_scope.name, platform_scope.path))
|
||||
return config_scopes
|
||||
|
||||
|
||||
def _add_compilers_if_missing():
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
new_compilers = spack.compilers.find_new_compilers()
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ensure_bootstrap_configuration():
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||
spack.platforms.real_host()
|
||||
), spack.repo.use_repositories(spack.paths.packages_path), spack.store.use_store(
|
||||
bootstrap_store_path
|
||||
):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
# We may need to compile code from sources, so ensure we
|
||||
# have compilers for the current platform
|
||||
_add_compilers_if_missing()
|
||||
spack.config.set("bootstrap", user_configuration["bootstrap"])
|
||||
spack.config.set("config", user_configuration["config"])
|
||||
with spack.modules.disable_modules():
|
||||
with spack_python_interpreter():
|
||||
yield
|
574
lib/spack/spack/bootstrap/core.py
Normal file
574
lib/spack/spack/bootstrap/core.py
Normal file
@@ -0,0 +1,574 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap Spack core dependencies from binaries.
|
||||
|
||||
This module contains logic to bootstrap software required by Spack from binaries served in the
|
||||
bootstrapping mirrors. The logic is quite different from an installation done from a Spack user,
|
||||
because of the following reasons:
|
||||
|
||||
1. The binaries are all compiled on the same OS for a given platform (e.g. they are compiled on
|
||||
``centos7`` on ``linux``), but they will be installed and used on the host OS. They are also
|
||||
targeted at the most generic architecture possible. That makes the binaries difficult to reuse
|
||||
with other specs in an environment without ad-hoc logic.
|
||||
2. Bootstrapping has a fallback procedure where we try to install software by default from the
|
||||
most recent binaries, and proceed to older versions of the mirror, until we try building from
|
||||
sources as a last resort. This allows us not to be blocked on architectures where we don't
|
||||
have binaries readily available, but is also not compatible with the working of environments
|
||||
(they don't have fallback procedures).
|
||||
3. Among the binaries we have clingo, so we can't concretize that with clingo :-)
|
||||
4. clingo, GnuPG and patchelf binaries need to be verified by sha256 sum (all the other binaries
|
||||
we might add on top of that in principle can be verified with GPG signatures).
|
||||
"""
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.platforms.linux
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
|
||||
from ._common import (
|
||||
_executables_in_store,
|
||||
_python_import,
|
||||
_root_spec,
|
||||
_try_import_from_store,
|
||||
)
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
METADATA_YAML_FILENAME = "metadata.yaml"
|
||||
|
||||
#: Whether the current platform is Windows
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
|
||||
|
||||
def bootstrapper(bootstrapper_type):
|
||||
"""Decorator to register classes implementing bootstrapping
|
||||
methods.
|
||||
|
||||
Args:
|
||||
bootstrapper_type (str): string identifying the class
|
||||
"""
|
||||
|
||||
def _register(cls):
|
||||
_bootstrap_methods[bootstrapper_type] = cls
|
||||
return cls
|
||||
|
||||
return _register
|
||||
|
||||
|
||||
class Bootstrapper:
|
||||
"""Interface for "core" software bootstrappers"""
|
||||
|
||||
config_scope_name = ""
|
||||
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
self.name = conf["name"]
|
||||
self.url = conf["info"]["url"]
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
@property
|
||||
def mirror_url(self):
|
||||
"""Mirror url associated with this bootstrapper"""
|
||||
# Absolute paths
|
||||
if os.path.isabs(self.url):
|
||||
return spack.util.url.format(self.url)
|
||||
|
||||
# Check for :// and assume it's an url if we find it
|
||||
if "://" in self.url:
|
||||
return self.url
|
||||
|
||||
# Otherwise, it's a relative path
|
||||
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
"""Mirror scope to be pushed onto the bootstrapping configuration when using
|
||||
this bootstrapper.
|
||||
"""
|
||||
return spack.config.InternalConfigScope(
|
||||
self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}}
|
||||
)
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str): # pylint: disable=unused-argument
|
||||
"""Try to import a Python module from a spec satisfying the abstract spec
|
||||
passed as argument.
|
||||
|
||||
Args:
|
||||
module (str): Python module name to try importing
|
||||
abstract_spec_str (str): abstract spec that can provide the Python module
|
||||
|
||||
Return:
|
||||
True if the Python module could be imported, False otherwise
|
||||
"""
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str): # pylint: disable=unused-argument
|
||||
"""Try to search some executables in the prefix of specs satisfying the abstract
|
||||
spec passed as argument.
|
||||
|
||||
Args:
|
||||
executables (list of str): executables to be found
|
||||
abstract_spec_str (str): abstract spec that can provide the Python module
|
||||
|
||||
Return:
|
||||
True if the executables are found, False otherwise
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
@bootstrapper(bootstrapper_type="buildcache")
|
||||
class BuildcacheBootstrapper(Bootstrapper):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
def __init__(self, conf):
|
||||
super().__init__(conf)
|
||||
self.last_search = None
|
||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||
|
||||
@staticmethod
|
||||
def _spec_and_platform(abstract_spec_str):
|
||||
"""Return the spec object and platform we need to use when
|
||||
querying the buildcache.
|
||||
|
||||
Args:
|
||||
abstract_spec_str: abstract spec string we are looking for
|
||||
"""
|
||||
# Try to install from an unsigned binary cache
|
||||
abstract_spec = spack.spec.Spec(abstract_spec_str)
|
||||
# On Cray we want to use Linux binaries if available from mirrors
|
||||
bincache_platform = spack.platforms.real_host()
|
||||
return abstract_spec, bincache_platform
|
||||
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = f"{package_name}.json"
|
||||
json_dir = self.metadata_dir
|
||||
json_path = os.path.join(json_dir, json_filename)
|
||||
with open(json_path, encoding="utf-8") as stream:
|
||||
data = json.load(stream)
|
||||
return data
|
||||
|
||||
def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null",
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family),
|
||||
}
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
|
||||
spec_str = "/" + pkg_hash
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
||||
for match in matches:
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, allow_root=True, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
|
||||
def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, test_fn):
|
||||
# Ensure we see only the buildcache being used to bootstrap
|
||||
with spack.config.override(self.mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
# specs that we know by dag hash.
|
||||
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
|
||||
if not index:
|
||||
raise RuntimeError("The binary index is empty")
|
||||
|
||||
for item in bincache_data["verified"]:
|
||||
candidate_spec = item["spec"]
|
||||
# This will be None for things that don't depend on python
|
||||
python_spec = item.get("python", None)
|
||||
# Skip specs which are not compatible
|
||||
if not abstract_spec.satisfies(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
continue
|
||||
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
|
||||
|
||||
info = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
return True
|
||||
|
||||
tty.debug(f"Bootstrapping {module} from pre-built binaries")
|
||||
abstract_spec, bincache_platform = self._spec_and_platform(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
)
|
||||
data = self._read_metadata(module)
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
abstract_spec, bincache_platform = self._spec_and_platform(abstract_spec_str)
|
||||
tty.debug(f"Bootstrapping {abstract_spec.name} from pre-built binaries")
|
||||
data = self._read_metadata(abstract_spec.name)
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
|
||||
@bootstrapper(bootstrapper_type="install")
|
||||
class SourceBootstrapper(Bootstrapper):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
|
||||
def __init__(self, conf):
|
||||
super().__init__(conf)
|
||||
self.last_search = None
|
||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
info = {}
|
||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.debug(f"Bootstrapping {module} from sources")
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
|
||||
if module == "clingo":
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
info = {}
|
||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.debug(f"Bootstrapping {abstract_spec_str} from sources")
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||
if concrete_spec.name == "patchelf":
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def create_bootstrapper(conf):
|
||||
"""Return a bootstrap object built according to the configuration argument"""
|
||||
btype = conf["type"]
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled_or_raise(conf):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError("source is not trusted")
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
"""Make the requested module available for import, or raise.
|
||||
|
||||
This function tries to import a Python module in the current interpreter
|
||||
using, in order, the methods configured in bootstrap.yaml.
|
||||
|
||||
If none of the methods succeed, an exception is raised. The function exits
|
||||
on first success.
|
||||
|
||||
Args:
|
||||
module (str): module to be imported in the current interpreter
|
||||
abstract_spec (str): abstract spec that might provide the module. If not
|
||||
given it defaults to "module"
|
||||
|
||||
Raises:
|
||||
ImportError: if the module couldn't be imported
|
||||
"""
|
||||
# If we can import it already, that's great
|
||||
tty.debug(f"[BOOTSTRAP MODULE {module}] Try importing from Python")
|
||||
if _python_import(module):
|
||||
return
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
return
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {module}"
|
||||
)
|
||||
msg = f'cannot bootstrap the "{module}" Python module '
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
def ensure_executables_in_path_or_raise(executables, abstract_spec, cmd_check=None):
|
||||
"""Ensure that some executables are in path or raise.
|
||||
|
||||
Args:
|
||||
executables (list): list of executables to be searched in the PATH,
|
||||
in order. The function exits on the first one found.
|
||||
abstract_spec (str): abstract spec that provides the executables
|
||||
cmd_check (object): callable predicate that takes a
|
||||
``spack.util.executable.Executable`` command and validate it. Should return
|
||||
``True`` if the executable is acceptable, ``False`` otherwise.
|
||||
Can be used to, e.g., ensure a suitable version of the command before
|
||||
accepting for bootstrapping.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the executables cannot be ensured to be in PATH
|
||||
|
||||
Return:
|
||||
Executable object
|
||||
|
||||
"""
|
||||
cmd = spack.util.executable.which(*executables)
|
||||
if cmd:
|
||||
if not cmd_check or cmd_check(cmd):
|
||||
return cmd
|
||||
|
||||
executables_str = ", ".join(executables)
|
||||
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
concrete_spec, cmd = (
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
env_mods = spack.util.environment.EnvironmentModifications()
|
||||
for dep in concrete_spec.traverse(
|
||||
root=True, order="post", deptype=("link", "run")
|
||||
):
|
||||
env_mods.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(
|
||||
dep, set_package_py_globals=False
|
||||
)
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {executables_str}"
|
||||
)
|
||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def _add_externals_if_missing():
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
|
||||
|
||||
def clingo_root_spec():
|
||||
"""Return the root spec used to bootstrap clingo"""
|
||||
return _root_spec("clingo-bootstrap@spack+python")
|
||||
|
||||
|
||||
def ensure_clingo_importable_or_raise():
|
||||
"""Ensure that the clingo module is available for import."""
|
||||
ensure_module_importable_or_raise(module="clingo", abstract_spec=clingo_root_spec())
|
||||
|
||||
|
||||
def gnupg_root_spec():
|
||||
"""Return the root spec used to bootstrap GnuPG"""
|
||||
return _root_spec("gnupg@2.3:")
|
||||
|
||||
|
||||
def ensure_gpg_in_path_or_raise():
|
||||
"""Ensure gpg or gpg2 are in the PATH or raise."""
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["gpg2", "gpg"], abstract_spec=gnupg_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def patchelf_root_spec():
|
||||
"""Return the root spec used to bootstrap patchelf"""
|
||||
# 0.13.1 is the last version not to require C++17.
|
||||
return _root_spec("patchelf@0.13.1:")
|
||||
|
||||
|
||||
def verify_patchelf(patchelf):
|
||||
"""Older patchelf versions can produce broken binaries, so we
|
||||
verify the version here.
|
||||
|
||||
Arguments:
|
||||
|
||||
patchelf (spack.util.executable.Executable): patchelf executable
|
||||
"""
|
||||
out = patchelf("--version", output=str, error=os.devnull, fail_on_error=False).strip()
|
||||
if patchelf.returncode != 0:
|
||||
return False
|
||||
parts = out.split(" ")
|
||||
if len(parts) < 2:
|
||||
return False
|
||||
try:
|
||||
version = spack.version.Version(parts[1])
|
||||
except ValueError:
|
||||
return False
|
||||
return version >= spack.version.Version("0.13.1")
|
||||
|
||||
|
||||
def ensure_patchelf_in_path_or_raise():
|
||||
"""Ensure patchelf is in the PATH or raise."""
|
||||
# The old concretizer is not smart and we're doing its job: if the latest patchelf
|
||||
# does not concretize because the compiler doesn't support C++17, we try to
|
||||
# concretize again with an upperbound @:13.
|
||||
try:
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["patchelf"], abstract_spec=patchelf_root_spec(), cmd_check=verify_patchelf
|
||||
)
|
||||
except RuntimeError:
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["patchelf"],
|
||||
abstract_spec=_root_spec("patchelf@0.13.1:0.13"),
|
||||
cmd_check=verify_patchelf,
|
||||
)
|
||||
|
||||
|
||||
def ensure_core_dependencies():
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
ensure_clingo_importable_or_raise()
|
||||
ensure_gpg_in_path_or_raise()
|
||||
|
||||
|
||||
def all_core_root_specs():
|
||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
|
||||
|
||||
def bootstrapping_sources(scope=None):
|
||||
"""Return the list of configured sources of software for bootstrapping Spack
|
||||
|
||||
Args:
|
||||
scope (str or None): if a valid configuration scope is given, return the
|
||||
list only from that scope
|
||||
"""
|
||||
source_configs = spack.config.get("bootstrap:sources", default=None, scope=scope)
|
||||
source_configs = source_configs or []
|
||||
list_of_sources = []
|
||||
for entry in source_configs:
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
191
lib/spack/spack/bootstrap/environment.py
Normal file
191
lib/spack/spack/bootstrap/environment.py
Normal file
@@ -0,0 +1,191 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||
import glob
|
||||
import hashlib
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.environment
|
||||
import spack.tengine
|
||||
import spack.util.executable
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||
|
||||
@classmethod
|
||||
def spack_dev_requirements(cls):
|
||||
"""Spack development requirements"""
|
||||
return [
|
||||
isort_root_spec(),
|
||||
mypy_root_spec(),
|
||||
black_root_spec(),
|
||||
flake8_root_spec(),
|
||||
pytest_root_spec(),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def environment_root(cls):
|
||||
"""Environment root directory"""
|
||||
bootstrap_root_path = root_path()
|
||||
python_part = spec_for_current_python().replace("@", "")
|
||||
arch_part = archspec.cpu.host().family
|
||||
interpreter_part = hashlib.md5(sys.exec_prefix.encode()).hexdigest()[:5]
|
||||
environment_dir = f"{python_part}-{arch_part}-{interpreter_part}"
|
||||
return pathlib.Path(
|
||||
spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, "environments", environment_dir)
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def view_root(cls):
|
||||
"""Location of the view"""
|
||||
return cls.environment_root().joinpath("view")
|
||||
|
||||
@classmethod
|
||||
def pythonpaths(cls):
|
||||
"""Paths to be added to sys.path or PYTHONPATH"""
|
||||
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
||||
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
||||
result = glob.glob(glob_expr)
|
||||
if not result:
|
||||
msg = f"Cannot find any Python path in {cls.view_root()}"
|
||||
warnings.warn(msg)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def bin_dirs(cls):
|
||||
"""Paths to be added to PATH"""
|
||||
return [cls.view_root().joinpath("bin")]
|
||||
|
||||
@classmethod
|
||||
def spack_yaml(cls):
|
||||
"""Environment spack.yaml file"""
|
||||
return cls.environment_root().joinpath("spack.yaml")
|
||||
|
||||
def __init__(self):
|
||||
if not self.spack_yaml().exists():
|
||||
self._write_spack_yaml_file()
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self):
|
||||
"""Update the installations of this environment.
|
||||
|
||||
The update is done using a depfile on Linux and macOS, and using the ``install_all``
|
||||
method of environments on Windows.
|
||||
"""
|
||||
with tty.SuppressOutput(msg_enabled=False, warn_enabled=False):
|
||||
specs = self.concretize()
|
||||
if specs:
|
||||
colorized_specs = [
|
||||
spack.spec.Spec(x).cformat("{name}{@version}")
|
||||
for x in self.spack_dev_requirements()
|
||||
]
|
||||
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
|
||||
self.write(regenerate=False)
|
||||
if sys.platform == "win32":
|
||||
self.install_all()
|
||||
else:
|
||||
self._install_with_depfile()
|
||||
self.write(regenerate=True)
|
||||
|
||||
def update_syspath_and_environ(self):
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
the environment view.
|
||||
"""
|
||||
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
||||
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
||||
# the performance of the current interpreter
|
||||
sys.path.extend(self.pythonpaths())
|
||||
os.environ["PATH"] = os.pathsep.join(
|
||||
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
||||
)
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(
|
||||
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
|
||||
def _install_with_depfile(self):
|
||||
spackcmd = spack.util.executable.which("spack")
|
||||
spackcmd(
|
||||
"-e",
|
||||
str(self.environment_root()),
|
||||
"env",
|
||||
"depfile",
|
||||
"-o",
|
||||
str(self.environment_root().joinpath("Makefile")),
|
||||
)
|
||||
make = spack.util.executable.which("make")
|
||||
kwargs = {}
|
||||
if not tty.is_debug():
|
||||
kwargs = {"output": os.devnull, "error": os.devnull}
|
||||
make(
|
||||
"-C",
|
||||
str(self.environment_root()),
|
||||
"-j",
|
||||
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def _write_spack_yaml_file(self):
|
||||
tty.msg(
|
||||
"[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment"
|
||||
)
|
||||
env = spack.tengine.make_environment()
|
||||
template = env.get_template("bootstrap/spack.yaml")
|
||||
context = {
|
||||
"python_spec": spec_for_current_python(),
|
||||
"python_prefix": sys.exec_prefix,
|
||||
"architecture": archspec.cpu.host().family,
|
||||
"environment_path": self.environment_root(),
|
||||
"environment_specs": self.spack_dev_requirements(),
|
||||
"store_path": store_path(),
|
||||
}
|
||||
self.environment_root().mkdir(parents=True, exist_ok=True)
|
||||
self.spack_yaml().write_text(template.render(context), encoding="utf-8")
|
||||
|
||||
|
||||
def isort_root_spec():
|
||||
"""Return the root spec used to bootstrap isort"""
|
||||
return _root_spec("py-isort@4.3.5:")
|
||||
|
||||
|
||||
def mypy_root_spec():
|
||||
"""Return the root spec used to bootstrap mypy"""
|
||||
return _root_spec("py-mypy@0.900:")
|
||||
|
||||
|
||||
def black_root_spec():
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black")
|
||||
|
||||
|
||||
def flake8_root_spec():
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-flake8")
|
||||
|
||||
|
||||
def pytest_root_spec():
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-pytest")
|
||||
|
||||
|
||||
def ensure_environment_dependencies():
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
169
lib/spack/spack/bootstrap/status.py
Normal file
169
lib/spack/spack/bootstrap/status.py
Normal file
@@ -0,0 +1,169 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Query the status of bootstrapping on this machine"""
|
||||
import platform
|
||||
|
||||
import spack.util.executable
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _try_import_from_store
|
||||
from .config import ensure_bootstrap_configuration
|
||||
from .core import clingo_root_spec, patchelf_root_spec
|
||||
from .environment import (
|
||||
BootstrapEnvironment,
|
||||
black_root_spec,
|
||||
flake8_root_spec,
|
||||
isort_root_spec,
|
||||
mypy_root_spec,
|
||||
pytest_root_spec,
|
||||
)
|
||||
|
||||
|
||||
def _required_system_executable(exes, msg):
|
||||
"""Search for an executable is the system path only."""
|
||||
if isinstance(exes, str):
|
||||
exes = (exes,)
|
||||
if spack.util.executable.which_string(*exes):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _required_executable(exes, query_spec, msg):
|
||||
"""Search for an executable in the system path or in the bootstrap store."""
|
||||
if isinstance(exes, str):
|
||||
exes = (exes,)
|
||||
if spack.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _required_python_module(module, query_spec, msg):
|
||||
"""Check if a Python module is available in the current interpreter or
|
||||
if it can be loaded from the bootstrap store
|
||||
"""
|
||||
if _python_import(module) or _try_import_from_store(module, query_spec):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
if not system_only:
|
||||
return msg.format(name, purpose, "@*y{{B}}")
|
||||
return msg.format(name, purpose, "@*y{{-}}")
|
||||
|
||||
|
||||
def _core_requirements():
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"bash": _missing("bash", "required for Spack compiler wrapper"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
"unzip": _missing("unzip", "required to compress/decompress code archives"),
|
||||
"bzip2": _missing("bzip2", "required to compress/decompress code archives"),
|
||||
"git": _missing("git", "required to fetch/manage git repositories"),
|
||||
}
|
||||
if platform.system().lower() == "linux":
|
||||
_core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives")
|
||||
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg) for exe, msg in _core_system_exes.items()]
|
||||
# Python modules
|
||||
result.append(
|
||||
_required_python_module(
|
||||
"clingo", clingo_root_spec(), _missing("clingo", "required to concretize specs", False)
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def _buildcache_requirements():
|
||||
_buildcache_exes = {
|
||||
"file": _missing("file", "required to analyze files for buildcaches"),
|
||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||
}
|
||||
if platform.system().lower() == "darwin":
|
||||
_buildcache_exes["otool"] = _missing("otool", "required to relocate binaries")
|
||||
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()]
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
result.append(
|
||||
_required_executable(
|
||||
"patchelf",
|
||||
patchelf_root_spec(),
|
||||
_missing("patchelf", "required to relocate binaries", False),
|
||||
)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _optional_requirements():
|
||||
_optional_exes = {
|
||||
"zstd": _missing("zstd", "required to compress/decompress code archives"),
|
||||
"svn": _missing("svn", "required to manage subversion repositories"),
|
||||
"hg": _missing("hg", "required to manage mercurial repositories"),
|
||||
}
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg) for exe, msg in _optional_exes.items()]
|
||||
return result
|
||||
|
||||
|
||||
def _development_requirements():
|
||||
# Ensure we trigger environment modifications if we have an environment
|
||||
if BootstrapEnvironment.spack_yaml().exists():
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
return [
|
||||
_required_executable(
|
||||
"isort", isort_root_spec(), _missing("isort", "required for style checks", False)
|
||||
),
|
||||
_required_executable(
|
||||
"mypy", mypy_root_spec(), _missing("mypy", "required for style checks", False)
|
||||
),
|
||||
_required_executable(
|
||||
"flake8", flake8_root_spec(), _missing("flake8", "required for style checks", False)
|
||||
),
|
||||
_required_executable(
|
||||
"black", black_root_spec(), _missing("black", "required for code formatting", False)
|
||||
),
|
||||
_required_python_module(
|
||||
"pytest", pytest_root_spec(), _missing("pytest", "required to run unit-test", False)
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def status_message(section):
|
||||
"""Return a status message to be printed to screen that refers to the
|
||||
section passed as argument and a bool which is True if there are missing
|
||||
dependencies.
|
||||
|
||||
Args:
|
||||
section (str): either 'core' or 'buildcache' or 'optional' or 'develop'
|
||||
"""
|
||||
pass_token, fail_token = "@*g{[PASS]}", "@*r{[FAIL]}"
|
||||
|
||||
# Contain the header of the section and a list of requirements
|
||||
spack_sections = {
|
||||
"core": ("{0} @*{{Core Functionalities}}", _core_requirements),
|
||||
"buildcache": ("{0} @*{{Binary packages}}", _buildcache_requirements),
|
||||
"optional": ("{0} @*{{Optional Features}}", _optional_requirements),
|
||||
"develop": ("{0} @*{{Development Dependencies}}", _development_requirements),
|
||||
}
|
||||
msg, required_software = spack_sections[section]
|
||||
|
||||
with ensure_bootstrap_configuration():
|
||||
missing_software = False
|
||||
for found, err_msg in required_software():
|
||||
if not found:
|
||||
missing_software = True
|
||||
msg += "\n " + err_msg
|
||||
msg += "\n"
|
||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||
return msg, missing_software
|
@@ -41,6 +41,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
from typing import List, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import install, install_tree, mkdirp
|
||||
@@ -287,7 +288,7 @@ def clean_environment():
|
||||
def _add_werror_handling(keep_werror, env):
|
||||
keep_flags = set()
|
||||
# set of pairs
|
||||
replace_flags = [] # type: List[Tuple[str,str]]
|
||||
replace_flags: List[Tuple[str, str]] = []
|
||||
if keep_werror == "all":
|
||||
keep_flags.add("-Werror*")
|
||||
else:
|
||||
|
@@ -7,7 +7,7 @@
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -138,7 +138,7 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
patch_libtool = True
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.build` phase
|
||||
build_targets = [] # type: List[str]
|
||||
build_targets: List[str] = []
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
@@ -152,7 +152,7 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
force_autoreconf = False
|
||||
|
||||
#: Options to be passed to autoreconf when using the default implementation
|
||||
autoreconf_extra_args = [] # type: List[str]
|
||||
autoreconf_extra_args: List[str] = []
|
||||
|
||||
#: If False deletes all the .la files in the prefix folder after the installation.
|
||||
#: If True instead it installs them.
|
||||
|
@@ -34,22 +34,22 @@ class CachedCMakeBuilder(CMakeBuilder):
|
||||
|
||||
#: Phases of a Cached CMake package
|
||||
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
||||
phases = ("initconfig", "cmake", "build", "install") # type: Tuple[str, ...]
|
||||
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = CMakeBuilder.legacy_methods + (
|
||||
legacy_methods: Tuple[str, ...] = CMakeBuilder.legacy_methods + (
|
||||
"initconfig_compiler_entries",
|
||||
"initconfig_mpi_entries",
|
||||
"initconfig_hardware_entries",
|
||||
"std_initconfig_entries",
|
||||
"initconfig_package_entries",
|
||||
) # type: Tuple[str, ...]
|
||||
)
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = CMakeBuilder.legacy_attributes + (
|
||||
legacy_attributes: Tuple[str, ...] = CMakeBuilder.legacy_attributes + (
|
||||
"cache_name",
|
||||
"cache_path",
|
||||
) # type: Tuple[str, ...]
|
||||
)
|
||||
|
||||
@property
|
||||
def cache_name(self):
|
||||
|
@@ -153,13 +153,13 @@ class CMakeBuilder(BaseBuilder):
|
||||
"""
|
||||
|
||||
#: Phases of a CMake package
|
||||
phases = ("cmake", "build", "install") # type: Tuple[str, ...]
|
||||
phases: Tuple[str, ...] = ("cmake", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("cmake_args", "check") # type: Tuple[str, ...]
|
||||
legacy_methods: Tuple[str, ...] = ("cmake_args", "check")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
legacy_attributes: Tuple[str, ...] = (
|
||||
"generator",
|
||||
"build_targets",
|
||||
"install_targets",
|
||||
@@ -169,7 +169,7 @@ class CMakeBuilder(BaseBuilder):
|
||||
"std_cmake_args",
|
||||
"build_dirname",
|
||||
"build_directory",
|
||||
) # type: Tuple[str, ...]
|
||||
)
|
||||
|
||||
#: The build system generator to use.
|
||||
#:
|
||||
@@ -182,7 +182,7 @@ class CMakeBuilder(BaseBuilder):
|
||||
generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
|
||||
#: Targets to be used during the build phase
|
||||
build_targets = [] # type: List[str]
|
||||
build_targets: List[str] = []
|
||||
#: Targets to be used during the install phase
|
||||
install_targets = ["install"]
|
||||
#: Callback names for build-time test
|
||||
|
@@ -35,10 +35,10 @@ class GenericBuilder(BaseBuilder):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = () # type: Tuple[str, ...]
|
||||
legacy_methods: Tuple[str, ...] = ()
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ("archive_files",) # type: Tuple[str, ...]
|
||||
legacy_attributes: Tuple[str, ...] = ("archive_files",)
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
@@ -13,7 +13,7 @@ class GNUMirrorPackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
||||
|
||||
#: Path of the package in a GNU mirror
|
||||
gnu_mirror_path = None # type: Optional[str]
|
||||
gnu_mirror_path: Optional[str] = None
|
||||
|
||||
#: List of GNU mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -77,7 +77,7 @@ class MakefileBuilder(BaseBuilder):
|
||||
)
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.build` phase
|
||||
build_targets = [] # type: List[str]
|
||||
build_targets: List[str] = []
|
||||
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -95,7 +95,7 @@ class MesonBuilder(BaseBuilder):
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
build_targets = [] # type: List[str]
|
||||
build_targets: List[str] = []
|
||||
install_targets = ["install"]
|
||||
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -72,7 +72,7 @@ class NMakeBuilder(BaseBuilder):
|
||||
)
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.build` phase
|
||||
build_targets = [] # type: List[str]
|
||||
build_targets: List[str] = []
|
||||
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
|
@@ -177,7 +177,7 @@ class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi = None # type: Optional[str]
|
||||
pypi: Optional[str] = None
|
||||
|
||||
maintainers = ["adamjstewart", "pradyunsg"]
|
||||
|
||||
@@ -200,7 +200,7 @@ class PythonPackage(PythonExtension):
|
||||
# package manually
|
||||
depends_on("py-wheel", type="build")
|
||||
|
||||
py_namespace = None # type: Optional[str]
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
|
@@ -22,10 +22,10 @@ class RBuilder(GenericBuilder):
|
||||
"""
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = (
|
||||
legacy_methods: Tuple[str, ...] = (
|
||||
"configure_args",
|
||||
"configure_vars",
|
||||
) + GenericBuilder.legacy_methods # type: Tuple[str, ...]
|
||||
) + GenericBuilder.legacy_methods
|
||||
|
||||
def configure_args(self):
|
||||
"""Arguments to pass to install via ``--configure-args``."""
|
||||
@@ -64,10 +64,10 @@ class RPackage(Package):
|
||||
# package attributes that can be expanded to set the homepage, url,
|
||||
# list_url, and git values
|
||||
# For CRAN packages
|
||||
cran = None # type: Optional[str]
|
||||
cran: Optional[str] = None
|
||||
|
||||
# For Bioconductor packages
|
||||
bioc = None # type: Optional[str]
|
||||
bioc: Optional[str] = None
|
||||
|
||||
GenericBuilder = RBuilder
|
||||
|
||||
|
@@ -34,7 +34,7 @@ class RacketPackage(PackageBase):
|
||||
|
||||
extends("racket", when="build_system=racket")
|
||||
|
||||
racket_name = None # type: Optional[str]
|
||||
racket_name: Optional[str] = None
|
||||
parallel = True
|
||||
|
||||
@lang.classproperty
|
||||
@@ -51,7 +51,7 @@ class RacketBuilder(spack.builder.Builder):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = tuple() # type: Tuple[str, ...]
|
||||
legacy_methods: Tuple[str, ...] = tuple()
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ("build_directory", "build_time_test_callbacks", "subdirectory")
|
||||
@@ -59,7 +59,7 @@ class RacketBuilder(spack.builder.Builder):
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
racket_name = None # type: Optional[str]
|
||||
racket_name: Optional[str] = None
|
||||
|
||||
@property
|
||||
def subdirectory(self):
|
||||
|
@@ -14,7 +14,7 @@ class SourceforgePackage(spack.package_base.PackageBase):
|
||||
packages."""
|
||||
|
||||
#: Path of the package in a Sourceforge mirror
|
||||
sourceforge_mirror_path = None # type: Optional[str]
|
||||
sourceforge_mirror_path: Optional[str] = None
|
||||
|
||||
#: List of Sourceforge mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
@@ -13,7 +13,7 @@ class SourcewarePackage(spack.package_base.PackageBase):
|
||||
packages."""
|
||||
|
||||
#: Path of the package in a Sourceware mirror
|
||||
sourceware_mirror_path = None # type: Optional[str]
|
||||
sourceware_mirror_path: Optional[str] = None
|
||||
|
||||
#: List of Sourceware mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
@@ -14,7 +14,7 @@ class XorgPackage(spack.package_base.PackageBase):
|
||||
packages."""
|
||||
|
||||
#: Path of the package in a x.org mirror
|
||||
xorg_mirror_path = None # type: Optional[str]
|
||||
xorg_mirror_path: Optional[str] = None
|
||||
|
||||
#: List of x.org mirrors used by Spack
|
||||
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
|
||||
|
@@ -466,19 +466,19 @@ class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||
"""
|
||||
|
||||
#: Sequence of phases. Must be defined in derived classes
|
||||
phases = () # type: Tuple[str, ...]
|
||||
phases: Tuple[str, ...] = ()
|
||||
#: Build system name. Must also be defined in derived classes.
|
||||
build_system = None # type: Optional[str]
|
||||
build_system: Optional[str] = None
|
||||
|
||||
legacy_methods = () # type: Tuple[str, ...]
|
||||
legacy_attributes = () # type: Tuple[str, ...]
|
||||
legacy_methods: Tuple[str, ...] = ()
|
||||
legacy_attributes: Tuple[str, ...] = ()
|
||||
|
||||
#: List of glob expressions. Each expression must either be
|
||||
#: absolute or relative to the package source path.
|
||||
#: Matching artifacts found at the end of the build process will be
|
||||
#: copied in the same directory tree as _spack_build_logfile and
|
||||
#: _spack_build_envfile.
|
||||
archive_files = [] # type: List[str]
|
||||
archive_files: List[str] = []
|
||||
|
||||
def __init__(self, pkg):
|
||||
self.pkg = pkg
|
||||
|
@@ -11,7 +11,7 @@
|
||||
import shlex
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
from typing import List, Tuple
|
||||
from typing import List, Match, Tuple
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
@@ -26,6 +26,7 @@
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -165,18 +166,15 @@ class _UnquotedFlags(object):
|
||||
)
|
||||
)
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs):
|
||||
# type: (List[Tuple[re.Match, str]]) -> None
|
||||
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
|
||||
self._flag_pairs = all_unquoted_flag_pairs
|
||||
|
||||
def __bool__(self):
|
||||
# type: () -> bool
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self._flag_pairs)
|
||||
|
||||
@classmethod
|
||||
def extract(cls, sargs):
|
||||
# type: (str) -> _UnquotedFlags
|
||||
all_unquoted_flag_pairs = [] # type: List[Tuple[re.Match, str]]
|
||||
def extract(cls, sargs: str) -> "_UnquotedFlags":
|
||||
all_unquoted_flag_pairs: List[Tuple[Match[str], str]] = []
|
||||
prev_flags_arg = None
|
||||
for arg in shlex.split(sargs):
|
||||
if prev_flags_arg is not None:
|
||||
@@ -184,8 +182,7 @@ def extract(cls, sargs):
|
||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||
return cls(all_unquoted_flag_pairs)
|
||||
|
||||
def report(self):
|
||||
# type: () -> str
|
||||
def report(self) -> str:
|
||||
single_errors = [
|
||||
"({0}) {1} {2} => {3}".format(
|
||||
i + 1,
|
||||
@@ -221,7 +218,7 @@ def parse_specs(args, **kwargs):
|
||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||
|
||||
try:
|
||||
specs = spack.spec.parse(sargs)
|
||||
specs = spack.parser.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
|
@@ -5,7 +5,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path
|
||||
import platform
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
@@ -15,6 +14,8 @@
|
||||
|
||||
import spack
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
@@ -75,7 +76,8 @@ def _add_scope_option(parser):
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(dest="subcommand")
|
||||
|
||||
sp.add_parser("now", help="Spack ready, right now!")
|
||||
now = sp.add_parser("now", help="Spack ready, right now!")
|
||||
now.add_argument("--dev", action="store_true", help="bootstrap dev dependencies too")
|
||||
|
||||
status = sp.add_parser("status", help="get the status of Spack")
|
||||
status.add_argument(
|
||||
@@ -194,7 +196,7 @@ def _root(args):
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
|
||||
sources = spack.bootstrap.core.bootstrapping_sources(scope=args.scope)
|
||||
if not sources:
|
||||
llnl.util.tty.msg("No method available for bootstrapping Spack's dependencies")
|
||||
return
|
||||
@@ -298,7 +300,7 @@ def _status(args):
|
||||
sections.append("develop")
|
||||
|
||||
header = "@*b{{Spack v{0} - {1}}}".format(
|
||||
spack.spack_version, spack.bootstrap.spec_for_current_python()
|
||||
spack.spack_version, spack.bootstrap.config.spec_for_current_python()
|
||||
)
|
||||
print(llnl.util.tty.color.colorize(header))
|
||||
print()
|
||||
@@ -323,7 +325,7 @@ def _status(args):
|
||||
|
||||
|
||||
def _add(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
initial_sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
names = [s["name"] for s in initial_sources]
|
||||
|
||||
# If the name is already used error out
|
||||
@@ -353,7 +355,7 @@ def _add(args):
|
||||
|
||||
|
||||
def _remove(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
initial_sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
names = [s["name"] for s in initial_sources]
|
||||
if args.name not in names:
|
||||
msg = (
|
||||
@@ -386,7 +388,10 @@ def _mirror(args):
|
||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||
# TODO: dependencies of a spec
|
||||
root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ["gnuconfig"]
|
||||
root_specs = spack.bootstrap.all_core_root_specs() + ["gnuconfig"]
|
||||
if args.dev:
|
||||
root_specs += spack.bootstrap.BootstrapEnvironment.spack_dev_requirements()
|
||||
|
||||
for spec_str in root_specs:
|
||||
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||
@@ -436,10 +441,9 @@ def write_metadata(subdir, metadata):
|
||||
|
||||
def _now(args):
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
if platform.system().lower() == "linux":
|
||||
spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
spack.bootstrap.ensure_gpg_in_path_or_raise()
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if args.dev:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
|
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from typing import Callable, Dict # novm
|
||||
from typing import Callable, Dict
|
||||
|
||||
import spack.cmd.modules.lmod
|
||||
import spack.cmd.modules.tcl
|
||||
@@ -13,7 +13,7 @@
|
||||
level = "short"
|
||||
|
||||
|
||||
_subcommands = {} # type: Dict[str, Callable]
|
||||
_subcommands: Dict[str, Callable] = {}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
@@ -16,6 +16,7 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated", "specs"])
|
||||
arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def patch(parser, args):
|
||||
|
@@ -2,9 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
@@ -15,7 +12,6 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.paths
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -25,7 +21,7 @@
|
||||
|
||||
|
||||
def grouper(iterable, n, fillvalue=None):
|
||||
"Collect data into fixed-length chunks or blocks"
|
||||
"""Collect data into fixed-length chunks or blocks"""
|
||||
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx"
|
||||
args = [iter(iterable)] * n
|
||||
for group in zip_longest(*args, fillvalue=fillvalue):
|
||||
@@ -41,16 +37,13 @@ def grouper(iterable, n, fillvalue=None):
|
||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||
#: The list maps an executable name to a method to ensure the tool is
|
||||
#: bootstrapped or present in the environment.
|
||||
tool_order = [
|
||||
("isort", spack.bootstrap.ensure_isort_in_path_or_raise),
|
||||
("mypy", spack.bootstrap.ensure_mypy_in_path_or_raise),
|
||||
("black", spack.bootstrap.ensure_black_in_path_or_raise),
|
||||
("flake8", spack.bootstrap.ensure_flake8_in_path_or_raise),
|
||||
tool_names = [
|
||||
"isort",
|
||||
"mypy",
|
||||
"black",
|
||||
"flake8",
|
||||
]
|
||||
|
||||
#: list of just the tool names -- for argparse
|
||||
tool_names = [k for k, _ in tool_order]
|
||||
|
||||
#: tools we run in spack style
|
||||
tools = {}
|
||||
|
||||
@@ -222,10 +215,8 @@ def translate(match):
|
||||
print(line)
|
||||
|
||||
|
||||
def print_style_header(file_list, args, selected):
|
||||
tools = [tool for tool in tool_names if tool in selected]
|
||||
tty.msg("Running style checks on spack", "selected: " + ", ".join(tools))
|
||||
|
||||
def print_style_header(file_list, args, tools_to_run):
|
||||
tty.msg("Running style checks on spack", "selected: " + ", ".join(tools_to_run))
|
||||
# translate modified paths to cwd_relative if needed
|
||||
paths = [filename.strip() for filename in file_list]
|
||||
if not args.root_relative:
|
||||
@@ -384,6 +375,17 @@ def validate_toolset(arg_value):
|
||||
return tools
|
||||
|
||||
|
||||
def missing_tools(tools_to_run):
|
||||
return [t for t in tools_to_run if which(t) is None]
|
||||
|
||||
|
||||
def _bootstrap_dev_dependencies():
|
||||
import spack.bootstrap
|
||||
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
|
||||
|
||||
def style(parser, args):
|
||||
# save initial working directory for relativizing paths later
|
||||
args.initial_working_dir = os.getcwd()
|
||||
@@ -418,25 +420,20 @@ def prefix_relative(path):
|
||||
tty.msg("Nothing to run.")
|
||||
return
|
||||
|
||||
tools_to_run = [t for t in tool_names if t in selected]
|
||||
if missing_tools(tools_to_run):
|
||||
_bootstrap_dev_dependencies()
|
||||
|
||||
return_code = 0
|
||||
with working_dir(args.root):
|
||||
if not file_list:
|
||||
file_list = changed_files(args.base, args.untracked, args.all)
|
||||
|
||||
print_style_header(file_list, args, selected)
|
||||
|
||||
tools_to_run = [(tool, fn) for tool, fn in tool_order if tool in selected]
|
||||
commands = {}
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
# bootstrap everything first to get commands
|
||||
for tool_name, bootstrap_fn in tools_to_run:
|
||||
commands[tool_name] = bootstrap_fn()
|
||||
|
||||
# run tools once bootstrapping is done
|
||||
for tool_name, bootstrap_fn in tools_to_run:
|
||||
run_function, required = tools[tool_name]
|
||||
print_tool_header(tool_name)
|
||||
return_code |= run_function(commands[tool_name], file_list, args)
|
||||
print_style_header(file_list, args, tools_to_run)
|
||||
for tool_name in tools_to_run:
|
||||
run_function, required = tools[tool_name]
|
||||
print_tool_header(tool_name)
|
||||
return_code |= run_function(which(tool_name), file_list, args)
|
||||
|
||||
if return_code == 0:
|
||||
tty.msg(color.colorize("@*{spack style checks were clean}"))
|
||||
|
@@ -21,7 +21,6 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.paths
|
||||
|
||||
description = "run spack's unit tests (wrapper around pytest)"
|
||||
@@ -207,6 +206,7 @@ def add_back_pytest_args(args, unknown_args):
|
||||
|
||||
def unit_test(parser, args, unknown_args):
|
||||
global pytest
|
||||
import spack.bootstrap
|
||||
|
||||
# Ensure clingo is available before switching to the
|
||||
# mock configuration used by unit tests
|
||||
@@ -214,12 +214,10 @@ def unit_test(parser, args, unknown_args):
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if not is_windows:
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
if pytest is None:
|
||||
vendored_pytest_dir = os.path.join(spack.paths.external_path, "pytest-fallback")
|
||||
sys.path.append(vendored_pytest_dir)
|
||||
import pytest
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
from typing import List, Sequence # novm
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -195,20 +195,20 @@ class Compiler(object):
|
||||
and how to identify the particular type of compiler."""
|
||||
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = [] # type: List[str]
|
||||
cc_names: List[str] = []
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = [] # type: List[str]
|
||||
cxx_names: List[str] = []
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = [] # type: List[str]
|
||||
f77_names: List[str] = []
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = [] # type: List[str]
|
||||
fc_names: List[str] = []
|
||||
|
||||
# Optional prefix regexes for searching for this type of compiler.
|
||||
# Prefixes are sometimes used for toolchains
|
||||
prefixes = [] # type: List[str]
|
||||
prefixes: List[str] = []
|
||||
|
||||
# Optional suffix regexes for searching for this type of compiler.
|
||||
# Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||
@@ -219,7 +219,7 @@ class Compiler(object):
|
||||
version_argument = "-dumpversion"
|
||||
|
||||
#: Return values to ignore when invoking the compiler to get its version
|
||||
ignore_version_errors = () # type: Sequence[int]
|
||||
ignore_version_errors: Sequence[int] = ()
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
version_regex = "(.*)"
|
||||
@@ -271,9 +271,9 @@ def opt_flags(self):
|
||||
return ["-O", "-O0", "-O1", "-O2", "-O3"]
|
||||
|
||||
# Cray PrgEnv name that can be used to load this compiler
|
||||
PrgEnv = None # type: str
|
||||
PrgEnv: Optional[str] = None
|
||||
# Name of module used to switch versions of this compiler
|
||||
PrgEnv_compiler = None # type: str
|
||||
PrgEnv_compiler: Optional[str] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -286,7 +286,7 @@ def __init__(
|
||||
environment=None,
|
||||
extra_rpaths=None,
|
||||
enable_implicit_rpaths=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
self.spec = cspec
|
||||
self.operating_system = str(operating_system)
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
from typing import Dict # novm
|
||||
from typing import Dict
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
# TODO: Caches at module level make it difficult to mock configurations in
|
||||
# TODO: unit tests. It might be worth reworking their implementation.
|
||||
#: cache of compilers constructed from config data, keyed by config entry id.
|
||||
_compiler_cache = {} # type: Dict[str, spack.compiler.Compiler]
|
||||
_compiler_cache: Dict[str, "spack.compiler.Compiler"] = {}
|
||||
|
||||
_compiler_to_pkg = {
|
||||
"clang": "llvm+clang",
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils.version import StrictVersion
|
||||
from typing import Dict, List, Set # novm
|
||||
from typing import Dict, List, Set
|
||||
|
||||
import spack.compiler
|
||||
import spack.operating_systems.windows_os
|
||||
@@ -18,8 +18,8 @@
|
||||
from spack.error import SpackError
|
||||
from spack.version import Version
|
||||
|
||||
avail_fc_version = set() # type: Set[str]
|
||||
fc_path = dict() # type: Dict[str, str]
|
||||
avail_fc_version: Set[str] = set()
|
||||
fc_path: Dict[str, str] = dict()
|
||||
|
||||
fortran_mapping = {
|
||||
"2021.3.0": "19.29.30133",
|
||||
@@ -42,16 +42,16 @@ def get_valid_fortran_pth(comp_ver):
|
||||
|
||||
class Msvc(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["cl.exe"] # type: List[str]
|
||||
cc_names: List[str] = ["cl.exe"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["cl.exe"] # type: List[str]
|
||||
cxx_names: List[str] = ["cl.exe"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["ifx.exe"] # type: List[str]
|
||||
f77_names: List[str] = ["ifx.exe"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["ifx.exe"] # type: List[str]
|
||||
fc_names: List[str] = ["ifx.exe"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
# Due to the challenges of supporting compiler wrappers
|
||||
|
@@ -4,17 +4,17 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import spack.compiler
|
||||
|
||||
|
||||
class Nag(spack.compiler.Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = [] # type: List[str]
|
||||
cc_names: List[str] = []
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = [] # type: List[str]
|
||||
cxx_names: List[str] = []
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["nagfor"]
|
||||
|
@@ -36,7 +36,7 @@
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
|
@@ -26,7 +26,7 @@
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from typing import Dict # novm
|
||||
from typing import Dict
|
||||
|
||||
try:
|
||||
import uuid
|
||||
@@ -304,10 +304,10 @@ class Database(object):
|
||||
|
||||
"""Per-process lock objects for each install prefix."""
|
||||
|
||||
_prefix_locks = {} # type: Dict[str, lk.Lock]
|
||||
_prefix_locks: Dict[str, lk.Lock] = {}
|
||||
|
||||
"""Per-process failure (lock) objects for each install prefix."""
|
||||
_prefix_failures = {} # type: Dict[str, lk.Lock]
|
||||
_prefix_failures: Dict[str, lk.Lock] = {}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@@ -32,7 +32,7 @@ class OpenMpi(Package):
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
from typing import List, Set # novm
|
||||
from typing import List, Set
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
@@ -122,9 +122,9 @@ class DirectiveMeta(type):
|
||||
"""
|
||||
|
||||
# Set of all known directives
|
||||
_directive_dict_names = set() # type: Set[str]
|
||||
_directives_to_be_executed = [] # type: List[str]
|
||||
_when_constraints_from_context = [] # type: List[str]
|
||||
_directive_dict_names: Set[str] = set()
|
||||
_directives_to_be_executed: List[str] = []
|
||||
_when_constraints_from_context: List[str] = []
|
||||
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Initialize the attribute containing the list of directives
|
||||
@@ -495,6 +495,8 @@ def provides(*specs, **kwargs):
|
||||
"""
|
||||
|
||||
def _execute_provides(pkg):
|
||||
import spack.parser # Avoid circular dependency
|
||||
|
||||
when = kwargs.get("when")
|
||||
when_spec = make_when_spec(when)
|
||||
if not when_spec:
|
||||
@@ -505,7 +507,7 @@ def _execute_provides(pkg):
|
||||
when_spec.name = pkg.name
|
||||
|
||||
for string in specs:
|
||||
for provided_spec in spack.spec.parse(string):
|
||||
for provided_spec in spack.parser.parse(string):
|
||||
if pkg.name == provided_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name)
|
||||
|
||||
|
@@ -19,7 +19,6 @@
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
@@ -1344,6 +1343,8 @@ def _concretize_separately(self, tests=False):
|
||||
"""Concretization strategy that concretizes separately one
|
||||
user spec after the other.
|
||||
"""
|
||||
import spack.bootstrap
|
||||
|
||||
# keep any concretized specs whose user specs are still in the manifest
|
||||
old_concretized_user_specs = self.concretized_user_specs
|
||||
old_concretized_order = self.concretized_order
|
||||
@@ -1368,7 +1369,7 @@ def _concretize_separately(self, tests=False):
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
|
@@ -30,7 +30,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
import urllib.parse
|
||||
from typing import List, Optional # novm
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.util
|
||||
import llnl.util.filesystem as fs
|
||||
|
@@ -34,7 +34,7 @@
|
||||
import inspect
|
||||
import os.path
|
||||
import re
|
||||
from typing import Optional # novm
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
@@ -588,7 +588,7 @@ class BaseFileLayout(object):
|
||||
"""
|
||||
|
||||
#: This needs to be redefined
|
||||
extension = None # type: Optional[str]
|
||||
extension: Optional[str] = None
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.conf = configuration
|
||||
|
@@ -7,7 +7,7 @@
|
||||
import itertools
|
||||
import os.path
|
||||
import posixpath
|
||||
from typing import Any, Dict # novm
|
||||
from typing import Any, Dict
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
@@ -30,7 +30,7 @@ def configuration(module_set_name):
|
||||
|
||||
|
||||
# Caches the configuration {spec_hash: configuration}
|
||||
configuration_registry = {} # type: Dict[str, Any]
|
||||
configuration_registry: Dict[str, Any] = {}
|
||||
|
||||
|
||||
def make_configuration(spec, module_set_name):
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"""
|
||||
import posixpath
|
||||
import string
|
||||
from typing import Any, Dict # novm
|
||||
from typing import Any, Dict
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -27,7 +27,7 @@ def configuration(module_set_name):
|
||||
|
||||
|
||||
# Caches the configuration {spec_hash: configuration}
|
||||
configuration_registry = {} # type: Dict[str, Any]
|
||||
configuration_registry: Dict[str, Any] = {}
|
||||
|
||||
|
||||
def make_configuration(spec, module_set_name):
|
||||
|
@@ -27,7 +27,7 @@
|
||||
import traceback
|
||||
import types
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type # novm
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
@@ -548,7 +548,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
|
||||
#: Keep -Werror flags, matches config:flags:keep_werror to override config
|
||||
# NOTE: should be type Optional[Literal['all', 'specific', 'none']] in 3.8+
|
||||
keep_werror = None # type: Optional[str]
|
||||
keep_werror: Optional[str] = None
|
||||
|
||||
#: Most packages are NOT extendable. Set to True if you want extensions.
|
||||
extendable = False
|
||||
@@ -564,17 +564,17 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
#: for it. Note: accepts both file names and directory names, for example
|
||||
#: ``["libcuda.so", "stubs"]`` will ensure libcuda.so and all libraries in the
|
||||
#: stubs directory are not bound by path."""
|
||||
non_bindable_shared_objects = [] # type: List[str]
|
||||
non_bindable_shared_objects: List[str] = []
|
||||
|
||||
#: List of prefix-relative file paths (or a single path). If these do
|
||||
#: not exist after install, or if they exist but are not files,
|
||||
#: sanity checks fail.
|
||||
sanity_check_is_file = [] # type: List[str]
|
||||
sanity_check_is_file: List[str] = []
|
||||
|
||||
#: List of prefix-relative directory paths (or a single path). If
|
||||
#: these do not exist after install, or if they exist but are not
|
||||
#: directories, sanity checks will fail.
|
||||
sanity_check_is_dir = [] # type: List[str]
|
||||
sanity_check_is_dir: List[str] = []
|
||||
|
||||
#: Boolean. Set to ``True`` for packages that require a manual download.
|
||||
#: This is currently used by package sanity tests and generation of a
|
||||
@@ -582,7 +582,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
manual_download = False
|
||||
|
||||
#: Set of additional options used when fetching package versions.
|
||||
fetch_options = {} # type: Dict[str, Any]
|
||||
fetch_options: Dict[str, Any] = {}
|
||||
|
||||
#
|
||||
# Set default licensing information
|
||||
@@ -600,12 +600,12 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
#: looking for a license. All file paths must be relative to the
|
||||
#: installation directory. More complex packages like Intel may require
|
||||
#: multiple licenses for individual components. Defaults to the empty list.
|
||||
license_files = [] # type: List[str]
|
||||
license_files: List[str] = []
|
||||
|
||||
#: List of strings. Environment variables that can be set to tell the
|
||||
#: software where to look for a license if it is not in the usual location.
|
||||
#: Defaults to the empty list.
|
||||
license_vars = [] # type: List[str]
|
||||
license_vars: List[str] = []
|
||||
|
||||
#: String. A URL pointing to license setup instructions for the software.
|
||||
#: Defaults to the empty string.
|
||||
@@ -618,17 +618,17 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
_patches_by_hash = None
|
||||
|
||||
#: Package homepage where users can find more information about the package
|
||||
homepage = None # type: str
|
||||
homepage: Optional[str] = None
|
||||
|
||||
#: Default list URL (place to find available versions)
|
||||
list_url = None # type: str
|
||||
list_url: Optional[str] = None
|
||||
|
||||
#: Link depth to which list_url should be searched for new versions
|
||||
list_depth = 0
|
||||
|
||||
#: List of strings which contains GitHub usernames of package maintainers.
|
||||
#: Do not include @ here in order not to unnecessarily ping the users.
|
||||
maintainers = [] # type: List[str]
|
||||
maintainers: List[str] = []
|
||||
|
||||
#: List of attributes to be excluded from a package's hash.
|
||||
metadata_attrs = [
|
||||
@@ -2073,24 +2073,21 @@ def build_log_path(self):
|
||||
return self.install_log_path if self.spec.installed else self.log_path
|
||||
|
||||
@classmethod
|
||||
def inject_flags(cls, name, flags):
|
||||
# type: (Type, str, Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE
|
||||
def inject_flags(cls: Type, name: str, flags: Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE:
|
||||
"""
|
||||
flag_handler that injects all flags through the compiler wrapper.
|
||||
"""
|
||||
return flags, None, None
|
||||
|
||||
@classmethod
|
||||
def env_flags(cls, name, flags):
|
||||
# type: (Type, str, Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE
|
||||
def env_flags(cls: Type, name: str, flags: Iterable[str]):
|
||||
"""
|
||||
flag_handler that adds all flags to canonical environment variables.
|
||||
"""
|
||||
return None, flags, None
|
||||
|
||||
@classmethod
|
||||
def build_system_flags(cls, name, flags):
|
||||
# type: (Type, str, Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE
|
||||
def build_system_flags(cls: Type, name: str, flags: Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE:
|
||||
"""
|
||||
flag_handler that passes flags to the build system arguments. Any
|
||||
package using `build_system_flags` must also implement
|
||||
@@ -2169,18 +2166,16 @@ def setup_dependent_package(self, module, dependent_spec):
|
||||
"""
|
||||
pass
|
||||
|
||||
_flag_handler = None # type: Optional[FLAG_HANDLER_TYPE]
|
||||
_flag_handler: Optional[FLAG_HANDLER_TYPE] = None
|
||||
|
||||
@property
|
||||
def flag_handler(self):
|
||||
# type: () -> FLAG_HANDLER_TYPE
|
||||
def flag_handler(self) -> FLAG_HANDLER_TYPE:
|
||||
if self._flag_handler is None:
|
||||
self._flag_handler = PackageBase.inject_flags
|
||||
return self._flag_handler
|
||||
|
||||
@flag_handler.setter
|
||||
def flag_handler(self, var):
|
||||
# type: (FLAG_HANDLER_TYPE) -> None
|
||||
def flag_handler(self, var: FLAG_HANDLER_TYPE):
|
||||
self._flag_handler = var
|
||||
|
||||
# The flag handler method is called for each of the allowed compiler flags.
|
||||
|
@@ -1,174 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import itertools
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
import spack.error
|
||||
import spack.util.path as sp
|
||||
|
||||
|
||||
class Token(object):
|
||||
"""Represents tokens; generated from input by lexer and fed to parse()."""
|
||||
|
||||
__slots__ = "type", "value", "start", "end"
|
||||
|
||||
def __init__(self, type, value="", start=0, end=0):
|
||||
self.type = type
|
||||
self.value = value
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
def __str__(self):
|
||||
return "<%d: '%s'>" % (self.type, self.value)
|
||||
|
||||
def is_a(self, type):
|
||||
return self.type == type
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.type == other.type) and (self.value == other.value)
|
||||
|
||||
|
||||
class Lexer(object):
|
||||
"""Base class for Lexers that keep track of line numbers."""
|
||||
|
||||
__slots__ = "scanner0", "scanner1", "mode", "mode_switches_01", "mode_switches_10"
|
||||
|
||||
def __init__(self, lexicon0, mode_switches_01=[], lexicon1=[], mode_switches_10=[]):
|
||||
self.scanner0 = re.Scanner(lexicon0)
|
||||
self.mode_switches_01 = mode_switches_01
|
||||
self.scanner1 = re.Scanner(lexicon1)
|
||||
self.mode_switches_10 = mode_switches_10
|
||||
self.mode = 0
|
||||
|
||||
def token(self, type, value=""):
|
||||
if self.mode == 0:
|
||||
return Token(type, value, self.scanner0.match.start(0), self.scanner0.match.end(0))
|
||||
else:
|
||||
return Token(type, value, self.scanner1.match.start(0), self.scanner1.match.end(0))
|
||||
|
||||
def lex_word(self, word):
|
||||
scanner = self.scanner0
|
||||
mode_switches = self.mode_switches_01
|
||||
if self.mode == 1:
|
||||
scanner = self.scanner1
|
||||
mode_switches = self.mode_switches_10
|
||||
|
||||
tokens, remainder = scanner.scan(word)
|
||||
remainder_used = 0
|
||||
|
||||
for i, t in enumerate(tokens):
|
||||
if t.type in mode_switches:
|
||||
# Combine post-switch tokens with remainder and
|
||||
# scan in other mode
|
||||
self.mode = 1 - self.mode # swap 0/1
|
||||
remainder_used = 1
|
||||
tokens = tokens[: i + 1] + self.lex_word(
|
||||
word[word.index(t.value) + len(t.value) :]
|
||||
)
|
||||
break
|
||||
|
||||
if remainder and not remainder_used:
|
||||
msg = "Invalid character, '{0}',".format(remainder[0])
|
||||
msg += " in '{0}' at index {1}".format(word, word.index(remainder))
|
||||
raise LexError(msg, word, word.index(remainder))
|
||||
|
||||
return tokens
|
||||
|
||||
def lex(self, text):
|
||||
lexed = []
|
||||
for word in text:
|
||||
tokens = self.lex_word(word)
|
||||
lexed.extend(tokens)
|
||||
return lexed
|
||||
|
||||
|
||||
class Parser(object):
|
||||
"""Base class for simple recursive descent parsers."""
|
||||
|
||||
__slots__ = "tokens", "token", "next", "lexer", "text"
|
||||
|
||||
def __init__(self, lexer):
|
||||
self.tokens = iter([]) # iterators over tokens, handled in order.
|
||||
self.token = Token(None) # last accepted token
|
||||
self.next = None # next token
|
||||
self.lexer = lexer
|
||||
self.text = None
|
||||
|
||||
def gettok(self):
|
||||
"""Puts the next token in the input stream into self.next."""
|
||||
try:
|
||||
self.next = next(self.tokens)
|
||||
except StopIteration:
|
||||
self.next = None
|
||||
|
||||
def push_tokens(self, iterable):
|
||||
"""Adds all tokens in some iterable to the token stream."""
|
||||
self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens)
|
||||
self.gettok()
|
||||
|
||||
def accept(self, id):
|
||||
"""Put the next symbol in self.token if accepted, then call gettok()"""
|
||||
if self.next and self.next.is_a(id):
|
||||
self.token = self.next
|
||||
self.gettok()
|
||||
return True
|
||||
return False
|
||||
|
||||
def next_token_error(self, message):
|
||||
"""Raise an error about the next token in the stream."""
|
||||
raise ParseError(message, self.text[0], self.token.end)
|
||||
|
||||
def last_token_error(self, message):
|
||||
"""Raise an error about the previous token in the stream."""
|
||||
raise ParseError(message, self.text[0], self.token.start)
|
||||
|
||||
def unexpected_token(self):
|
||||
self.next_token_error("Unexpected token: '%s'" % self.next.value)
|
||||
|
||||
def expect(self, id):
|
||||
"""Like accept(), but fails if we don't like the next token."""
|
||||
if self.accept(id):
|
||||
return True
|
||||
else:
|
||||
if self.next:
|
||||
self.unexpected_token()
|
||||
else:
|
||||
self.next_token_error("Unexpected end of input")
|
||||
sys.exit(1)
|
||||
|
||||
def setup(self, text):
|
||||
if isinstance(text, str):
|
||||
# shlex does not handle Windows path
|
||||
# separators, so we must normalize to posix
|
||||
text = sp.convert_to_posix_path(text)
|
||||
text = shlex.split(str(text))
|
||||
self.text = text
|
||||
self.push_tokens(self.lexer.lex(text))
|
||||
|
||||
def parse(self, text):
|
||||
self.setup(text)
|
||||
return self.do_parse()
|
||||
|
||||
|
||||
class ParseError(spack.error.SpackError):
|
||||
"""Raised when we don't hit an error while parsing."""
|
||||
|
||||
def __init__(self, message, string, pos):
|
||||
super(ParseError, self).__init__(message)
|
||||
self.string = string
|
||||
self.pos = pos
|
||||
|
||||
|
||||
class LexError(ParseError):
|
||||
"""Raised when we don't know how to lex something."""
|
||||
|
||||
def __init__(self, message, string, pos):
|
||||
super(LexError, self).__init__(message, string, pos)
|
522
lib/spack/spack/parser.py
Normal file
522
lib/spack/spack/parser.py
Normal file
@@ -0,0 +1,522 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Parser for spec literals
|
||||
|
||||
Here is the EBNF grammar for a spec::
|
||||
|
||||
spec = [name] [node_options] { ^ node } |
|
||||
[name] [node_options] hash |
|
||||
filename
|
||||
|
||||
node = name [node_options] |
|
||||
[name] [node_options] hash |
|
||||
filename
|
||||
|
||||
node_options = [@(version_list|version_pair)] [%compiler] { variant }
|
||||
|
||||
hash = / id
|
||||
filename = (.|/|[a-zA-Z0-9-_]*/)([a-zA-Z0-9-_./]*)(.json|.yaml)
|
||||
|
||||
name = id | namespace id
|
||||
namespace = { id . }
|
||||
|
||||
variant = bool_variant | key_value | propagated_bv | propagated_kv
|
||||
bool_variant = +id | ~id | -id
|
||||
propagated_bv = ++id | ~~id | --id
|
||||
key_value = id=id | id=quoted_id
|
||||
propagated_kv = id==id | id==quoted_id
|
||||
|
||||
compiler = id [@version_list]
|
||||
|
||||
version_pair = git_version=vid
|
||||
version_list = (version|version_range) [ { , (version|version_range)} ]
|
||||
version_range = vid:vid | vid: | :vid | :
|
||||
version = vid
|
||||
|
||||
git_version = git.(vid) | git_hash
|
||||
git_hash = [A-Fa-f0-9]{40}
|
||||
|
||||
quoted_id = " id_with_ws " | ' id_with_ws '
|
||||
id_with_ws = [a-zA-Z0-9_][a-zA-Z_0-9-.\\s]*
|
||||
vid = [a-zA-Z0-9_][a-zA-Z_0-9-.]*
|
||||
id = [a-zA-Z0-9_][a-zA-Z_0-9-]*
|
||||
|
||||
Identifiers using the <name>=<value> command, such as architectures and
|
||||
compiler flags, require a space before the name.
|
||||
|
||||
There is one context-sensitive part: ids in versions may contain '.', while
|
||||
other ids may not.
|
||||
|
||||
There is one ambiguity: since '-' is allowed in an id, you need to put
|
||||
whitespace space before -variant for it to be tokenized properly. You can
|
||||
either use whitespace, or you can just use ~variant since it means the same
|
||||
thing. Spack uses ~variant in directory names and in the canonical form of
|
||||
specs to avoid ambiguity. Both are provided because ~ can cause shell
|
||||
expansion when it is the first character in an id typed on the command line.
|
||||
"""
|
||||
import enum
|
||||
import pathlib
|
||||
import re
|
||||
from typing import Iterator, List, Match, Optional
|
||||
|
||||
from llnl.util.tty import color
|
||||
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.variant
|
||||
import spack.version
|
||||
|
||||
#: Valid name for specs and variants. Here we are not using
|
||||
#: the previous "w[\w.-]*" since that would match most
|
||||
#: characters that can be part of a word in any language
|
||||
IDENTIFIER = r"([a-zA-Z_0-9][a-zA-Z_0-9\-]*)"
|
||||
DOTTED_IDENTIFIER = rf"({IDENTIFIER}(\.{IDENTIFIER})+)"
|
||||
GIT_HASH = r"([A-Fa-f0-9]{40})"
|
||||
GIT_VERSION = rf"((git\.({DOTTED_IDENTIFIER}|{IDENTIFIER}))|({GIT_HASH}))"
|
||||
|
||||
NAME = r"[a-zA-Z_0-9][a-zA-Z_0-9\-.]*"
|
||||
|
||||
HASH = r"[a-zA-Z_0-9]+"
|
||||
|
||||
#: A filename starts either with a "." or a "/" or a "{name}/"
|
||||
FILENAME = r"(\.|\/|[a-zA-Z0-9-_]*\/)([a-zA-Z0-9-_\.\/]*)(\.json|\.yaml)"
|
||||
|
||||
VALUE = r"([a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
|
||||
QUOTED_VALUE = r"[\"']+([a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+"
|
||||
|
||||
VERSION = r"([a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)"
|
||||
VERSION_RANGE = rf"({VERSION}\s*:\s*{VERSION}(?!\s*=)|:\s*{VERSION}(?!\s*=)|{VERSION}\s*:|:)"
|
||||
VERSION_LIST = rf"({VERSION_RANGE}|{VERSION})(\s*[,]\s*({VERSION_RANGE}|{VERSION}))*"
|
||||
|
||||
|
||||
class TokenBase(enum.Enum):
|
||||
"""Base class for an enum type with a regex value"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
# See
|
||||
value = len(cls.__members__) + 1
|
||||
obj = object.__new__(cls)
|
||||
obj._value_ = value
|
||||
return obj
|
||||
|
||||
def __init__(self, regex):
|
||||
self.regex = regex
|
||||
|
||||
def __str__(self):
|
||||
return f"{self._name_}"
|
||||
|
||||
|
||||
class TokenType(TokenBase):
|
||||
"""Enumeration of the different token kinds in the spec grammar.
|
||||
|
||||
Order of declaration is extremely important, since text containing specs is parsed with a
|
||||
single regex obtained by ``"|".join(...)`` of all the regex in the order of declaration.
|
||||
"""
|
||||
|
||||
# Dependency
|
||||
DEPENDENCY = r"(\^)"
|
||||
# Version
|
||||
VERSION_HASH_PAIR = rf"(@({GIT_VERSION})=({VERSION}))"
|
||||
VERSION = rf"(@\s*({VERSION_LIST}))"
|
||||
# Variants
|
||||
PROPAGATED_BOOL_VARIANT = rf"((\+\+|~~|--)\s*{NAME})"
|
||||
BOOL_VARIANT = rf"([~+-]\s*{NAME})"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"({NAME}\s*==\s*({VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"({NAME}\s*=\s*({VALUE}|{QUOTED_VALUE}))"
|
||||
# Compilers
|
||||
COMPILER_AND_VERSION = rf"(%\s*({NAME})([\s]*)@\s*({VERSION_LIST}))"
|
||||
COMPILER = rf"(%\s*({NAME}))"
|
||||
# FILENAME
|
||||
FILENAME = rf"({FILENAME})"
|
||||
# Package name
|
||||
FULLY_QUALIFIED_PACKAGE_NAME = rf"({DOTTED_IDENTIFIER})"
|
||||
UNQUALIFIED_PACKAGE_NAME = rf"({IDENTIFIER})"
|
||||
# DAG hash
|
||||
DAG_HASH = rf"(/({HASH}))"
|
||||
# White spaces
|
||||
WS = r"(\s+)"
|
||||
|
||||
|
||||
class ErrorTokenType(TokenBase):
|
||||
"""Enum with regexes for error analysis"""
|
||||
|
||||
# Unexpected character
|
||||
UNEXPECTED = r"(.[\s]*)"
|
||||
|
||||
|
||||
class Token:
|
||||
"""Represents tokens; generated from input by lexer and fed to parse()."""
|
||||
|
||||
__slots__ = "kind", "value", "start", "end"
|
||||
|
||||
def __init__(
|
||||
self, kind: TokenType, value: str, start: Optional[int] = None, end: Optional[int] = None
|
||||
):
|
||||
self.kind = kind
|
||||
self.value = value
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
def __str__(self):
|
||||
return f"({self.kind}, {self.value})"
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.kind == other.kind) and (self.value == other.value)
|
||||
|
||||
|
||||
#: List of all the regexes used to match spec parts, in order of precedence
|
||||
TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
|
||||
#: List of all valid regexes followed by error analysis regexes
|
||||
ERROR_HANDLING_REGEXES = TOKEN_REGEXES + [
|
||||
rf"(?P<{token}>{token.regex})" for token in ErrorTokenType
|
||||
]
|
||||
#: Regex to scan a valid text
|
||||
ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
|
||||
#: Regex to analyze an invalid text
|
||||
ANALYSIS_REGEX = re.compile("|".join(ERROR_HANDLING_REGEXES))
|
||||
|
||||
|
||||
def tokenize(text: str) -> Iterator[Token]:
|
||||
"""Return a token generator from the text passed as input.
|
||||
|
||||
Raises:
|
||||
SpecTokenizationError: if we can't tokenize anymore, but didn't reach the
|
||||
end of the input text.
|
||||
"""
|
||||
scanner = ALL_TOKENS.scanner(text) # type: ignore[attr-defined]
|
||||
match: Optional[Match] = None
|
||||
for match in iter(scanner.match, None):
|
||||
yield Token(
|
||||
TokenType.__members__[match.lastgroup], # type: ignore[attr-defined]
|
||||
match.group(), # type: ignore[attr-defined]
|
||||
match.start(), # type: ignore[attr-defined]
|
||||
match.end(), # type: ignore[attr-defined]
|
||||
)
|
||||
|
||||
if match is None and not text:
|
||||
# We just got an empty string
|
||||
return
|
||||
|
||||
if match is None or match.end() != len(text):
|
||||
scanner = ANALYSIS_REGEX.scanner(text) # type: ignore[attr-defined]
|
||||
matches = [m for m in iter(scanner.match, None)] # type: ignore[var-annotated]
|
||||
raise SpecTokenizationError(matches, text)
|
||||
|
||||
|
||||
class TokenContext:
|
||||
"""Token context passed around by parsers"""
|
||||
|
||||
__slots__ = "token_stream", "current_token", "next_token"
|
||||
|
||||
def __init__(self, token_stream: Iterator[Token]):
|
||||
self.token_stream = token_stream
|
||||
self.current_token = None
|
||||
self.next_token = None
|
||||
self.advance()
|
||||
|
||||
def advance(self):
|
||||
"""Advance one token"""
|
||||
self.current_token, self.next_token = self.next_token, next(self.token_stream, None)
|
||||
|
||||
def accept(self, kind: TokenType):
|
||||
"""If the next token is of the specified kind, advance the stream and return True.
|
||||
Otherwise return False.
|
||||
"""
|
||||
if self.next_token and self.next_token.kind == kind:
|
||||
self.advance()
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class SpecParser:
|
||||
"""Parse text into specs"""
|
||||
|
||||
__slots__ = "literal_str", "ctx"
|
||||
|
||||
def __init__(self, literal_str: str):
|
||||
self.literal_str = literal_str
|
||||
self.ctx = TokenContext(filter(lambda x: x.kind != TokenType.WS, tokenize(literal_str)))
|
||||
|
||||
def tokens(self) -> List[Token]:
|
||||
"""Return the entire list of token from the initial text. White spaces are
|
||||
filtered out.
|
||||
"""
|
||||
return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str)))
|
||||
|
||||
def next_spec(self, initial_spec: Optional[spack.spec.Spec] = None) -> spack.spec.Spec:
|
||||
"""Return the next spec parsed from text.
|
||||
|
||||
Args:
|
||||
initial_spec: object where to parse the spec. If None a new one
|
||||
will be created.
|
||||
|
||||
Return
|
||||
The spec that was parsed
|
||||
"""
|
||||
initial_spec = initial_spec or spack.spec.Spec()
|
||||
root_spec = SpecNodeParser(self.ctx).parse(initial_spec)
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.DEPENDENCY):
|
||||
dependency = SpecNodeParser(self.ctx).parse(spack.spec.Spec())
|
||||
|
||||
if dependency == spack.spec.Spec():
|
||||
msg = (
|
||||
"this dependency sigil needs to be followed by a package name "
|
||||
"or a node attribute (version, variant, etc.)"
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, ())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
return root_spec
|
||||
|
||||
def all_specs(self) -> List[spack.spec.Spec]:
|
||||
"""Return all the specs that remain to be parsed"""
|
||||
return list(iter(self.next_spec, spack.spec.Spec()))
|
||||
|
||||
|
||||
class SpecNodeParser:
|
||||
"""Parse a single spec node from a stream of tokens"""
|
||||
|
||||
__slots__ = "ctx", "has_compiler", "has_version", "has_hash"
|
||||
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
self.has_compiler = False
|
||||
self.has_version = False
|
||||
self.has_hash = False
|
||||
|
||||
def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
|
||||
"""Parse a single spec node from a stream of tokens
|
||||
|
||||
Args:
|
||||
initial_spec: object to be constructed
|
||||
|
||||
Return
|
||||
The object passed as argument
|
||||
"""
|
||||
import spack.environment # Needed to retrieve by hash
|
||||
|
||||
# If we start with a package name we have a named spec, we cannot
|
||||
# accept another package name afterwards in a node
|
||||
if self.ctx.accept(TokenType.UNQUALIFIED_PACKAGE_NAME):
|
||||
initial_spec.name = self.ctx.current_token.value
|
||||
elif self.ctx.accept(TokenType.FULLY_QUALIFIED_PACKAGE_NAME):
|
||||
parts = self.ctx.current_token.value.split(".")
|
||||
name = parts[-1]
|
||||
namespace = ".".join(parts[:-1])
|
||||
initial_spec.name = name
|
||||
initial_spec.namespace = namespace
|
||||
elif self.ctx.accept(TokenType.FILENAME):
|
||||
return FileParser(self.ctx).parse(initial_spec)
|
||||
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.COMPILER):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
)
|
||||
|
||||
compiler_name = self.ctx.current_token.value[1:]
|
||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||
self.has_compiler = True
|
||||
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
)
|
||||
|
||||
compiler_name, compiler_version = self.ctx.current_token.value[1:].split("@")
|
||||
initial_spec.compiler = spack.spec.CompilerSpec(
|
||||
compiler_name.strip(), compiler_version
|
||||
)
|
||||
self.has_compiler = True
|
||||
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
|
||||
TokenType.VERSION_HASH_PAIR
|
||||
):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_version:
|
||||
raise spack.spec.MultipleVersionError(
|
||||
f"{initial_spec} cannot have multiple versions"
|
||||
)
|
||||
|
||||
version_list = spack.version.VersionList()
|
||||
version_list.add(spack.version.from_string(self.ctx.current_token.value[1:]))
|
||||
initial_spec.versions = version_list
|
||||
|
||||
# Add a git lookup method for GitVersions
|
||||
if (
|
||||
initial_spec.name
|
||||
and initial_spec.versions.concrete
|
||||
and isinstance(initial_spec.version, spack.version.GitVersion)
|
||||
):
|
||||
initial_spec.version.generate_git_lookup(initial_spec.fullname)
|
||||
|
||||
self.has_version = True
|
||||
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0] == "+"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
|
||||
)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
|
||||
)
|
||||
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
initial_spec._add_flag(name, value, propagate=False)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
initial_spec._add_flag(name, value, propagate=True)
|
||||
elif not self.has_hash and self.ctx.accept(TokenType.DAG_HASH):
|
||||
dag_hash = self.ctx.current_token.value[1:]
|
||||
matches = []
|
||||
if spack.environment.active_environment():
|
||||
matches = spack.environment.active_environment().get_by_hash(dag_hash)
|
||||
if not matches:
|
||||
matches = spack.store.db.get_by_hash(dag_hash)
|
||||
if not matches:
|
||||
raise spack.spec.NoSuchHashError(dag_hash)
|
||||
|
||||
if len(matches) != 1:
|
||||
raise spack.spec.AmbiguousHashError(
|
||||
f"Multiple packages specify hash beginning '{dag_hash}'.", *matches
|
||||
)
|
||||
spec_by_hash = matches[0]
|
||||
if not spec_by_hash.satisfies(initial_spec):
|
||||
raise spack.spec.InvalidHashError(initial_spec, spec_by_hash.dag_hash())
|
||||
initial_spec._dup(spec_by_hash)
|
||||
|
||||
self.has_hash = True
|
||||
else:
|
||||
break
|
||||
|
||||
return initial_spec
|
||||
|
||||
def hash_not_parsed_or_raise(self, spec, addition):
|
||||
if not self.has_hash:
|
||||
return
|
||||
|
||||
raise spack.spec.RedundantSpecError(spec, addition)
|
||||
|
||||
|
||||
class FileParser:
|
||||
"""Parse a single spec from a JSON or YAML file"""
|
||||
|
||||
__slots__ = ("ctx",)
|
||||
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
|
||||
def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
|
||||
"""Parse a spec tree from a specfile.
|
||||
|
||||
Args:
|
||||
initial_spec: object where to parse the spec
|
||||
|
||||
Return
|
||||
The initial_spec passed as argument, once constructed
|
||||
"""
|
||||
file = pathlib.Path(self.ctx.current_token.value)
|
||||
|
||||
if not file.exists():
|
||||
raise spack.spec.NoSuchSpecFileError(f"No such spec file: '{file}'")
|
||||
|
||||
with file.open("r", encoding="utf-8") as stream:
|
||||
if str(file).endswith(".json"):
|
||||
spec_from_file = spack.spec.Spec.from_json(stream)
|
||||
else:
|
||||
spec_from_file = spack.spec.Spec.from_yaml(stream)
|
||||
initial_spec._dup(spec_from_file)
|
||||
return initial_spec
|
||||
|
||||
|
||||
def parse(text: str) -> List[spack.spec.Spec]:
|
||||
"""Parse text into a list of strings
|
||||
|
||||
Args:
|
||||
text (str): text to be parsed
|
||||
|
||||
Return:
|
||||
List of specs
|
||||
"""
|
||||
return SpecParser(text).all_specs()
|
||||
|
||||
|
||||
def parse_one_or_raise(
|
||||
text: str, initial_spec: Optional[spack.spec.Spec] = None
|
||||
) -> spack.spec.Spec:
|
||||
"""Parse exactly one spec from text and return it, or raise
|
||||
|
||||
Args:
|
||||
text (str): text to be parsed
|
||||
initial_spec: buffer where to parse the spec. If None a new one will be created.
|
||||
"""
|
||||
stripped_text = text.strip()
|
||||
parser = SpecParser(stripped_text)
|
||||
result = parser.next_spec(initial_spec)
|
||||
last_token = parser.ctx.current_token
|
||||
|
||||
if last_token is not None and last_token.end != len(stripped_text):
|
||||
message = "a single spec was requested, but parsed more than one:"
|
||||
message += f"\n{text}"
|
||||
if last_token is not None:
|
||||
underline = f"\n{' ' * last_token.end}{'^' * (len(text) - last_token.end)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
raise ValueError(message)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class SpecSyntaxError(Exception):
|
||||
"""Base class for Spec syntax errors"""
|
||||
|
||||
|
||||
class SpecTokenizationError(SpecSyntaxError):
|
||||
"""Syntax error in a spec string"""
|
||||
|
||||
def __init__(self, matches, text):
|
||||
message = "unexpected tokens in the spec string\n"
|
||||
message += f"{text}"
|
||||
|
||||
underline = "\n"
|
||||
for match in matches:
|
||||
if match.lastgroup == str(ErrorTokenType.UNEXPECTED):
|
||||
underline += f"{'^' * (match.end() - match.start())}"
|
||||
continue
|
||||
underline += f"{' ' * (match.end() - match.start())}"
|
||||
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class SpecParsingError(SpecSyntaxError):
|
||||
"""Error when parsing tokens"""
|
||||
|
||||
def __init__(self, message, token, text):
|
||||
message += f"\n{text}"
|
||||
underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
super().__init__(message)
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.error
|
||||
@@ -37,18 +39,18 @@ class attributes such as priority, front_target, back_target, front_os, back_os.
|
||||
"""
|
||||
|
||||
# Subclass sets number. Controls detection order
|
||||
priority = None # type: int
|
||||
priority: Optional[int] = None
|
||||
|
||||
#: binary formats used on this platform; used by relocation logic
|
||||
binary_formats = ["elf"]
|
||||
|
||||
front_end = None # type: str
|
||||
back_end = None # type: str
|
||||
default = None # type: str # The default back end target.
|
||||
front_end: Optional[str] = None
|
||||
back_end: Optional[str] = None
|
||||
default: Optional[str] = None # The default back end target.
|
||||
|
||||
front_os = None # type: str
|
||||
back_os = None # type: str
|
||||
default_os = None # type: str
|
||||
front_os: Optional[str] = None
|
||||
back_os: Optional[str] = None
|
||||
default_os: Optional[str] = None
|
||||
|
||||
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
|
||||
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
|
||||
|
@@ -18,7 +18,6 @@
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
@@ -92,6 +91,8 @@ def __init__(self, old, new, full_old_string):
|
||||
@memoized
|
||||
def _patchelf():
|
||||
"""Return the full path to the patchelf binary, if available, else None."""
|
||||
import spack.bootstrap
|
||||
|
||||
if is_macos:
|
||||
return None
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
import errno
|
||||
import functools
|
||||
import importlib
|
||||
import importlib.machinery # novm
|
||||
import importlib.machinery
|
||||
import importlib.util
|
||||
import inspect
|
||||
import itertools
|
||||
@@ -24,7 +24,7 @@
|
||||
import traceback
|
||||
import types
|
||||
import uuid
|
||||
from typing import Dict # novm
|
||||
from typing import Dict
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
|
||||
@@ -79,7 +79,7 @@ def namespace_from_fullname(fullname):
|
||||
return namespace
|
||||
|
||||
|
||||
class _PrependFileLoader(importlib.machinery.SourceFileLoader): # novm
|
||||
class _PrependFileLoader(importlib.machinery.SourceFileLoader):
|
||||
def __init__(self, fullname, path, prepend=None):
|
||||
super(_PrependFileLoader, self).__init__(fullname, path)
|
||||
self.prepend = prepend
|
||||
@@ -144,7 +144,7 @@ def find_spec(self, fullname, python_path, target=None):
|
||||
loader = self.compute_loader(fullname)
|
||||
if loader is None:
|
||||
return None
|
||||
return importlib.util.spec_from_loader(fullname, loader) # novm
|
||||
return importlib.util.spec_from_loader(fullname, loader)
|
||||
|
||||
def compute_loader(self, fullname):
|
||||
# namespaces are added to repo, and package modules are leaves.
|
||||
@@ -366,7 +366,7 @@ class FastPackageChecker(collections.abc.Mapping):
|
||||
"""
|
||||
|
||||
#: Global cache, reused by every instance
|
||||
_paths_cache = {} # type: Dict[str, Dict[str, os.stat_result]]
|
||||
_paths_cache: Dict[str, Dict[str, os.stat_result]] = {}
|
||||
|
||||
def __init__(self, packages_path):
|
||||
# The path of the repository managed by this instance
|
||||
@@ -384,7 +384,7 @@ def invalidate(self):
|
||||
self._paths_cache[self.packages_path] = self._create_new_cache()
|
||||
self._packages_to_stats = self._paths_cache[self.packages_path]
|
||||
|
||||
def _create_new_cache(self): # type: () -> Dict[str, os.stat_result]
|
||||
def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
"""Create a new cache for packages in a repo.
|
||||
|
||||
The implementation here should try to minimize filesystem
|
||||
@@ -394,7 +394,7 @@ def _create_new_cache(self): # type: () -> Dict[str, os.stat_result]
|
||||
"""
|
||||
# Create a dictionary that will store the mapping between a
|
||||
# package name and its stat info
|
||||
cache = {} # type: Dict[str, os.stat_result]
|
||||
cache: Dict[str, os.stat_result] = {}
|
||||
for pkg_name in os.listdir(self.packages_path):
|
||||
# Skip non-directories in the package root.
|
||||
pkg_dir = os.path.join(self.packages_path, pkg_name)
|
||||
|
@@ -70,20 +70,6 @@ def s3_open(self, req):
|
||||
url, headers, stream = _s3_open(orig_url)
|
||||
return urllib.response.addinfourl(stream, headers, url)
|
||||
except ClientError as err:
|
||||
# if no such [KEY], but [KEY]/index.html exists,
|
||||
# return that, instead.
|
||||
if err.response["Error"]["Code"] == "NoSuchKey":
|
||||
try:
|
||||
_, headers, stream = _s3_open(url_util.join(orig_url, "index.html"))
|
||||
return urllib.response.addinfourl(stream, headers, orig_url)
|
||||
|
||||
except ClientError as err2:
|
||||
if err.response["Error"]["Code"] == "NoSuchKey":
|
||||
# raise original error
|
||||
raise urllib.error.URLError(err) from err
|
||||
|
||||
raise urllib.error.URLError(err2) from err2
|
||||
|
||||
raise urllib.error.URLError(err) from err
|
||||
|
||||
|
||||
|
@@ -8,14 +8,14 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.spec
|
||||
|
||||
|
||||
# jsonschema is imported lazily as it is heavy to import
|
||||
# and increases the start-up time
|
||||
def _make_validator():
|
||||
import jsonschema
|
||||
|
||||
import spack.parser
|
||||
|
||||
def _validate_spec(validator, is_spec, instance, schema):
|
||||
"""Check if the attributes on instance are valid specs."""
|
||||
import jsonschema
|
||||
@@ -25,11 +25,9 @@ def _validate_spec(validator, is_spec, instance, schema):
|
||||
|
||||
for spec_str in instance:
|
||||
try:
|
||||
spack.spec.parse(spec_str)
|
||||
except spack.spec.SpecParseError as e:
|
||||
yield jsonschema.ValidationError(
|
||||
'"{0}" is an invalid spec [{1}]'.format(spec_str, str(e))
|
||||
)
|
||||
spack.parser.parse(spec_str)
|
||||
except spack.parser.SpecSyntaxError as e:
|
||||
yield jsonschema.ValidationError(str(e))
|
||||
|
||||
def _deprecated_properties(validator, deprecated, instance, schema):
|
||||
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
||||
|
@@ -30,7 +30,6 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap
|
||||
import spack.cmd
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
@@ -541,8 +540,10 @@ def bootstrap_clingo():
|
||||
global clingo, ASTType, parse_files
|
||||
|
||||
if not clingo:
|
||||
import spack.bootstrap
|
||||
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
import clingo
|
||||
|
||||
from clingo.ast import ASTType
|
||||
@@ -1974,11 +1975,6 @@ def _facts_from_concrete_spec(self, spec, possible):
|
||||
h = spec.dag_hash()
|
||||
if spec.name in possible and h not in self.seen_hashes:
|
||||
self.reusable_and_possible[h] = spec
|
||||
try:
|
||||
# Only consider installed packages for repo we know
|
||||
spack.repo.path.get(spec)
|
||||
except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError):
|
||||
return
|
||||
|
||||
# this indicates that there is a spec like this installed
|
||||
self.gen.fact(fn.installed_hash(spec.name, h))
|
||||
@@ -2268,48 +2264,41 @@ def reorder_flags(self):
|
||||
The solver determines wihch flags are on nodes; this routine
|
||||
imposes order afterwards.
|
||||
"""
|
||||
# nodes with no flags get flag order from compiler
|
||||
compilers = dict((c.spec, c) for c in all_compilers_in_config())
|
||||
for pkg in self._flag_compiler_defaults:
|
||||
spec = self._specs[pkg]
|
||||
compiler_flags = compilers[spec.compiler].flags
|
||||
for key in spec.compiler_flags:
|
||||
spec_compiler_flags_set = set(spec.compiler_flags.get(key, []))
|
||||
compiler_flags_set = set(compiler_flags.get(key, []))
|
||||
|
||||
assert spec_compiler_flags_set == compiler_flags_set, "%s does not equal %s" % (
|
||||
spec_compiler_flags_set,
|
||||
compiler_flags_set,
|
||||
)
|
||||
|
||||
spec.compiler_flags[key] = compiler_flags.get(key, [])
|
||||
# index of all specs (and deps) from the command line by name
|
||||
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
|
||||
|
||||
# iterate through specs with specified flags
|
||||
for key, sources in self._flag_sources.items():
|
||||
pkg, flag_type = key
|
||||
spec = self._specs[pkg]
|
||||
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
||||
for spec in self._specs.values():
|
||||
# if bootstrapping, compiler is not in config and has no flags
|
||||
flagmap_from_compiler = {}
|
||||
if spec.compiler in compilers:
|
||||
flagmap_from_compiler = compilers[spec.compiler].flags
|
||||
|
||||
# order is determined by the DAG. A spec's flags come after
|
||||
# any from its ancestors on the compile line.
|
||||
order = [s.name for s in spec.traverse(order="post", direction="parents")]
|
||||
for flag_type in spec.compiler_flags.valid_compiler_flags():
|
||||
from_compiler = flagmap_from_compiler.get(flag_type, [])
|
||||
from_sources = []
|
||||
|
||||
# sort the sources in our DAG order
|
||||
sorted_sources = sorted(sources, key=lambda s: order.index(s))
|
||||
# order is determined by the DAG. A spec's flags come after any of its ancestors
|
||||
# on the compile line
|
||||
source_key = (spec.name, flag_type)
|
||||
if source_key in self._flag_sources:
|
||||
order = [s.name for s in spec.traverse(order="post", direction="parents")]
|
||||
sorted_sources = sorted(
|
||||
self._flag_sources[source_key], key=lambda s: order.index(s)
|
||||
)
|
||||
|
||||
# add flags from each source, lowest to highest precedence
|
||||
flags = []
|
||||
for source_name in sorted_sources:
|
||||
source = cmd_specs[source_name]
|
||||
extend_flag_list(flags, source.compiler_flags.get(flag_type, []))
|
||||
# add flags from each source, lowest to highest precedence
|
||||
for source_name in sorted_sources:
|
||||
source = cmd_specs[source_name]
|
||||
extend_flag_list(from_sources, source.compiler_flags.get(flag_type, []))
|
||||
|
||||
assert set(compiler_flags) == set(flags), "%s does not equal %s" % (
|
||||
set(compiler_flags),
|
||||
set(flags),
|
||||
)
|
||||
spec.compiler_flags.update({flag_type: source.compiler_flags[flag_type]})
|
||||
# compiler flags from compilers config are lowest precedence
|
||||
ordered_compiler_flags = from_compiler + from_sources
|
||||
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
||||
|
||||
msg = "%s does not equal %s" % (set(compiler_flags), set(ordered_compiler_flags))
|
||||
assert set(compiler_flags) == set(ordered_compiler_flags), msg
|
||||
|
||||
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
||||
|
||||
def deprecated(self, pkg, version):
|
||||
msg = 'using "{0}@{1}" which is a deprecated version'
|
||||
|
@@ -16,12 +16,12 @@
|
||||
literal_not_solved(ID) :- not literal_solved(ID), literal(ID).
|
||||
|
||||
% If concretize_everything() is a fact, then we cannot have unsolved specs
|
||||
:- literal_not_solved(ID), concretize_everything.
|
||||
:- literal_not_solved(ID), concretize_everything, internal_error("spec unsolved").
|
||||
|
||||
% Make a problem with "zero literals solved" unsat. This is to trigger
|
||||
% looking for solutions to the ASP problem with "errors", which results
|
||||
% in better reporting for users. See #30669 for details.
|
||||
1 { literal_solved(ID) : literal(ID) }.
|
||||
1 { literal_solved(ID) : literal(ID) } :- internal_error("nothing installed").
|
||||
|
||||
opt_criterion(300, "number of input specs not concretized").
|
||||
#minimize{ 0@300: #true }.
|
||||
@@ -126,7 +126,8 @@ possible_version_weight(Package, Weight)
|
||||
|
||||
1 { version_weight(Package, Weight) : version_declared(Package, Version, Weight) } 1
|
||||
:- attr("version", Package, Version),
|
||||
attr("node", Package).
|
||||
attr("node", Package),
|
||||
internal_error("cannot find unique version weight").
|
||||
|
||||
% node_version_satisfies implies that exactly one of the satisfying versions
|
||||
% is the package's version, and vice versa.
|
||||
@@ -140,7 +141,8 @@ possible_version_weight(Package, Weight)
|
||||
% bound on the choice rule to avoid false positives with the error below
|
||||
1 { attr("version", Package, Version) : version_satisfies(Package, Constraint, Version) }
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
version_satisfies(Package, Constraint, _).
|
||||
version_satisfies(Package, Constraint, _),
|
||||
internal_error("cannot find a satisfying version").
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
@@ -996,7 +998,8 @@ node_flag_inherited(Dependency, FlagType, Flag)
|
||||
% Ensure propagation
|
||||
:- node_flag_inherited(Package, FlagType, Flag),
|
||||
can_inherit_flags(Package, Dependency, FlagType),
|
||||
attr("node_flag_propagate", Package, FlagType).
|
||||
attr("node_flag_propagate", Package, FlagType),
|
||||
internal_error("failed flag propagation").
|
||||
|
||||
error(2, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||
depends_on(Source1, Package),
|
||||
@@ -1049,12 +1052,12 @@ attr("no_flags", Package, FlagType)
|
||||
:- attr("node", Package), internal_error("Package must resolve to at most one hash").
|
||||
|
||||
% you can't choose an installed hash for a dev spec
|
||||
:- attr("hash", Package, Hash), attr("variant_value", Package, "dev_path", _).
|
||||
:- attr("hash", Package, Hash), attr("variant_value", Package, "dev_path", _), internal_error("reused dev package").
|
||||
|
||||
% You can't install a hash, if it is not installed
|
||||
:- attr("hash", Package, Hash), not installed_hash(Package, Hash).
|
||||
:- attr("hash", Package, Hash), not installed_hash(Package, Hash), internal_error("Package resolved to invalid hash").
|
||||
% This should be redundant given the constraint above
|
||||
:- attr("hash", Package, Hash1), attr("hash", Package, Hash2), Hash1 != Hash2.
|
||||
:- attr("hash", Package, Hash1), attr("hash", Package, Hash2), Hash1 != Hash2, internal_error("Package resolved to multiple hashes").
|
||||
|
||||
% if a hash is selected, we impose all the constraints that implies
|
||||
impose(Hash) :- attr("hash", Package, Hash).
|
||||
@@ -1097,7 +1100,8 @@ build_priority(Package, 0) :- attr("node", Package), not optimize_for_reuse().
|
||||
:- attr("version", Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "installed"),
|
||||
not optimize_for_reuse().
|
||||
not optimize_for_reuse(),
|
||||
internal_error("Chose reuse weight for package while not reusing packages").
|
||||
|
||||
#defined installed_hash/2.
|
||||
|
||||
|
@@ -47,37 +47,6 @@
|
||||
|
||||
6. The architecture to build with. This is needed on machines where
|
||||
cross-compilation is required
|
||||
|
||||
Here is the EBNF grammar for a spec::
|
||||
|
||||
spec-list = { spec [ dep-list ] }
|
||||
dep_list = { ^ spec }
|
||||
spec = id [ options ]
|
||||
options = { @version-list | ++variant | +variant |
|
||||
--variant | -variant | ~~variant | ~variant |
|
||||
variant=value | variant==value | %compiler |
|
||||
arch=architecture | [ flag ]==value | [ flag ]=value}
|
||||
flag = { cflags | cxxflags | fcflags | fflags | cppflags |
|
||||
ldflags | ldlibs }
|
||||
variant = id
|
||||
architecture = id
|
||||
compiler = id [ version-list ]
|
||||
version-list = version [ { , version } ]
|
||||
version = id | id: | :id | id:id
|
||||
id = [A-Za-z0-9_][A-Za-z0-9_.-]*
|
||||
|
||||
Identifiers using the <name>=<value> command, such as architectures and
|
||||
compiler flags, require a space before the name.
|
||||
|
||||
There is one context-sensitive part: ids in versions may contain '.', while
|
||||
other ids may not.
|
||||
|
||||
There is one ambiguity: since '-' is allowed in an id, you need to put
|
||||
whitespace space before -variant for it to be tokenized properly. You can
|
||||
either use whitespace, or you can just use ~variant since it means the same
|
||||
thing. Spack uses ~variant in directory names and in the canonical form of
|
||||
specs to avoid ambiguity. Both are provided because ~ can cause shell
|
||||
expansion when it is the first character in an id typed on the command line.
|
||||
"""
|
||||
import collections
|
||||
import collections.abc
|
||||
@@ -101,7 +70,6 @@
|
||||
import spack.dependency as dp
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.parse
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.provider_index
|
||||
@@ -125,8 +93,6 @@
|
||||
__all__ = [
|
||||
"CompilerSpec",
|
||||
"Spec",
|
||||
"SpecParser",
|
||||
"parse",
|
||||
"SpecParseError",
|
||||
"ArchitecturePropagationError",
|
||||
"DuplicateDependencyError",
|
||||
@@ -584,9 +550,9 @@ def __init__(self, *args):
|
||||
# If there is one argument, it's either another CompilerSpec
|
||||
# to copy or a string to parse
|
||||
if isinstance(arg, str):
|
||||
c = SpecParser().parse_compiler(arg)
|
||||
self.name = c.name
|
||||
self.versions = c.versions
|
||||
spec = spack.parser.parse_one_or_raise(f"%{arg}")
|
||||
self.name = spec.compiler.name
|
||||
self.versions = spec.compiler.versions
|
||||
|
||||
elif isinstance(arg, CompilerSpec):
|
||||
self.name = arg.name
|
||||
@@ -602,7 +568,8 @@ def __init__(self, *args):
|
||||
name, version = args
|
||||
self.name = name
|
||||
self.versions = vn.VersionList()
|
||||
self.versions.add(vn.ver(version))
|
||||
versions = vn.ver(version)
|
||||
self.versions.add(versions)
|
||||
|
||||
else:
|
||||
raise TypeError("__init__ takes 1 or 2 arguments. (%d given)" % nargs)
|
||||
@@ -1285,6 +1252,7 @@ def __init__(
|
||||
self.external_path = external_path
|
||||
self.external_module = external_module
|
||||
"""
|
||||
import spack.parser
|
||||
|
||||
# Copy if spec_like is a Spec.
|
||||
if isinstance(spec_like, Spec):
|
||||
@@ -1335,11 +1303,7 @@ def __init__(
|
||||
self._build_spec = None
|
||||
|
||||
if isinstance(spec_like, str):
|
||||
spec_list = SpecParser(self).parse(spec_like)
|
||||
if len(spec_list) > 1:
|
||||
raise ValueError("More than one spec in string: " + spec_like)
|
||||
if len(spec_list) < 1:
|
||||
raise ValueError("String contains no specs: " + spec_like)
|
||||
spack.parser.parse_one_or_raise(spec_like, self)
|
||||
|
||||
elif spec_like is not None:
|
||||
raise TypeError("Can't make spec out of %s" % type(spec_like))
|
||||
@@ -4974,421 +4938,6 @@ def __missing__(self, key):
|
||||
spec_id_re = r"\w[\w.-]*"
|
||||
|
||||
|
||||
class SpecLexer(spack.parse.Lexer):
|
||||
|
||||
"""Parses tokens that make up spack specs."""
|
||||
|
||||
def __init__(self):
|
||||
# Spec strings require posix-style paths on Windows
|
||||
# because the result is later passed to shlex
|
||||
filename_reg = (
|
||||
r"[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*"
|
||||
if not is_windows
|
||||
else r"([A-Za-z]:)*?[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*"
|
||||
)
|
||||
super(SpecLexer, self).__init__(
|
||||
[
|
||||
(
|
||||
r"\@([\w.\-]*\s*)*(\s*\=\s*\w[\w.\-]*)?",
|
||||
lambda scanner, val: self.token(VER, val),
|
||||
),
|
||||
(r"\:", lambda scanner, val: self.token(COLON, val)),
|
||||
(r"\,", lambda scanner, val: self.token(COMMA, val)),
|
||||
(r"\^", lambda scanner, val: self.token(DEP, val)),
|
||||
(r"\+\+", lambda scanner, val: self.token(D_ON, val)),
|
||||
(r"\+", lambda scanner, val: self.token(ON, val)),
|
||||
(r"\-\-", lambda scanner, val: self.token(D_OFF, val)),
|
||||
(r"\-", lambda scanner, val: self.token(OFF, val)),
|
||||
(r"\~\~", lambda scanner, val: self.token(D_OFF, val)),
|
||||
(r"\~", lambda scanner, val: self.token(OFF, val)),
|
||||
(r"\%", lambda scanner, val: self.token(PCT, val)),
|
||||
(r"\=\=", lambda scanner, val: self.token(D_EQ, val)),
|
||||
(r"\=", lambda scanner, val: self.token(EQ, val)),
|
||||
# Filenames match before identifiers, so no initial filename
|
||||
# component is parsed as a spec (e.g., in subdir/spec.yaml/json)
|
||||
(filename_reg, lambda scanner, v: self.token(FILE, v)),
|
||||
# Hash match after filename. No valid filename can be a hash
|
||||
# (files end w/.yaml), but a hash can match a filename prefix.
|
||||
(r"/", lambda scanner, val: self.token(HASH, val)),
|
||||
# Identifiers match after filenames and hashes.
|
||||
(spec_id_re, lambda scanner, val: self.token(ID, val)),
|
||||
(r"\s+", lambda scanner, val: None),
|
||||
],
|
||||
[D_EQ, EQ],
|
||||
[
|
||||
(r"[\S].*", lambda scanner, val: self.token(VAL, val)),
|
||||
(r"\s+", lambda scanner, val: None),
|
||||
],
|
||||
[VAL],
|
||||
)
|
||||
|
||||
|
||||
# Lexer is always the same for every parser.
|
||||
_lexer = SpecLexer()
|
||||
|
||||
|
||||
class SpecParser(spack.parse.Parser):
|
||||
"""Parses specs."""
|
||||
|
||||
__slots__ = "previous", "_initial"
|
||||
|
||||
def __init__(self, initial_spec=None):
|
||||
"""Construct a new SpecParser.
|
||||
|
||||
Args:
|
||||
initial_spec (Spec, optional): provide a Spec that we'll parse
|
||||
directly into. This is used to avoid construction of a
|
||||
superfluous Spec object in the Spec constructor.
|
||||
"""
|
||||
super(SpecParser, self).__init__(_lexer)
|
||||
self.previous = None
|
||||
self._initial = initial_spec
|
||||
|
||||
def do_parse(self):
|
||||
specs = []
|
||||
|
||||
try:
|
||||
while self.next:
|
||||
# Try a file first, but if it doesn't succeed, keep parsing
|
||||
# as from_file may backtrack and try an id.
|
||||
if self.accept(FILE):
|
||||
spec = self.spec_from_file()
|
||||
if spec:
|
||||
specs.append(spec)
|
||||
continue
|
||||
|
||||
if self.accept(ID):
|
||||
self.previous = self.token
|
||||
if self.accept(EQ) or self.accept(D_EQ):
|
||||
# We're parsing an anonymous spec beginning with a
|
||||
# key-value pair.
|
||||
if not specs:
|
||||
self.push_tokens([self.previous, self.token])
|
||||
self.previous = None
|
||||
specs.append(self.spec(None))
|
||||
else:
|
||||
if specs[-1].concrete:
|
||||
# Trying to add k-v pair to spec from hash
|
||||
raise RedundantSpecError(specs[-1], "key-value pair")
|
||||
# We should never end up here.
|
||||
# This requires starting a new spec with ID, EQ
|
||||
# After another spec that is not concrete
|
||||
# If the previous spec is not concrete, this is
|
||||
# handled in the spec parsing loop
|
||||
# If it is concrete, see the if statement above
|
||||
# If there is no previous spec, we don't land in
|
||||
# this else case.
|
||||
self.unexpected_token()
|
||||
else:
|
||||
# We're parsing a new spec by name
|
||||
self.previous = None
|
||||
specs.append(self.spec(self.token.value))
|
||||
elif self.accept(HASH):
|
||||
# We're finding a spec by hash
|
||||
specs.append(self.spec_by_hash())
|
||||
|
||||
elif self.accept(DEP):
|
||||
if not specs:
|
||||
# We're parsing an anonymous spec beginning with a
|
||||
# dependency. Push the token to recover after creating
|
||||
# anonymous spec
|
||||
self.push_tokens([self.token])
|
||||
specs.append(self.spec(None))
|
||||
else:
|
||||
dep = None
|
||||
if self.accept(FILE):
|
||||
# this may return None, in which case we backtrack
|
||||
dep = self.spec_from_file()
|
||||
|
||||
if not dep and self.accept(HASH):
|
||||
# We're finding a dependency by hash for an
|
||||
# anonymous spec
|
||||
dep = self.spec_by_hash()
|
||||
dep = dep.copy(deps=("link", "run"))
|
||||
|
||||
if not dep:
|
||||
# We're adding a dependency to the last spec
|
||||
if self.accept(ID):
|
||||
self.previous = self.token
|
||||
if self.accept(EQ):
|
||||
# This is an anonymous dep with a key=value
|
||||
# push tokens to be parsed as part of the
|
||||
# dep spec
|
||||
self.push_tokens([self.previous, self.token])
|
||||
dep_name = None
|
||||
else:
|
||||
# named dep (standard)
|
||||
dep_name = self.token.value
|
||||
self.previous = None
|
||||
else:
|
||||
# anonymous dep
|
||||
dep_name = None
|
||||
dep = self.spec(dep_name)
|
||||
|
||||
# Raise an error if the previous spec is already
|
||||
# concrete (assigned by hash)
|
||||
if specs[-1].concrete:
|
||||
raise RedundantSpecError(specs[-1], "dependency")
|
||||
# command line deps get empty deptypes now.
|
||||
# Real deptypes are assigned later per packages.
|
||||
specs[-1]._add_dependency(dep, ())
|
||||
|
||||
else:
|
||||
# If the next token can be part of a valid anonymous spec,
|
||||
# create the anonymous spec
|
||||
if self.next.type in (VER, ON, D_ON, OFF, D_OFF, PCT):
|
||||
# Raise an error if the previous spec is already
|
||||
# concrete (assigned by hash)
|
||||
if specs and specs[-1]._hash:
|
||||
raise RedundantSpecError(specs[-1], "compiler, version, " "or variant")
|
||||
specs.append(self.spec(None))
|
||||
else:
|
||||
self.unexpected_token()
|
||||
|
||||
except spack.parse.ParseError as e:
|
||||
raise SpecParseError(e) from e
|
||||
|
||||
# Generate lookups for git-commit-based versions
|
||||
for spec in specs:
|
||||
# Cannot do lookups for versions in anonymous specs
|
||||
# Only allow Version objects to use git for now
|
||||
# Note: VersionRange(x, x) is currently concrete, hence isinstance(...).
|
||||
if spec.name and spec.versions.concrete and isinstance(spec.version, vn.GitVersion):
|
||||
spec.version.generate_git_lookup(spec.fullname)
|
||||
|
||||
return specs
|
||||
|
||||
def spec_from_file(self):
|
||||
"""Read a spec from a filename parsed on the input stream.
|
||||
|
||||
There is some care taken here to ensure that filenames are a last
|
||||
resort, and that any valid package name is parsed as a name
|
||||
before we consider it as a file. Specs are used in lots of places;
|
||||
we don't want the parser touching the filesystem unnecessarily.
|
||||
|
||||
The parse logic is as follows:
|
||||
|
||||
1. We require that filenames end in .yaml, which means that no valid
|
||||
filename can be interpreted as a hash (hashes can't have '.')
|
||||
|
||||
2. We avoid treating paths like /path/to/spec.json as hashes, or paths
|
||||
like subdir/spec.json as ids by lexing filenames before hashes.
|
||||
|
||||
3. For spec names that match file and id regexes, like 'builtin.yaml',
|
||||
we backtrack from spec_from_file() and treat them as spec names.
|
||||
|
||||
"""
|
||||
path = self.token.value
|
||||
|
||||
# Special case where someone omits a space after a filename. Consider:
|
||||
#
|
||||
# libdwarf^/some/path/to/libelf.yamllibdwarf ^../../libelf.yaml
|
||||
#
|
||||
# The error is clearly an omitted space. To handle this, the FILE
|
||||
# regex admits text *beyond* .yaml, and we raise a nice error for
|
||||
# file names that don't end in .yaml.
|
||||
if not (path.endswith(".yaml") or path.endswith(".json")):
|
||||
raise SpecFilenameError("Spec filename must end in .yaml or .json: '{0}'".format(path))
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise NoSuchSpecFileError("No such spec file: '{0}'".format(path))
|
||||
|
||||
with open(path) as f:
|
||||
if path.endswith(".json"):
|
||||
return Spec.from_json(f)
|
||||
return Spec.from_yaml(f)
|
||||
|
||||
def parse_compiler(self, text):
|
||||
self.setup(text)
|
||||
return self.compiler()
|
||||
|
||||
def spec_by_hash(self):
|
||||
# TODO: Remove parser dependency on active environment and database.
|
||||
import spack.environment
|
||||
|
||||
self.expect(ID)
|
||||
dag_hash = self.token.value
|
||||
matches = []
|
||||
if spack.environment.active_environment():
|
||||
matches = spack.environment.active_environment().get_by_hash(dag_hash)
|
||||
if not matches:
|
||||
matches = spack.store.db.get_by_hash(dag_hash)
|
||||
if not matches:
|
||||
raise NoSuchHashError(dag_hash)
|
||||
|
||||
if len(matches) != 1:
|
||||
raise AmbiguousHashError(
|
||||
"Multiple packages specify hash beginning '%s'." % dag_hash, *matches
|
||||
)
|
||||
|
||||
return matches[0]
|
||||
|
||||
def spec(self, name):
|
||||
"""Parse a spec out of the input. If a spec is supplied, initialize
|
||||
and return it instead of creating a new one."""
|
||||
spec_namespace = None
|
||||
spec_name = None
|
||||
if name:
|
||||
spec_namespace, dot, spec_name = name.rpartition(".")
|
||||
if not spec_namespace:
|
||||
spec_namespace = None
|
||||
self.check_identifier(spec_name)
|
||||
|
||||
if self._initial is None:
|
||||
spec = Spec()
|
||||
else:
|
||||
# this is used by Spec.__init__
|
||||
spec = self._initial
|
||||
self._initial = None
|
||||
|
||||
spec.namespace = spec_namespace
|
||||
spec.name = spec_name
|
||||
|
||||
while self.next:
|
||||
if self.accept(VER):
|
||||
vlist = self.version_list()
|
||||
spec._add_versions(vlist)
|
||||
|
||||
elif self.accept(D_ON):
|
||||
name = self.variant()
|
||||
spec.variants[name] = vt.BoolValuedVariant(name, True, propagate=True)
|
||||
|
||||
elif self.accept(ON):
|
||||
name = self.variant()
|
||||
spec.variants[name] = vt.BoolValuedVariant(name, True, propagate=False)
|
||||
|
||||
elif self.accept(D_OFF):
|
||||
name = self.variant()
|
||||
spec.variants[name] = vt.BoolValuedVariant(name, False, propagate=True)
|
||||
|
||||
elif self.accept(OFF):
|
||||
name = self.variant()
|
||||
spec.variants[name] = vt.BoolValuedVariant(name, False, propagate=False)
|
||||
|
||||
elif self.accept(PCT):
|
||||
spec._set_compiler(self.compiler())
|
||||
|
||||
elif self.accept(ID):
|
||||
self.previous = self.token
|
||||
if self.accept(D_EQ):
|
||||
# We're adding a key-value pair to the spec
|
||||
self.expect(VAL)
|
||||
spec._add_flag(self.previous.value, self.token.value, propagate=True)
|
||||
self.previous = None
|
||||
elif self.accept(EQ):
|
||||
# We're adding a key-value pair to the spec
|
||||
self.expect(VAL)
|
||||
spec._add_flag(self.previous.value, self.token.value, propagate=False)
|
||||
self.previous = None
|
||||
else:
|
||||
# We've found the start of a new spec. Go back to do_parse
|
||||
# and read this token again.
|
||||
self.push_tokens([self.token])
|
||||
self.previous = None
|
||||
break
|
||||
|
||||
elif self.accept(HASH):
|
||||
# Get spec by hash and confirm it matches any constraints we
|
||||
# already read in
|
||||
hash_spec = self.spec_by_hash()
|
||||
if hash_spec.satisfies(spec):
|
||||
spec._dup(hash_spec)
|
||||
break
|
||||
else:
|
||||
raise InvalidHashError(spec, hash_spec.dag_hash())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
return spec
|
||||
|
||||
def variant(self, name=None):
|
||||
if name:
|
||||
return name
|
||||
else:
|
||||
self.expect(ID)
|
||||
self.check_identifier()
|
||||
return self.token.value
|
||||
|
||||
def version(self):
|
||||
|
||||
start = None
|
||||
end = None
|
||||
|
||||
def str_translate(value):
|
||||
# return None for empty strings since we can end up with `'@'.strip('@')`
|
||||
if not (value and value.strip()):
|
||||
return None
|
||||
else:
|
||||
return value
|
||||
|
||||
if self.token.type is COMMA:
|
||||
# need to increment commas, could be ID or COLON
|
||||
self.accept(ID)
|
||||
|
||||
if self.token.type in (VER, ID):
|
||||
version_spec = self.token.value.lstrip("@")
|
||||
start = str_translate(version_spec)
|
||||
|
||||
if self.accept(COLON):
|
||||
if self.accept(ID):
|
||||
if self.next and self.next.type is EQ:
|
||||
# This is a start: range followed by a key=value pair
|
||||
self.push_tokens([self.token])
|
||||
else:
|
||||
end = self.token.value
|
||||
elif start:
|
||||
# No colon, but there was a version
|
||||
return vn.Version(start)
|
||||
else:
|
||||
# No colon and no id: invalid version
|
||||
self.next_token_error("Invalid version specifier")
|
||||
|
||||
if start:
|
||||
start = vn.Version(start)
|
||||
if end:
|
||||
end = vn.Version(end)
|
||||
return vn.VersionRange(start, end)
|
||||
|
||||
def version_list(self):
|
||||
vlist = []
|
||||
vlist.append(self.version())
|
||||
while self.accept(COMMA):
|
||||
vlist.append(self.version())
|
||||
return vlist
|
||||
|
||||
def compiler(self):
|
||||
self.expect(ID)
|
||||
self.check_identifier()
|
||||
|
||||
compiler = CompilerSpec.__new__(CompilerSpec)
|
||||
compiler.name = self.token.value
|
||||
compiler.versions = vn.VersionList()
|
||||
if self.accept(VER):
|
||||
vlist = self.version_list()
|
||||
compiler._add_versions(vlist)
|
||||
else:
|
||||
compiler.versions = vn.VersionList(":")
|
||||
return compiler
|
||||
|
||||
def check_identifier(self, id=None):
|
||||
"""The only identifiers that can contain '.' are versions, but version
|
||||
ids are context-sensitive so we have to check on a case-by-case
|
||||
basis. Call this if we detect a version id where it shouldn't be.
|
||||
"""
|
||||
if not id:
|
||||
id = self.token.value
|
||||
if "." in id:
|
||||
self.last_token_error("{0}: Identifier cannot contain '.'".format(id))
|
||||
|
||||
|
||||
def parse(string):
|
||||
"""Returns a list of specs from an input string.
|
||||
For creating one spec, see Spec() constructor.
|
||||
"""
|
||||
return SpecParser().parse(string)
|
||||
|
||||
|
||||
def save_dependency_specfiles(
|
||||
root_spec_info, output_directory, dependencies=None, spec_format="json"
|
||||
):
|
||||
|
@@ -14,7 +14,7 @@
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict # novm
|
||||
from typing import Dict
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -49,9 +49,7 @@
|
||||
stage_prefix = "spack-stage-"
|
||||
|
||||
|
||||
def create_stage_root(path):
|
||||
# type: (str) -> None
|
||||
|
||||
def create_stage_root(path: str) -> None:
|
||||
"""Create the stage root directory and ensure appropriate access perms."""
|
||||
assert os.path.isabs(path) and len(path.strip()) > 1
|
||||
|
||||
@@ -235,7 +233,7 @@ class Stage(object):
|
||||
"""
|
||||
|
||||
"""Shared dict of all stage locks."""
|
||||
stage_locks = {} # type: Dict[str, spack.util.lock.Lock]
|
||||
stage_locks: Dict[str, spack.util.lock.Lock] = {}
|
||||
|
||||
"""Most staging is managed by Spack. DIYStage is one exception."""
|
||||
managed_by_spack = True
|
||||
|
@@ -191,18 +191,6 @@ def _store():
|
||||
root, unpadded_root, projections = parse_install_tree(config_dict)
|
||||
hash_length = spack.config.get("config:install_hash_length")
|
||||
|
||||
# Check that the user is not trying to install software into the store
|
||||
# reserved by Spack to bootstrap its own dependencies, since this would
|
||||
# lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe
|
||||
# user installed software)
|
||||
enable_bootstrap = spack.config.get("bootstrap:enable", True)
|
||||
if enable_bootstrap and spack.bootstrap.store_path() == root:
|
||||
msg = (
|
||||
'please change the install tree root "{0}" in your '
|
||||
"configuration [path reserved for Spack internal use]"
|
||||
)
|
||||
raise ValueError(msg.format(root))
|
||||
|
||||
return Store(
|
||||
root=root, unpadded_root=unpadded_root, projections=projections, hash_length=hash_length
|
||||
)
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
import textwrap
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
@@ -20,7 +20,7 @@ class ContextMeta(type):
|
||||
|
||||
#: Keeps track of the context properties that have been added
|
||||
#: by the class that is being defined
|
||||
_new_context_properties = [] # type: List[str]
|
||||
_new_context_properties: List[str] = []
|
||||
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Merge all the context properties that are coming from base classes
|
||||
|
@@ -5,7 +5,6 @@
|
||||
import glob
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import py
|
||||
@@ -65,16 +64,6 @@ def test_mirror(mirror_dir):
|
||||
mirror_cmd("rm", "--scope=site", "test-mirror-func")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def test_legacy_mirror(mutable_config, tmpdir):
|
||||
mirror_dir = tmpdir.join("legacy_yaml_mirror")
|
||||
shutil.copytree(legacy_mirror_dir, mirror_dir.strpath)
|
||||
mirror_url = "file://%s" % mirror_dir
|
||||
mirror_cmd("add", "--scope", "site", "test-legacy-yaml", mirror_url)
|
||||
yield mirror_dir
|
||||
mirror_cmd("rm", "--scope=site", "test-legacy-yaml")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def config_directory(tmpdir_factory):
|
||||
tmpdir = tmpdir_factory.mktemp("test_configs")
|
||||
@@ -581,19 +570,6 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
uninstall_cmd("-y", "/%s" % new_spec.dag_hash())
|
||||
|
||||
|
||||
# Need one where the platform has been changed to the test platform.
|
||||
def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config, mock_packages):
|
||||
install_cmd(
|
||||
"--no-check-signature",
|
||||
"--cache-only",
|
||||
"-f",
|
||||
legacy_mirror_dir
|
||||
+ "/build_cache/test-debian6-core2-gcc-4.5.0-zlib-"
|
||||
+ "1.2.11-t5mczux3tfqpxwmg7egp7axy2jvyulqk.spec.yaml",
|
||||
)
|
||||
uninstall_cmd("-y", "/t5mczux3tfqpxwmg7egp7axy2jvyulqk")
|
||||
|
||||
|
||||
def test_install_legacy_buildcache_layout(install_mockery_mutable_config):
|
||||
"""Legacy buildcache layout involved a nested archive structure
|
||||
where the .spack file contained a repeated spec.json and another
|
||||
|
@@ -7,6 +7,8 @@
|
||||
import pytest
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.compilers
|
||||
import spack.environment
|
||||
import spack.store
|
||||
@@ -33,7 +35,7 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
|
||||
# Test that within the context manager we use the bootstrap store
|
||||
# and that outside we restore the correct location
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.root == spack.bootstrap.store_path()
|
||||
assert spack.store.root == spack.bootstrap.config.store_path()
|
||||
assert spack.store.root == user_path
|
||||
|
||||
|
||||
@@ -51,7 +53,7 @@ def test_store_path_customization(config_value, expected, mutable_config):
|
||||
spack.config.set("bootstrap:root", config_value)
|
||||
|
||||
# Check the store path
|
||||
current = spack.bootstrap.store_path()
|
||||
current = spack.bootstrap.config.store_path()
|
||||
assert current == spack.util.path.canonicalize_path(expected)
|
||||
|
||||
|
||||
@@ -61,7 +63,7 @@ def test_raising_exception_if_bootstrap_disabled(mutable_config):
|
||||
|
||||
# Check the correct exception is raised
|
||||
with pytest.raises(RuntimeError, match="bootstrapping is currently disabled"):
|
||||
spack.bootstrap.store_path()
|
||||
spack.bootstrap.config.store_path()
|
||||
|
||||
|
||||
def test_raising_exception_module_importable():
|
||||
@@ -69,7 +71,7 @@ def test_raising_exception_module_importable():
|
||||
ImportError,
|
||||
match='cannot bootstrap the "asdf" Python module',
|
||||
):
|
||||
spack.bootstrap.ensure_module_importable_or_raise("asdf")
|
||||
spack.bootstrap.core.ensure_module_importable_or_raise("asdf")
|
||||
|
||||
|
||||
def test_raising_exception_executables_in_path():
|
||||
@@ -77,7 +79,7 @@ def test_raising_exception_executables_in_path():
|
||||
RuntimeError,
|
||||
match="cannot bootstrap any of the asdf, fdsa executables",
|
||||
):
|
||||
spack.bootstrap.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python")
|
||||
spack.bootstrap.core.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python")
|
||||
|
||||
|
||||
@pytest.mark.regression("25603")
|
||||
@@ -175,13 +177,15 @@ def test_nested_use_of_context_manager(mutable_config):
|
||||
def test_status_function_find_files(
|
||||
mutable_config, mock_executable, tmpdir, monkeypatch, expected_missing
|
||||
):
|
||||
import spack.bootstrap.status
|
||||
|
||||
if not expected_missing:
|
||||
mock_executable("foo", "echo Hello WWorld!")
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.bootstrap,
|
||||
spack.bootstrap.status,
|
||||
"_optional_requirements",
|
||||
lambda: [spack.bootstrap._required_system_executable("foo", "NOT FOUND")],
|
||||
lambda: [spack.bootstrap.status._required_system_executable("foo", "NOT FOUND")],
|
||||
)
|
||||
monkeypatch.setenv("PATH", str(tmpdir.join("bin")))
|
||||
|
||||
@@ -192,15 +196,15 @@ def test_status_function_find_files(
|
||||
@pytest.mark.regression("31042")
|
||||
def test_source_is_disabled(mutable_config):
|
||||
# Get the configuration dictionary of the current bootstrapping source
|
||||
conf = next(iter(spack.bootstrap.bootstrapping_sources()))
|
||||
conf = next(iter(spack.bootstrap.core.bootstrapping_sources()))
|
||||
|
||||
# The source is not explicitly enabled or disabled, so the following
|
||||
# call should raise to skip using it for bootstrapping
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.source_is_enabled_or_raise(conf)
|
||||
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
||||
|
||||
# Try to explicitly disable the source and verify that the behavior
|
||||
# is the same as above
|
||||
spack.config.add("bootstrap:trusted:{0}:{1}".format(conf["name"], False))
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.source_is_enabled_or_raise(conf)
|
||||
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
||||
|
@@ -149,7 +149,7 @@
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def wrapper_environment():
|
||||
def wrapper_environment(working_env):
|
||||
with set_env(
|
||||
SPACK_CC=real_cc,
|
||||
SPACK_CXX=real_cc,
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.core
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
@@ -157,17 +159,17 @@ def test_remove_failure_for_non_existing_names(mutable_config):
|
||||
|
||||
def test_remove_and_add_a_source(mutable_config):
|
||||
# Check we start with a single bootstrapping source
|
||||
sources = spack.bootstrap.bootstrapping_sources()
|
||||
sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
assert len(sources) == 1
|
||||
|
||||
# Remove it and check the result
|
||||
_bootstrap("remove", "github-actions")
|
||||
sources = spack.bootstrap.bootstrapping_sources()
|
||||
sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
assert not sources
|
||||
|
||||
# Add it back and check we restored the initial state
|
||||
_bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.3")
|
||||
sources = spack.bootstrap.bootstrapping_sources()
|
||||
sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
assert len(sources) == 1
|
||||
|
||||
|
||||
@@ -206,4 +208,4 @@ def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir
|
||||
_bootstrap("add", "--trust", "test-mirror", str(metadata_dir))
|
||||
|
||||
assert _bootstrap.returncode == 0
|
||||
assert any(m["name"] == "test-mirror" for m in spack.bootstrap.bootstrapping_sources())
|
||||
assert any(m["name"] == "test-mirror" for m in spack.bootstrap.core.bootstrapping_sources())
|
||||
|
@@ -26,6 +26,7 @@
|
||||
import spack.util.executable
|
||||
from spack.error import SpackError
|
||||
from spack.main import SpackCommand
|
||||
from spack.parser import SpecSyntaxError
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
|
||||
install = SpackCommand("install")
|
||||
@@ -362,7 +363,7 @@ def test_install_conflicts(conflict_spec):
|
||||
)
|
||||
def test_install_invalid_spec(invalid_spec):
|
||||
# Make sure that invalid specs raise a SpackError
|
||||
with pytest.raises(SpackError, match="Unexpected token"):
|
||||
with pytest.raises(SpecSyntaxError, match="unexpected tokens"):
|
||||
install(invalid_spec)
|
||||
|
||||
|
||||
|
@@ -10,6 +10,7 @@
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.parser
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
@@ -181,13 +182,11 @@ def test_spec_returncode():
|
||||
|
||||
|
||||
def test_spec_parse_error():
|
||||
with pytest.raises(spack.error.SpackError) as e:
|
||||
with pytest.raises(spack.parser.SpecSyntaxError) as e:
|
||||
spec("1.15:")
|
||||
|
||||
# make sure the error is formatted properly
|
||||
error_msg = """\
|
||||
1.15:
|
||||
^"""
|
||||
error_msg = "unexpected tokens in the spec string\n1.15:\n ^"
|
||||
assert error_msg in str(e.value)
|
||||
|
||||
|
||||
|
@@ -325,6 +325,13 @@ def test_different_compilers_get_different_flags(self):
|
||||
assert set(client.compiler_flags["fflags"]) == set(["-O0", "-g"])
|
||||
assert not set(cmake.compiler_flags["fflags"])
|
||||
|
||||
def test_compiler_flags_from_compiler_and_dependent(self):
|
||||
client = Spec("cmake-client %clang@12.2.0 platform=test os=fe target=fe cflags==-g")
|
||||
client.concretize()
|
||||
cmake = client["cmake"]
|
||||
for spec in [client, cmake]:
|
||||
assert spec.compiler_flags["cflags"] == ["-O3", "-g"]
|
||||
|
||||
def test_concretize_compiler_flag_propagate(self):
|
||||
spec = Spec("hypre cflags=='-g' ^openblas")
|
||||
spec.concretize()
|
||||
|
@@ -11,10 +11,8 @@
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Optional # novm
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from types import ModuleType # novm
|
||||
from types import ModuleType
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -24,7 +22,7 @@
|
||||
|
||||
from spack.util.executable import which
|
||||
|
||||
termios = None # type: Optional[ModuleType]
|
||||
termios: Optional[ModuleType] = None
|
||||
try:
|
||||
import termios as term_mod
|
||||
|
||||
|
@@ -68,22 +68,18 @@ def test_validate_spec(validate_spec_schema):
|
||||
|
||||
# Check that invalid data throws
|
||||
data["^python@3.7@"] = "baz"
|
||||
with pytest.raises(jsonschema.ValidationError) as exc_err:
|
||||
with pytest.raises(jsonschema.ValidationError, match="unexpected tokens"):
|
||||
v.validate(data)
|
||||
|
||||
assert "is an invalid spec" in str(exc_err.value)
|
||||
|
||||
|
||||
@pytest.mark.regression("9857")
|
||||
def test_module_suffixes(module_suffixes_schema):
|
||||
v = spack.schema.Validator(module_suffixes_schema)
|
||||
data = {"tcl": {"all": {"suffixes": {"^python@2.7@": "py2.7"}}}}
|
||||
|
||||
with pytest.raises(jsonschema.ValidationError) as exc_err:
|
||||
with pytest.raises(jsonschema.ValidationError, match="unexpected tokens"):
|
||||
v.validate(data)
|
||||
|
||||
assert "is an invalid spec" in str(exc_err.value)
|
||||
|
||||
|
||||
@pytest.mark.regression("10246")
|
||||
@pytest.mark.parametrize(
|
||||
@@ -91,7 +87,7 @@ def test_module_suffixes(module_suffixes_schema):
|
||||
["compilers", "config", "env", "merged", "mirrors", "modules", "packages", "repos"],
|
||||
)
|
||||
def test_schema_validation(meta_schema, config_name):
|
||||
import importlib # novm
|
||||
import importlib
|
||||
|
||||
module_name = "spack.schema.{0}".format(config_name)
|
||||
module = importlib.import_module(module_name)
|
||||
|
@@ -9,6 +9,7 @@
|
||||
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.parser
|
||||
import spack.repo
|
||||
import spack.util.hash as hashutil
|
||||
from spack.dependency import Dependency, all_deptypes, canonical_deptype
|
||||
@@ -961,7 +962,7 @@ def test_canonical_deptype(self):
|
||||
|
||||
def test_invalid_literal_spec(self):
|
||||
# Can't give type 'build' to a top-level spec
|
||||
with pytest.raises(spack.spec.SpecParseError):
|
||||
with pytest.raises(spack.parser.SpecSyntaxError):
|
||||
Spec.from_literal({"foo:build": None})
|
||||
|
||||
# Can't use more than one ':' separator
|
||||
|
@@ -707,13 +707,9 @@ def test_constrain_dependency_not_changed(self):
|
||||
)
|
||||
|
||||
def test_exceptional_paths_for_constructor(self):
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
Spec((1, 2))
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
Spec("")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
Spec("libelf foo")
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -22,7 +22,7 @@
|
||||
|
||||
|
||||
#: cache of hash functions generated
|
||||
_hash_functions = {} # type: Dict[str, Callable[[], Any]]
|
||||
_hash_functions: Dict[str, Callable[[], Any]] = {}
|
||||
|
||||
|
||||
class DeprecatedHash(object):
|
||||
|
@@ -8,7 +8,6 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
@@ -47,6 +46,8 @@ def init(gnupghome=None, force=False):
|
||||
global objects are set already
|
||||
"""
|
||||
global GPG, GPGCONF, SOCKET_DIR, GNUPGHOME
|
||||
import spack.bootstrap
|
||||
|
||||
if force:
|
||||
clear()
|
||||
|
||||
@@ -59,7 +60,7 @@ def init(gnupghome=None, force=False):
|
||||
|
||||
# Set the executable objects for "gpg" and "gpgconf"
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_gpg_in_path_or_raise()
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
GPG, GPGCONF = _gpg(), _gpgconf()
|
||||
|
||||
GPG.add_default_env("GNUPGHOME", GNUPGHOME)
|
||||
|
@@ -5,7 +5,7 @@
|
||||
|
||||
"""Simple wrapper around JSON to guarantee consistent use of load/dump. """
|
||||
import json
|
||||
from typing import Any, Dict, Optional # novm
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import spack.error
|
||||
|
||||
@@ -14,8 +14,7 @@
|
||||
_json_dump_args = {"indent": 2, "separators": (",", ": ")}
|
||||
|
||||
|
||||
def load(stream):
|
||||
# type: (Any) -> Dict
|
||||
def load(stream: Any) -> Dict:
|
||||
"""Spack JSON needs to be ordered to support specs."""
|
||||
if isinstance(stream, str):
|
||||
load = json.loads # type: ignore[assignment]
|
||||
@@ -25,14 +24,12 @@ def load(stream):
|
||||
return _strify(load(stream, object_hook=_strify), ignore_dicts=True)
|
||||
|
||||
|
||||
def encode_json_dict(data):
|
||||
# type: (Dict) -> Dict
|
||||
def encode_json_dict(data: Dict) -> Dict:
|
||||
"""Converts python 2 unicodes to str in JSON data."""
|
||||
return _strify(data)
|
||||
|
||||
|
||||
def dump(data, stream=None):
|
||||
# type: (Dict, Optional[Any]) -> Optional[str]
|
||||
def dump(data: Dict, stream: Optional[Any] = None) -> Optional[str]:
|
||||
"""Dump JSON with a reasonable amount of indentation and separation."""
|
||||
data = _strify(data)
|
||||
if stream is None:
|
||||
@@ -41,14 +38,12 @@ def dump(data, stream=None):
|
||||
return None
|
||||
|
||||
|
||||
def decode_json_dict(data):
|
||||
# type: (Dict) -> Dict
|
||||
def decode_json_dict(data: Dict) -> Dict:
|
||||
"""Converts str to python 2 unicodes in JSON data."""
|
||||
return _strify(data)
|
||||
|
||||
|
||||
def _strify(data, ignore_dicts=False):
|
||||
# type: (Dict, bool) -> Dict
|
||||
def _strify(data: Dict, ignore_dicts: bool = False) -> Dict:
|
||||
"""Helper method for ``encode_json_dict()`` and ``decode_json_dict()``.
|
||||
|
||||
Converts python 2 unicodes to str in JSON data, or the other way around."""
|
||||
@@ -59,6 +54,5 @@ def _strify(data, ignore_dicts=False):
|
||||
class SpackJSONError(spack.error.SpackError):
|
||||
"""Raised when there are issues with JSON parsing."""
|
||||
|
||||
def __init__(self, msg, json_error):
|
||||
# type: (str, BaseException) -> None
|
||||
def __init__(self, msg: str, json_error: BaseException):
|
||||
super(SpackJSONError, self).__init__(msg, str(json_error))
|
||||
|
@@ -17,7 +17,7 @@
|
||||
import ctypes
|
||||
import io
|
||||
import re
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
from ruamel.yaml import RoundTripDumper, RoundTripLoader
|
||||
@@ -225,7 +225,7 @@ def file_line(mark):
|
||||
#: This is nasty but YAML doesn't give us many ways to pass arguments --
|
||||
#: yaml.dump() takes a class (not an instance) and instantiates the dumper
|
||||
#: itself, so we can't just pass an instance
|
||||
_annotations = [] # type: List[str]
|
||||
_annotations: List[str] = []
|
||||
|
||||
|
||||
class LineAnnotationDumper(OrderedLineDumper):
|
||||
|
@@ -65,7 +65,7 @@ def __init__(self, now=time.time):
|
||||
now: function that gives the seconds since e.g. epoch
|
||||
"""
|
||||
self._now = now
|
||||
self._timers = OrderedDict() # type: OrderedDict[str,Interval]
|
||||
self._timers: OrderedDict[str, Interval] = OrderedDict()
|
||||
|
||||
# _global is the overal timer since the instance was created
|
||||
self._timers[global_timer_name] = Interval(self._now(), end=None)
|
||||
|
@@ -78,35 +78,23 @@ def uses_ssl(parsed_url):
|
||||
return False
|
||||
|
||||
|
||||
__UNABLE_TO_VERIFY_SSL = (lambda pyver: ((pyver < (2, 7, 9)) or ((3,) < pyver < (3, 4, 3))))(
|
||||
sys.version_info
|
||||
)
|
||||
|
||||
|
||||
def read_from_url(url, accept_content_type=None):
|
||||
url = url_util.parse(url)
|
||||
context = None
|
||||
|
||||
verify_ssl = spack.config.get("config:verify_ssl")
|
||||
|
||||
# Timeout in seconds for web requests
|
||||
timeout = spack.config.get("config:connect_timeout", 10)
|
||||
|
||||
# Don't even bother with a context unless the URL scheme is one that uses
|
||||
# SSL certs.
|
||||
if uses_ssl(url):
|
||||
if verify_ssl:
|
||||
if __UNABLE_TO_VERIFY_SSL:
|
||||
# User wants SSL verification, but it cannot be provided.
|
||||
warn_no_ssl_cert_checking()
|
||||
else:
|
||||
# User wants SSL verification, and it *can* be provided.
|
||||
context = ssl.create_default_context() # novm
|
||||
if spack.config.get("config:verify_ssl"):
|
||||
# User wants SSL verification, and it *can* be provided.
|
||||
context = ssl.create_default_context()
|
||||
else:
|
||||
# User has explicitly indicated that they do not want SSL
|
||||
# verification.
|
||||
if not __UNABLE_TO_VERIFY_SSL:
|
||||
context = ssl._create_unverified_context()
|
||||
context = ssl._create_unverified_context()
|
||||
|
||||
url_scheme = url.scheme
|
||||
url = url_util.format(url)
|
||||
@@ -154,22 +142,11 @@ def read_from_url(url, accept_content_type=None):
|
||||
return response.geturl(), response.headers, response
|
||||
|
||||
|
||||
def warn_no_ssl_cert_checking():
|
||||
tty.warn(
|
||||
"Spack will not check SSL certificates. You need to update "
|
||||
"your Python to enable certificate verification."
|
||||
)
|
||||
|
||||
|
||||
def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=None):
|
||||
if sys.platform == "win32":
|
||||
if remote_path[1] == ":":
|
||||
remote_path = "file://" + remote_path
|
||||
remote_url = url_util.parse(remote_path)
|
||||
verify_ssl = spack.config.get("config:verify_ssl")
|
||||
|
||||
if __UNABLE_TO_VERIFY_SSL and verify_ssl and uses_ssl(remote_url):
|
||||
warn_no_ssl_cert_checking()
|
||||
|
||||
remote_file_path = url_util.local_file_path(remote_url)
|
||||
if remote_file_path is not None:
|
||||
@@ -728,10 +705,7 @@ def _urlopen(req, *args, **kwargs):
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Note: 'context' parameter was only introduced starting
|
||||
# with versions 2.7.9 and 3.4.3 of Python.
|
||||
if __UNABLE_TO_VERIFY_SSL:
|
||||
del kwargs["context"]
|
||||
del kwargs["context"]
|
||||
|
||||
opener = urlopen
|
||||
if url_util.parse(url).scheme == "s3":
|
||||
|
@@ -235,8 +235,7 @@ class VersionBase(object):
|
||||
"string",
|
||||
]
|
||||
|
||||
def __init__(self, string):
|
||||
# type: (str) -> None
|
||||
def __init__(self, string: str) -> None:
|
||||
if not isinstance(string, str):
|
||||
string = str(string)
|
||||
|
||||
@@ -938,7 +937,7 @@ def __init__(self, vlist=None):
|
||||
self.versions = []
|
||||
if vlist is not None:
|
||||
if isinstance(vlist, str):
|
||||
vlist = _string_to_version(vlist)
|
||||
vlist = from_string(vlist)
|
||||
if type(vlist) == VersionList:
|
||||
self.versions = vlist.versions
|
||||
else:
|
||||
@@ -1166,7 +1165,7 @@ def __repr__(self):
|
||||
return str(self.versions)
|
||||
|
||||
|
||||
def _string_to_version(string):
|
||||
def from_string(string):
|
||||
"""Converts a string to a Version, VersionList, or VersionRange.
|
||||
This is private. Client code should use ver().
|
||||
"""
|
||||
@@ -1192,9 +1191,9 @@ def ver(obj):
|
||||
if isinstance(obj, (list, tuple)):
|
||||
return VersionList(obj)
|
||||
elif isinstance(obj, str):
|
||||
return _string_to_version(obj)
|
||||
return from_string(obj)
|
||||
elif isinstance(obj, (int, float)):
|
||||
return _string_to_version(str(obj))
|
||||
return from_string(str(obj))
|
||||
elif type(obj) in (VersionBase, GitVersion, VersionRange, VersionList):
|
||||
return obj
|
||||
else:
|
||||
|
@@ -28,9 +28,8 @@ spack:
|
||||
- py-transformers
|
||||
|
||||
# JAX
|
||||
# https://github.com/google/jax/issues/12614
|
||||
# - py-jax
|
||||
# - py-jaxlib
|
||||
- py-jax
|
||||
- py-jaxlib
|
||||
|
||||
# Keras
|
||||
- py-keras
|
||||
|
@@ -31,9 +31,8 @@ spack:
|
||||
- py-transformers
|
||||
|
||||
# JAX
|
||||
# https://github.com/google/jax/issues/12614
|
||||
# - py-jax
|
||||
# - py-jaxlib
|
||||
- py-jax
|
||||
- py-jaxlib
|
||||
|
||||
# Keras
|
||||
- py-keras
|
||||
|
@@ -33,9 +33,8 @@ spack:
|
||||
- py-transformers
|
||||
|
||||
# JAX
|
||||
# https://github.com/google/jax/issues/12614
|
||||
# - py-jax
|
||||
# - py-jaxlib
|
||||
- py-jax
|
||||
- py-jaxlib
|
||||
|
||||
# Keras
|
||||
- py-keras
|
||||
|
@@ -408,7 +408,7 @@ _spack_bootstrap() {
|
||||
}
|
||||
|
||||
_spack_bootstrap_now() {
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
SPACK_COMPREPLY="-h --help --dev"
|
||||
}
|
||||
|
||||
_spack_bootstrap_status() {
|
||||
@@ -1439,7 +1439,7 @@ _spack_module_tcl_setdefault() {
|
||||
_spack_patch() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -n --no-checksum --deprecated"
|
||||
SPACK_COMPREPLY="-h --help -n --no-checksum --deprecated -U --fresh --reuse"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
|
34
share/spack/templates/bootstrap/spack.yaml
Normal file
34
share/spack/templates/bootstrap/spack.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
# This environment contains Spack non-core dependencies for the
|
||||
# following configuration
|
||||
#
|
||||
# Python spec: {{ python_spec }}
|
||||
# Python interpreter: {{ python_prefix }}
|
||||
# Architecture: {{ architecture }}
|
||||
#
|
||||
spack:
|
||||
specs:
|
||||
{% for spec in environment_specs %}
|
||||
- "{{ spec }}"
|
||||
{% endfor %}
|
||||
view: {{ environment_path }}/view
|
||||
|
||||
config:
|
||||
install_tree:
|
||||
root: {{ store_path }}
|
||||
|
||||
packages:
|
||||
python:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: "{{ python_spec }}"
|
||||
prefix: "{{ python_prefix }}"
|
||||
|
||||
py-typed-ast:
|
||||
require: "+wheel"
|
||||
|
||||
py-platformdirs:
|
||||
require: "+wheel"
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
unify: true
|
@@ -27,6 +27,8 @@ class Apptainer(SingularityBase):
|
||||
git = "https://github.com/apptainer/apptainer.git"
|
||||
|
||||
version("main", branch="main")
|
||||
|
||||
version("1.1.3", sha256="c7bf7f4d5955e1868739627928238d02f94ca9fd0caf110b0243d65548427899")
|
||||
version("1.0.2", sha256="2d7a9d0a76d5574459d249c3415e21423980d9154ce85e8c34b0600782a7dfd3")
|
||||
|
||||
singularity_org = "apptainer"
|
||||
@@ -35,3 +37,16 @@ class Apptainer(SingularityBase):
|
||||
"https://apptainer.org/docs/admin/main/security.html",
|
||||
"https://apptainer.org/docs/admin/main/admin_quickstart.html#apptainer-security",
|
||||
)
|
||||
|
||||
# This overrides SingularityBase (found in ../singularityce/package.py)
|
||||
# Because Apptainer's mconfig has no option `--without-conmon`
|
||||
# https://github.com/apptainer/apptainer/blob/v1.0.2/mconfig
|
||||
def edit(self, spec, prefix):
|
||||
with working_dir(self.build_directory):
|
||||
confstring = "./mconfig --prefix=%s" % prefix
|
||||
if "~suid" in spec:
|
||||
confstring += " --without-suid"
|
||||
if "~network" in spec:
|
||||
confstring += " --without-network"
|
||||
configure = Executable(confstring)
|
||||
configure()
|
||||
|
@@ -25,10 +25,26 @@ class Atmi(CMakePackage):
|
||||
version("5.2.0", sha256="33e77905a607734157d46c736c924c7c50b6b13f2b2ddbf711cb08e37f2efa4f")
|
||||
version("5.1.3", sha256="a43448d77705b2b07e1758ffe8035aa6ba146abc2167984e8cb0f1615797b341")
|
||||
version("5.1.0", sha256="6a758f5a8332e6774cd8e14a4e5ce05e43b1e05298d817b4068c35fa1793d333")
|
||||
version("5.0.2", sha256="3aea040f5a246539ab118f2183cf3e802a21e0e6215a53025eda77f382341747")
|
||||
version("5.0.0", sha256="208c1773170722b60b74357e264e698df5871e9d9d490d64011e6ea76750d9cf")
|
||||
version("4.5.2", sha256="c235cfb8bdd89deafecf9123264217b8cc5577a5469e3e1f24587fa820d0792e")
|
||||
version("4.5.0", sha256="64eeb0244cedae99db7dfdb365e0ad624106cc1090a531f94885ae81e254aabf")
|
||||
version(
|
||||
"5.0.2",
|
||||
sha256="3aea040f5a246539ab118f2183cf3e802a21e0e6215a53025eda77f382341747",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"5.0.0",
|
||||
sha256="208c1773170722b60b74357e264e698df5871e9d9d490d64011e6ea76750d9cf",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"4.5.2",
|
||||
sha256="c235cfb8bdd89deafecf9123264217b8cc5577a5469e3e1f24587fa820d0792e",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"4.5.0",
|
||||
sha256="64eeb0244cedae99db7dfdb365e0ad624106cc1090a531f94885ae81e254aabf",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"4.3.1",
|
||||
sha256="4497fa6d33547b946e2a51619f2777ec36e9cff1b07fd534eb8a5ef0d8e30650",
|
||||
|
@@ -13,7 +13,7 @@ class AwsParallelcluster(PythonPackage):
|
||||
tool to deploy and manage HPC clusters in the AWS cloud."""
|
||||
|
||||
homepage = "https://github.com/aws/aws-parallelcluster"
|
||||
pypi = "aws-parallelcluster/aws-parallelcluster-2.11.8.tar.gz"
|
||||
pypi = "aws-parallelcluster/aws-parallelcluster-2.11.9.tar.gz"
|
||||
|
||||
maintainers = [
|
||||
"charlesg3",
|
||||
@@ -26,6 +26,7 @@ class AwsParallelcluster(PythonPackage):
|
||||
"lukeseawalker",
|
||||
]
|
||||
|
||||
version("2.11.9", sha256="615de4d59d9fd56a31d4feb3aeefe685346538a8dd0c1c35b660029f891d4dfd")
|
||||
version("2.11.8", sha256="acf33f48f8e48b0bc7be20f539d61baa1e27248765ba355df753bdfca4abd3cb")
|
||||
version("2.11.7", sha256="f7c51cf1c94787f56e0661e39860ecc9275efeacc88716b7c9f14053ec7fbd35")
|
||||
version("2.11.6", sha256="4df4bcf966f523bcdf5b4f68ed0ef347eebae70a074cd098b15bc8a6be27217c")
|
||||
|
@@ -25,7 +25,9 @@ class Bcache(MakefilePackage):
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.append_flags("LDFLAGS", "-lintl")
|
||||
# Add -lintl if provided by gettext, otherwise libintl is provided by the system's glibc:
|
||||
if any("libintl" in filename for filename in self.libs):
|
||||
env.append_flags("LDFLAGS", "-lintl")
|
||||
|
||||
patch(
|
||||
"func_crc64.patch",
|
||||
|
@@ -40,6 +40,7 @@ class Berkeleygw(MakefilePackage):
|
||||
# https://github.com/spack/spack/pull/33948#issuecomment-1323805817
|
||||
variant("mpi", default=True, description="Build with MPI and ScaLAPACK support")
|
||||
variant("elpa", default=True, description="Build with ELPA support")
|
||||
variant("python", default=True, description="Build with Python support")
|
||||
variant("openmp", default=True, description="Build with OpenMP support")
|
||||
variant("hdf5", default=True, description="Builds with HDF5 support")
|
||||
variant("debug", default=False, description="Builds with DEBUG flag")
|
||||
@@ -56,6 +57,12 @@ class Berkeleygw(MakefilePackage):
|
||||
depends_on("fftw-api@3+openmp", when="+openmp")
|
||||
depends_on("fftw-api@3~openmp", when="~openmp")
|
||||
|
||||
# in order to run the installed python scripts
|
||||
depends_on("python", type=("build", "run"), when="+python")
|
||||
depends_on("py-numpy", type=("build", "run"), when="+python")
|
||||
depends_on("py-setuptools", type=("build", "run"), when="+python")
|
||||
depends_on("py-h5py", type=("build", "run"), when="+hdf5+python")
|
||||
|
||||
depends_on("perl", type="test")
|
||||
|
||||
conflicts("+elpa", when="~mpi", msg="elpa is a parallel library and needs MPI support")
|
||||
@@ -91,7 +98,10 @@ def edit(self, spec, prefix):
|
||||
# use parallelization in tests
|
||||
filter_file(
|
||||
r"cd testsuite \&\& \$\(MAKE\) check$",
|
||||
"cd testsuite && $(MAKE) check-parallel",
|
||||
"cd testsuite && export BGW_TEST_MPI_NPROCS=2 OMP_NUM_THREADS=2 \
|
||||
SAVETESTDIRS=yes TEMPDIRPATH=%s && \
|
||||
$(MAKE) check-parallel"
|
||||
% join_path(self.build_directory, "tmp"),
|
||||
"Makefile",
|
||||
)
|
||||
|
||||
@@ -102,10 +112,23 @@ def edit(self, spec, prefix):
|
||||
"testsuite/run_testsuite.sh",
|
||||
)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
if self.run_tests:
|
||||
env.set("OMP_NUM_THREADS", "2")
|
||||
env.set("BGW_TEST_MPI_NPROCS", "2")
|
||||
# slightly raise tolerance of some tests
|
||||
si_epm_tests = ["Si", "Si_cplx_spin"]
|
||||
if self.version >= Version("3.0"):
|
||||
si_epm_tests.append("Si_hdf5")
|
||||
for test in si_epm_tests:
|
||||
filter_file(
|
||||
"Precision : 6e-15",
|
||||
"Precision : 7e-15",
|
||||
join_path("testsuite", "Si-EPM", test + ".test"),
|
||||
)
|
||||
for test in ["Si_subspace", "Si_subspace_cplx", "Si_subspace_cplx_spin"]:
|
||||
filter_file(
|
||||
"Precision : 6e-15",
|
||||
"Precision : 7e-15",
|
||||
join_path("testsuite", "Si-EPM_subspace", test + ".test"),
|
||||
)
|
||||
filter_file("Precision : 8e-15", "Precision : 9e-15", "testsuite/GaAs-EPM/GaAs.test")
|
||||
|
||||
def build(self, spec, prefix):
|
||||
|
||||
|
28
var/spack/repos/builtin/packages/cernlib/package.py
Normal file
28
var/spack/repos/builtin/packages/cernlib/package.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Cernlib(CMakePackage):
|
||||
"""CERN Library"""
|
||||
|
||||
homepage = "https://cernlib.web.cern.ch"
|
||||
url = "https://cernlib.web.cern.ch/cernlib/download/2022_source/tar/cernlib-2022.11.08.0-free.tar.gz"
|
||||
|
||||
maintainers = ["andriish"]
|
||||
|
||||
version(
|
||||
"2022.11.08.0-free",
|
||||
sha256="733d148415ef78012ff81f21922d3bf641be7514b0242348dd0200cf1b003e46",
|
||||
)
|
||||
|
||||
depends_on("motif")
|
||||
depends_on("libx11")
|
||||
|
||||
def cmake_args(self):
|
||||
args = ["-DCERNLIB_BUILD_SHARED:BOOL=ON"]
|
||||
return args
|
@@ -14,6 +14,7 @@ class Cfitsio(AutotoolsPackage):
|
||||
homepage = "https://heasarc.gsfc.nasa.gov/fitsio/"
|
||||
url = "https://heasarc.gsfc.nasa.gov/FTP/software/fitsio/c/cfitsio-3.49.tar.gz"
|
||||
|
||||
version("4.2.0", sha256="eba53d1b3f6e345632bb09a7b752ec7ced3d63ec5153a848380f3880c5d61889")
|
||||
version("4.1.0", sha256="b367c695d2831958e7166921c3b356d5dfa51b1ecee505b97416ba39d1b6c17a")
|
||||
version("4.0.0", sha256="b2a8efba0b9f86d3e1bd619f662a476ec18112b4f27cc441cc680a4e3777425e")
|
||||
version("3.49", sha256="5b65a20d5c53494ec8f638267fca4a629836b7ac8dd0ef0266834eab270ed4b3")
|
||||
|
@@ -27,7 +27,7 @@ class Clingo(CMakePackage):
|
||||
|
||||
version("master", branch="master", submodules=True)
|
||||
version("spack", commit="2a025667090d71b2c9dce60fe924feb6bde8f667", submodules=True)
|
||||
|
||||
version("5.6.2", sha256="81eb7b14977ac57c97c905bd570f30be2859eabc7fe534da3cdc65eaca44f5be")
|
||||
version("5.5.2", sha256="a2a0a590485e26dce18860ac002576232d70accc5bfcb11c0c22e66beb23baa6")
|
||||
version("5.5.1", sha256="b9cf2ba2001f8241b8b1d369b6f353e628582e2a00f13566e51c03c4dd61f67e")
|
||||
version("5.5.0", sha256="c9d7004a0caec61b636ad1c1960fbf339ef8fdee9719321fc1b6b210613a8499")
|
||||
|
@@ -27,10 +27,26 @@ class Comgr(CMakePackage):
|
||||
version("5.2.0", sha256="5f63fa93739ee9230756ef93c53019474b6cdddea3b588492d785dae1b08c087")
|
||||
version("5.1.3", sha256="3078c10e9a852fe8357712a263ad775b15944e083f93a879935c877511066ac9")
|
||||
version("5.1.0", sha256="1cdcfe5acb768ef50fb0026d4ee7ba01e615251ad3c27bb2593cdcf8c070a894")
|
||||
version("5.0.2", sha256="20d733f70d8edb573d8c92707f663d7d46dcaff08026cd6addbb83266679f92a")
|
||||
version("5.0.0", sha256="da1bbc694bd930a504406eb0a0018c2e317d8b2c136fb2cab8de426870efe9a8")
|
||||
version("4.5.2", sha256="e45f387fb6635fc1713714d09364204cd28fea97655b313c857beb1f8524e593")
|
||||
version("4.5.0", sha256="03c5880e0922fcff31306f7da2eb9d3a3709d9b5b75b3524dcfae85f4b181678")
|
||||
version(
|
||||
"5.0.2",
|
||||
sha256="20d733f70d8edb573d8c92707f663d7d46dcaff08026cd6addbb83266679f92a",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"5.0.0",
|
||||
sha256="da1bbc694bd930a504406eb0a0018c2e317d8b2c136fb2cab8de426870efe9a8",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"4.5.2",
|
||||
sha256="e45f387fb6635fc1713714d09364204cd28fea97655b313c857beb1f8524e593",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"4.5.0",
|
||||
sha256="03c5880e0922fcff31306f7da2eb9d3a3709d9b5b75b3524dcfae85f4b181678",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"4.3.1",
|
||||
sha256="f1d99550383ed7b3a01d304eedc3d86a8e45b271aa5a80b1dd099c22fda3f745",
|
||||
|
@@ -29,6 +29,9 @@ class CrayMpich(Package):
|
||||
version("7.7.14")
|
||||
version("7.7.13")
|
||||
|
||||
depends_on("cray-pmi")
|
||||
depends_on("libfabric")
|
||||
|
||||
# cray-mpich 8.1.7: features MPI compiler wrappers
|
||||
variant("wrappers", default=True, when="@8.1.7:", description="enable MPI wrappers")
|
||||
|
||||
|
@@ -93,20 +93,26 @@ def configure_args(self):
|
||||
return extra_args
|
||||
|
||||
@property
|
||||
def basepath(self):
|
||||
return join_path("darshan-test", "example-output")
|
||||
def test_log_path(self):
|
||||
if self.version < Version("3.4.1"):
|
||||
return join_path(
|
||||
"darshan-test",
|
||||
"example-output",
|
||||
"mpi-io-test-x86_64-{0}.darshan".format(self.version),
|
||||
)
|
||||
else:
|
||||
return join_path(
|
||||
"darshan-util", "pydarshan", "darshan", "tests", "input", "sample.darshan"
|
||||
)
|
||||
|
||||
@run_after("install")
|
||||
def _copy_test_inputs(self):
|
||||
# add darshan-test/example-output/mpi-io-test-spack-expected.txt"
|
||||
test_inputs = [
|
||||
join_path(self.basepath, "mpi-io-test-x86_64-{0}.darshan".format(self.spec.version))
|
||||
]
|
||||
test_inputs = [self.test_log_path]
|
||||
self.cache_extra_test_sources(test_inputs)
|
||||
|
||||
def _test_parser(self):
|
||||
purpose = "Verify darshan-parser can parse an example log \
|
||||
from the current version and check some expected counter values"
|
||||
and check some expected counter values"
|
||||
# Switch to loading the expected strings from the darshan source in future
|
||||
# filename = self.test_suite.current_test_cache_dir.
|
||||
# join(join_path(self.basepath, "mpi-io-test-spack-expected.txt"))
|
||||
@@ -116,9 +122,7 @@ def _test_parser(self):
|
||||
r"MPI-IO\s+-1\s+\w+\s+MPIIO_INDEP_OPENS\s+\d+",
|
||||
r"STDIO\s+0\s+\w+\s+STDIO_OPENS\s+\d+",
|
||||
]
|
||||
logname = self.test_suite.current_test_cache_dir.join(
|
||||
join_path(self.basepath, "mpi-io-test-x86_64-{0}.darshan".format(self.spec.version))
|
||||
)
|
||||
logname = self.test_suite.current_test_cache_dir.join(self.test_log_path)
|
||||
exe = "darshan-parser"
|
||||
options = [logname]
|
||||
status = [0]
|
||||
|
@@ -39,8 +39,12 @@ class Doxygen(CMakePackage):
|
||||
|
||||
variant("mscgen", default=False, description="Build with support for code graphs from mscgen.")
|
||||
|
||||
tags = ["build-tools"]
|
||||
|
||||
executables = ["doxygen"]
|
||||
|
||||
maintainers = ["sethrj"]
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe):
|
||||
output = Executable(exe)("-v", output=str, error=str)
|
||||
|
27
var/spack/repos/builtin/packages/epsic/package.py
Normal file
27
var/spack/repos/builtin/packages/epsic/package.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Epsic(AutotoolsPackage):
|
||||
"""Electromagnetic Polarization Simulation in C++."""
|
||||
|
||||
homepage = "https://github.com/straten/epsic"
|
||||
git = "https://github.com/straten/epsic.git"
|
||||
|
||||
version("develop", default=True)
|
||||
|
||||
# Version to match
|
||||
# https://github.com/lwa-project/pulsar/blob/master/SoftwareStack.md
|
||||
# last updated 10/17/2020
|
||||
version("LWA-10-17-2020", commit="5315cc634f6539ea0a34e403e492472b97e0f086")
|
||||
depends_on("autoconf", type="build")
|
||||
depends_on("automake", type="build")
|
||||
depends_on("yacc", when="@develop", type="build")
|
||||
depends_on("libtool", type="build")
|
||||
depends_on("m4", type="build")
|
||||
configure_directory = "src"
|
@@ -30,6 +30,7 @@ class Freeglut(CMakePackage, SourceforgePackage):
|
||||
depends_on("glx")
|
||||
depends_on("libx11")
|
||||
depends_on("libxi")
|
||||
depends_on("libxxf86vm")
|
||||
with when("platform=cray"):
|
||||
depends_on("glx")
|
||||
depends_on("libx11")
|
||||
|
@@ -12,7 +12,7 @@ class Ftk(CMakePackage):
|
||||
|
||||
# Add a proper url for your package's homepage here.
|
||||
homepage = "https://github.com/hguo/ftk"
|
||||
url = "https://github.com/hguo/ftk/archive/0.0.5.tar.gz"
|
||||
url = "https://github.com/hguo/ftk/archive/0.0.7.1.tar.gz"
|
||||
git = "https://github.com/hguo/ftk.git"
|
||||
|
||||
# Add a list of GitHub accounts to
|
||||
@@ -21,14 +21,18 @@ class Ftk(CMakePackage):
|
||||
|
||||
version("master", branch="master")
|
||||
version("dev", branch="dev")
|
||||
version("0.0.7.1", sha256="6090fd436f971316062dbb4fcbf2c44603ed5c29341df8f2b80b85770a3bcda2")
|
||||
version("0.0.6", sha256="876839c62c78dddf48ee1f15681401db266e9537b76075e23fea31874e65935c")
|
||||
version("0.0.5.1", sha256="5f7d1931d902a3f48d5c15a3b3e6dc6038b880ac869930b44ca9e196148804e8")
|
||||
version("0.0.5", sha256="9d5c84a73b7761b9fc7dac62d4296df9f3052b722ec1b06518b2b8f51a8d3440")
|
||||
version("0.0.4", sha256="1674904da8d88dbd4c7d2b6a2629883f0444e70aefc99b48d285735d394897fa")
|
||||
|
||||
# variants
|
||||
variant("adios2", default=False)
|
||||
variant("cuda", default=False)
|
||||
variant("gmp", default=True)
|
||||
variant("gmp", default=False)
|
||||
variant("hdf5", default=False)
|
||||
variant("metis", default=False)
|
||||
variant("mpi", default=False)
|
||||
variant("netcdf", default=False)
|
||||
variant("vtk", default=False)
|
||||
@@ -38,6 +42,8 @@ class Ftk(CMakePackage):
|
||||
depends_on("cuda", when="+cuda")
|
||||
depends_on("hdf5", when="+hdf5")
|
||||
depends_on("gmp", when="+gmp")
|
||||
depends_on("metis", when="+metis")
|
||||
depends_on("metis", when="+mpi")
|
||||
depends_on("mpi", when="+mpi")
|
||||
depends_on("netcdf-c", when="+netcdf")
|
||||
depends_on("vtk", when="+vtk")
|
||||
@@ -55,6 +61,7 @@ def cmake_args(self):
|
||||
self.add_cmake_option(args, "+cuda", "FTK_USE_CUDA")
|
||||
self.add_cmake_option(args, "+gmp", "FTK_USE_GMP")
|
||||
self.add_cmake_option(args, "+hdf5", "FTK_USE_HDF5")
|
||||
self.add_cmake_option(args, "+metis", "FTK_USE_METIS")
|
||||
self.add_cmake_option(args, "+mpi", "FTK_USE_MPI")
|
||||
self.add_cmake_option(args, "+netcdf", "FTK_USE_NETCDF")
|
||||
self.add_cmake_option(args, "+vtk", "FTK_USE_VTK")
|
||||
|
@@ -21,6 +21,7 @@ class Graphite2(CMakePackage):
|
||||
version("1.3.13", sha256="dd63e169b0d3cf954b397c122551ab9343e0696fb2045e1b326db0202d875f06")
|
||||
|
||||
depends_on("python@3.6:", type="test")
|
||||
depends_on("freetype")
|
||||
|
||||
patch("regparm.patch")
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user