Compare commits
118 Commits
develop-20
...
hs/fix/qua
Author | SHA1 | Date | |
---|---|---|---|
![]() |
10acffc92e | ||
![]() |
f95e246355 | ||
![]() |
a77f903f4d | ||
![]() |
92260b179d | ||
![]() |
196c912b8a | ||
![]() |
0f54995e53 | ||
![]() |
9d1332f1a1 | ||
![]() |
40a1da4a73 | ||
![]() |
82e091e2c2 | ||
![]() |
c86112b0e8 | ||
![]() |
bb25c04845 | ||
![]() |
d69d26d9ce | ||
![]() |
06d660b9ba | ||
![]() |
40b3196412 | ||
![]() |
7e893da4a6 | ||
![]() |
13aa8b6867 | ||
![]() |
b0afb619de | ||
![]() |
7a82c703c7 | ||
![]() |
0d3667175a | ||
![]() |
a754341f6c | ||
![]() |
a50c45f00c | ||
![]() |
87e65e5377 | ||
![]() |
50fe96aaf6 | ||
![]() |
56495a8cd8 | ||
![]() |
c054cb818d | ||
![]() |
bc28ec35d1 | ||
![]() |
e47a6059a7 | ||
![]() |
0d170b9ef3 | ||
![]() |
5174cb9180 | ||
![]() |
22ba366e85 | ||
![]() |
13558269b5 | ||
![]() |
615b7a6ddb | ||
![]() |
0415b21d3d | ||
![]() |
053c9d2846 | ||
![]() |
1e763629f6 | ||
![]() |
7568687f1e | ||
![]() |
3b81c0e6b7 | ||
![]() |
c764400338 | ||
![]() |
4e8a6eec1a | ||
![]() |
ebc9f03dda | ||
![]() |
8ac0bd2825 | ||
![]() |
cc9e0137df | ||
![]() |
b8e448afa0 | ||
![]() |
209d670bf3 | ||
![]() |
c6202842ed | ||
![]() |
b2a75db030 | ||
![]() |
0ec00a9c9a | ||
![]() |
5e3020ad02 | ||
![]() |
a0d0e6321f | ||
![]() |
0afac0beaa | ||
![]() |
6155be8548 | ||
![]() |
611cb98b02 | ||
![]() |
ea5742853f | ||
![]() |
25a3e8ba59 | ||
![]() |
7fbb3df6b0 | ||
![]() |
a728db95de | ||
![]() |
7bc4069b9e | ||
![]() |
51fc195d14 | ||
![]() |
27a0593104 | ||
![]() |
f95e27a159 | ||
![]() |
effe433c96 | ||
![]() |
21988fbb18 | ||
![]() |
2db654bf5a | ||
![]() |
9992b563db | ||
![]() |
daba1a805e | ||
![]() |
832bf95aa4 | ||
![]() |
81e6dcd95c | ||
![]() |
518572e710 | ||
![]() |
6f4ac31a67 | ||
![]() |
e291daaa17 | ||
![]() |
58f1e791a0 | ||
![]() |
aba0a740c2 | ||
![]() |
0fe8e763c3 | ||
![]() |
0e2d261b7e | ||
![]() |
85cb234861 | ||
![]() |
87a83db623 | ||
![]() |
e1e17786c5 | ||
![]() |
68af5cc4c0 | ||
![]() |
70df460fa7 | ||
![]() |
31a1b2fd6c | ||
![]() |
f8fd51e12f | ||
![]() |
12784594aa | ||
![]() |
e0eb0aba37 | ||
![]() |
f47bf5f6b8 | ||
![]() |
9296527775 | ||
![]() |
08c53fa405 | ||
![]() |
0c6f0c090d | ||
![]() |
c623448f81 | ||
![]() |
df71341972 | ||
![]() |
75862c456d | ||
![]() |
e680a0c153 | ||
![]() |
9ad36080ca | ||
![]() |
ecd14f0ad9 | ||
![]() |
c44edf1e8d | ||
![]() |
1eacdca5aa | ||
![]() |
4a8f5efb38 | ||
![]() |
2e753571bd | ||
![]() |
da16336550 | ||
![]() |
1818e70e74 | ||
![]() |
1dde785e9a | ||
![]() |
a7af32c23b | ||
![]() |
6c92ad439b | ||
![]() |
93f555eb14 | ||
![]() |
fa3725e9de | ||
![]() |
870dd6206f | ||
![]() |
b1d411ab06 | ||
![]() |
783eccfbd5 | ||
![]() |
a842332b1b | ||
![]() |
7e41288ca6 | ||
![]() |
3bb375a47f | ||
![]() |
478855728f | ||
![]() |
5e3baeabfa | ||
![]() |
58b9b54066 | ||
![]() |
3918deab74 | ||
![]() |
ceb2ce352f | ||
![]() |
7dc6bff7b1 | ||
![]() |
05fbbd7164 | ||
![]() |
58421866c2 |
@@ -25,7 +25,6 @@ exit 1
|
||||
# The code above runs this file with our preferred python interpreter.
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
min_python3 = (3, 6)
|
||||
|
@@ -36,7 +36,7 @@ packages:
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-oneapi-ipp]
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
java: [openjdk, jdk]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
@@ -73,15 +73,27 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cray-fftw:
|
||||
buildable: false
|
||||
cray-libsci:
|
||||
buildable: false
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
cray-pmi:
|
||||
buildable: false
|
||||
egl:
|
||||
buildable: false
|
||||
essl:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
fujitsu-ssl2:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
mpt:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
|
@@ -170,7 +170,7 @@ bootstrapping.
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
|
||||
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
|
@@ -56,13 +56,13 @@ If you look at the ``perl`` package, you'll see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ["configure", "build", "install"]
|
||||
phases = ("configure", "build", "install")
|
||||
|
||||
Similarly, ``cmake`` defines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ["bootstrap", "build", "install"]
|
||||
phases = ("bootstrap", "build", "install")
|
||||
|
||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""URL primitives that just require Python standard library."""
|
||||
import itertools
|
||||
import os.path
|
||||
import os
|
||||
import re
|
||||
from typing import Optional, Set, Tuple
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
@@ -75,7 +75,6 @@
|
||||
"install_tree",
|
||||
"is_exe",
|
||||
"join_path",
|
||||
"last_modification_time_recursive",
|
||||
"library_extensions",
|
||||
"mkdirp",
|
||||
"partition_path",
|
||||
@@ -1470,15 +1469,36 @@ def set_executable(path):
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def last_modification_time_recursive(path):
|
||||
path = os.path.abspath(path)
|
||||
times = [os.stat(path).st_mtime]
|
||||
times.extend(
|
||||
os.lstat(os.path.join(root, name)).st_mtime
|
||||
for root, dirs, files in os.walk(path)
|
||||
for name in dirs + files
|
||||
)
|
||||
return max(times)
|
||||
def recursive_mtime_greater_than(path: str, time: float) -> bool:
|
||||
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
|
||||
# use bfs order to increase likelihood of early return
|
||||
queue: Deque[str] = collections.deque([path])
|
||||
|
||||
if os.stat(path).st_mtime > time:
|
||||
return True
|
||||
|
||||
while queue:
|
||||
current = queue.popleft()
|
||||
|
||||
try:
|
||||
entries = os.scandir(current)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
with entries:
|
||||
for entry in entries:
|
||||
try:
|
||||
st = entry.stat(follow_symlinks=False)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
if st.st_mtime > time:
|
||||
return True
|
||||
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
queue.append(entry.path)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@system_path_filter
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import codecs
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import contextlib
|
||||
import copy
|
||||
import hashlib
|
||||
import io
|
||||
@@ -91,6 +92,9 @@
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||
|
||||
|
||||
INDEX_HASH_FILE = "index.json.hash"
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
"""A database for binary buildcaches.
|
||||
|
||||
@@ -502,7 +506,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
):
|
||||
return False
|
||||
|
||||
@@ -704,7 +708,7 @@ def _read_specs_and_push_index(
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(temp_dir, "index.json")
|
||||
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE)
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -714,14 +718,14 @@ def _read_specs_and_push_index(
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE)
|
||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -729,7 +733,7 @@ def _read_specs_and_push_index(
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
url_util.join(cache_prefix, INDEX_HASH_FILE),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -798,7 +802,7 @@ def url_read_method(url):
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(url)
|
||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
except web_util.SpackWebError as e:
|
||||
except (web_util.SpackWebError, OSError) as e:
|
||||
tty.error(f"Error reading specfile: {url}: {e}")
|
||||
return contents
|
||||
|
||||
@@ -1785,7 +1789,7 @@ def _oci_update_index(
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE)
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -2006,7 +2010,7 @@ def fetch_url_to_mirror(url):
|
||||
|
||||
# Download the config = spec.json and the relevant tarball
|
||||
try:
|
||||
manifest = json.loads(response.read())
|
||||
manifest = json.load(response)
|
||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||
tarball_digest = spack.oci.image.Digest.from_string(
|
||||
manifest["layers"][-1]["digest"]
|
||||
@@ -2166,7 +2170,8 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
|
||||
def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
"""Relocate binaries and text files in the given spec prefix, based on its buildinfo file."""
|
||||
buildinfo = read_buildinfo_file(spec.prefix)
|
||||
spec_prefix = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(spec_prefix)
|
||||
old_layout_root = str(buildinfo["buildpath"])
|
||||
|
||||
# Warn about old style tarballs created with the --rel flag (removed in Spack v0.20)
|
||||
@@ -2187,7 +2192,7 @@ def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
"and an older buildcache create implementation. It cannot be relocated."
|
||||
)
|
||||
|
||||
prefix_to_prefix = {}
|
||||
prefix_to_prefix: Dict[str, str] = {}
|
||||
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
@@ -2239,12 +2244,12 @@ def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
tty.debug(f"Relocating: {old} => {new}.")
|
||||
|
||||
# Old archives may have hardlinks repeated.
|
||||
dedupe_hardlinks_if_necessary(spec.prefix, buildinfo)
|
||||
dedupe_hardlinks_if_necessary(spec_prefix, buildinfo)
|
||||
|
||||
# Text files containing the prefix text
|
||||
textfiles = [os.path.join(spec.prefix, f) for f in buildinfo["relocate_textfiles"]]
|
||||
binaries = [os.path.join(spec.prefix, f) for f in buildinfo.get("relocate_binaries")]
|
||||
links = [os.path.join(spec.prefix, f) for f in buildinfo.get("relocate_links", [])]
|
||||
textfiles = [os.path.join(spec_prefix, f) for f in buildinfo["relocate_textfiles"]]
|
||||
binaries = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_binaries")]
|
||||
links = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_links", [])]
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
@@ -2266,6 +2271,24 @@ def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
|
||||
install_manifest = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(install_manifest):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
|
||||
# overwrite old metadata with new
|
||||
if spec.spliced:
|
||||
# rewrite spec on disk
|
||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||
|
||||
# de-cache the install manifest
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.unlink(install_manifest)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||
stagepath = os.path.dirname(filename)
|
||||
@@ -2432,15 +2455,6 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
raise e
|
||||
else:
|
||||
manifest_file = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(manifest_file):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
@@ -2545,10 +2559,6 @@ def install_root_node(
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
if spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||
)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||
|
||||
@@ -2586,11 +2596,14 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
)
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
specfile_is_signed = True
|
||||
except web_util.SpackWebError as e1:
|
||||
except (web_util.SpackWebError, OSError) as e1:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except web_util.SpackWebError as e2:
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
specfile_is_signed = False
|
||||
except (web_util.SpackWebError, OSError) as e2:
|
||||
tty.debug(
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||
e1,
|
||||
@@ -2600,7 +2613,6 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||
)
|
||||
continue
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
@@ -2694,8 +2706,9 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
|
||||
try:
|
||||
_, _, json_file = web_util.read_from_url(keys_index)
|
||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||
except web_util.SpackWebError as url_err:
|
||||
json_index = sjson.load(json_file)
|
||||
except (web_util.SpackWebError, OSError, ValueError) as url_err:
|
||||
# TODO: avoid repeated request
|
||||
if web_util.url_exists(keys_index):
|
||||
tty.error(
|
||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||
@@ -2942,14 +2955,14 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
||||
|
||||
def get_remote_hash(self):
|
||||
# Failure to fetch index.json.hash is not fatal
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
except (TimeoutError, urllib.error.URLError):
|
||||
remote_hash = response.read(64)
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
# Validate the hash
|
||||
remote_hash = response.read(64)
|
||||
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
||||
return None
|
||||
return remote_hash.decode("utf-8")
|
||||
@@ -2963,17 +2976,17 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise, download index.json
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
except OSError as e:
|
||||
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except ValueError as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
||||
except (ValueError, OSError) as e:
|
||||
raise FetchIndexError(f"Remote index {url_index} is invalid") from e
|
||||
|
||||
computed_hash = compute_hash(result)
|
||||
|
||||
@@ -3007,7 +3020,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||
|
||||
try:
|
||||
@@ -3017,12 +3030,12 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Not modified; that means fresh.
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
except OSError as e: # URLError, socket.timeout, etc.
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except ValueError as e:
|
||||
except (ValueError, OSError) as e:
|
||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||
|
||||
headers = response.headers
|
||||
@@ -3054,11 +3067,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
except OSError as e:
|
||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||
|
||||
try:
|
||||
manifest = json.loads(response.read())
|
||||
manifest = json.load(response)
|
||||
except Exception as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
|
||||
@@ -3073,14 +3086,16 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise fetch the blob / index.json
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
try:
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (OSError, ValueError) as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
|
||||
# Make sure the blob we download has the advertised hash
|
||||
if compute_hash(result) != index_digest.digest:
|
||||
|
@@ -5,7 +5,7 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import importlib
|
||||
import os.path
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
|
@@ -27,9 +27,9 @@
|
||||
class ClingoBootstrapConcretizer:
|
||||
def __init__(self, configuration):
|
||||
self.host_platform = spack.platforms.host()
|
||||
self.host_os = self.host_platform.operating_system("frontend")
|
||||
self.host_os = self.host_platform.default_operating_system()
|
||||
self.host_target = archspec.cpu.host().family
|
||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||
self.host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
self.host_architecture.target = str(self.host_target)
|
||||
self.host_compiler = self._valid_compiler_or_raise()
|
||||
self.host_python = self.python_external_spec()
|
||||
|
@@ -4,7 +4,7 @@
|
||||
"""Manage configuration swapping for bootstrapping purposes"""
|
||||
|
||||
import contextlib
|
||||
import os.path
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
||||
|
||||
@@ -141,7 +141,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
|
||||
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
arch = spack.spec.ArchSpec.default_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
spack.compilers.find_compilers()
|
||||
|
||||
|
@@ -25,7 +25,6 @@
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import uuid
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||
@@ -37,6 +36,7 @@
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.mirrors.mirror
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
@@ -45,6 +45,7 @@
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
@@ -96,8 +97,12 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.name = conf["name"]
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Promote (relative) paths to file urls
|
||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
# Check for relative paths, and turn them into absolute paths
|
||||
# root is the metadata_dir
|
||||
maybe_url = conf["info"]["url"]
|
||||
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
|
||||
maybe_url = os.path.join(self.metadata_dir, maybe_url)
|
||||
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -323,11 +328,9 @@ def create_bootstrapper(conf: ConfigDictionary):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled_or_raise(conf: ConfigDictionary):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError("source is not trusted")
|
||||
def source_is_enabled(conf: ConfigDictionary) -> bool:
|
||||
"""Returns true if the source is not enabled for bootstrapping"""
|
||||
return spack.config.get("bootstrap:trusted").get(conf["name"], False)
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||
@@ -357,24 +360,23 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
||||
return
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {module}"
|
||||
)
|
||||
msg = f'cannot bootstrap the "{module}" Python module '
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
if tty.is_debug():
|
||||
|
||||
if not exception_handler:
|
||||
msg += ": no bootstrapping sources are enabled"
|
||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -412,8 +414,9 @@ def ensure_executables_in_path_or_raise(
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
@@ -429,18 +432,17 @@ def ensure_executables_in_path_or_raise(
|
||||
)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {executables_str}"
|
||||
)
|
||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
if tty.is_debug():
|
||||
|
||||
if not exception_handler:
|
||||
msg += ": no bootstrapping sources are enabled"
|
||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
|
@@ -63,7 +63,6 @@ def _missing(name: str, purpose: str, system_only: bool = True) -> str:
|
||||
|
||||
def _core_requirements() -> List[RequiredResponseType]:
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
|
@@ -301,11 +301,13 @@ def clean_environment():
|
||||
env.unset("CPLUS_INCLUDE_PATH")
|
||||
env.unset("OBJC_INCLUDE_PATH")
|
||||
|
||||
# prevent configure scripts from sourcing variables from config site file (AC_SITE_LOAD).
|
||||
env.set("CONFIG_SITE", os.devnull)
|
||||
env.unset("CMAKE_PREFIX_PATH")
|
||||
|
||||
env.unset("PYTHONPATH")
|
||||
env.unset("R_HOME")
|
||||
env.unset("R_ENVIRON")
|
||||
|
||||
env.unset("LUA_PATH")
|
||||
env.unset("LUA_CPATH")
|
||||
|
||||
|
@@ -6,7 +6,9 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.directives
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
|
||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||
|
||||
@@ -17,19 +19,18 @@ class AspellBuilder(AutotoolsBuilder):
|
||||
to the Aspell extensions.
|
||||
"""
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self,
|
||||
pkg: "AspellDictPackage", # type: ignore[override]
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
):
|
||||
aspell = spec["aspell"].prefix.bin.aspell
|
||||
prezip = spec["aspell"].prefix.bin.prezip
|
||||
destdir = prefix
|
||||
|
||||
sh = spack.util.executable.which("sh")
|
||||
sh(
|
||||
"./configure",
|
||||
"--vars",
|
||||
"ASPELL={0}".format(aspell),
|
||||
"PREZIP={0}".format(prezip),
|
||||
"DESTDIR={0}".format(destdir),
|
||||
)
|
||||
sh = spack.util.executable.Executable("/bin/sh")
|
||||
sh("./configure", "--vars", f"ASPELL={aspell}", f"PREZIP={prezip}", f"DESTDIR={destdir}")
|
||||
|
||||
|
||||
# Aspell dictionaries install their bits into their prefix.lib
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
@@ -356,6 +355,13 @@ def _do_patch_libtool_configure(self) -> None:
|
||||
)
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
# Configure scripts generated with libtool < 2.5.4 have a faulty test for the
|
||||
# -single_module linker flag. A deprecation warning makes it think the default is
|
||||
# -multi_module, triggering it to use problematic linker flags (such as ld -r). The
|
||||
# linker default is `-single_module` from (ancient) macOS 10.4, so override by setting
|
||||
# `lt_cv_apple_cc_single_mod=yes`. See the fix in libtool commit
|
||||
# 82f7f52123e4e7e50721049f7fa6f9b870e09c9d.
|
||||
x.filter("lt_cv_apple_cc_single_mod=no", "lt_cv_apple_cc_single_mod=yes", string=True)
|
||||
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def _do_patch_libtool(self) -> None:
|
||||
@@ -527,7 +533,7 @@ def build_directory(self) -> str:
|
||||
return build_dir
|
||||
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self) -> None:
|
||||
def _delete_configure_to_force_update(self) -> None:
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
@@ -540,7 +546,7 @@ def autoreconf_search_path_args(self) -> List[str]:
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def set_configure_or_die(self) -> None:
|
||||
def _set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
@@ -564,10 +570,7 @@ def configure_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def autoreconf(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
@@ -596,10 +599,7 @@ def autoreconf(
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
def configure(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
@@ -612,10 +612,7 @@ def configure(
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
@@ -625,10 +622,7 @@ def build(
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
@@ -825,7 +819,7 @@ def installcheck(self) -> None:
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def remove_libtool_archives(self) -> None:
|
||||
def _remove_libtool_archives(self) -> None:
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
|
@@ -10,6 +10,8 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -293,6 +295,13 @@ def initconfig_hardware_entries(self):
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
if spec.satisfies("%gcc"):
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_HIP_FLAGS", f"--gcc-toolchain={self.pkg.compiler.prefix}"
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def std_initconfig_entries(self):
|
||||
@@ -323,7 +332,9 @@ def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(self, pkg, spec, prefix):
|
||||
def initconfig(
|
||||
self, pkg: "CachedCMakePackage", spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
cache_entries = (
|
||||
self.std_initconfig_entries()
|
||||
+ self.initconfig_compiler_entries()
|
||||
|
@@ -7,6 +7,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -81,12 +83,16 @@ def check_args(self):
|
||||
def setup_build_environment(self, env):
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Copy build files into package prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
|
@@ -454,10 +454,7 @@ def cmake_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def cmake(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
@@ -474,10 +471,7 @@ def cmake(
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
@@ -488,10 +482,7 @@ def build(
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -7,6 +7,8 @@
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
|
||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
@@ -48,3 +50,8 @@ class GenericBuilder(BuilderWithDefaults):
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def install(
|
||||
self, pkg: Package, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
raise NotImplementedError
|
||||
|
@@ -7,7 +7,9 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
@@ -26,9 +28,7 @@ class GoPackage(spack.package_base.PackageBase):
|
||||
build_system("go")
|
||||
|
||||
with when("build_system=go"):
|
||||
# TODO: this seems like it should be depends_on, see
|
||||
# setup_dependent_build_environment in go for why I kept it like this
|
||||
extends("go@1.14:", type="build")
|
||||
depends_on("go", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("go")
|
||||
@@ -71,6 +71,7 @@ class GoBuilder(BuilderWithDefaults):
|
||||
def setup_build_environment(self, env):
|
||||
env.set("GO111MODULE", "on")
|
||||
env.set("GOTOOLCHAIN", "local")
|
||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
@@ -81,19 +82,31 @@ def build_directory(self):
|
||||
def build_args(self):
|
||||
"""Arguments for ``go build``."""
|
||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||
return [
|
||||
"-p",
|
||||
str(self.pkg.module.make_jobs),
|
||||
"-modcacherw",
|
||||
"-ldflags",
|
||||
"-s -w",
|
||||
"-o",
|
||||
f"{self.pkg.name}",
|
||||
]
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
"""Argument for ``go test`` during check phase"""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Runs ``go build`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.go("build", *self.build_args)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install built binaries into prefix bin."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.mkdirp(prefix.bin)
|
||||
|
@@ -7,7 +7,9 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -55,7 +57,9 @@ class LuaBuilder(spack.builder.Builder):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def unpack(self, pkg, spec, prefix):
|
||||
def unpack(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
||||
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
||||
dirlines = directory.split("\n")
|
||||
@@ -66,15 +70,16 @@ def unpack(self, pkg, spec, prefix):
|
||||
def _generate_tree_line(name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||
|
||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
def generate_luarocks_config(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
spec = self.pkg.spec
|
||||
table_entries = []
|
||||
for d in spec.traverse(deptype=("build", "run")):
|
||||
if d.package.extends(self.pkg.extendee_spec):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, "w", encoding="utf-8") as config:
|
||||
with open(self._luarocks_config_path(), "w", encoding="utf-8") as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
@@ -85,23 +90,26 @@ def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
"\n".join(table_entries)
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
||||
def preprocess(self, pkg, spec, prefix):
|
||||
def preprocess(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Override this to preprocess source before building with luarocks"""
|
||||
pass
|
||||
|
||||
def luarocks_args(self):
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
rock = "."
|
||||
specs = find(".", "*.rockspec", recursive=False)
|
||||
if specs:
|
||||
rock = specs[0]
|
||||
rocks_args = self.luarocks_args()
|
||||
rocks_args.append(rock)
|
||||
self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||
pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||
|
@@ -98,29 +98,20 @@ def build_directory(self) -> str:
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def edit(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||
pass
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import which
|
||||
@@ -58,16 +60,20 @@ def build_args(self):
|
||||
"""List of args to pass to build phase."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Compile code and package into a JAR file."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
mvn = which("mvn")
|
||||
mvn = which("mvn", required=True)
|
||||
if self.pkg.run_tests:
|
||||
mvn("verify", *self.build_args())
|
||||
else:
|
||||
mvn("package", "-DskipTests", *self.build_args())
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Copy to installation prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree(".", prefix)
|
||||
|
@@ -188,10 +188,7 @@ def meson_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def meson(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = []
|
||||
@@ -204,10 +201,7 @@ def meson(
|
||||
pkg.module.meson(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
@@ -216,10 +210,7 @@ def build(
|
||||
pkg.module.ninja(*options)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -99,7 +101,9 @@ def msbuild_install_args(self):
|
||||
as `msbuild_args` by default."""
|
||||
return self.msbuild_args()
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "msbuild" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.msbuild(
|
||||
@@ -108,7 +112,9 @@ def build(self, pkg, spec, prefix):
|
||||
self.define_targets(*self.build_targets),
|
||||
)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "msbuild" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -123,7 +125,9 @@ def nmake_install_args(self):
|
||||
Individual packages should override to specify NMake args to command line"""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "nmake" on the build targets specified by the builder."""
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
@@ -132,7 +136,9 @@ def build(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "nmake" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
opts = self.std_nmake_args
|
||||
|
@@ -3,6 +3,8 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -42,7 +44,9 @@ class OctaveBuilder(BuilderWithDefaults):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: OctavePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install the package from the archive file"""
|
||||
pkg.module.octave(
|
||||
"--quiet",
|
||||
|
@@ -10,6 +10,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.multimethod import when
|
||||
@@ -149,7 +151,9 @@ def configure_args(self):
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||
an appropriate installation base directory followed by the
|
||||
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||
@@ -173,7 +177,9 @@ def fix_shebang(self):
|
||||
repl = "#!/usr/bin/env perl"
|
||||
filter_file(pattern, repl, "Build", backup=False)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Builds a Perl package."""
|
||||
self.build_executable()
|
||||
|
||||
@@ -184,6 +190,8 @@ def check(self):
|
||||
"""Runs built-in tests of a Perl package."""
|
||||
self.build_executable("test")
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Installs a Perl package."""
|
||||
self.build_executable("install")
|
||||
|
@@ -28,6 +28,7 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import test_part
|
||||
@@ -263,16 +264,17 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
host_os = host_platform.default_operating_system()
|
||||
host_target = host_platform.default_target()
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
platform = spack.platforms.by_name(python.architecture.platform)
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
python.architecture.os = platform.default_operating_system()
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
|
@@ -6,6 +6,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -62,17 +64,23 @@ def qmake_args(self):
|
||||
"""List of arguments passed to qmake."""
|
||||
return []
|
||||
|
||||
def qmake(self, pkg, spec, prefix):
|
||||
def qmake(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.qmake(*self.qmake_args())
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make()
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make("install")
|
||||
|
@@ -9,6 +9,8 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.builder
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
@@ -74,18 +76,22 @@ def build_directory(self):
|
||||
ret = os.path.join(ret, self.subdirectory)
|
||||
return ret
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: RacketPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install everything from build directory."""
|
||||
raco = Executable("raco")
|
||||
with fs.working_dir(self.build_directory):
|
||||
parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
parallel = pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
name = pkg.racket_name
|
||||
assert name is not None, "Racket package name is not set"
|
||||
args = [
|
||||
"pkg",
|
||||
"install",
|
||||
"-t",
|
||||
"dir",
|
||||
"-n",
|
||||
self.pkg.racket_name,
|
||||
name,
|
||||
"--deps",
|
||||
"fail",
|
||||
"--ignore-implies",
|
||||
@@ -101,8 +107,7 @@ def install(self, pkg, spec, prefix):
|
||||
except ProcessError:
|
||||
args.insert(-2, "--skip-installed")
|
||||
raco(*args)
|
||||
msg = (
|
||||
"Racket package {0} was already installed, uninstalling via "
|
||||
tty.warn(
|
||||
f"Racket package {name} was already installed, uninstalling via "
|
||||
"Spack may make someone unhappy!"
|
||||
)
|
||||
tty.warn(msg.format(self.pkg.racket_name))
|
||||
|
@@ -140,7 +140,7 @@ class ROCmPackage(PackageBase):
|
||||
when="+rocm",
|
||||
)
|
||||
|
||||
depends_on("llvm-amdgpu", when="+rocm")
|
||||
depends_on("llvm-amdgpu", type="build", when="+rocm")
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -42,7 +44,9 @@ class RubyBuilder(BuilderWithDefaults):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Build a Ruby gem."""
|
||||
|
||||
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
||||
@@ -58,7 +62,9 @@ def build(self, pkg, spec, prefix):
|
||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||
pass
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install a Ruby gem.
|
||||
|
||||
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
||||
|
@@ -4,6 +4,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -59,7 +61,9 @@ def build_args(self, spec, prefix):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Build the package."""
|
||||
pkg.module.scons(*self.build_args(spec, prefix))
|
||||
|
||||
@@ -67,7 +71,9 @@ def install_args(self, spec, prefix):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install the package."""
|
||||
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||
|
||||
|
@@ -11,6 +11,8 @@
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
@@ -41,6 +43,7 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
with when("build_system=sip"):
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -130,7 +133,9 @@ class SIPBuilder(BuilderWithDefaults):
|
||||
|
||||
build_directory = "build"
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Configure the package."""
|
||||
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
@@ -148,7 +153,9 @@ def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Build the package."""
|
||||
args = self.build_args()
|
||||
|
||||
@@ -159,7 +166,9 @@ def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
|
||||
|
@@ -6,6 +6,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||
@@ -97,7 +99,9 @@ def waf(self, *args, **kwargs):
|
||||
with working_dir(self.build_directory):
|
||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Configures the project."""
|
||||
args = ["--prefix={0}".format(self.pkg.prefix)]
|
||||
args += self.configure_args()
|
||||
@@ -108,7 +112,9 @@ def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Executes the build."""
|
||||
args = self.build_args()
|
||||
|
||||
@@ -118,7 +124,9 @@ def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Installs the targets on the system."""
|
||||
args = self.install_args()
|
||||
|
||||
|
@@ -14,7 +14,6 @@
|
||||
import zipfile
|
||||
from collections import namedtuple
|
||||
from typing import Callable, Dict, List, Set
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -472,12 +471,9 @@ def generate_pipeline(env: ev.Environment, args) -> None:
|
||||
# Use all unpruned specs to populate the build group for this set
|
||||
cdash_config = cfg.get("cdash")
|
||||
if options.cdash_handler and options.cdash_handler.auth_token:
|
||||
try:
|
||||
options.cdash_handler.populate_buildgroup(
|
||||
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
||||
)
|
||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||
tty.warn(f"Problem populating buildgroup: {err}")
|
||||
options.cdash_handler.populate_buildgroup(
|
||||
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
||||
)
|
||||
elif cdash_config:
|
||||
# warn only if there was actually a CDash configuration.
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
|
@@ -1,23 +1,21 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import codecs
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import ssl
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
from enum import Enum
|
||||
from typing import Dict, Generator, List, Optional, Set, Tuple
|
||||
from urllib.parse import quote, urlencode, urlparse
|
||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import Singleton, memoized
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.config as cfg
|
||||
@@ -35,32 +33,11 @@
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
|
||||
def _urlopen():
|
||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
||||
|
||||
# One opener with HTTPS ssl enabled
|
||||
with_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
||||
)
|
||||
|
||||
# One opener with HTTPS ssl disabled
|
||||
without_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||
)
|
||||
|
||||
# And dynamically dispatch based on the config:verify_ssl.
|
||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||
opener = with_ssl if verify_ssl else without_ssl
|
||||
timeout = timeout or cfg.get("config:connect_timeout", 1)
|
||||
return opener.open(fullurl, data, timeout)
|
||||
|
||||
return dispatch_open
|
||||
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
||||
|
||||
# this exists purely for testing purposes
|
||||
_urlopen = web_util.urlopen
|
||||
|
||||
|
||||
def copy_files_to_artifacts(src, artifacts_dir):
|
||||
@@ -279,26 +256,25 @@ def copy_test_results(self, source, dest):
|
||||
reports = fs.join_path(source, "*_Test*.xml")
|
||||
copy_files_to_artifacts(reports, dest)
|
||||
|
||||
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||
def create_buildgroup(self, headers, url, group_name, group_type):
|
||||
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code not in [200, 201]:
|
||||
msg = f"Creating buildgroup failed (response code = {response_code})"
|
||||
tty.warn(msg)
|
||||
try:
|
||||
response_text = _urlopen(request, timeout=SPACK_CDASH_TIMEOUT).read()
|
||||
except OSError as e:
|
||||
tty.warn(f"Failed to create CDash buildgroup: {e}")
|
||||
return None
|
||||
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
build_group_id = response_json["id"]
|
||||
|
||||
return build_group_id
|
||||
try:
|
||||
response_json = json.loads(response_text)
|
||||
return response_json["id"]
|
||||
except (json.JSONDecodeError, KeyError) as e:
|
||||
tty.warn(f"Failed to parse CDash response: {e}")
|
||||
return None
|
||||
|
||||
def populate_buildgroup(self, job_names):
|
||||
url = f"{self.url}/api/v1/buildgroup.php"
|
||||
@@ -308,16 +284,11 @@ def populate_buildgroup(self, job_names):
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||
group_id = self.create_buildgroup(
|
||||
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
||||
)
|
||||
parent_group_id = self.create_buildgroup(headers, url, self.build_group, "Daily")
|
||||
group_id = self.create_buildgroup(headers, url, f"Latest {self.build_group}", "Latest")
|
||||
|
||||
if not parent_group_id or not group_id:
|
||||
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
||||
tty.warn(msg)
|
||||
tty.warn(f"Failed to create or retrieve buildgroups for {self.build_group}")
|
||||
return
|
||||
|
||||
data = {
|
||||
@@ -329,15 +300,12 @@ def populate_buildgroup(self, job_names):
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
request.get_method = lambda: "PUT"
|
||||
request = Request(url, data=enc_data, headers=headers, method="PUT")
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
||||
tty.warn(msg)
|
||||
try:
|
||||
_urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
except OSError as e:
|
||||
tty.warn(f"Failed to populate CDash buildgroup: {e}")
|
||||
|
||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
||||
@@ -735,9 +703,6 @@ def _apply_section(dest, src):
|
||||
for value in header.values():
|
||||
value = os.path.expandvars(value)
|
||||
|
||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
||||
|
||||
required = mapping.get("require", [])
|
||||
allowed = mapping.get("allow", [])
|
||||
ignored = mapping.get("ignore", [])
|
||||
@@ -771,19 +736,15 @@ def job_query(job):
|
||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||
)
|
||||
try:
|
||||
response = _dyn_mapping_urlopener(
|
||||
request, verify_ssl=verify_ssl, timeout=timeout
|
||||
)
|
||||
response = _urlopen(request)
|
||||
config = json.load(response)
|
||||
except Exception as e:
|
||||
# For now just ignore any errors from dynamic mapping and continue
|
||||
# This is still experimental, and failures should not stop CI
|
||||
# from running normally
|
||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
||||
tty.warn(f"{e}")
|
||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}: {e}")
|
||||
continue
|
||||
|
||||
config = json.load(codecs.getreader("utf-8")(response))
|
||||
|
||||
# Strip ignore keys
|
||||
if ignored:
|
||||
for key in ignored:
|
||||
|
@@ -3,6 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -51,10 +52,10 @@ def setup_parser(subparser):
|
||||
"-t", "--target", action="store_true", default=False, help="print only the target"
|
||||
)
|
||||
parts2.add_argument(
|
||||
"-f", "--frontend", action="store_true", default=False, help="print frontend"
|
||||
"-f", "--frontend", action="store_true", default=False, help="print frontend (DEPRECATED)"
|
||||
)
|
||||
parts2.add_argument(
|
||||
"-b", "--backend", action="store_true", default=False, help="print backend"
|
||||
"-b", "--backend", action="store_true", default=False, help="print backend (DEPRECATED)"
|
||||
)
|
||||
|
||||
|
||||
@@ -98,15 +99,14 @@ def arch(parser, args):
|
||||
display_targets(archspec.cpu.TARGETS)
|
||||
return
|
||||
|
||||
os_args, target_args = "default_os", "default_target"
|
||||
if args.frontend:
|
||||
os_args, target_args = "frontend", "frontend"
|
||||
warnings.warn("the argument --frontend is deprecated, and will be removed in Spack v1.0")
|
||||
elif args.backend:
|
||||
os_args, target_args = "backend", "backend"
|
||||
warnings.warn("the argument --backend is deprecated, and will be removed in Spack v1.0")
|
||||
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system(os_args)
|
||||
host_target = host_platform.target(target_args)
|
||||
host_os = host_platform.default_operating_system()
|
||||
host_target = host_platform.default_target()
|
||||
if args.family:
|
||||
host_target = host_target.family
|
||||
elif args.generic:
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
@@ -436,6 +436,7 @@ def write_metadata(subdir, metadata):
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||
instructions += cmd.format("local-binaries", rel_directory)
|
||||
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
|
||||
print(instructions)
|
||||
|
||||
|
||||
|
@@ -4,7 +4,7 @@
|
||||
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import os
|
||||
import textwrap
|
||||
|
||||
from llnl.util.lang import stable_partition
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
|
@@ -86,8 +86,8 @@ def create_db_tarball(args):
|
||||
|
||||
def report(args):
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("frontend")
|
||||
host_target = host_platform.target("frontend")
|
||||
host_os = host_platform.default_operating_system()
|
||||
host_target = host_platform.default_target()
|
||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||
print("* **Spack:**", spack.get_version())
|
||||
print("* **Python:**", platform.python_version())
|
||||
|
@@ -114,7 +114,7 @@ def dev_build(self, args):
|
||||
source_path = os.path.abspath(source_path)
|
||||
|
||||
# Forces the build to run out of the source directory.
|
||||
spec.constrain("dev_path=%s" % source_path)
|
||||
spec.constrain(f'dev_path="{source_path}"')
|
||||
spec = spack.concretize.concretize_one(spec)
|
||||
|
||||
if spec.installed:
|
||||
|
@@ -54,10 +54,6 @@
|
||||
@m{target=target} specific <target> processor
|
||||
@m{arch=platform-os-target} shortcut for all three above
|
||||
|
||||
cross-compiling:
|
||||
@m{os=backend} or @m{os=be} build for compute node (backend)
|
||||
@m{os=frontend} or @m{os=fe} build for login node (frontend)
|
||||
|
||||
dependencies:
|
||||
^dependency [constraints] specify constraints on dependencies
|
||||
^@K{/hash} build with a specific installed
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"""Implementation details of the ``spack module`` command."""
|
||||
|
||||
import collections
|
||||
import os.path
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os.path
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
@@ -5,7 +5,7 @@
|
||||
import argparse
|
||||
import collections
|
||||
import io
|
||||
import os.path
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
@@ -801,17 +801,17 @@ def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]
|
||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target")
|
||||
operating_system = host_platform.default_operating_system()
|
||||
target = host_platform.default_target()
|
||||
else:
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
target = spack.platforms.host().target("default_target")
|
||||
target = spack.platforms.host().default_target()
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
operating_system = host_platform.default_operating_system()
|
||||
return operating_system, target
|
||||
|
||||
|
||||
|
@@ -36,6 +36,8 @@
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||
|
||||
import jsonschema
|
||||
|
||||
from llnl.util import filesystem, lang, tty
|
||||
|
||||
import spack.error
|
||||
@@ -51,6 +53,7 @@
|
||||
import spack.schema.definitions
|
||||
import spack.schema.develop
|
||||
import spack.schema.env
|
||||
import spack.schema.env_vars
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
@@ -68,6 +71,7 @@
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"definitions": spack.schema.definitions.schema,
|
||||
"env_vars": spack.schema.env_vars.schema,
|
||||
"view": spack.schema.view.schema,
|
||||
"develop": spack.schema.develop.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
@@ -1048,8 +1052,6 @@ def validate(
|
||||
This leverages the line information (start_mark, end_mark) stored
|
||||
on Spack YAML structures.
|
||||
"""
|
||||
import jsonschema
|
||||
|
||||
try:
|
||||
spack.schema.Validator(schema).validate(data)
|
||||
except jsonschema.ValidationError as e:
|
||||
|
@@ -6,6 +6,8 @@
|
||||
"""
|
||||
import warnings
|
||||
|
||||
import jsonschema
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.schema.env as env
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -30,8 +32,6 @@ def validate(configuration_file):
|
||||
Returns:
|
||||
A sanitized copy of the configuration stored in the input file
|
||||
"""
|
||||
import jsonschema
|
||||
|
||||
with open(configuration_file, encoding="utf-8") as f:
|
||||
config = syaml.load(f)
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Manages the details on the images used in the various stages."""
|
||||
import json
|
||||
import os.path
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
|
@@ -9,6 +9,8 @@
|
||||
from collections import namedtuple
|
||||
from typing import Optional
|
||||
|
||||
import jsonschema
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.schema.env
|
||||
@@ -188,8 +190,6 @@ def paths(self):
|
||||
@tengine.context_property
|
||||
def manifest(self):
|
||||
"""The spack.yaml file that should be used in the image"""
|
||||
import jsonschema
|
||||
|
||||
# Copy in the part of spack.yaml prescribed in the configuration file
|
||||
manifest = copy.deepcopy(self.config)
|
||||
manifest.pop("container")
|
||||
|
@@ -123,6 +123,15 @@
|
||||
"deprecated_for",
|
||||
)
|
||||
|
||||
#: File where the database is written
|
||||
INDEX_JSON_FILE = "index.json"
|
||||
|
||||
# Verifier file to check last modification of the DB
|
||||
_INDEX_VERIFIER_FILE = "index_verifier"
|
||||
|
||||
# Lockfile for the database
|
||||
_LOCK_FILE = "lock"
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _getfqdn():
|
||||
@@ -260,7 +269,7 @@ class ForbiddenLockError(SpackError):
|
||||
|
||||
class ForbiddenLock:
|
||||
def __getattr__(self, name):
|
||||
raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
|
||||
raise ForbiddenLockError(f"Cannot access attribute '{name}' of lock")
|
||||
|
||||
def __reduce__(self):
|
||||
return ForbiddenLock, tuple()
|
||||
@@ -589,9 +598,9 @@ def __init__(
|
||||
self.layout = layout
|
||||
|
||||
# Set up layout of database files within the db dir
|
||||
self._index_path = self.database_directory / "index.json"
|
||||
self._verifier_path = self.database_directory / "index_verifier"
|
||||
self._lock_path = self.database_directory / "lock"
|
||||
self._index_path = self.database_directory / INDEX_JSON_FILE
|
||||
self._verifier_path = self.database_directory / _INDEX_VERIFIER_FILE
|
||||
self._lock_path = self.database_directory / _LOCK_FILE
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
self.last_seen_verifier = ""
|
||||
@@ -606,7 +615,7 @@ def __init__(
|
||||
|
||||
# initialize rest of state.
|
||||
self.db_lock_timeout = lock_cfg.database_timeout
|
||||
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
||||
tty.debug(f"DATABASE LOCK TIMEOUT: {str(self.db_lock_timeout)}s")
|
||||
|
||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||
if self.is_upstream:
|
||||
@@ -1090,7 +1099,7 @@ def _read(self):
|
||||
self._state_is_inconsistent = False
|
||||
return
|
||||
elif self.is_upstream:
|
||||
tty.warn("upstream not found: {0}".format(self._index_path))
|
||||
tty.warn(f"upstream not found: {self._index_path}")
|
||||
|
||||
def _add(
|
||||
self,
|
||||
|
@@ -15,7 +15,6 @@
|
||||
import glob
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
|
@@ -7,7 +7,6 @@
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
|
@@ -32,7 +32,7 @@ class OpenMpi(Package):
|
||||
"""
|
||||
import collections
|
||||
import collections.abc
|
||||
import os.path
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
||||
|
||||
|
@@ -10,6 +10,7 @@
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.store
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
@@ -156,6 +157,11 @@ def activate(
|
||||
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
||||
# become PATH variables.
|
||||
#
|
||||
|
||||
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
|
||||
if env_vars_yaml:
|
||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
|
||||
|
||||
try:
|
||||
if view and env.has_view(view):
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
@@ -189,6 +195,10 @@ def deactivate() -> EnvironmentModifications:
|
||||
if active is None:
|
||||
return env_mods
|
||||
|
||||
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
|
||||
if env_vars_yaml:
|
||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
|
||||
|
||||
active_view = os.getenv(ev.spack_env_view_var)
|
||||
|
||||
if active_view and active.has_view(active_view):
|
||||
|
@@ -25,7 +25,6 @@
|
||||
import functools
|
||||
import http.client
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import urllib.error
|
||||
@@ -321,9 +320,15 @@ def _fetch_urllib(self, url):
|
||||
|
||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
tty.msg(f"Fetching {url}")
|
||||
with open(save_file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
except OSError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
@@ -331,14 +336,6 @@ def _fetch_urllib(self, url):
|
||||
os.remove(save_file)
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.msg(f"Fetching {url}")
|
||||
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
|
||||
with open(save_file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
|
||||
# mirror that is broken, leading to spurious download failures. In that case it's helpful
|
||||
# for users to know which URL was actually fetched.
|
||||
@@ -535,11 +532,16 @@ def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
file = self.stage.save_filename
|
||||
tty.msg(f"Fetching {self.url}")
|
||||
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
|
||||
try:
|
||||
response = self._urlopen(self.url)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
tty.msg(f"Fetching {self.url}")
|
||||
with open(file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
except OSError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
@@ -547,12 +549,6 @@ def fetch(self):
|
||||
os.remove(file)
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
|
||||
with open(file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
|
||||
class VCSFetchStrategy(FetchStrategy):
|
||||
"""Superclass for version control system fetch strategies.
|
||||
|
@@ -89,10 +89,10 @@ def view_copy(
|
||||
if stat.S_ISLNK(src_stat.st_mode):
|
||||
spack.relocate.relocate_links(links=[dst], prefix_to_prefix=prefix_to_projection)
|
||||
elif spack.relocate.is_binary(dst):
|
||||
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
|
||||
spack.relocate.relocate_text_bin(binaries=[dst], prefix_to_prefix=prefix_to_projection)
|
||||
else:
|
||||
prefix_to_projection[spack.store.STORE.layout.root] = view._root
|
||||
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
spack.relocate.relocate_text(files=[dst], prefix_to_prefix=prefix_to_projection)
|
||||
|
||||
# The os module on Windows does not have a chown function.
|
||||
if sys.platform != "win32":
|
||||
|
@@ -14,7 +14,6 @@
|
||||
import io
|
||||
import operator
|
||||
import os
|
||||
import os.path
|
||||
import pstats
|
||||
import re
|
||||
import shlex
|
||||
@@ -729,7 +728,7 @@ def _compatible_sys_types():
|
||||
with the current host.
|
||||
"""
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = str(host_platform.operating_system("default_os"))
|
||||
host_os = str(host_platform.default_operating_system())
|
||||
host_target = archspec.cpu.host()
|
||||
compatible_targets = [host_target] + host_target.ancestors
|
||||
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
from typing import Optional
|
||||
|
||||
import llnl.url
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
import traceback
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
@@ -31,7 +31,7 @@
|
||||
import copy
|
||||
import datetime
|
||||
import inspect
|
||||
import os.path
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
from typing import List, Optional
|
||||
|
@@ -4,7 +4,7 @@
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import os.path
|
||||
import os
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"""This module implements the classes necessary to generate Tcl
|
||||
non-hierarchical modules.
|
||||
"""
|
||||
import os.path
|
||||
import os
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import spack.config
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import base64
|
||||
import json
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
@@ -410,7 +411,7 @@ def wrapper(*args, **kwargs):
|
||||
for i in range(retries):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except (urllib.error.URLError, TimeoutError) as e:
|
||||
except OSError as e:
|
||||
# Retry on internal server errors, and rate limit errors
|
||||
# Potentially this could take into account the Retry-After header
|
||||
# if registries support it
|
||||
@@ -420,9 +421,10 @@ def wrapper(*args, **kwargs):
|
||||
and (500 <= e.code < 600 or e.code == 429)
|
||||
)
|
||||
or (
|
||||
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
|
||||
isinstance(e, urllib.error.URLError)
|
||||
and isinstance(e.reason, socket.timeout)
|
||||
)
|
||||
or isinstance(e, TimeoutError)
|
||||
or isinstance(e, socket.timeout)
|
||||
):
|
||||
# Exponential backoff
|
||||
sleep(2**i)
|
||||
|
@@ -3,8 +3,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
@llnl.util.lang.lazy_lexicographic_ordering
|
||||
class OperatingSystem:
|
||||
@@ -42,4 +40,4 @@ def _cmp_iter(self):
|
||||
yield self.version
|
||||
|
||||
def to_dict(self):
|
||||
return syaml.syaml_dict([("name", self.name), ("version", self.version)])
|
||||
return {"name": self.name, "version": self.version}
|
||||
|
@@ -3,29 +3,52 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# flake8: noqa: F401
|
||||
"""spack.util.package is a set of useful build tools and directives for packages.
|
||||
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
|
||||
other modules. Packages should import this module, instead of importing from spack.* directly
|
||||
to ensure forward compatibility with future versions of Spack."""
|
||||
|
||||
Everything in this module is automatically imported into Spack package files.
|
||||
"""
|
||||
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
|
||||
from shutil import move, rmtree
|
||||
|
||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||
|
||||
# Emulate some shell commands for convenience
|
||||
env = environ
|
||||
cd = chdir
|
||||
pwd = getcwd
|
||||
|
||||
# import most common types used in packages
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import llnl.util.filesystem
|
||||
from llnl.util.filesystem import *
|
||||
from llnl.util.filesystem import (
|
||||
FileFilter,
|
||||
FileList,
|
||||
HeaderList,
|
||||
LibraryList,
|
||||
ancestor,
|
||||
can_access,
|
||||
change_sed_delimiter,
|
||||
copy,
|
||||
copy_tree,
|
||||
filter_file,
|
||||
find,
|
||||
find_all_headers,
|
||||
find_first,
|
||||
find_headers,
|
||||
find_libraries,
|
||||
find_system_libraries,
|
||||
force_remove,
|
||||
force_symlink,
|
||||
install,
|
||||
install_tree,
|
||||
is_exe,
|
||||
join_path,
|
||||
keep_modification_time,
|
||||
library_extensions,
|
||||
mkdirp,
|
||||
remove_directory_contents,
|
||||
remove_linked_tree,
|
||||
rename,
|
||||
set_executable,
|
||||
set_install_permissions,
|
||||
touch,
|
||||
working_dir,
|
||||
)
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.util.executable
|
||||
|
||||
# These props will be overridden when the build env is set up.
|
||||
from spack.build_environment import MakeExecutable
|
||||
from spack.build_systems.aspell_dict import AspellDictPackage
|
||||
@@ -76,7 +99,24 @@
|
||||
from spack.builder import BaseBuilder
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.deptypes import ALL_TYPES as all_deptypes
|
||||
from spack.directives import *
|
||||
from spack.directives import (
|
||||
build_system,
|
||||
can_splice,
|
||||
conditional,
|
||||
conflicts,
|
||||
depends_on,
|
||||
extends,
|
||||
license,
|
||||
maintainers,
|
||||
patch,
|
||||
provides,
|
||||
redistribute,
|
||||
requires,
|
||||
resource,
|
||||
variant,
|
||||
version,
|
||||
)
|
||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import (
|
||||
SkipTest,
|
||||
cache_extra_test_sources,
|
||||
@@ -86,26 +126,26 @@
|
||||
install_test_root,
|
||||
test_part,
|
||||
)
|
||||
from spack.installer import ExternalPackageError, InstallLockError, UpstreamPackageError
|
||||
from spack.mixins import filter_compiler_wrappers
|
||||
from spack.multimethod import default_args, when
|
||||
from spack.package_base import (
|
||||
DependencyConflictError,
|
||||
build_system_flags,
|
||||
env_flags,
|
||||
flatten_dependencies,
|
||||
inject_flags,
|
||||
install_dependency_symlinks,
|
||||
on_package_attributes,
|
||||
from spack.package_base import build_system_flags, env_flags, inject_flags, on_package_attributes
|
||||
from spack.package_completions import (
|
||||
bash_completion_path,
|
||||
fish_completion_path,
|
||||
zsh_completion_path,
|
||||
)
|
||||
from spack.package_completions import *
|
||||
from spack.phase_callbacks import run_after, run_before
|
||||
from spack.spec import InvalidSpecDetected, Spec
|
||||
from spack.util.executable import *
|
||||
from spack.spec import Spec
|
||||
from spack.util.executable import Executable, ProcessError, which, which_string
|
||||
from spack.util.filesystem import fix_darwin_install_name
|
||||
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
||||
from spack.version import Version, ver
|
||||
|
||||
# Emulate some shell commands for convenience
|
||||
env = environ
|
||||
cd = chdir
|
||||
pwd = getcwd
|
||||
|
||||
# These are just here for editor support; they may be set when the build env is set up.
|
||||
configure: Executable
|
||||
make_jobs: int
|
||||
|
@@ -30,7 +30,6 @@
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
@@ -1099,14 +1098,14 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
pass
|
||||
|
||||
def detect_dev_src_change(self):
|
||||
def detect_dev_src_change(self) -> bool:
|
||||
"""
|
||||
Method for checking for source code changes to trigger rebuild/reinstall
|
||||
"""
|
||||
dev_path_var = self.spec.variants.get("dev_path", None)
|
||||
_, record = spack.store.STORE.db.query_by_spec_hash(self.spec.dag_hash())
|
||||
mtime = fsys.last_modification_time_recursive(dev_path_var.value)
|
||||
return mtime > record.installation_time
|
||||
assert dev_path_var and record, "dev_path variant and record must be present"
|
||||
return fsys.recursive_mtime_greater_than(dev_path_var.value, record.installation_time)
|
||||
|
||||
def all_urls_for_version(self, version: StandardVersion) -> List[str]:
|
||||
"""Return all URLs derived from version_urls(), url, urls, and
|
||||
@@ -2292,19 +2291,6 @@ def rpath_args(self):
|
||||
build_system_flags = PackageBase.build_system_flags
|
||||
|
||||
|
||||
def install_dependency_symlinks(pkg, spec, prefix):
|
||||
"""
|
||||
Execute a dummy install and flatten dependencies.
|
||||
|
||||
This routine can be used in a ``package.py`` definition by setting
|
||||
``install = install_dependency_symlinks``.
|
||||
|
||||
This feature comes in handy for creating a common location for the
|
||||
the installation of third-party libraries.
|
||||
"""
|
||||
flatten_dependencies(spec, prefix)
|
||||
|
||||
|
||||
def use_cray_compiler_names():
|
||||
"""Compiler names for builds that rely on cray compiler names."""
|
||||
os.environ["CC"] = "cc"
|
||||
@@ -2313,23 +2299,6 @@ def use_cray_compiler_names():
|
||||
os.environ["F77"] = "ftn"
|
||||
|
||||
|
||||
def flatten_dependencies(spec, flat_dir):
|
||||
"""Make each dependency of spec present in dir via symlink."""
|
||||
for dep in spec.traverse(root=False):
|
||||
name = dep.name
|
||||
|
||||
dep_path = spack.store.STORE.layout.path_for_spec(dep)
|
||||
dep_files = LinkTree(dep_path)
|
||||
|
||||
os.mkdir(flat_dir + "/" + name)
|
||||
|
||||
conflict = dep_files.find_conflict(flat_dir + "/" + name)
|
||||
if conflict:
|
||||
raise DependencyConflictError(conflict)
|
||||
|
||||
dep_files.merge(flat_dir + "/" + name)
|
||||
|
||||
|
||||
def possible_dependencies(
|
||||
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
|
||||
transitive: bool = True,
|
||||
|
@@ -4,10 +4,9 @@
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Any, Dict, Optional, Tuple, Type, Union
|
||||
from typing import Any, Dict, Optional, Set, Tuple, Type, Union
|
||||
|
||||
import llnl.util.filesystem
|
||||
from llnl.url import allowed_archive
|
||||
@@ -504,36 +503,38 @@ def patch_for_package(self, sha256: str, pkg: "spack.package_base.PackageBase")
|
||||
patch_dict["sha256"] = sha256
|
||||
return from_dict(patch_dict, repository=self.repository)
|
||||
|
||||
def update_package(self, pkg_fullname: str) -> None:
|
||||
def update_packages(self, pkgs_fullname: Set[str]) -> None:
|
||||
"""Update the patch cache.
|
||||
|
||||
Args:
|
||||
pkg_fullname: package to update.
|
||||
"""
|
||||
# remove this package from any patch entries that reference it.
|
||||
empty = []
|
||||
for sha256, package_to_patch in self.index.items():
|
||||
remove = []
|
||||
for fullname, patch_dict in package_to_patch.items():
|
||||
if patch_dict["owner"] == pkg_fullname:
|
||||
remove.append(fullname)
|
||||
if self.index:
|
||||
empty = []
|
||||
for sha256, package_to_patch in self.index.items():
|
||||
remove = []
|
||||
for fullname, patch_dict in package_to_patch.items():
|
||||
if patch_dict["owner"] in pkgs_fullname:
|
||||
remove.append(fullname)
|
||||
|
||||
for fullname in remove:
|
||||
package_to_patch.pop(fullname)
|
||||
for fullname in remove:
|
||||
package_to_patch.pop(fullname)
|
||||
|
||||
if not package_to_patch:
|
||||
empty.append(sha256)
|
||||
if not package_to_patch:
|
||||
empty.append(sha256)
|
||||
|
||||
# remove any entries that are now empty
|
||||
for sha256 in empty:
|
||||
del self.index[sha256]
|
||||
# remove any entries that are now empty
|
||||
for sha256 in empty:
|
||||
del self.index[sha256]
|
||||
|
||||
# update the index with per-package patch indexes
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_fullname)
|
||||
partial_index = self._index_patches(pkg_cls, self.repository)
|
||||
for sha256, package_to_patch in partial_index.items():
|
||||
p2p = self.index.setdefault(sha256, {})
|
||||
p2p.update(package_to_patch)
|
||||
for pkg_fullname in pkgs_fullname:
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_fullname)
|
||||
partial_index = self._index_patches(pkg_cls, self.repository)
|
||||
for sha256, package_to_patch in partial_index.items():
|
||||
p2p = self.index.setdefault(sha256, {})
|
||||
p2p.update(package_to_patch)
|
||||
|
||||
def update(self, other: "PatchCache") -> None:
|
||||
"""Update this cache with the contents of another.
|
||||
|
@@ -52,8 +52,7 @@ def use_platform(new_platform):
|
||||
|
||||
import spack.config
|
||||
|
||||
msg = '"{0}" must be an instance of Platform'
|
||||
assert isinstance(new_platform, Platform), msg.format(new_platform)
|
||||
assert isinstance(new_platform, Platform), f'"{new_platform}" must be an instance of Platform'
|
||||
|
||||
original_host_fn = host
|
||||
|
||||
|
@@ -1,42 +1,22 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import warnings
|
||||
from typing import Optional
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.error
|
||||
|
||||
|
||||
class NoPlatformError(spack.error.SpackError):
|
||||
def __init__(self):
|
||||
msg = "Could not determine a platform for this machine"
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
@llnl.util.lang.lazy_lexicographic_ordering
|
||||
class Platform:
|
||||
"""Platform is an abstract class extended by subclasses.
|
||||
|
||||
To add a new type of platform (such as cray_xe), create a subclass and set all the
|
||||
class attributes such as priority, front_target, back_target, front_os, back_os.
|
||||
|
||||
Platform also contain a priority class attribute. A lower number signifies higher
|
||||
priority. These numbers are arbitrarily set and can be changed though often there
|
||||
isn't much need unless a new platform is added and the user wants that to be
|
||||
detected first.
|
||||
|
||||
Targets are created inside the platform subclasses. Most architecture (like linux,
|
||||
and darwin) will have only one target family (x86_64) but in the case of Cray
|
||||
machines, there is both a frontend and backend processor. The user can specify
|
||||
which targets are present on front-end and back-end architecture.
|
||||
|
||||
Depending on the platform, operating systems are either autodetected or are
|
||||
set. The user can set the frontend and backend operating setting by the class
|
||||
attributes front_os and back_os. The operating system will be responsible for
|
||||
compiler detection.
|
||||
"""
|
||||
|
||||
# Subclass sets number. Controls detection order
|
||||
@@ -45,82 +25,72 @@ class attributes such as priority, front_target, back_target, front_os, back_os.
|
||||
#: binary formats used on this platform; used by relocation logic
|
||||
binary_formats = ["elf"]
|
||||
|
||||
front_end: Optional[str] = None
|
||||
back_end: Optional[str] = None
|
||||
default: Optional[str] = None # The default back end target.
|
||||
|
||||
front_os: Optional[str] = None
|
||||
back_os: Optional[str] = None
|
||||
default_os: Optional[str] = None
|
||||
default: str
|
||||
default_os: str
|
||||
|
||||
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
|
||||
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
|
||||
deprecated_names = ["frontend", "fe", "backend", "be"]
|
||||
|
||||
def __init__(self, name):
|
||||
self.targets = {}
|
||||
self.operating_sys = {}
|
||||
self.name = name
|
||||
self._init_targets()
|
||||
|
||||
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
|
||||
"""Used by the platform specific subclass to list available targets.
|
||||
Raises an error if the platform specifies a name
|
||||
that is reserved by spack as an alias.
|
||||
"""
|
||||
if name in Platform.reserved_targets:
|
||||
msg = "{0} is a spack reserved alias and cannot be the name of a target"
|
||||
raise ValueError(msg.format(name))
|
||||
msg = f"{name} is a spack reserved alias and cannot be the name of a target"
|
||||
raise ValueError(msg)
|
||||
self.targets[name] = target
|
||||
|
||||
def _add_archspec_targets(self):
|
||||
def _init_targets(self):
|
||||
self.default = archspec.cpu.host().name
|
||||
for name, microarchitecture in archspec.cpu.TARGETS.items():
|
||||
self.add_target(name, microarchitecture)
|
||||
|
||||
def target(self, name):
|
||||
"""This is a getter method for the target dictionary
|
||||
that handles defaulting based on the values provided by default,
|
||||
front-end, and back-end. This can be overwritten
|
||||
by a subclass for which we want to provide further aliasing options.
|
||||
"""
|
||||
# TODO: Check if we can avoid using strings here
|
||||
name = str(name)
|
||||
if name == "default_target":
|
||||
if name in Platform.deprecated_names:
|
||||
warnings.warn(f"target={name} is deprecated, use target={self.default} instead")
|
||||
|
||||
if name in Platform.reserved_targets:
|
||||
name = self.default
|
||||
elif name == "frontend" or name == "fe":
|
||||
name = self.front_end
|
||||
elif name == "backend" or name == "be":
|
||||
name = self.back_end
|
||||
|
||||
return self.targets.get(name, None)
|
||||
|
||||
def add_operating_system(self, name, os_class):
|
||||
"""Add the operating_system class object into the
|
||||
platform.operating_sys dictionary.
|
||||
"""
|
||||
if name in Platform.reserved_oss:
|
||||
msg = "{0} is a spack reserved alias and cannot be the name of an OS"
|
||||
raise ValueError(msg.format(name))
|
||||
if name in Platform.reserved_oss + Platform.deprecated_names:
|
||||
msg = f"{name} is a spack reserved alias and cannot be the name of an OS"
|
||||
raise ValueError(msg)
|
||||
self.operating_sys[name] = os_class
|
||||
|
||||
def default_target(self):
|
||||
return self.target(self.default)
|
||||
|
||||
def default_operating_system(self):
|
||||
return self.operating_system(self.default_os)
|
||||
|
||||
def operating_system(self, name):
|
||||
if name == "default_os":
|
||||
if name in Platform.deprecated_names:
|
||||
warnings.warn(f"os={name} is deprecated, use os={self.default_os} instead")
|
||||
|
||||
if name in Platform.reserved_oss:
|
||||
name = self.default_os
|
||||
if name == "frontend" or name == "fe":
|
||||
name = self.front_os
|
||||
if name == "backend" or name == "be":
|
||||
name = self.back_os
|
||||
|
||||
return self.operating_sys.get(name, None)
|
||||
|
||||
def setup_platform_environment(self, pkg, env):
|
||||
"""Subclass can override this method if it requires any
|
||||
platform-specific build environment modifications.
|
||||
"""Platform-specific build environment modifications.
|
||||
|
||||
This method is meant toi be overridden by subclasses, when needed.
|
||||
"""
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def detect(cls):
|
||||
"""Return True if the the host platform is detected to be the current
|
||||
Platform class, False otherwise.
|
||||
"""Returns True if the host platform is detected to be the current Platform class,
|
||||
False otherwise.
|
||||
|
||||
Derived classes are responsible for implementing this method.
|
||||
"""
|
||||
@@ -135,11 +105,7 @@ def __str__(self):
|
||||
def _cmp_iter(self):
|
||||
yield self.name
|
||||
yield self.default
|
||||
yield self.front_end
|
||||
yield self.back_end
|
||||
yield self.default_os
|
||||
yield self.front_os
|
||||
yield self.back_os
|
||||
|
||||
def targets():
|
||||
for t in sorted(self.targets.values()):
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import os
|
||||
|
||||
|
||||
def slingshot_network():
|
||||
|
@@ -4,8 +4,6 @@
|
||||
|
||||
import platform as py_platform
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from spack.operating_systems.mac_os import MacOs
|
||||
from spack.version import Version
|
||||
|
||||
@@ -19,18 +17,8 @@ class Darwin(Platform):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__("darwin")
|
||||
self._add_archspec_targets()
|
||||
|
||||
self.default = archspec.cpu.host().name
|
||||
self.front_end = self.default
|
||||
self.back_end = self.default
|
||||
|
||||
mac_os = MacOs()
|
||||
|
||||
self.default_os = str(mac_os)
|
||||
self.front_os = str(mac_os)
|
||||
self.back_os = str(mac_os)
|
||||
|
||||
self.add_operating_system(str(mac_os), mac_os)
|
||||
|
||||
@classmethod
|
||||
|
@@ -3,8 +3,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from spack.operating_systems.freebsd import FreeBSDOs
|
||||
|
||||
from ._platform import Platform
|
||||
@@ -15,18 +13,8 @@ class FreeBSD(Platform):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__("freebsd")
|
||||
|
||||
self._add_archspec_targets()
|
||||
|
||||
# Get specific default
|
||||
self.default = archspec.cpu.host().name
|
||||
self.front_end = self.default
|
||||
self.back_end = self.default
|
||||
|
||||
os = FreeBSDOs()
|
||||
self.default_os = str(os)
|
||||
self.front_os = self.default_os
|
||||
self.back_os = self.default_os
|
||||
self.add_operating_system(str(os), os)
|
||||
|
||||
@classmethod
|
||||
|
@@ -3,8 +3,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from spack.operating_systems.linux_distro import LinuxDistro
|
||||
|
||||
from ._platform import Platform
|
||||
@@ -15,18 +13,8 @@ class Linux(Platform):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__("linux")
|
||||
|
||||
self._add_archspec_targets()
|
||||
|
||||
# Get specific default
|
||||
self.default = archspec.cpu.host().name
|
||||
self.front_end = self.default
|
||||
self.back_end = self.default
|
||||
|
||||
linux_dist = LinuxDistro()
|
||||
self.default_os = str(linux_dist)
|
||||
self.front_os = self.default_os
|
||||
self.back_os = self.default_os
|
||||
self.add_operating_system(str(linux_dist), linux_dist)
|
||||
|
||||
@classmethod
|
||||
|
@@ -16,31 +16,19 @@ class Test(Platform):
|
||||
if platform.system().lower() == "darwin":
|
||||
binary_formats = ["macho"]
|
||||
|
||||
if platform.machine() == "arm64":
|
||||
front_end = "aarch64"
|
||||
back_end = "m1"
|
||||
default = "m1"
|
||||
else:
|
||||
front_end = "x86_64"
|
||||
back_end = "core2"
|
||||
default = "core2"
|
||||
|
||||
front_os = "redhat6"
|
||||
back_os = "debian6"
|
||||
default_os = "debian6"
|
||||
default = "m1" if platform.machine() == "arm64" else "core2"
|
||||
|
||||
def __init__(self, name=None):
|
||||
name = name or "test"
|
||||
super().__init__(name)
|
||||
self.add_target(self.default, archspec.cpu.TARGETS[self.default])
|
||||
self.add_target(self.front_end, archspec.cpu.TARGETS[self.front_end])
|
||||
self.add_operating_system("debian6", spack.operating_systems.OperatingSystem("debian", 6))
|
||||
self.add_operating_system("redhat6", spack.operating_systems.OperatingSystem("redhat", 6))
|
||||
|
||||
self.add_operating_system(
|
||||
self.default_os, spack.operating_systems.OperatingSystem("debian", 6)
|
||||
)
|
||||
self.add_operating_system(
|
||||
self.front_os, spack.operating_systems.OperatingSystem("redhat", 6)
|
||||
)
|
||||
def _init_targets(self):
|
||||
targets = ("aarch64", "m1") if platform.machine() == "arm64" else ("x86_64", "core2")
|
||||
for t in targets:
|
||||
self.add_target(t, archspec.cpu.TARGETS[t])
|
||||
|
||||
@classmethod
|
||||
def detect(cls):
|
||||
|
@@ -4,8 +4,6 @@
|
||||
|
||||
import platform
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from spack.operating_systems.windows_os import WindowsOs
|
||||
|
||||
from ._platform import Platform
|
||||
@@ -16,18 +14,8 @@ class Windows(Platform):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__("windows")
|
||||
self._add_archspec_targets()
|
||||
|
||||
self.default = archspec.cpu.host().name
|
||||
self.front_end = self.default
|
||||
self.back_end = self.default
|
||||
|
||||
windows_os = WindowsOs()
|
||||
|
||||
self.default_os = str(windows_os)
|
||||
self.front_os = str(windows_os)
|
||||
self.back_os = str(windows_os)
|
||||
|
||||
self.add_operating_system(str(windows_os), windows_os)
|
||||
|
||||
@classmethod
|
||||
|
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Classes and functions to manage providers of virtual dependencies"""
|
||||
from typing import Dict, List, Optional, Set
|
||||
from typing import Dict, Iterable, List, Optional, Set, Union
|
||||
|
||||
import spack.error
|
||||
import spack.spec
|
||||
@@ -26,7 +26,7 @@ class _IndexBase:
|
||||
#: Calling providers_for(spec) will find specs that provide a
|
||||
#: matching implementation of MPI. Derived class need to construct
|
||||
#: this attribute according to the semantics above.
|
||||
providers: Dict[str, Dict[str, Set[str]]]
|
||||
providers: Dict[str, Dict["spack.spec.Spec", Set["spack.spec.Spec"]]]
|
||||
|
||||
def providers_for(self, virtual_spec):
|
||||
"""Return a list of specs of all packages that provide virtual
|
||||
@@ -99,66 +99,56 @@ def __init__(
|
||||
self.repository = repository
|
||||
self.restrict = restrict
|
||||
self.providers = {}
|
||||
if specs:
|
||||
self.update_packages(specs)
|
||||
|
||||
specs = specs or []
|
||||
for spec in specs:
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
if self.repository.is_virtual_safe(spec.name):
|
||||
continue
|
||||
|
||||
self.update(spec)
|
||||
|
||||
def update(self, spec):
|
||||
def update_packages(self, specs: Iterable[Union[str, "spack.spec.Spec"]]):
|
||||
"""Update the provider index with additional virtual specs.
|
||||
|
||||
Args:
|
||||
spec: spec potentially providing additional virtual specs
|
||||
"""
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
for spec in specs:
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
if not spec.name:
|
||||
# Empty specs do not have a package
|
||||
return
|
||||
if not spec.name or self.repository.is_virtual_safe(spec.name):
|
||||
# Only non-virtual packages with name can provide virtual specs.
|
||||
continue
|
||||
|
||||
msg = "cannot update an index passing the virtual spec '{}'".format(spec.name)
|
||||
assert not self.repository.is_virtual_safe(spec.name), msg
|
||||
pkg_provided = self.repository.get_pkg_class(spec.name).provided
|
||||
for provider_spec_readonly, provided_specs in pkg_provided.items():
|
||||
for provided_spec in provided_specs:
|
||||
# TODO: fix this comment.
|
||||
# We want satisfaction other than flags
|
||||
provider_spec = provider_spec_readonly.copy()
|
||||
provider_spec.compiler_flags = spec.compiler_flags.copy()
|
||||
|
||||
pkg_provided = self.repository.get_pkg_class(spec.name).provided
|
||||
for provider_spec_readonly, provided_specs in pkg_provided.items():
|
||||
for provided_spec in provided_specs:
|
||||
# TODO: fix this comment.
|
||||
# We want satisfaction other than flags
|
||||
provider_spec = provider_spec_readonly.copy()
|
||||
provider_spec.compiler_flags = spec.compiler_flags.copy()
|
||||
if spec.intersects(provider_spec, deps=False):
|
||||
provided_name = provided_spec.name
|
||||
|
||||
if spec.intersects(provider_spec, deps=False):
|
||||
provided_name = provided_spec.name
|
||||
provider_map = self.providers.setdefault(provided_name, {})
|
||||
if provided_spec not in provider_map:
|
||||
provider_map[provided_spec] = set()
|
||||
|
||||
provider_map = self.providers.setdefault(provided_name, {})
|
||||
if provided_spec not in provider_map:
|
||||
provider_map[provided_spec] = set()
|
||||
if self.restrict:
|
||||
provider_set = provider_map[provided_spec]
|
||||
|
||||
if self.restrict:
|
||||
provider_set = provider_map[provided_spec]
|
||||
# If this package existed in the index before,
|
||||
# need to take the old versions out, as they're
|
||||
# now more constrained.
|
||||
old = {s for s in provider_set if s.name == spec.name}
|
||||
provider_set.difference_update(old)
|
||||
|
||||
# If this package existed in the index before,
|
||||
# need to take the old versions out, as they're
|
||||
# now more constrained.
|
||||
old = set([s for s in provider_set if s.name == spec.name])
|
||||
provider_set.difference_update(old)
|
||||
# Now add the new version.
|
||||
provider_set.add(spec)
|
||||
|
||||
# Now add the new version.
|
||||
provider_set.add(spec)
|
||||
|
||||
else:
|
||||
# Before putting the spec in the map, constrain
|
||||
# it so that it provides what was asked for.
|
||||
constrained = spec.copy()
|
||||
constrained.constrain(provider_spec)
|
||||
provider_map[provided_spec].add(constrained)
|
||||
else:
|
||||
# Before putting the spec in the map, constrain
|
||||
# it so that it provides what was asked for.
|
||||
constrained = spec.copy()
|
||||
constrained.constrain(provider_spec)
|
||||
provider_map[provided_spec].add(constrained)
|
||||
|
||||
def to_json(self, stream=None):
|
||||
"""Dump a JSON representation of this object.
|
||||
@@ -193,14 +183,13 @@ def merge(self, other):
|
||||
|
||||
spdict[provided_spec] = spdict[provided_spec].union(opdict[provided_spec])
|
||||
|
||||
def remove_provider(self, pkg_name):
|
||||
def remove_providers(self, pkgs_fullname: Set[str]):
|
||||
"""Remove a provider from the ProviderIndex."""
|
||||
empty_pkg_dict = []
|
||||
for pkg, pkg_dict in self.providers.items():
|
||||
empty_pset = []
|
||||
for provided, pset in pkg_dict.items():
|
||||
same_name = set(p for p in pset if p.fullname == pkg_name)
|
||||
pset.difference_update(same_name)
|
||||
pset.difference_update(pkgs_fullname)
|
||||
|
||||
if not pset:
|
||||
empty_pset.append(provided)
|
||||
|
@@ -6,8 +6,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
from typing import List, Optional
|
||||
from typing import Dict, Iterable, List, Optional
|
||||
|
||||
import macholib.mach_o
|
||||
import macholib.MachO
|
||||
@@ -18,28 +17,11 @@
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.error
|
||||
import spack.store
|
||||
import spack.util.elf as elf
|
||||
import spack.util.executable as executable
|
||||
|
||||
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
||||
|
||||
|
||||
class InstallRootStringError(spack.error.SpackError):
|
||||
def __init__(self, file_path, root_path):
|
||||
"""Signal that the relocated binary still has the original
|
||||
Spack's store root string
|
||||
|
||||
Args:
|
||||
file_path (str): path of the binary
|
||||
root_path (str): original Spack's store root string
|
||||
"""
|
||||
super().__init__(
|
||||
"\n %s \ncontains string\n %s \n"
|
||||
"after replacing it in rpaths.\n"
|
||||
"Package should not be relocated.\n Use -a to override." % (file_path, root_path)
|
||||
)
|
||||
from .relocate_text import BinaryFilePrefixReplacer, PrefixToPrefix, TextFilePrefixReplacer
|
||||
|
||||
|
||||
@memoized
|
||||
@@ -58,7 +40,7 @@ def _decode_macho_data(bytestring):
|
||||
return bytestring.rstrip(b"\x00").decode("ascii")
|
||||
|
||||
|
||||
def macho_find_paths(orig_rpaths, deps, idpath, prefix_to_prefix):
|
||||
def _macho_find_paths(orig_rpaths, deps, idpath, prefix_to_prefix):
|
||||
"""
|
||||
Inputs
|
||||
original rpaths from mach-o binaries
|
||||
@@ -103,7 +85,7 @@ def macho_find_paths(orig_rpaths, deps, idpath, prefix_to_prefix):
|
||||
return paths_to_paths
|
||||
|
||||
|
||||
def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
||||
def _modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
||||
"""
|
||||
This function is used to make machO buildcaches on macOS by
|
||||
replacing old paths with new paths using install_name_tool
|
||||
@@ -146,7 +128,7 @@ def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
||||
install_name_tool(*args, temp_path)
|
||||
|
||||
|
||||
def macholib_get_paths(cur_path):
|
||||
def _macholib_get_paths(cur_path):
|
||||
"""Get rpaths, dependent libraries, and library id of mach-o objects."""
|
||||
headers = []
|
||||
try:
|
||||
@@ -228,25 +210,25 @@ def relocate_macho_binaries(path_names, prefix_to_prefix):
|
||||
if path_name.endswith(".o"):
|
||||
continue
|
||||
# get the paths in the old prefix
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
rpaths, deps, idpath = _macholib_get_paths(path_name)
|
||||
# get the mapping of paths in the old prerix to the new prefix
|
||||
paths_to_paths = macho_find_paths(rpaths, deps, idpath, prefix_to_prefix)
|
||||
paths_to_paths = _macho_find_paths(rpaths, deps, idpath, prefix_to_prefix)
|
||||
# replace the old paths with new paths
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
_modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
|
||||
|
||||
def relocate_elf_binaries(binaries, prefix_to_prefix):
|
||||
"""Take a list of binaries, and an ordered dictionary of
|
||||
prefix to prefix mapping, and update the rpaths accordingly."""
|
||||
def relocate_elf_binaries(binaries: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
|
||||
"""Take a list of binaries, and an ordered prefix to prefix mapping, and update the rpaths
|
||||
accordingly."""
|
||||
|
||||
# Transform to binary string
|
||||
prefix_to_prefix = OrderedDict(
|
||||
(k.encode("utf-8"), v.encode("utf-8")) for (k, v) in prefix_to_prefix.items()
|
||||
)
|
||||
prefix_to_prefix_bin = {
|
||||
k.encode("utf-8"): v.encode("utf-8") for k, v in prefix_to_prefix.items()
|
||||
}
|
||||
|
||||
for path in binaries:
|
||||
try:
|
||||
elf.substitute_rpath_and_pt_interp_in_place_or_raise(path, prefix_to_prefix)
|
||||
elf.substitute_rpath_and_pt_interp_in_place_or_raise(path, prefix_to_prefix_bin)
|
||||
except elf.ElfCStringUpdatesFailed as e:
|
||||
# Fall back to `patchelf --set-rpath ... --set-interpreter ...`
|
||||
rpaths = e.rpath.new_value.decode("utf-8").split(":") if e.rpath else []
|
||||
@@ -254,13 +236,13 @@ def relocate_elf_binaries(binaries, prefix_to_prefix):
|
||||
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
|
||||
|
||||
|
||||
def warn_if_link_cant_be_relocated(link, target):
|
||||
def _warn_if_link_cant_be_relocated(link: str, target: str):
|
||||
if not os.path.isabs(target):
|
||||
return
|
||||
tty.warn('Symbolic link at "{}" to "{}" cannot be relocated'.format(link, target))
|
||||
tty.warn(f'Symbolic link at "{link}" to "{target}" cannot be relocated')
|
||||
|
||||
|
||||
def relocate_links(links, prefix_to_prefix):
|
||||
def relocate_links(links: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
|
||||
"""Relocate links to a new install prefix."""
|
||||
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
||||
for link in links:
|
||||
@@ -269,7 +251,7 @@ def relocate_links(links, prefix_to_prefix):
|
||||
|
||||
# No match.
|
||||
if match is None:
|
||||
warn_if_link_cant_be_relocated(link, old_target)
|
||||
_warn_if_link_cant_be_relocated(link, old_target)
|
||||
continue
|
||||
|
||||
new_target = prefix_to_prefix[match.group()] + old_target[match.end() :]
|
||||
@@ -277,32 +259,32 @@ def relocate_links(links, prefix_to_prefix):
|
||||
symlink(new_target, link)
|
||||
|
||||
|
||||
def relocate_text(files, prefixes):
|
||||
def relocate_text(files: Iterable[str], prefix_to_prefix: PrefixToPrefix) -> None:
|
||||
"""Relocate text file from the original installation prefix to the
|
||||
new prefix.
|
||||
|
||||
Relocation also affects the the path in Spack's sbang script.
|
||||
|
||||
Args:
|
||||
files (list): Text files to be relocated
|
||||
prefixes (OrderedDict): String prefixes which need to be changed
|
||||
files: Text files to be relocated
|
||||
prefix_to_prefix: ordered prefix to prefix mapping
|
||||
"""
|
||||
TextFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(files)
|
||||
TextFilePrefixReplacer.from_strings_or_bytes(prefix_to_prefix).apply(files)
|
||||
|
||||
|
||||
def relocate_text_bin(binaries, prefixes):
|
||||
def relocate_text_bin(binaries: Iterable[str], prefix_to_prefix: PrefixToPrefix) -> List[str]:
|
||||
"""Replace null terminated path strings hard-coded into binaries.
|
||||
|
||||
The new install prefix must be shorter than the original one.
|
||||
|
||||
Args:
|
||||
binaries (list): binaries to be relocated
|
||||
prefixes (OrderedDict): String prefixes which need to be changed.
|
||||
binaries: paths to binaries to be relocated
|
||||
prefix_to_prefix: ordered prefix to prefix mapping
|
||||
|
||||
Raises:
|
||||
spack.relocate_text.BinaryTextReplaceError: when the new path is longer than the old path
|
||||
"""
|
||||
return BinaryFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(binaries)
|
||||
return BinaryFilePrefixReplacer.from_strings_or_bytes(prefix_to_prefix).apply(binaries)
|
||||
|
||||
|
||||
def is_macho_magic(magic: bytes) -> bool:
|
||||
@@ -339,7 +321,7 @@ def _exists_dir(dirname):
|
||||
return os.path.isdir(dirname)
|
||||
|
||||
|
||||
def is_macho_binary(path):
|
||||
def is_macho_binary(path: str) -> bool:
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
return is_macho_magic(f.read(4))
|
||||
@@ -363,7 +345,7 @@ def fixup_macos_rpath(root, filename):
|
||||
return False
|
||||
|
||||
# Get Mach-O header commands
|
||||
(rpath_list, deps, id_dylib) = macholib_get_paths(abspath)
|
||||
(rpath_list, deps, id_dylib) = _macholib_get_paths(abspath)
|
||||
|
||||
# Convert rpaths list to (name -> number of occurrences)
|
||||
add_rpaths = set()
|
||||
|
@@ -6,64 +6,61 @@
|
||||
paths inside text files and binaries."""
|
||||
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from typing import Dict, Union
|
||||
from typing import IO, Dict, Iterable, List, Union
|
||||
|
||||
from llnl.util.lang import PatternBytes
|
||||
|
||||
import spack.error
|
||||
|
||||
Prefix = Union[str, bytes]
|
||||
PrefixToPrefix = Union[Dict[str, str], Dict[bytes, bytes]]
|
||||
|
||||
|
||||
def encode_path(p: Prefix) -> bytes:
|
||||
return p if isinstance(p, bytes) else p.encode("utf-8")
|
||||
|
||||
|
||||
def _prefix_to_prefix_as_bytes(prefix_to_prefix) -> Dict[bytes, bytes]:
|
||||
return OrderedDict((encode_path(k), encode_path(v)) for (k, v) in prefix_to_prefix.items())
|
||||
def _prefix_to_prefix_as_bytes(prefix_to_prefix: PrefixToPrefix) -> Dict[bytes, bytes]:
|
||||
return {encode_path(k): encode_path(v) for (k, v) in prefix_to_prefix.items()}
|
||||
|
||||
|
||||
def utf8_path_to_binary_regex(prefix: str):
|
||||
def utf8_path_to_binary_regex(prefix: str) -> PatternBytes:
|
||||
"""Create a binary regex that matches the input path in utf8"""
|
||||
prefix_bytes = re.escape(prefix).encode("utf-8")
|
||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % prefix_bytes)
|
||||
|
||||
|
||||
def _byte_strings_to_single_binary_regex(prefixes):
|
||||
def _byte_strings_to_single_binary_regex(prefixes: Iterable[bytes]) -> PatternBytes:
|
||||
all_prefixes = b"|".join(re.escape(p) for p in prefixes)
|
||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
|
||||
|
||||
|
||||
def utf8_paths_to_single_binary_regex(prefixes):
|
||||
def utf8_paths_to_single_binary_regex(prefixes: Iterable[str]) -> PatternBytes:
|
||||
"""Create a (binary) regex that matches any input path in utf8"""
|
||||
return _byte_strings_to_single_binary_regex(p.encode("utf-8") for p in prefixes)
|
||||
|
||||
|
||||
def filter_identity_mappings(prefix_to_prefix):
|
||||
def filter_identity_mappings(prefix_to_prefix: Dict[bytes, bytes]) -> Dict[bytes, bytes]:
|
||||
"""Drop mappings that are not changed."""
|
||||
# NOTE: we don't guard against the following case:
|
||||
# [/abc/def -> /abc/def, /abc -> /x] *will* be simplified to
|
||||
# [/abc -> /x], meaning that after this simplification /abc/def will be
|
||||
# mapped to /x/def instead of /abc/def. This should not be a problem.
|
||||
return OrderedDict((k, v) for (k, v) in prefix_to_prefix.items() if k != v)
|
||||
return {k: v for k, v in prefix_to_prefix.items() if k != v}
|
||||
|
||||
|
||||
class PrefixReplacer:
|
||||
"""Base class for applying a prefix to prefix map
|
||||
to a list of binaries or text files.
|
||||
Child classes implement _apply_to_file to do the
|
||||
actual work, which is different when it comes to
|
||||
"""Base class for applying a prefix to prefix map to a list of binaries or text files. Derived
|
||||
classes implement _apply_to_file to do the actual work, which is different when it comes to
|
||||
binaries and text files."""
|
||||
|
||||
def __init__(self, prefix_to_prefix: Dict[bytes, bytes]):
|
||||
def __init__(self, prefix_to_prefix: Dict[bytes, bytes]) -> None:
|
||||
"""
|
||||
Arguments:
|
||||
|
||||
prefix_to_prefix (OrderedDict):
|
||||
|
||||
A ordered mapping from prefix to prefix. The order is
|
||||
relevant to support substring fallbacks, for example
|
||||
[("/first/sub", "/x"), ("/first", "/y")] will ensure
|
||||
/first/sub is matched and replaced before /first.
|
||||
prefix_to_prefix: An ordered mapping from prefix to prefix. The order is relevant to
|
||||
support substring fallbacks, for example
|
||||
``[("/first/sub", "/x"), ("/first", "/y")]`` will ensure /first/sub is matched and
|
||||
replaced before /first.
|
||||
"""
|
||||
self.prefix_to_prefix = filter_identity_mappings(prefix_to_prefix)
|
||||
|
||||
@@ -74,7 +71,7 @@ def is_noop(self) -> bool:
|
||||
or there are no prefixes to replace."""
|
||||
return not self.prefix_to_prefix
|
||||
|
||||
def apply(self, filenames: list):
|
||||
def apply(self, filenames: Iterable[str]) -> List[str]:
|
||||
"""Returns a list of files that were modified"""
|
||||
changed_files = []
|
||||
if self.is_noop:
|
||||
@@ -84,17 +81,20 @@ def apply(self, filenames: list):
|
||||
changed_files.append(filename)
|
||||
return changed_files
|
||||
|
||||
def apply_to_filename(self, filename):
|
||||
def apply_to_filename(self, filename: str) -> bool:
|
||||
if self.is_noop:
|
||||
return False
|
||||
with open(filename, "rb+") as f:
|
||||
return self.apply_to_file(f)
|
||||
|
||||
def apply_to_file(self, f):
|
||||
def apply_to_file(self, f: IO[bytes]) -> bool:
|
||||
if self.is_noop:
|
||||
return False
|
||||
return self._apply_to_file(f)
|
||||
|
||||
def _apply_to_file(self, f: IO) -> bool:
|
||||
raise NotImplementedError("Derived classes must implement this method")
|
||||
|
||||
|
||||
class TextFilePrefixReplacer(PrefixReplacer):
|
||||
"""This class applies prefix to prefix mappings for relocation
|
||||
@@ -112,13 +112,11 @@ def __init__(self, prefix_to_prefix: Dict[bytes, bytes]):
|
||||
self.regex = _byte_strings_to_single_binary_regex(self.prefix_to_prefix.keys())
|
||||
|
||||
@classmethod
|
||||
def from_strings_or_bytes(
|
||||
cls, prefix_to_prefix: Dict[Prefix, Prefix]
|
||||
) -> "TextFilePrefixReplacer":
|
||||
def from_strings_or_bytes(cls, prefix_to_prefix: PrefixToPrefix) -> "TextFilePrefixReplacer":
|
||||
"""Create a TextFilePrefixReplacer from an ordered prefix to prefix map."""
|
||||
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix))
|
||||
|
||||
def _apply_to_file(self, f):
|
||||
def _apply_to_file(self, f: IO) -> bool:
|
||||
"""Text replacement implementation simply reads the entire file
|
||||
in memory and applies the combined regex."""
|
||||
replacement = lambda m: m.group(1) + self.prefix_to_prefix[m.group(2)] + m.group(3)
|
||||
@@ -133,12 +131,12 @@ def _apply_to_file(self, f):
|
||||
|
||||
|
||||
class BinaryFilePrefixReplacer(PrefixReplacer):
|
||||
def __init__(self, prefix_to_prefix, suffix_safety_size=7):
|
||||
def __init__(self, prefix_to_prefix: Dict[bytes, bytes], suffix_safety_size: int = 7) -> None:
|
||||
"""
|
||||
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new
|
||||
suffix_safety_size (int): in case of null terminated strings, what size
|
||||
of the suffix should remain to avoid aliasing issues?
|
||||
prefix_to_prefix: Ordered dictionary where the keys are bytes representing the old prefixes
|
||||
and the values are the new
|
||||
suffix_safety_size: in case of null terminated strings, what size of the suffix should
|
||||
remain to avoid aliasing issues?
|
||||
"""
|
||||
assert suffix_safety_size >= 0
|
||||
super().__init__(prefix_to_prefix)
|
||||
@@ -146,17 +144,18 @@ def __init__(self, prefix_to_prefix, suffix_safety_size=7):
|
||||
self.regex = self.binary_text_regex(self.prefix_to_prefix.keys(), suffix_safety_size)
|
||||
|
||||
@classmethod
|
||||
def binary_text_regex(cls, binary_prefixes, suffix_safety_size=7):
|
||||
"""
|
||||
Create a regex that looks for exact matches of prefixes, and also tries to
|
||||
match a C-string type null terminator in a small lookahead window.
|
||||
def binary_text_regex(
|
||||
cls, binary_prefixes: Iterable[bytes], suffix_safety_size: int = 7
|
||||
) -> PatternBytes:
|
||||
"""Create a regex that looks for exact matches of prefixes, and also tries to match a
|
||||
C-string type null terminator in a small lookahead window.
|
||||
|
||||
Arguments:
|
||||
binary_prefixes (list): List of byte strings of prefixes to match
|
||||
suffix_safety_size (int): Sizeof the lookahed for null-terminated string.
|
||||
|
||||
Returns: compiled regex
|
||||
binary_prefixes: Iterable of byte strings of prefixes to match
|
||||
suffix_safety_size: Sizeof the lookahed for null-terminated string.
|
||||
"""
|
||||
# Note: it's important not to use capture groups for the prefix, since it destroys
|
||||
# performance due to common prefix optimization.
|
||||
return re.compile(
|
||||
b"("
|
||||
+ b"|".join(re.escape(p) for p in binary_prefixes)
|
||||
@@ -165,36 +164,34 @@ def binary_text_regex(cls, binary_prefixes, suffix_safety_size=7):
|
||||
|
||||
@classmethod
|
||||
def from_strings_or_bytes(
|
||||
cls, prefix_to_prefix: Dict[Prefix, Prefix], suffix_safety_size: int = 7
|
||||
cls, prefix_to_prefix: PrefixToPrefix, suffix_safety_size: int = 7
|
||||
) -> "BinaryFilePrefixReplacer":
|
||||
"""Create a BinaryFilePrefixReplacer from an ordered prefix to prefix map.
|
||||
|
||||
Arguments:
|
||||
prefix_to_prefix (OrderedDict): Ordered mapping of prefix to prefix.
|
||||
suffix_safety_size (int): Number of bytes to retain at the end of a C-string
|
||||
to avoid binary string-aliasing issues.
|
||||
prefix_to_prefix: Ordered mapping of prefix to prefix.
|
||||
suffix_safety_size: Number of bytes to retain at the end of a C-string to avoid binary
|
||||
string-aliasing issues.
|
||||
"""
|
||||
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix), suffix_safety_size)
|
||||
|
||||
def _apply_to_file(self, f):
|
||||
def _apply_to_file(self, f: IO[bytes]) -> bool:
|
||||
"""
|
||||
Given a file opened in rb+ mode, apply the string replacements as
|
||||
specified by an ordered dictionary of prefix to prefix mappings. This
|
||||
method takes special care of null-terminated C-strings. C-string constants
|
||||
are problematic because compilers and linkers optimize readonly strings for
|
||||
space by aliasing those that share a common suffix (only suffix since all
|
||||
of them are null terminated). See https://github.com/spack/spack/pull/31739
|
||||
and https://github.com/spack/spack/pull/32253 for details. Our logic matches
|
||||
the original prefix with a ``suffix_safety_size + 1`` lookahead for null bytes.
|
||||
If no null terminator is found, we simply pad with leading /, assuming that
|
||||
it's a long C-string; the full C-string after replacement has a large suffix
|
||||
in common with its original value.
|
||||
If there *is* a null terminator we can do the same as long as the replacement
|
||||
has a sufficiently long common suffix with the original prefix.
|
||||
As a last resort when the replacement does not have a long enough common suffix,
|
||||
we can try to shorten the string, but this only works if the new length is
|
||||
sufficiently short (typically the case when going from large padding -> normal path)
|
||||
If the replacement string is longer, or all of the above fails, we error out.
|
||||
Given a file opened in rb+ mode, apply the string replacements as specified by an ordered
|
||||
dictionary of prefix to prefix mappings. This method takes special care of null-terminated
|
||||
C-strings. C-string constants are problematic because compilers and linkers optimize
|
||||
readonly strings for space by aliasing those that share a common suffix (only suffix since
|
||||
all of them are null terminated). See https://github.com/spack/spack/pull/31739 and
|
||||
https://github.com/spack/spack/pull/32253 for details. Our logic matches the original
|
||||
prefix with a ``suffix_safety_size + 1`` lookahead for null bytes. If no null terminator
|
||||
is found, we simply pad with leading /, assuming that it's a long C-string; the full
|
||||
C-string after replacement has a large suffix in common with its original value. If there
|
||||
*is* a null terminator we can do the same as long as the replacement has a sufficiently
|
||||
long common suffix with the original prefix. As a last resort when the replacement does
|
||||
not have a long enough common suffix, we can try to shorten the string, but this only
|
||||
works if the new length is sufficiently short (typically the case when going from large
|
||||
padding -> normal path) If the replacement string is longer, or all of the above fails,
|
||||
we error out.
|
||||
|
||||
Arguments:
|
||||
f: file opened in rb+ mode
|
||||
@@ -204,9 +201,8 @@ def _apply_to_file(self, f):
|
||||
"""
|
||||
assert f.tell() == 0
|
||||
|
||||
# We *could* read binary data in chunks to avoid loading all in memory,
|
||||
# but it's nasty to deal with matches across boundaries, so let's stick to
|
||||
# something simple.
|
||||
# We *could* read binary data in chunks to avoid loading all in memory, but it's nasty to
|
||||
# deal with matches across boundaries, so let's stick to something simple.
|
||||
|
||||
modified = False
|
||||
|
||||
@@ -218,8 +214,7 @@ def _apply_to_file(self, f):
|
||||
# Did we find a trailing null within a N + 1 bytes window after the prefix?
|
||||
null_terminated = match.end(0) > match.end(1)
|
||||
|
||||
# Suffix string length, excluding the null byte
|
||||
# Only makes sense if null_terminated
|
||||
# Suffix string length, excluding the null byte. Only makes sense if null_terminated
|
||||
suffix_strlen = match.end(0) - match.end(1) - 1
|
||||
|
||||
# How many bytes are we shrinking our string?
|
||||
@@ -229,9 +224,9 @@ def _apply_to_file(self, f):
|
||||
if bytes_shorter < 0:
|
||||
raise CannotGrowString(old, new)
|
||||
|
||||
# If we don't know whether this is a null terminated C-string (we're looking
|
||||
# only N + 1 bytes ahead), or if it is and we have a common suffix, we can
|
||||
# simply pad with leading dir separators.
|
||||
# If we don't know whether this is a null terminated C-string (we're looking only N + 1
|
||||
# bytes ahead), or if it is and we have a common suffix, we can simply pad with leading
|
||||
# dir separators.
|
||||
elif (
|
||||
not null_terminated
|
||||
or suffix_strlen >= self.suffix_safety_size # == is enough, but let's be defensive
|
||||
@@ -240,9 +235,9 @@ def _apply_to_file(self, f):
|
||||
):
|
||||
replacement = b"/" * bytes_shorter + new
|
||||
|
||||
# If it *was* null terminated, all that matters is that we can leave N bytes
|
||||
# of old suffix in place. Note that > is required since we also insert an
|
||||
# additional null terminator.
|
||||
# If it *was* null terminated, all that matters is that we can leave N bytes of old
|
||||
# suffix in place. Note that > is required since we also insert an additional null
|
||||
# terminator.
|
||||
elif bytes_shorter > self.suffix_safety_size:
|
||||
replacement = new + match.group(2) # includes the trailing null
|
||||
|
||||
@@ -257,22 +252,6 @@ def _apply_to_file(self, f):
|
||||
return modified
|
||||
|
||||
|
||||
class BinaryStringReplacementError(spack.error.SpackError):
|
||||
def __init__(self, file_path, old_len, new_len):
|
||||
"""The size of the file changed after binary path substitution
|
||||
|
||||
Args:
|
||||
file_path (str): file with changing size
|
||||
old_len (str): original length of the file
|
||||
new_len (str): length of the file after substitution
|
||||
"""
|
||||
super().__init__(
|
||||
"Doing a binary string replacement in %s failed.\n"
|
||||
"The size of the file changed from %s to %s\n"
|
||||
"when it should have remanined the same." % (file_path, old_len, new_len)
|
||||
)
|
||||
|
||||
|
||||
class BinaryTextReplaceError(spack.error.SpackError):
|
||||
def __init__(self, msg):
|
||||
msg += (
|
||||
@@ -284,17 +263,16 @@ def __init__(self, msg):
|
||||
|
||||
class CannotGrowString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new):
|
||||
msg = "Cannot replace {!r} with {!r} because the new prefix is longer.".format(old, new)
|
||||
super().__init__(msg)
|
||||
return super().__init__(
|
||||
f"Cannot replace {old!r} with {new!r} because the new prefix is longer."
|
||||
)
|
||||
|
||||
|
||||
class CannotShrinkCString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new, full_old_string):
|
||||
# Just interpolate binary string to not risk issues with invalid
|
||||
# unicode, which would be really bad user experience: error in error.
|
||||
# We have no clue if we actually deal with a real C-string nor what
|
||||
# encoding it has.
|
||||
msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
old, new, full_old_string
|
||||
# Just interpolate binary string to not risk issues with invalid unicode, which would be
|
||||
# really bad user experience: error in error. We have no clue if we actually deal with a
|
||||
# real C-string nor what encoding it has.
|
||||
super().__init__(
|
||||
f"Cannot replace {old!r} with {new!r} in the C-string {full_old_string!r}."
|
||||
)
|
||||
super().__init__(msg)
|
||||
|
@@ -14,7 +14,6 @@
|
||||
import inspect
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import random
|
||||
import re
|
||||
import shutil
|
||||
@@ -466,7 +465,7 @@ def read(self, stream):
|
||||
"""Read this index from a provided file object."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def update(self, pkg_fullname):
|
||||
def update(self, pkgs_fullname: Set[str]):
|
||||
"""Update the index in memory with information about a package."""
|
||||
|
||||
@abc.abstractmethod
|
||||
@@ -483,8 +482,8 @@ def _create(self):
|
||||
def read(self, stream):
|
||||
self.index = spack.tag.TagIndex.from_json(stream, self.repository)
|
||||
|
||||
def update(self, pkg_fullname):
|
||||
self.index.update_package(pkg_fullname.split(".")[-1])
|
||||
def update(self, pkgs_fullname: Set[str]):
|
||||
self.index.update_packages({p.split(".")[-1] for p in pkgs_fullname})
|
||||
|
||||
def write(self, stream):
|
||||
self.index.to_json(stream)
|
||||
@@ -499,15 +498,14 @@ def _create(self):
|
||||
def read(self, stream):
|
||||
self.index = spack.provider_index.ProviderIndex.from_json(stream, self.repository)
|
||||
|
||||
def update(self, pkg_fullname):
|
||||
name = pkg_fullname.split(".")[-1]
|
||||
def update(self, pkgs_fullname: Set[str]):
|
||||
is_virtual = (
|
||||
not self.repository.exists(name) or self.repository.get_pkg_class(name).virtual
|
||||
lambda name: not self.repository.exists(name)
|
||||
or self.repository.get_pkg_class(name).virtual
|
||||
)
|
||||
if is_virtual:
|
||||
return
|
||||
self.index.remove_provider(pkg_fullname)
|
||||
self.index.update(pkg_fullname)
|
||||
non_virtual_pkgs_fullname = {p for p in pkgs_fullname if not is_virtual(p.split(".")[-1])}
|
||||
self.index.remove_providers(non_virtual_pkgs_fullname)
|
||||
self.index.update_packages(non_virtual_pkgs_fullname)
|
||||
|
||||
def write(self, stream):
|
||||
self.index.to_json(stream)
|
||||
@@ -532,8 +530,8 @@ def read(self, stream):
|
||||
def write(self, stream):
|
||||
self.index.to_json(stream)
|
||||
|
||||
def update(self, pkg_fullname):
|
||||
self.index.update_package(pkg_fullname)
|
||||
def update(self, pkgs_fullname: Set[str]):
|
||||
self.index.update_packages(pkgs_fullname)
|
||||
|
||||
|
||||
class RepoIndex:
|
||||
@@ -623,9 +621,7 @@ def _build_index(self, name: str, indexer: Indexer):
|
||||
if new_index_mtime != index_mtime:
|
||||
needs_update = self.checker.modified_since(new_index_mtime)
|
||||
|
||||
for pkg_name in needs_update:
|
||||
indexer.update(f"{self.namespace}.{pkg_name}")
|
||||
|
||||
indexer.update({f"{self.namespace}.{pkg_name}" for pkg_name in needs_update})
|
||||
indexer.write(new)
|
||||
|
||||
return indexer.index
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import hashlib
|
||||
import os.path
|
||||
import os
|
||||
import platform
|
||||
import posixpath
|
||||
import re
|
||||
@@ -106,7 +106,7 @@ def __init__(self, configuration: CDashConfiguration):
|
||||
self.site = configuration.site or socket.gethostname()
|
||||
self.osname = platform.system()
|
||||
self.osrelease = platform.release()
|
||||
self.target = spack.platforms.host().target("default_target")
|
||||
self.target = spack.platforms.host().default_target()
|
||||
self.starttime = int(time.time())
|
||||
self.endtime = self.starttime
|
||||
self.buildstamp = (
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import os
|
||||
|
||||
import spack.tengine
|
||||
|
||||
|
@@ -3,15 +3,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.platforms
|
||||
import spack.relocate as relocate
|
||||
import spack.store
|
||||
|
||||
|
||||
@@ -42,63 +38,11 @@ def rewire_node(spec, explicit):
|
||||
|
||||
spack.hooks.pre_install(spec)
|
||||
bindist.extract_buildcache_tarball(tarball, destination=spec.prefix)
|
||||
buildinfo = bindist.read_buildinfo_file(spec.prefix)
|
||||
bindist.relocate_package(spec)
|
||||
|
||||
# compute prefix-to-prefix for every node from the build spec to the spliced
|
||||
# spec
|
||||
prefix_to_prefix = {spec.build_spec.prefix: spec.prefix}
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in bindist.specs_to_relocate(spec):
|
||||
analog = s
|
||||
if id(s) not in build_spec_ids:
|
||||
analogs = [
|
||||
d
|
||||
for d in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD)
|
||||
if s._splice_match(d, self_root=spec, other_root=spec.build_spec)
|
||||
]
|
||||
if analogs:
|
||||
# Prefer same-name analogs and prefer higher versions
|
||||
# This matches the preferences in Spec.splice, so we will find same node
|
||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
||||
|
||||
prefix_to_prefix[analog.prefix] = s.prefix
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
|
||||
text_to_relocate = [
|
||||
os.path.join(spec.prefix, rel_path) for rel_path in buildinfo["relocate_textfiles"]
|
||||
]
|
||||
if text_to_relocate:
|
||||
relocate.relocate_text(files=text_to_relocate, prefixes=prefix_to_prefix)
|
||||
links = [os.path.join(spec.prefix, f) for f in buildinfo["relocate_links"]]
|
||||
relocate.relocate_links(links, prefix_to_prefix)
|
||||
bins_to_relocate = [
|
||||
os.path.join(spec.prefix, rel_path) for rel_path in buildinfo["relocate_binaries"]
|
||||
]
|
||||
if bins_to_relocate:
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(bins_to_relocate, prefix_to_prefix)
|
||||
if "elf" in platform.binary_formats:
|
||||
relocate.relocate_elf_binaries(bins_to_relocate, prefix_to_prefix)
|
||||
relocate.relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
|
||||
shutil.rmtree(tempdir)
|
||||
install_manifest = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
try:
|
||||
os.unlink(install_manifest)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
# Write the spliced spec into spec.json. Without this, Database.add would fail because it
|
||||
# checks the spec.json in the prefix against the spec being added to look for mismatches
|
||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||
# add to database, not sure about explicit
|
||||
spack.store.STORE.db.add(spec, explicit=explicit)
|
||||
|
||||
# run post install hooks
|
||||
# run post install hooks and add to db
|
||||
spack.hooks.post_install(spec, explicit)
|
||||
spack.store.STORE.db.add(spec, explicit=explicit)
|
||||
|
||||
|
||||
class RewireError(spack.error.SpackError):
|
||||
|
@@ -6,6 +6,8 @@
|
||||
import typing
|
||||
import warnings
|
||||
|
||||
import jsonschema
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
from spack.error import SpecSyntaxError
|
||||
@@ -19,12 +21,8 @@ class DeprecationMessage(typing.NamedTuple):
|
||||
# jsonschema is imported lazily as it is heavy to import
|
||||
# and increases the start-up time
|
||||
def _make_validator():
|
||||
import jsonschema
|
||||
|
||||
def _validate_spec(validator, is_spec, instance, schema):
|
||||
"""Check if the attributes on instance are valid specs."""
|
||||
import jsonschema
|
||||
|
||||
import spack.spec_parser
|
||||
|
||||
if not validator.is_type(instance, "object"):
|
||||
@@ -33,8 +31,8 @@ def _validate_spec(validator, is_spec, instance, schema):
|
||||
for spec_str in instance:
|
||||
try:
|
||||
spack.spec_parser.parse(spec_str)
|
||||
except SpecSyntaxError as e:
|
||||
yield jsonschema.ValidationError(str(e))
|
||||
except SpecSyntaxError:
|
||||
yield jsonschema.ValidationError(f"the key '{spec_str}' is not a valid spec")
|
||||
|
||||
def _deprecated_properties(validator, deprecated, instance, schema):
|
||||
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
||||
@@ -67,7 +65,7 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
||||
yield jsonschema.ValidationError("\n".join(errors))
|
||||
|
||||
return jsonschema.validators.extend(
|
||||
jsonschema.Draft4Validator,
|
||||
jsonschema.Draft7Validator,
|
||||
{"validate_spec": _validate_spec, "deprecatedProperties": _deprecated_properties},
|
||||
)
|
||||
|
||||
|
@@ -19,7 +19,7 @@
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {"when": {"type": "string"}},
|
||||
"patternProperties": {r"^(?!when$)\w*": spec_list_schema},
|
||||
"additionalProperties": spec_list_schema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
22
lib/spack/spack/schema/env_vars.py
Normal file
22
lib/spack/spack/schema/env_vars.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Schema for env_vars.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/env_vars.py
|
||||
:lines: 15-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
import spack.schema.environment
|
||||
|
||||
properties: Dict[str, Any] = {"env_vars": spack.schema.environment.definition}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack env_vars configuration file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
@@ -20,6 +20,7 @@
|
||||
import spack.schema.container
|
||||
import spack.schema.definitions
|
||||
import spack.schema.develop
|
||||
import spack.schema.env_vars
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
@@ -38,6 +39,7 @@
|
||||
spack.schema.ci.properties,
|
||||
spack.schema.definitions.properties,
|
||||
spack.schema.develop.properties,
|
||||
spack.schema.env_vars.properties,
|
||||
spack.schema.mirrors.properties,
|
||||
spack.schema.modules.properties,
|
||||
spack.schema.packages.properties,
|
||||
|
@@ -9,6 +9,8 @@
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
import jsonschema
|
||||
|
||||
#: Common properties for connection specification
|
||||
connection = {
|
||||
"url": {"type": "string"},
|
||||
@@ -102,8 +104,6 @@
|
||||
|
||||
|
||||
def update(data):
|
||||
import jsonschema
|
||||
|
||||
errors = []
|
||||
|
||||
def check_access_pair(name, section):
|
||||
|
@@ -12,22 +12,6 @@
|
||||
import spack.schema.environment
|
||||
import spack.schema.projections
|
||||
|
||||
#: Matches a spec or a multi-valued variant but not another
|
||||
#: valid keyword.
|
||||
#:
|
||||
#: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT
|
||||
#: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE
|
||||
spec_regex = (
|
||||
r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|hide|"
|
||||
r"include|exclude|projections|naming_scheme|core_compilers|all)(^\w[\w-]*)"
|
||||
)
|
||||
|
||||
#: Matches a valid name for a module set
|
||||
valid_module_set_name = r"^(?!prefix_inspections$)\w[\w-]*$"
|
||||
|
||||
#: Matches an anonymous spec, i.e. a spec without a root name
|
||||
anonymous_spec_regex = r"^[\^@%+~]"
|
||||
|
||||
#: Definitions for parts of module schema
|
||||
array_of_strings = {"type": "array", "default": [], "items": {"type": "string"}}
|
||||
|
||||
@@ -56,7 +40,7 @@
|
||||
"suffixes": {
|
||||
"type": "object",
|
||||
"validate_spec": True,
|
||||
"patternProperties": {r"\w[\w-]*": {"type": "string"}}, # key
|
||||
"additionalProperties": {"type": "string"}, # key
|
||||
},
|
||||
"environment": spack.schema.environment.definition,
|
||||
},
|
||||
@@ -64,34 +48,40 @@
|
||||
|
||||
projections_scheme = spack.schema.projections.properties["projections"]
|
||||
|
||||
module_type_configuration = {
|
||||
module_type_configuration: Dict = {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"allOf": [
|
||||
{
|
||||
"properties": {
|
||||
"verbose": {"type": "boolean", "default": False},
|
||||
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
|
||||
"include": array_of_strings,
|
||||
"exclude": array_of_strings,
|
||||
"exclude_implicits": {"type": "boolean", "default": False},
|
||||
"defaults": array_of_strings,
|
||||
"hide_implicits": {"type": "boolean", "default": False},
|
||||
"naming_scheme": {"type": "string"}, # Can we be more specific here?
|
||||
"projections": projections_scheme,
|
||||
"all": module_file_configuration,
|
||||
}
|
||||
},
|
||||
{
|
||||
"validate_spec": True,
|
||||
"patternProperties": {
|
||||
spec_regex: module_file_configuration,
|
||||
anonymous_spec_regex: module_file_configuration,
|
||||
},
|
||||
},
|
||||
],
|
||||
"validate_spec": True,
|
||||
"properties": {
|
||||
"verbose": {"type": "boolean", "default": False},
|
||||
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
|
||||
"include": array_of_strings,
|
||||
"exclude": array_of_strings,
|
||||
"exclude_implicits": {"type": "boolean", "default": False},
|
||||
"defaults": array_of_strings,
|
||||
"hide_implicits": {"type": "boolean", "default": False},
|
||||
"naming_scheme": {"type": "string"},
|
||||
"projections": projections_scheme,
|
||||
"all": module_file_configuration,
|
||||
},
|
||||
"additionalProperties": module_file_configuration,
|
||||
}
|
||||
|
||||
tcl_configuration = module_type_configuration.copy()
|
||||
|
||||
lmod_configuration = module_type_configuration.copy()
|
||||
lmod_configuration["properties"].update(
|
||||
{
|
||||
"core_compilers": array_of_strings,
|
||||
"hierarchy": array_of_strings,
|
||||
"core_specs": array_of_strings,
|
||||
"filter_hierarchy_specs": {
|
||||
"type": "object",
|
||||
"validate_spec": True,
|
||||
"additionalProperties": array_of_strings,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
module_config_properties = {
|
||||
"use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]},
|
||||
@@ -105,31 +95,8 @@
|
||||
"default": [],
|
||||
"items": {"type": "string", "enum": ["tcl", "lmod"]},
|
||||
},
|
||||
"lmod": {
|
||||
"allOf": [
|
||||
# Base configuration
|
||||
module_type_configuration,
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"core_compilers": array_of_strings,
|
||||
"hierarchy": array_of_strings,
|
||||
"core_specs": array_of_strings,
|
||||
"filter_hierarchy_specs": {
|
||||
"type": "object",
|
||||
"patternProperties": {spec_regex: array_of_strings},
|
||||
},
|
||||
},
|
||||
}, # Specific lmod extensions
|
||||
]
|
||||
},
|
||||
"tcl": {
|
||||
"allOf": [
|
||||
# Base configuration
|
||||
module_type_configuration,
|
||||
{}, # Specific tcl extensions
|
||||
]
|
||||
},
|
||||
"lmod": lmod_configuration,
|
||||
"tcl": tcl_configuration,
|
||||
"prefix_inspections": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
@@ -145,7 +112,6 @@
|
||||
properties: Dict[str, Any] = {
|
||||
"modules": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"prefix_inspections": {
|
||||
"type": "object",
|
||||
@@ -156,13 +122,11 @@
|
||||
},
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
valid_module_set_name: {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": module_config_properties,
|
||||
}
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": module_config_properties,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@@ -98,7 +98,6 @@
|
||||
"packages": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"all": { # package name
|
||||
"type": "object",
|
||||
@@ -140,58 +139,54 @@
|
||||
},
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
r"(?!^all$)(^\w[\w-]*)": { # package name
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"require": requirements,
|
||||
"prefer": prefer_and_conflict,
|
||||
"conflict": prefer_and_conflict,
|
||||
"version": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
# version strings
|
||||
"items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
|
||||
},
|
||||
"buildable": {"type": "boolean", "default": True},
|
||||
"permissions": permissions,
|
||||
# If 'get_full_repo' is promoted to a Package-level
|
||||
# attribute, it could be useful to set it here
|
||||
"package_attributes": package_attributes,
|
||||
"variants": variants,
|
||||
"externals": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"spec": {"type": "string"},
|
||||
"prefix": {"type": "string"},
|
||||
"modules": {"type": "array", "items": {"type": "string"}},
|
||||
"extra_attributes": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
"properties": {
|
||||
"compilers": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
r"(^\w[\w-]*)": {"type": "string"}
|
||||
},
|
||||
},
|
||||
"environment": spack.schema.environment.definition,
|
||||
"extra_rpaths": extra_rpaths,
|
||||
"implicit_rpaths": implicit_rpaths,
|
||||
"flags": flags,
|
||||
"additionalProperties": { # package name
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"require": requirements,
|
||||
"prefer": prefer_and_conflict,
|
||||
"conflict": prefer_and_conflict,
|
||||
"version": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
# version strings
|
||||
"items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
|
||||
},
|
||||
"buildable": {"type": "boolean", "default": True},
|
||||
"permissions": permissions,
|
||||
# If 'get_full_repo' is promoted to a Package-level
|
||||
# attribute, it could be useful to set it here
|
||||
"package_attributes": package_attributes,
|
||||
"variants": variants,
|
||||
"externals": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"spec": {"type": "string"},
|
||||
"prefix": {"type": "string"},
|
||||
"modules": {"type": "array", "items": {"type": "string"}},
|
||||
"extra_attributes": {
|
||||
"type": "object",
|
||||
"additionalProperties": {"type": "string"},
|
||||
"properties": {
|
||||
"compilers": {
|
||||
"type": "object",
|
||||
"patternProperties": {r"(^\w[\w-]*)": {"type": "string"}},
|
||||
},
|
||||
"environment": spack.schema.environment.definition,
|
||||
"extra_rpaths": extra_rpaths,
|
||||
"implicit_rpaths": implicit_rpaths,
|
||||
"flags": flags,
|
||||
},
|
||||
},
|
||||
"additionalProperties": True,
|
||||
"required": ["spec"],
|
||||
},
|
||||
"additionalProperties": True,
|
||||
"required": ["spec"],
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@@ -2711,7 +2711,7 @@ def setup(
|
||||
if env:
|
||||
dev_specs = tuple(
|
||||
spack.spec.Spec(info["spec"]).constrained(
|
||||
"dev_path=%s"
|
||||
'dev_path="%s"'
|
||||
% spack.util.path.canonicalize_path(info["path"], default_wd=env.path)
|
||||
)
|
||||
for name, info in env.dev_specs.items()
|
||||
|
@@ -237,23 +237,14 @@ def _make_microarchitecture(name: str) -> archspec.cpu.Microarchitecture:
|
||||
class ArchSpec:
|
||||
"""Aggregate the target platform, the operating system and the target microarchitecture."""
|
||||
|
||||
@staticmethod
|
||||
def _return_arch(os_tag, target_tag):
|
||||
platform = spack.platforms.host()
|
||||
default_os = platform.operating_system(os_tag)
|
||||
default_target = platform.target(target_tag)
|
||||
arch_tuple = str(platform), str(default_os), str(default_target)
|
||||
return ArchSpec(arch_tuple)
|
||||
|
||||
@staticmethod
|
||||
def default_arch():
|
||||
"""Return the default architecture"""
|
||||
return ArchSpec._return_arch("default_os", "default_target")
|
||||
|
||||
@staticmethod
|
||||
def frontend_arch():
|
||||
"""Return the frontend architecture"""
|
||||
return ArchSpec._return_arch("frontend", "frontend")
|
||||
platform = spack.platforms.host()
|
||||
default_os = platform.default_operating_system()
|
||||
default_target = platform.default_target()
|
||||
arch_tuple = str(platform), str(default_os), str(default_target)
|
||||
return ArchSpec(arch_tuple)
|
||||
|
||||
__slots__ = "_platform", "_os", "_target"
|
||||
|
||||
@@ -461,6 +452,9 @@ def _target_satisfies(self, other: "ArchSpec", strict: bool) -> bool:
|
||||
return bool(self._target_intersection(other))
|
||||
|
||||
def _target_constrain(self, other: "ArchSpec") -> bool:
|
||||
if self.target is None and other.target is None:
|
||||
return False
|
||||
|
||||
if not other._target_satisfies(self, strict=False):
|
||||
raise UnsatisfiableArchitectureSpecError(self, other)
|
||||
|
||||
@@ -509,21 +503,56 @@ def _target_intersection(self, other):
|
||||
if (not s_min or o_comp >= s_min) and (not s_max or o_comp <= s_max):
|
||||
results.append(o_min)
|
||||
else:
|
||||
# Take intersection of two ranges
|
||||
# Lots of comparisons needed
|
||||
_s_min = _make_microarchitecture(s_min)
|
||||
_s_max = _make_microarchitecture(s_max)
|
||||
_o_min = _make_microarchitecture(o_min)
|
||||
_o_max = _make_microarchitecture(o_max)
|
||||
# Take the "min" of the two max, if there is a partial ordering.
|
||||
n_max = ""
|
||||
if s_max and o_max:
|
||||
_s_max = _make_microarchitecture(s_max)
|
||||
_o_max = _make_microarchitecture(o_max)
|
||||
if _s_max.family != _o_max.family:
|
||||
continue
|
||||
if _s_max <= _o_max:
|
||||
n_max = s_max
|
||||
elif _o_max < _s_max:
|
||||
n_max = o_max
|
||||
else:
|
||||
continue
|
||||
elif s_max:
|
||||
n_max = s_max
|
||||
elif o_max:
|
||||
n_max = o_max
|
||||
|
||||
# Take the "max" of the two min.
|
||||
n_min = ""
|
||||
if s_min and o_min:
|
||||
_s_min = _make_microarchitecture(s_min)
|
||||
_o_min = _make_microarchitecture(o_min)
|
||||
if _s_min.family != _o_min.family:
|
||||
continue
|
||||
if _s_min >= _o_min:
|
||||
n_min = s_min
|
||||
elif _o_min > _s_min:
|
||||
n_min = o_min
|
||||
else:
|
||||
continue
|
||||
elif s_min:
|
||||
n_min = s_min
|
||||
elif o_min:
|
||||
n_min = o_min
|
||||
|
||||
if n_min and n_max:
|
||||
_n_min = _make_microarchitecture(n_min)
|
||||
_n_max = _make_microarchitecture(n_max)
|
||||
if _n_min.family != _n_max.family or not _n_min <= _n_max:
|
||||
continue
|
||||
if n_min == n_max:
|
||||
results.append(n_min)
|
||||
else:
|
||||
results.append(f"{n_min}:{n_max}")
|
||||
elif n_min:
|
||||
results.append(f"{n_min}:")
|
||||
elif n_max:
|
||||
results.append(f":{n_max}")
|
||||
|
||||
n_min = s_min if _s_min >= _o_min else o_min
|
||||
n_max = s_max if _s_max <= _o_max else o_max
|
||||
_n_min = _make_microarchitecture(n_min)
|
||||
_n_max = _make_microarchitecture(n_max)
|
||||
if _n_min == _n_max:
|
||||
results.append(n_min)
|
||||
elif not n_min or not n_max or _n_min < _n_max:
|
||||
results.append("%s:%s" % (n_min, n_max))
|
||||
return results
|
||||
|
||||
def constrain(self, other: "ArchSpec") -> bool:
|
||||
@@ -578,14 +607,9 @@ def to_dict(self):
|
||||
target_data = str(self.target)
|
||||
else:
|
||||
# Get rid of compiler flag information before turning the uarch into a dict
|
||||
uarch_dict = self.target.to_dict()
|
||||
uarch_dict.pop("compilers", None)
|
||||
target_data = syaml.syaml_dict(uarch_dict.items())
|
||||
|
||||
d = syaml.syaml_dict(
|
||||
[("platform", self.platform), ("platform_os", self.os), ("target", target_data)]
|
||||
)
|
||||
return syaml.syaml_dict([("arch", d)])
|
||||
target_data = self.target.to_dict()
|
||||
target_data.pop("compilers", None)
|
||||
return {"arch": {"platform": self.platform, "platform_os": self.os, "target": target_data}}
|
||||
|
||||
@staticmethod
|
||||
def from_dict(d):
|
||||
@@ -710,10 +734,7 @@ def _cmp_iter(self):
|
||||
yield self.versions
|
||||
|
||||
def to_dict(self):
|
||||
d = syaml.syaml_dict([("name", self.name)])
|
||||
d.update(self.versions.to_dict())
|
||||
|
||||
return syaml.syaml_dict([("compiler", d)])
|
||||
return {"compiler": {"name": self.name, **self.versions.to_dict()}}
|
||||
|
||||
@staticmethod
|
||||
def from_dict(d):
|
||||
@@ -2290,9 +2311,7 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
Arguments:
|
||||
hash (spack.hash_types.SpecHashDescriptor) type of hash to generate.
|
||||
"""
|
||||
d = syaml.syaml_dict()
|
||||
|
||||
d["name"] = self.name
|
||||
d = {"name": self.name}
|
||||
|
||||
if self.versions:
|
||||
d.update(self.versions.to_dict())
|
||||
@@ -2306,7 +2325,7 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
if self.namespace:
|
||||
d["namespace"] = self.namespace
|
||||
|
||||
params = syaml.syaml_dict(sorted(v.yaml_entry() for _, v in self.variants.items()))
|
||||
params = dict(sorted(v.yaml_entry() for v in self.variants.values()))
|
||||
|
||||
# Only need the string compiler flag for yaml file
|
||||
params.update(
|
||||
@@ -2337,13 +2356,11 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
else:
|
||||
extra_attributes = None
|
||||
|
||||
d["external"] = syaml.syaml_dict(
|
||||
[
|
||||
("path", self.external_path),
|
||||
("module", self.external_modules),
|
||||
("extra_attributes", extra_attributes),
|
||||
]
|
||||
)
|
||||
d["external"] = {
|
||||
"path": self.external_path,
|
||||
"module": self.external_modules,
|
||||
"extra_attributes": extra_attributes,
|
||||
}
|
||||
|
||||
if not self._concrete:
|
||||
d["concrete"] = False
|
||||
@@ -2374,29 +2391,25 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
# Note: Relies on sorting dict by keys later in algorithm.
|
||||
deps = self._dependencies_dict(depflag=hash.depflag)
|
||||
if deps:
|
||||
deps_list = []
|
||||
for name, edges_for_name in sorted(deps.items()):
|
||||
name_tuple = ("name", name)
|
||||
for dspec in edges_for_name:
|
||||
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
|
||||
parameters_tuple = (
|
||||
"parameters",
|
||||
syaml.syaml_dict(
|
||||
(
|
||||
("deptypes", dt.flag_to_tuple(dspec.depflag)),
|
||||
("virtuals", dspec.virtuals),
|
||||
)
|
||||
),
|
||||
)
|
||||
ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
|
||||
deps_list.append(syaml.syaml_dict(ordered_entries))
|
||||
d["dependencies"] = deps_list
|
||||
d["dependencies"] = [
|
||||
{
|
||||
"name": name,
|
||||
hash.name: dspec.spec._cached_hash(hash),
|
||||
"parameters": {
|
||||
"deptypes": dt.flag_to_tuple(dspec.depflag),
|
||||
"virtuals": dspec.virtuals,
|
||||
},
|
||||
}
|
||||
for name, edges_for_name in sorted(deps.items())
|
||||
for dspec in edges_for_name
|
||||
]
|
||||
|
||||
# Name is included in case this is replacing a virtual.
|
||||
if self._build_spec:
|
||||
d["build_spec"] = syaml.syaml_dict(
|
||||
[("name", self.build_spec.name), (hash.name, self.build_spec._cached_hash(hash))]
|
||||
)
|
||||
d["build_spec"] = {
|
||||
"name": self.build_spec.name,
|
||||
hash.name: self.build_spec._cached_hash(hash),
|
||||
}
|
||||
return d
|
||||
|
||||
def to_dict(self, hash=ht.dag_hash):
|
||||
@@ -2498,10 +2511,7 @@ def to_dict(self, hash=ht.dag_hash):
|
||||
node_list.append(node)
|
||||
hash_set.add(node_hash)
|
||||
|
||||
meta_dict = syaml.syaml_dict([("version", SPECFILE_FORMAT_VERSION)])
|
||||
inner_dict = syaml.syaml_dict([("_meta", meta_dict), ("nodes", node_list)])
|
||||
spec_dict = syaml.syaml_dict([("spec", inner_dict)])
|
||||
return spec_dict
|
||||
return {"spec": {"_meta": {"version": SPECFILE_FORMAT_VERSION}, "nodes": node_list}}
|
||||
|
||||
def node_dict_with_hashes(self, hash=ht.dag_hash):
|
||||
"""Returns a node_dict of this spec with the dag hash added. If this
|
||||
@@ -3170,18 +3180,13 @@ def constrain(self, other, deps=True):
|
||||
if not self.variants[v].compatible(other.variants[v]):
|
||||
raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
|
||||
|
||||
# TODO: Check out the logic here
|
||||
sarch, oarch = self.architecture, other.architecture
|
||||
if sarch is not None and oarch is not None:
|
||||
if sarch.platform is not None and oarch.platform is not None:
|
||||
if sarch.platform != oarch.platform:
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
if sarch.os is not None and oarch.os is not None:
|
||||
if sarch.os != oarch.os:
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
if sarch.target is not None and oarch.target is not None:
|
||||
if sarch.target != oarch.target:
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
if (
|
||||
sarch is not None
|
||||
and oarch is not None
|
||||
and not self.architecture.intersects(other.architecture)
|
||||
):
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
|
||||
changed = False
|
||||
|
||||
@@ -3204,18 +3209,12 @@ def constrain(self, other, deps=True):
|
||||
|
||||
changed |= self.compiler_flags.constrain(other.compiler_flags)
|
||||
|
||||
old = str(self.architecture)
|
||||
sarch, oarch = self.architecture, other.architecture
|
||||
if sarch is None or other.architecture is None:
|
||||
self.architecture = sarch or oarch
|
||||
else:
|
||||
if sarch.platform is None or oarch.platform is None:
|
||||
self.architecture.platform = sarch.platform or oarch.platform
|
||||
if sarch.os is None or oarch.os is None:
|
||||
sarch.os = sarch.os or oarch.os
|
||||
if sarch.target is None or oarch.target is None:
|
||||
sarch.target = sarch.target or oarch.target
|
||||
changed |= str(self.architecture) != old
|
||||
if sarch is not None and oarch is not None:
|
||||
changed |= self.architecture.constrain(other.architecture)
|
||||
elif oarch is not None:
|
||||
self.architecture = oarch
|
||||
changed = True
|
||||
|
||||
if deps:
|
||||
changed |= self._constrain_dependencies(other)
|
||||
@@ -4906,9 +4905,7 @@ def from_node_dict(cls, node):
|
||||
spec.external_modules = node["external"]["module"]
|
||||
if spec.external_modules is False:
|
||||
spec.external_modules = None
|
||||
spec.extra_attributes = node["external"].get(
|
||||
"extra_attributes", syaml.syaml_dict()
|
||||
)
|
||||
spec.extra_attributes = node["external"].get("extra_attributes", {})
|
||||
|
||||
# specs read in are concrete unless marked abstract
|
||||
if node.get("concrete", True):
|
||||
@@ -5185,12 +5182,10 @@ def get_host_environment_metadata() -> Dict[str, str]:
|
||||
|
||||
|
||||
def get_host_environment() -> Dict[str, Any]:
|
||||
"""Return a dictionary (lookup) with host information (not including the
|
||||
os.environ).
|
||||
"""
|
||||
"""Returns a dictionary with host information (not including the os.environ)."""
|
||||
host_platform = spack.platforms.host()
|
||||
host_target = host_platform.target("default_target")
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.default_target()
|
||||
host_os = host_platform.default_operating_system()
|
||||
arch_fmt = "platform={0} os={1} target={2}"
|
||||
arch_spec = Spec(arch_fmt.format(host_platform, host_os, host_target))
|
||||
return {
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import collections
|
||||
import copy
|
||||
from collections.abc import Mapping
|
||||
from typing import Set
|
||||
|
||||
import spack.error
|
||||
import spack.repo
|
||||
@@ -110,23 +111,20 @@ def merge(self, other):
|
||||
spkgs, opkgs = self.tags[tag], other.tags[tag]
|
||||
self.tags[tag] = sorted(list(set(spkgs + opkgs)))
|
||||
|
||||
def update_package(self, pkg_name):
|
||||
"""Updates a package in the tag index.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package to be removed from the index
|
||||
"""
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_name)
|
||||
|
||||
def update_packages(self, pkg_names: Set[str]):
|
||||
"""Updates a package in the tag index."""
|
||||
# Remove the package from the list of packages, if present
|
||||
for pkg_list in self._tag_dict.values():
|
||||
if pkg_name in pkg_list:
|
||||
pkg_list.remove(pkg_name)
|
||||
if pkg_names.isdisjoint(pkg_list):
|
||||
continue
|
||||
pkg_list[:] = [pkg for pkg in pkg_list if pkg not in pkg_names]
|
||||
|
||||
# Add it again under the appropriate tags
|
||||
for tag in getattr(pkg_cls, "tags", []):
|
||||
tag = tag.lower()
|
||||
self._tag_dict[tag].append(pkg_cls.name)
|
||||
for pkg_name in pkg_names:
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_name)
|
||||
for tag in getattr(pkg_cls, "tags", []):
|
||||
tag = tag.lower()
|
||||
self._tag_dict[tag].append(pkg_cls.name)
|
||||
|
||||
|
||||
class TagIndexError(spack.error.SpackError):
|
||||
|
@@ -60,8 +60,7 @@ def test_user_input_combination(config, target_str, os_str):
|
||||
"""Test for all the valid user input combinations that both the target and
|
||||
the operating system match.
|
||||
"""
|
||||
spec_str = "libelf os={} target={}".format(os_str, target_str)
|
||||
spec = Spec(spec_str)
|
||||
spec = Spec(f"libelf os={os_str} target={target_str}")
|
||||
assert spec.architecture.os == str(TEST_PLATFORM.operating_system(os_str))
|
||||
assert spec.architecture.target == TEST_PLATFORM.target(target_str)
|
||||
|
||||
@@ -71,8 +70,8 @@ def test_default_os_and_target(default_mock_concretization):
|
||||
after concretization.
|
||||
"""
|
||||
spec = default_mock_concretization("libelf")
|
||||
assert spec.architecture.os == str(TEST_PLATFORM.operating_system("default_os"))
|
||||
assert spec.architecture.target == TEST_PLATFORM.target("default_target")
|
||||
assert spec.architecture.os == str(TEST_PLATFORM.default_operating_system())
|
||||
assert spec.architecture.target == TEST_PLATFORM.default_target()
|
||||
|
||||
|
||||
def test_operating_system_conversion_to_dict():
|
||||
|
@@ -36,13 +36,15 @@
|
||||
import spack.mirrors.mirror
|
||||
import spack.oci.image
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.binary_distribution import CannotListKeys, GenerateIndexError
|
||||
from spack.binary_distribution import INDEX_HASH_FILE, CannotListKeys, GenerateIndexError
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
@@ -93,7 +95,7 @@ def config_directory(tmp_path_factory):
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
|
||||
def default_config(tmp_path, config_directory, mock_repo_path, install_mockery):
|
||||
# This fixture depends on install_mockery to ensure
|
||||
# there is a clear order of initialization. The substitution of the
|
||||
# config scopes here is done on top of the substitution that comes with
|
||||
@@ -108,7 +110,6 @@ def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
|
||||
]
|
||||
|
||||
with spack.config.use_configuration(*scopes):
|
||||
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
|
||||
njobs = spack.config.get("config:build_jobs")
|
||||
if not njobs:
|
||||
spack.config.set("config:build_jobs", 4, scope="user")
|
||||
@@ -129,8 +130,8 @@ def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
|
||||
timeout = spack.config.get("config:connect_timeout")
|
||||
if not timeout:
|
||||
spack.config.set("config:connect_timeout", 10, scope="user")
|
||||
|
||||
yield spack.config.CONFIG
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
yield spack.config.CONFIG
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@@ -606,7 +607,7 @@ def test_etag_fetching_304():
|
||||
# handled as success, since it means the local cache is up-to-date.
|
||||
def response_304(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url == "https://www.example.com/build_cache/index.json":
|
||||
if url == f"https://www.example.com/build_cache/{INDEX_JSON_FILE}":
|
||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||
raise urllib.error.HTTPError(
|
||||
url, 304, "Not Modified", hdrs={}, fp=None # type: ignore[arg-type]
|
||||
@@ -628,7 +629,7 @@ def test_etag_fetching_200():
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_200(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url == "https://www.example.com/build_cache/index.json":
|
||||
if url == f"https://www.example.com/build_cache/{INDEX_JSON_FILE}":
|
||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(b"Result"),
|
||||
@@ -679,7 +680,7 @@ def test_default_index_fetch_200():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json.hash"):
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
return urllib.response.addinfourl( # type: ignore[arg-type]
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -687,7 +688,7 @@ def urlopen(request: urllib.request.Request):
|
||||
code=200,
|
||||
)
|
||||
|
||||
elif url.endswith("index.json"):
|
||||
elif url.endswith(INDEX_JSON_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json.encode()),
|
||||
headers={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type]
|
||||
@@ -718,7 +719,7 @@ def test_default_index_dont_fetch_index_json_hash_if_no_local_hash():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json"):
|
||||
if url.endswith(INDEX_JSON_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json.encode()),
|
||||
headers={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type]
|
||||
@@ -747,7 +748,7 @@ def test_default_index_not_modified():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json.hash"):
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -792,7 +793,7 @@ def test_default_index_json_404():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json.hash"):
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -800,7 +801,7 @@ def urlopen(request: urllib.request.Request):
|
||||
code=200,
|
||||
)
|
||||
|
||||
elif url.endswith("index.json"):
|
||||
elif url.endswith(INDEX_JSON_FILE):
|
||||
raise urllib.error.HTTPError(
|
||||
url,
|
||||
code=404,
|
||||
|
@@ -220,14 +220,12 @@ def test_source_is_disabled(mutable_config):
|
||||
|
||||
# The source is not explicitly enabled or disabled, so the following
|
||||
# call should raise to skip using it for bootstrapping
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
||||
assert not spack.bootstrap.core.source_is_enabled(conf)
|
||||
|
||||
# Try to explicitly disable the source and verify that the behavior
|
||||
# is the same as above
|
||||
spack.config.add("bootstrap:trusted:{0}:{1}".format(conf["name"], False))
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
||||
assert not spack.bootstrap.core.source_is_enabled(conf)
|
||||
|
||||
|
||||
@pytest.mark.regression("45247")
|
||||
|
@@ -3,7 +3,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import pytest
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user