Compare commits
19 Commits
develop
...
isolate-ut
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ba62db9add | ||
![]() |
aef5c35065 | ||
![]() |
7ee62d8202 | ||
![]() |
1ff78a7959 | ||
![]() |
0b92a19620 | ||
![]() |
ee36214f83 | ||
![]() |
4516b742dd | ||
![]() |
6e24ea55ea | ||
![]() |
bc06e2bc17 | ||
![]() |
330a5c0010 | ||
![]() |
1f0a8755c7 | ||
![]() |
425d3ba8a6 | ||
![]() |
2972dea418 | ||
![]() |
953209fe2d | ||
![]() |
1d07e4cb8d | ||
![]() |
3fd543328b | ||
![]() |
30cafc553a | ||
![]() |
3a6ad72ac1 | ||
![]() |
736c46e22d |
@ -499,7 +499,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
|||||||
@package_properties
|
@package_properties
|
||||||
def _ensure_packages_are_unparseable(pkgs, error_cls):
|
def _ensure_packages_are_unparseable(pkgs, error_cls):
|
||||||
"""Ensure that all packages can unparse and that unparsed code is valid Python"""
|
"""Ensure that all packages can unparse and that unparsed code is valid Python"""
|
||||||
import spack.util.package_hash as ph
|
import spack.package_hash as ph
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
@ -646,7 +646,11 @@ def _linting_package_file(pkgs, error_cls):
|
|||||||
if pkg_cls.homepage.startswith("http://"):
|
if pkg_cls.homepage.startswith("http://"):
|
||||||
https = re.sub("http", "https", pkg_cls.homepage, 1)
|
https = re.sub("http", "https", pkg_cls.homepage, 1)
|
||||||
try:
|
try:
|
||||||
response = urlopen(https)
|
response = urlopen(
|
||||||
|
https,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = 'Error with attempting https for "{0}": '
|
msg = 'Error with attempting https for "{0}": '
|
||||||
errors.append(error_cls(msg.format(pkg_cls.name), [str(e)]))
|
errors.append(error_cls(msg.format(pkg_cls.name), [str(e)]))
|
||||||
|
@ -38,12 +38,14 @@
|
|||||||
import spack.config as config
|
import spack.config as config
|
||||||
import spack.database as spack_db
|
import spack.database as spack_db
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.gpg
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
import spack.hooks.sbang
|
import spack.hooks.sbang
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.oci.image
|
import spack.oci.image
|
||||||
import spack.oci.oci
|
import spack.oci.oci
|
||||||
import spack.oci.opener
|
import spack.oci.opener
|
||||||
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.relocate as relocate
|
import spack.relocate as relocate
|
||||||
import spack.repo
|
import spack.repo
|
||||||
@ -52,7 +54,6 @@
|
|||||||
import spack.traverse as traverse
|
import spack.traverse as traverse
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
import spack.util.file_cache as file_cache
|
import spack.util.file_cache as file_cache
|
||||||
import spack.util.gpg
|
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@ -486,7 +487,10 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
|||||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||||
|
|
||||||
if scheme != "oci" and not web_util.url_exists(
|
if scheme != "oci" and not web_util.url_exists(
|
||||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json"),
|
||||||
|
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -532,7 +536,9 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
|||||||
def binary_index_location():
|
def binary_index_location():
|
||||||
"""Set up a BinaryCacheIndex for remote buildcache dbs in the user's homedir."""
|
"""Set up a BinaryCacheIndex for remote buildcache dbs in the user's homedir."""
|
||||||
cache_root = os.path.join(misc_cache_location(), "indices")
|
cache_root = os.path.join(misc_cache_location(), "indices")
|
||||||
return spack.util.path.canonicalize_path(cache_root)
|
return spack.util.path.canonicalize_path(
|
||||||
|
cache_root, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
#: Default binary cache index instance
|
#: Default binary cache index instance
|
||||||
@ -827,7 +833,7 @@ def tarball_path_name(spec, ext):
|
|||||||
|
|
||||||
def select_signing_key(key=None):
|
def select_signing_key(key=None):
|
||||||
if key is None:
|
if key is None:
|
||||||
keys = spack.util.gpg.signing_keys()
|
keys = spack.gpg.signing_keys()
|
||||||
if len(keys) == 1:
|
if len(keys) == 1:
|
||||||
key = keys[0]
|
key = keys[0]
|
||||||
|
|
||||||
@ -852,7 +858,7 @@ def sign_specfile(key, force, specfile_path):
|
|||||||
raise NoOverwriteException(signed_specfile_path)
|
raise NoOverwriteException(signed_specfile_path)
|
||||||
|
|
||||||
key = select_signing_key(key)
|
key = select_signing_key(key)
|
||||||
spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
|
spack.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
|
||||||
|
|
||||||
|
|
||||||
def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_dir, concurrency):
|
def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_dir, concurrency):
|
||||||
@ -904,6 +910,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
|||||||
url_util.join(cache_prefix, "index.json"),
|
url_util.join(cache_prefix, "index.json"),
|
||||||
keep_original=False,
|
keep_original=False,
|
||||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Push the hash
|
# Push the hash
|
||||||
@ -912,6 +919,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
|||||||
url_util.join(cache_prefix, "index.json.hash"),
|
url_util.join(cache_prefix, "index.json.hash"),
|
||||||
keep_original=False,
|
keep_original=False,
|
||||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -976,9 +984,13 @@ def _specs_from_cache_fallback(cache_prefix):
|
|||||||
def url_read_method(url):
|
def url_read_method(url):
|
||||||
contents = None
|
contents = None
|
||||||
try:
|
try:
|
||||||
_, _, spec_file = web_util.read_from_url(url)
|
_, _, spec_file = web_util.read_from_url(
|
||||||
|
url,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||||
except (URLError, web_util.SpackWebError) as url_err:
|
except (URLError, web_util.WebError) as url_err:
|
||||||
tty.error("Error reading specfile: {0}".format(url))
|
tty.error("Error reading specfile: {0}".format(url))
|
||||||
tty.error(url_err)
|
tty.error(url_err)
|
||||||
return contents
|
return contents
|
||||||
@ -986,7 +998,9 @@ def url_read_method(url):
|
|||||||
try:
|
try:
|
||||||
file_list = [
|
file_list = [
|
||||||
url_util.join(cache_prefix, entry)
|
url_util.join(cache_prefix, entry)
|
||||||
for entry in web_util.list_url(cache_prefix)
|
for entry in web_util.list_url(
|
||||||
|
cache_prefix, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||||
|
)
|
||||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||||
]
|
]
|
||||||
read_fn = url_read_method
|
read_fn = url_read_method
|
||||||
@ -1084,7 +1098,9 @@ def generate_key_index(key_prefix, tmpdir=None):
|
|||||||
try:
|
try:
|
||||||
fingerprints = (
|
fingerprints = (
|
||||||
entry[:-4]
|
entry[:-4]
|
||||||
for entry in web_util.list_url(key_prefix, recursive=False)
|
for entry in web_util.list_url(
|
||||||
|
key_prefix, recursive=False, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||||
|
)
|
||||||
if entry.endswith(".pub")
|
if entry.endswith(".pub")
|
||||||
)
|
)
|
||||||
except KeyError as inst:
|
except KeyError as inst:
|
||||||
@ -1121,6 +1137,7 @@ def generate_key_index(key_prefix, tmpdir=None):
|
|||||||
url_util.join(key_prefix, "index.json"),
|
url_util.join(key_prefix, "index.json"),
|
||||||
keep_original=False,
|
keep_original=False,
|
||||||
extra_args={"ContentType": "application/json"},
|
extra_args={"ContentType": "application/json"},
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
)
|
)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
|
msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
|
||||||
@ -1364,10 +1381,18 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
|||||||
spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack"))
|
spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack"))
|
||||||
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, stage_dir))
|
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, stage_dir))
|
||||||
|
|
||||||
|
fetch_method = (spack.config.get("config:url_fetch_method", "urllib"),)
|
||||||
|
verify_ssl = (spack.config.get("config:verify_ssl"),)
|
||||||
|
timeout = spack.config.get("config:connect_timeout", 10)
|
||||||
|
|
||||||
|
url_args = {"fetch_method": fetch_method, "verify_ssl": verify_ssl, "timeout": timeout}
|
||||||
|
|
||||||
mkdirp(tarfile_dir)
|
mkdirp(tarfile_dir)
|
||||||
if web_util.url_exists(remote_spackfile_path):
|
if web_util.url_exists(remote_spackfile_path, **url_args):
|
||||||
if options.force:
|
if options.force:
|
||||||
web_util.remove_url(remote_spackfile_path)
|
web_util.remove_url(
|
||||||
|
remote_spackfile_path, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise NoOverwriteException(url_util.format(remote_spackfile_path))
|
raise NoOverwriteException(url_util.format(remote_spackfile_path))
|
||||||
|
|
||||||
@ -1387,12 +1412,13 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
|||||||
|
|
||||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||||
if options.force:
|
if options.force:
|
||||||
if web_util.url_exists(remote_specfile_path):
|
verify_ssl = spack.config.get("config:verify_ssl", True)
|
||||||
web_util.remove_url(remote_specfile_path)
|
if web_util.url_exists(remote_specfile_path, **url_args):
|
||||||
if web_util.url_exists(remote_signed_specfile_path):
|
web_util.remove_url(remote_specfile_path, verify_ssl=verify_ssl)
|
||||||
web_util.remove_url(remote_signed_specfile_path)
|
if web_util.url_exists(remote_signed_specfile_path, **url_args):
|
||||||
elif web_util.url_exists(remote_specfile_path) or web_util.url_exists(
|
web_util.remove_url(remote_signed_specfile_path, verify_ssl=verify_ssl)
|
||||||
remote_signed_specfile_path
|
elif web_util.url_exists(remote_specfile_path, **url_args) or web_util.url_exists(
|
||||||
|
remote_signed_specfile_path, **url_args
|
||||||
):
|
):
|
||||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||||
|
|
||||||
@ -1426,11 +1452,17 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
|||||||
sign_specfile(key, options.force, specfile_path)
|
sign_specfile(key, options.force, specfile_path)
|
||||||
|
|
||||||
# push tarball and signed spec json to remote mirror
|
# push tarball and signed spec json to remote mirror
|
||||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
web_util.push_to_url(
|
||||||
|
spackfile_path,
|
||||||
|
remote_spackfile_path,
|
||||||
|
keep_original=False,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
)
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(
|
||||||
signed_specfile_path if not options.unsigned else specfile_path,
|
signed_specfile_path if not options.unsigned else specfile_path,
|
||||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||||
keep_original=False,
|
keep_original=False,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
)
|
)
|
||||||
|
|
||||||
# push the key to the build cache's _pgp directory so it can be
|
# push the key to the build cache's _pgp directory so it can be
|
||||||
@ -1534,7 +1566,7 @@ def try_verify(specfile_path):
|
|||||||
suppress = config.get("config:suppress_gpg_warnings", False)
|
suppress = config.get("config:suppress_gpg_warnings", False)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
spack.util.gpg.verify(specfile_path, suppress_warnings=suppress)
|
spack.gpg.verify(specfile_path, suppress_warnings=suppress)
|
||||||
except Exception:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -2005,7 +2037,7 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
|||||||
if os.path.exists("%s.asc" % specfile_path):
|
if os.path.exists("%s.asc" % specfile_path):
|
||||||
suppress = config.get("config:suppress_gpg_warnings", False)
|
suppress = config.get("config:suppress_gpg_warnings", False)
|
||||||
try:
|
try:
|
||||||
spack.util.gpg.verify("%s.asc" % specfile_path, specfile_path, suppress)
|
spack.gpg.verify("%s.asc" % specfile_path, specfile_path, suppress)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise NoVerifyException(
|
raise NoVerifyException(
|
||||||
"Spack was unable to verify package "
|
"Spack was unable to verify package "
|
||||||
@ -2211,7 +2243,8 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
|||||||
tty.debug("Verified SHA256 checksum of the build cache")
|
tty.debug("Verified SHA256 checksum of the build cache")
|
||||||
|
|
||||||
# don't print long padded paths while extracting/relocating binaries
|
# don't print long padded paths while extracting/relocating binaries
|
||||||
with spack.util.path.filter_padding():
|
padding = spack.config.get("config:install_tree:padded_length", None)
|
||||||
|
with spack.util.path.filter_padding(padding=padding):
|
||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, unsigned, force)
|
extract_tarball(spec, download_result, unsigned, force)
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
@ -2250,12 +2283,20 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, signed_specfile_name
|
mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, signed_specfile_name
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
_, _, fs = web_util.read_from_url(
|
||||||
|
buildcache_fetch_url_signed_json,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
specfile_is_signed = True
|
specfile_is_signed = True
|
||||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
except (URLError, web_util.WebError, HTTPError) as url_err:
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
_, _, fs = web_util.read_from_url(
|
||||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
buildcache_fetch_url_json,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
|
except (URLError, web_util.WebError, HTTPError) as url_err_x:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
"Did not find {0} on {1}".format(
|
"Did not find {0} on {1}".format(
|
||||||
specfile_name, buildcache_fetch_url_signed_json
|
specfile_name, buildcache_fetch_url_signed_json
|
||||||
@ -2359,10 +2400,19 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
tty.debug("Finding public keys in {0}".format(url_util.format(fetch_url)))
|
tty.debug("Finding public keys in {0}".format(url_util.format(fetch_url)))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_, _, json_file = web_util.read_from_url(keys_index)
|
_, _, json_file = web_util.read_from_url(
|
||||||
|
keys_index,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||||
except (URLError, web_util.SpackWebError) as url_err:
|
except (URLError, web_util.WebError) as url_err:
|
||||||
if web_util.url_exists(keys_index):
|
if web_util.url_exists(
|
||||||
|
keys_index,
|
||||||
|
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
):
|
||||||
err_msg = [
|
err_msg = [
|
||||||
"Unable to find public keys in {0},",
|
"Unable to find public keys in {0},",
|
||||||
" caught exception attempting to read from {1}.",
|
" caught exception attempting to read from {1}.",
|
||||||
@ -2393,7 +2443,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
tty.debug("Found key {0}".format(fingerprint))
|
tty.debug("Found key {0}".format(fingerprint))
|
||||||
if install:
|
if install:
|
||||||
if trust:
|
if trust:
|
||||||
spack.util.gpg.trust(stage.save_filename)
|
spack.gpg.trust(stage.save_filename)
|
||||||
tty.debug("Added this key to trusted keys.")
|
tty.debug("Added this key to trusted keys.")
|
||||||
else:
|
else:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
@ -2411,7 +2461,7 @@ def push_keys(*mirrors, **kwargs):
|
|||||||
tmpdir = kwargs.get("tmpdir")
|
tmpdir = kwargs.get("tmpdir")
|
||||||
remove_tmpdir = False
|
remove_tmpdir = False
|
||||||
|
|
||||||
keys = spack.util.gpg.public_keys(*(keys or []))
|
keys = spack.gpg.public_keys(*(keys or []))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for mirror in mirrors:
|
for mirror in mirrors:
|
||||||
@ -2443,7 +2493,7 @@ def push_keys(*mirrors, **kwargs):
|
|||||||
export_target = os.path.join(prefix, filename)
|
export_target = os.path.join(prefix, filename)
|
||||||
|
|
||||||
# Export public keys (private is set to False)
|
# Export public keys (private is set to False)
|
||||||
spack.util.gpg.export_keys(export_target, [fingerprint])
|
spack.gpg.export_keys(export_target, [fingerprint])
|
||||||
|
|
||||||
# If mirror is local, the above export writes directly to the
|
# If mirror is local, the above export writes directly to the
|
||||||
# mirror (export_target points directly to the mirror).
|
# mirror (export_target points directly to the mirror).
|
||||||
@ -2452,7 +2502,10 @@ def push_keys(*mirrors, **kwargs):
|
|||||||
# uploaded to the mirror.
|
# uploaded to the mirror.
|
||||||
if not keys_local:
|
if not keys_local:
|
||||||
spack.util.web.push_to_url(
|
spack.util.web.push_to_url(
|
||||||
export_target, url_util.join(keys_url, filename), keep_original=False
|
export_target,
|
||||||
|
url_util.join(keys_url, filename),
|
||||||
|
keep_original=False,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
)
|
)
|
||||||
|
|
||||||
if regenerate_index:
|
if regenerate_index:
|
||||||
@ -2486,7 +2539,12 @@ def needs_rebuild(spec, mirror_url):
|
|||||||
# Only check for the presence of the json version of the spec. If the
|
# Only check for the presence of the json version of the spec. If the
|
||||||
# mirror only has the json version, or doesn't have the spec at all, we
|
# mirror only has the json version, or doesn't have the spec at all, we
|
||||||
# need to rebuild.
|
# need to rebuild.
|
||||||
return not web_util.url_exists(specfile_path)
|
return not web_util.url_exists(
|
||||||
|
specfile_path,
|
||||||
|
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||||
@ -2652,7 +2710,11 @@ def get_remote_hash(self):
|
|||||||
# Failure to fetch index.json.hash is not fatal
|
# Failure to fetch index.json.hash is not fatal
|
||||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
response = self.urlopen(
|
||||||
|
urllib.request.Request(url_index_hash, headers=self.headers),
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
except urllib.error.URLError:
|
except urllib.error.URLError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -2674,7 +2736,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
response = self.urlopen(
|
||||||
|
urllib.request.Request(url_index, headers=self.headers),
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
except urllib.error.URLError as e:
|
except urllib.error.URLError as e:
|
||||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||||
|
|
||||||
@ -2722,7 +2788,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url, headers=headers))
|
response = self.urlopen(
|
||||||
|
urllib.request.Request(url, headers=headers),
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
except urllib.error.HTTPError as e:
|
except urllib.error.HTTPError as e:
|
||||||
if e.getcode() == 304:
|
if e.getcode() == 304:
|
||||||
# Not modified; that means fresh.
|
# Not modified; that means fresh.
|
||||||
|
@ -45,7 +45,8 @@ def spec_for_current_python() -> str:
|
|||||||
def root_path() -> str:
|
def root_path() -> str:
|
||||||
"""Root of all the bootstrap related folders"""
|
"""Root of all the bootstrap related folders"""
|
||||||
return spack.util.path.canonicalize_path(
|
return spack.util.path.canonicalize_path(
|
||||||
spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path)
|
spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path),
|
||||||
|
replacements=spack.paths.path_replacements(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -79,12 +80,16 @@ def spack_python_interpreter() -> Generator:
|
|||||||
|
|
||||||
def _store_path() -> str:
|
def _store_path() -> str:
|
||||||
bootstrap_root_path = root_path()
|
bootstrap_root_path = root_path()
|
||||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "store"))
|
return spack.util.path.canonicalize_path(
|
||||||
|
os.path.join(bootstrap_root_path, "store"), replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _config_path() -> str:
|
def _config_path() -> str:
|
||||||
bootstrap_root_path = root_path()
|
bootstrap_root_path = root_path()
|
||||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "config"))
|
return spack.util.path.canonicalize_path(
|
||||||
|
os.path.join(bootstrap_root_path, "config"), replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
|
@ -92,7 +92,9 @@ class Bootstrapper:
|
|||||||
def __init__(self, conf: ConfigDictionary) -> None:
|
def __init__(self, conf: ConfigDictionary) -> None:
|
||||||
self.conf = conf
|
self.conf = conf
|
||||||
self.name = conf["name"]
|
self.name = conf["name"]
|
||||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
self.metadata_dir = spack.util.path.canonicalize_path(
|
||||||
|
conf["metadata"], replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
# Promote (relative) paths to file urls
|
# Promote (relative) paths to file urls
|
||||||
url = conf["info"]["url"]
|
url = conf["info"]["url"]
|
||||||
@ -585,7 +587,9 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
|||||||
list_of_sources = []
|
list_of_sources = []
|
||||||
for entry in source_configs:
|
for entry in source_configs:
|
||||||
current = copy.copy(entry)
|
current = copy.copy(entry)
|
||||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
metadata_dir = spack.util.path.canonicalize_path(
|
||||||
|
entry["metadata"], replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||||
current.update(spack.util.spack_yaml.load(stream))
|
current.update(spack.util.spack_yaml.load(stream))
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
import spack.environment
|
import spack.environment
|
||||||
|
import spack.paths
|
||||||
import spack.tengine
|
import spack.tengine
|
||||||
import spack.util.cpus
|
import spack.util.cpus
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
@ -50,7 +51,8 @@ def environment_root(cls) -> pathlib.Path:
|
|||||||
environment_dir = f"{python_part}-{arch_part}-{interpreter_part}"
|
environment_dir = f"{python_part}-{arch_part}-{interpreter_part}"
|
||||||
return pathlib.Path(
|
return pathlib.Path(
|
||||||
spack.util.path.canonicalize_path(
|
spack.util.path.canonicalize_path(
|
||||||
os.path.join(bootstrap_root_path, "environments", environment_dir)
|
os.path.join(bootstrap_root_path, "environments", environment_dir),
|
||||||
|
replacements=spack.paths.path_replacements(),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -137,7 +139,9 @@ def _install_with_depfile(self) -> None:
|
|||||||
"-C",
|
"-C",
|
||||||
str(self.environment_root()),
|
str(self.environment_root()),
|
||||||
"-j",
|
"-j",
|
||||||
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
|
str(
|
||||||
|
spack.util.cpus.determine_number_of_jobs(parallel=True, config=spack.config.CONFIG)
|
||||||
|
),
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
import spack.config
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
from spack.package_base import PackageBase
|
from spack.package_base import PackageBase
|
||||||
@ -93,7 +94,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
"--copy",
|
"--copy",
|
||||||
"-i",
|
"-i",
|
||||||
"-j",
|
"-j",
|
||||||
str(determine_number_of_jobs(parallel=parallel)),
|
str(determine_number_of_jobs(parallel=parallel, config=spack.config.CONFIG)),
|
||||||
"--",
|
"--",
|
||||||
os.getcwd(),
|
os.getcwd(),
|
||||||
]
|
]
|
||||||
|
@ -26,7 +26,7 @@ def misc_cache_location():
|
|||||||
providers and for which packages provide which tags.
|
providers and for which packages provide which tags.
|
||||||
"""
|
"""
|
||||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||||
return spack.util.path.canonicalize_path(path)
|
return spack.util.path.canonicalize_path(path, replacements=spack.paths.path_replacements())
|
||||||
|
|
||||||
|
|
||||||
def _misc_cache():
|
def _misc_cache():
|
||||||
@ -49,7 +49,7 @@ def fetch_cache_location():
|
|||||||
path = spack.config.get("config:source_cache")
|
path = spack.config.get("config:source_cache")
|
||||||
if not path:
|
if not path:
|
||||||
path = spack.paths.default_fetch_cache_path
|
path = spack.paths.default_fetch_cache_path
|
||||||
path = spack.util.path.canonicalize_path(path)
|
path = spack.util.path.canonicalize_path(path, replacements=spack.paths.path_replacements())
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,13 +31,13 @@
|
|||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.gpg
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg as gpg_util
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
@ -1454,13 +1454,13 @@ def can_sign_binaries():
|
|||||||
"""Utility method to determine if this spack instance is capable of
|
"""Utility method to determine if this spack instance is capable of
|
||||||
signing binary packages. This is currently only possible if the
|
signing binary packages. This is currently only possible if the
|
||||||
spack gpg keystore contains exactly one secret key."""
|
spack gpg keystore contains exactly one secret key."""
|
||||||
return len(gpg_util.signing_keys()) == 1
|
return len(spack.gpg.signing_keys()) == 1
|
||||||
|
|
||||||
|
|
||||||
def can_verify_binaries():
|
def can_verify_binaries():
|
||||||
"""Utility method to determin if this spack instance is capable (at
|
"""Utility method to determin if this spack instance is capable (at
|
||||||
least in theory) of verifying signed binaries."""
|
least in theory) of verifying signed binaries."""
|
||||||
return len(gpg_util.public_keys()) >= 1
|
return len(spack.gpg.public_keys()) >= 1
|
||||||
|
|
||||||
|
|
||||||
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
||||||
@ -1756,7 +1756,11 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
|||||||
|
|
||||||
gpg_path = None
|
gpg_path = None
|
||||||
if gpg_url:
|
if gpg_url:
|
||||||
gpg_path = web_util.fetch_url_text(gpg_url, dest_dir=os.path.join(work_dir, "_pgp"))
|
gpg_path = web_util.fetch_url_text(
|
||||||
|
gpg_url,
|
||||||
|
dest_dir=os.path.join(work_dir, "_pgp"),
|
||||||
|
fetch_method=spack.config.get("config:url_fetch_method"),
|
||||||
|
)
|
||||||
rel_gpg_path = gpg_path.replace(work_dir, "").lstrip(os.path.sep)
|
rel_gpg_path = gpg_path.replace(work_dir, "").lstrip(os.path.sep)
|
||||||
|
|
||||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||||
@ -2113,7 +2117,11 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
|||||||
with open(file_path, "w") as fd:
|
with open(file_path, "w") as fd:
|
||||||
fd.write(syaml.dump(broken_spec_details))
|
fd.write(syaml.dump(broken_spec_details))
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(
|
||||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
file_path,
|
||||||
|
url,
|
||||||
|
keep_original=False,
|
||||||
|
extra_args={"ContentType": "text/plain"},
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
)
|
)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
# If there is an S3 error (e.g., access denied or connection
|
# If there is an S3 error (e.g., access denied or connection
|
||||||
@ -2130,8 +2138,12 @@ def read_broken_spec(broken_spec_url):
|
|||||||
object.
|
object.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
_, _, fs = web_util.read_from_url(
|
||||||
except (URLError, web_util.SpackWebError, HTTPError):
|
broken_spec_url,
|
||||||
|
verify_ssl=cfg.get("config:verify_ssl", True),
|
||||||
|
timeout=cfg.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
|
except (URLError, web_util.WebError, HTTPError):
|
||||||
tty.warn("Unable to read broken spec from {0}".format(broken_spec_url))
|
tty.warn("Unable to read broken spec from {0}".format(broken_spec_url))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
|
import spack.paths
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
@ -191,7 +192,9 @@ def _root(args):
|
|||||||
|
|
||||||
root = spack.config.get("bootstrap:root", default=None, scope=args.scope)
|
root = spack.config.get("bootstrap:root", default=None, scope=args.scope)
|
||||||
if root:
|
if root:
|
||||||
root = spack.util.path.canonicalize_path(root)
|
root = spack.util.path.canonicalize_path(
|
||||||
|
root, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
print(root)
|
print(root)
|
||||||
|
|
||||||
|
|
||||||
@ -335,7 +338,9 @@ def _add(args):
|
|||||||
raise RuntimeError(msg.format(args.name))
|
raise RuntimeError(msg.format(args.name))
|
||||||
|
|
||||||
# Check that the metadata file exists
|
# Check that the metadata file exists
|
||||||
metadata_dir = spack.util.path.canonicalize_path(args.metadata_dir)
|
metadata_dir = spack.util.path.canonicalize_path(
|
||||||
|
args.metadata_dir, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
if not os.path.exists(metadata_dir) or not os.path.isdir(metadata_dir):
|
if not os.path.exists(metadata_dir) or not os.path.isdir(metadata_dir):
|
||||||
raise RuntimeError('the directory "{0}" does not exist'.format(args.metadata_dir))
|
raise RuntimeError('the directory "{0}" does not exist'.format(args.metadata_dir))
|
||||||
|
|
||||||
@ -384,7 +389,9 @@ def _remove(args):
|
|||||||
|
|
||||||
|
|
||||||
def _mirror(args):
|
def _mirror(args):
|
||||||
mirror_dir = spack.util.path.canonicalize_path(os.path.join(args.root_dir, LOCAL_MIRROR_DIR))
|
mirror_dir = spack.util.path.canonicalize_path(
|
||||||
|
os.path.join(args.root_dir, LOCAL_MIRROR_DIR), replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||||
@ -433,9 +440,24 @@ def write_metadata(subdir, metadata):
|
|||||||
instructions += cmd.format("local-sources", rel_directory)
|
instructions += cmd.format("local-sources", rel_directory)
|
||||||
if args.binary_packages:
|
if args.binary_packages:
|
||||||
abs_directory, rel_directory = write_metadata(subdir="binaries", metadata=BINARY_METADATA)
|
abs_directory, rel_directory = write_metadata(subdir="binaries", metadata=BINARY_METADATA)
|
||||||
shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory)
|
shutil.copy(
|
||||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
spack.util.path.canonicalize_path(
|
||||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
CLINGO_JSON, replacements=spack.paths.path_replacements()
|
||||||
|
),
|
||||||
|
abs_directory,
|
||||||
|
)
|
||||||
|
shutil.copy(
|
||||||
|
spack.util.path.canonicalize_path(
|
||||||
|
GNUPG_JSON, replacements=spack.paths.path_replacements()
|
||||||
|
),
|
||||||
|
abs_directory,
|
||||||
|
)
|
||||||
|
shutil.copy(
|
||||||
|
spack.util.path.canonicalize_path(
|
||||||
|
PATCHELF_JSON, replacements=spack.paths.path_replacements()
|
||||||
|
),
|
||||||
|
abs_directory,
|
||||||
|
)
|
||||||
instructions += cmd.format("local-binaries", rel_directory)
|
instructions += cmd.format("local-binaries", rel_directory)
|
||||||
print(instructions)
|
print(instructions)
|
||||||
|
|
||||||
|
@ -918,7 +918,12 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
|||||||
try:
|
try:
|
||||||
temp_stage.create()
|
temp_stage.create()
|
||||||
temp_stage.fetch()
|
temp_stage.fetch()
|
||||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
web_util.push_to_url(
|
||||||
|
local_path,
|
||||||
|
dest_url,
|
||||||
|
keep_original=True,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
)
|
||||||
except spack.error.FetchError as e:
|
except spack.error.FetchError as e:
|
||||||
# Expected, since we have to try all the possible extensions
|
# Expected, since we have to try all the possible extensions
|
||||||
tty.debug("no such file: {0}".format(src_url))
|
tty.debug("no such file: {0}".format(src_url))
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.config
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
@ -275,4 +276,4 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
|||||||
tty.msg(f"Open {filename} to review the additions.")
|
tty.msg(f"Open {filename} to review the additions.")
|
||||||
|
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
editor(filename)
|
editor(filename, debug=spack.config.get("config:debug"))
|
||||||
|
@ -16,9 +16,9 @@
|
|||||||
import spack.cmd.buildcache as buildcache
|
import spack.cmd.buildcache as buildcache
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.gpg
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.util.gpg as gpg_util
|
|
||||||
import spack.util.timer as timer
|
import spack.util.timer as timer
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
@ -305,7 +305,7 @@ def ci_rebuild(args):
|
|||||||
# Fail early if signing is required but we don't have a signing key
|
# Fail early if signing is required but we don't have a signing key
|
||||||
sign_binaries = require_signing is not None and require_signing.lower() == "true"
|
sign_binaries = require_signing is not None and require_signing.lower() == "true"
|
||||||
if sign_binaries and not spack_ci.can_sign_binaries():
|
if sign_binaries and not spack_ci.can_sign_binaries():
|
||||||
gpg_util.list(False, True)
|
spack.gpg.list(False, True)
|
||||||
tty.die("SPACK_REQUIRE_SIGNING=True => spack must have exactly one signing key")
|
tty.die("SPACK_REQUIRE_SIGNING=True => spack must have exactly one signing key")
|
||||||
|
|
||||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||||
@ -730,10 +730,17 @@ def ci_rebuild(args):
|
|||||||
broken_specs_url = ci_config["broken-specs-url"]
|
broken_specs_url = ci_config["broken-specs-url"]
|
||||||
just_built_hash = job_spec.dag_hash()
|
just_built_hash = job_spec.dag_hash()
|
||||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||||
if web_util.url_exists(broken_spec_path):
|
if web_util.url_exists(
|
||||||
|
broken_spec_path,
|
||||||
|
fetch_method=cfg.get("config:url_fetch_method", "urllib"),
|
||||||
|
verify_ssl=cfg.get("config:verify_ssl"),
|
||||||
|
timeout=cfg.get("config:connect_timeout", 10),
|
||||||
|
):
|
||||||
tty.msg("Removing {0} from the list of broken specs".format(broken_spec_path))
|
tty.msg("Removing {0} from the list of broken specs".format(broken_spec_path))
|
||||||
try:
|
try:
|
||||||
web_util.remove_url(broken_spec_path)
|
web_util.remove_url(
|
||||||
|
broken_spec_path, verify_ssl=cfg.get("config:verify_ssl", True)
|
||||||
|
)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
# If there is an S3 error (e.g., access denied or connection
|
# If there is an S3 error (e.g., access denied or connection
|
||||||
# error), the first non boto-specific class in the exception
|
# error), the first non boto-specific class in the exception
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.cmd.test
|
import spack.cmd.test
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.store
|
import spack.store
|
||||||
@ -133,7 +134,9 @@ def clean(parser, args):
|
|||||||
remove_python_cache()
|
remove_python_cache()
|
||||||
|
|
||||||
if args.bootstrap:
|
if args.bootstrap:
|
||||||
bootstrap_prefix = spack.util.path.canonicalize_path(spack.config.get("bootstrap:root"))
|
bootstrap_prefix = spack.util.path.canonicalize_path(
|
||||||
|
spack.config.get("bootstrap:root"), replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
msg = 'Removing bootstrapped software and configuration in "{0}"'
|
msg = 'Removing bootstrapped software and configuration in "{0}"'
|
||||||
tty.msg(msg.format(bootstrap_prefix))
|
tty.msg(msg.format(bootstrap_prefix))
|
||||||
llnl.util.filesystem.remove_directory_contents(bootstrap_prefix)
|
llnl.util.filesystem.remove_directory_contents(bootstrap_prefix)
|
||||||
|
@ -180,7 +180,7 @@ def config_edit(args):
|
|||||||
if args.print_file:
|
if args.print_file:
|
||||||
print(config_file)
|
print(config_file)
|
||||||
else:
|
else:
|
||||||
editor(config_file)
|
editor(config_file, debug=spack.config.get("config:debug"))
|
||||||
|
|
||||||
|
|
||||||
def config_list(args):
|
def config_list(args):
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import mkdirp
|
||||||
|
|
||||||
|
import spack.config
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.web
|
import spack.util.web
|
||||||
@ -986,4 +987,4 @@ def create(parser, args):
|
|||||||
|
|
||||||
# Optionally open up the new package file in your $EDITOR
|
# Optionally open up the new package file in your $EDITOR
|
||||||
if not args.skip_editor:
|
if not args.skip_editor:
|
||||||
editor(pkg_path)
|
editor(pkg_path, debug=spack.config.get("config:debug"))
|
||||||
|
@ -8,6 +8,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.paths
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.version
|
import spack.version
|
||||||
@ -55,7 +56,9 @@ def develop(parser, args):
|
|||||||
# download all dev specs
|
# download all dev specs
|
||||||
for name, entry in env.dev_specs.items():
|
for name, entry in env.dev_specs.items():
|
||||||
path = entry.get("path", name)
|
path = entry.get("path", name)
|
||||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
abspath = spack.util.path.canonicalize_path(
|
||||||
|
path, default_wd=env.path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
if os.path.exists(abspath):
|
if os.path.exists(abspath):
|
||||||
msg = "Skipping developer download of %s" % entry["spec"]
|
msg = "Skipping developer download of %s" % entry["spec"]
|
||||||
@ -86,7 +89,9 @@ def develop(parser, args):
|
|||||||
|
|
||||||
# default path is relative path to spec.name
|
# default path is relative path to spec.name
|
||||||
path = args.path or spec.name
|
path = args.path or spec.name
|
||||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
abspath = spack.util.path.canonicalize_path(
|
||||||
|
path, default_wd=env.path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
# clone default: only if the path doesn't exist
|
# clone default: only if the path doesn't exist
|
||||||
clone = args.clone
|
clone = args.clone
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.config
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
@ -45,7 +46,7 @@ def edit_package(name, repo_path, namespace):
|
|||||||
else:
|
else:
|
||||||
raise spack.repo.UnknownPackageError(spec.name)
|
raise spack.repo.UnknownPackageError(spec.name)
|
||||||
|
|
||||||
editor(path)
|
editor(path, debug=spack.config.get("config:debug"))
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
|
@ -7,9 +7,9 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
|
import spack.gpg
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.util.gpg
|
|
||||||
import spack.util.url
|
import spack.util.url
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
@ -129,40 +129,38 @@ def setup_parser(subparser):
|
|||||||
def gpg_create(args):
|
def gpg_create(args):
|
||||||
"""create a new key"""
|
"""create a new key"""
|
||||||
if args.export or args.secret:
|
if args.export or args.secret:
|
||||||
old_sec_keys = spack.util.gpg.signing_keys()
|
old_sec_keys = spack.gpg.signing_keys()
|
||||||
|
|
||||||
# Create the new key
|
# Create the new key
|
||||||
spack.util.gpg.create(
|
spack.gpg.create(name=args.name, email=args.email, comment=args.comment, expires=args.expires)
|
||||||
name=args.name, email=args.email, comment=args.comment, expires=args.expires
|
|
||||||
)
|
|
||||||
if args.export or args.secret:
|
if args.export or args.secret:
|
||||||
new_sec_keys = set(spack.util.gpg.signing_keys())
|
new_sec_keys = set(spack.gpg.signing_keys())
|
||||||
new_keys = new_sec_keys.difference(old_sec_keys)
|
new_keys = new_sec_keys.difference(old_sec_keys)
|
||||||
|
|
||||||
if args.export:
|
if args.export:
|
||||||
spack.util.gpg.export_keys(args.export, new_keys)
|
spack.gpg.export_keys(args.export, new_keys)
|
||||||
if args.secret:
|
if args.secret:
|
||||||
spack.util.gpg.export_keys(args.secret, new_keys, secret=True)
|
spack.gpg.export_keys(args.secret, new_keys, secret=True)
|
||||||
|
|
||||||
|
|
||||||
def gpg_export(args):
|
def gpg_export(args):
|
||||||
"""export a gpg key, optionally including secret key"""
|
"""export a gpg key, optionally including secret key"""
|
||||||
keys = args.keys
|
keys = args.keys
|
||||||
if not keys:
|
if not keys:
|
||||||
keys = spack.util.gpg.signing_keys()
|
keys = spack.gpg.signing_keys()
|
||||||
spack.util.gpg.export_keys(args.location, keys, args.secret)
|
spack.gpg.export_keys(args.location, keys, args.secret)
|
||||||
|
|
||||||
|
|
||||||
def gpg_list(args):
|
def gpg_list(args):
|
||||||
"""list keys available in the keyring"""
|
"""list keys available in the keyring"""
|
||||||
spack.util.gpg.list(args.trusted, args.signing)
|
spack.gpg.list(args.trusted, args.signing)
|
||||||
|
|
||||||
|
|
||||||
def gpg_sign(args):
|
def gpg_sign(args):
|
||||||
"""sign a package"""
|
"""sign a package"""
|
||||||
key = args.key
|
key = args.key
|
||||||
if key is None:
|
if key is None:
|
||||||
keys = spack.util.gpg.signing_keys()
|
keys = spack.gpg.signing_keys()
|
||||||
if len(keys) == 1:
|
if len(keys) == 1:
|
||||||
key = keys[0]
|
key = keys[0]
|
||||||
elif not keys:
|
elif not keys:
|
||||||
@ -173,12 +171,12 @@ def gpg_sign(args):
|
|||||||
if not output:
|
if not output:
|
||||||
output = args.spec[0] + ".asc"
|
output = args.spec[0] + ".asc"
|
||||||
# TODO: Support the package format Spack creates.
|
# TODO: Support the package format Spack creates.
|
||||||
spack.util.gpg.sign(key, " ".join(args.spec), output, args.clearsign)
|
spack.gpg.sign(key, " ".join(args.spec), output, args.clearsign)
|
||||||
|
|
||||||
|
|
||||||
def gpg_trust(args):
|
def gpg_trust(args):
|
||||||
"""add a key to the keyring"""
|
"""add a key to the keyring"""
|
||||||
spack.util.gpg.trust(args.keyfile)
|
spack.gpg.trust(args.keyfile)
|
||||||
|
|
||||||
|
|
||||||
def gpg_init(args):
|
def gpg_init(args):
|
||||||
@ -191,12 +189,12 @@ def gpg_init(args):
|
|||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
if not filename.endswith(".key"):
|
if not filename.endswith(".key"):
|
||||||
continue
|
continue
|
||||||
spack.util.gpg.trust(os.path.join(root, filename))
|
spack.gpg.trust(os.path.join(root, filename))
|
||||||
|
|
||||||
|
|
||||||
def gpg_untrust(args):
|
def gpg_untrust(args):
|
||||||
"""remove a key from the keyring"""
|
"""remove a key from the keyring"""
|
||||||
spack.util.gpg.untrust(args.signing, *args.keys)
|
spack.gpg.untrust(args.signing, *args.keys)
|
||||||
|
|
||||||
|
|
||||||
def gpg_verify(args):
|
def gpg_verify(args):
|
||||||
@ -205,7 +203,7 @@ def gpg_verify(args):
|
|||||||
signature = args.signature
|
signature = args.signature
|
||||||
if signature is None:
|
if signature is None:
|
||||||
signature = args.spec[0] + ".asc"
|
signature = args.spec[0] + ".asc"
|
||||||
spack.util.gpg.verify(signature, " ".join(args.spec))
|
spack.gpg.verify(signature, " ".join(args.spec))
|
||||||
|
|
||||||
|
|
||||||
def gpg_publish(args):
|
def gpg_publish(args):
|
||||||
|
@ -495,7 +495,9 @@ def mirror_destroy(args):
|
|||||||
elif args.mirror_url:
|
elif args.mirror_url:
|
||||||
mirror_url = args.mirror_url
|
mirror_url = args.mirror_url
|
||||||
|
|
||||||
web_util.remove_url(mirror_url, recursive=True)
|
web_util.remove_url(
|
||||||
|
mirror_url, recursive=True, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def mirror(parser, args):
|
def mirror(parser, args):
|
||||||
|
@ -12,10 +12,10 @@
|
|||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.package_hash as ph
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.executable as exe
|
import spack.util.executable as exe
|
||||||
import spack.util.package_hash as ph
|
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
description = "query packages associated with particular git revisions"
|
description = "query packages associated with particular git revisions"
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
@ -83,7 +84,9 @@ def repo_add(args):
|
|||||||
path = args.path
|
path = args.path
|
||||||
|
|
||||||
# real_path is absolute and handles substitution.
|
# real_path is absolute and handles substitution.
|
||||||
canon_path = spack.util.path.canonicalize_path(path)
|
canon_path = spack.util.path.canonicalize_path(
|
||||||
|
path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
# check if the path exists
|
# check if the path exists
|
||||||
if not os.path.exists(canon_path):
|
if not os.path.exists(canon_path):
|
||||||
@ -115,9 +118,13 @@ def repo_remove(args):
|
|||||||
namespace_or_path = args.namespace_or_path
|
namespace_or_path = args.namespace_or_path
|
||||||
|
|
||||||
# If the argument is a path, remove that repository from config.
|
# If the argument is a path, remove that repository from config.
|
||||||
canon_path = spack.util.path.canonicalize_path(namespace_or_path)
|
canon_path = spack.util.path.canonicalize_path(
|
||||||
|
namespace_or_path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
for repo_path in repos:
|
for repo_path in repos:
|
||||||
repo_canon_path = spack.util.path.canonicalize_path(repo_path)
|
repo_canon_path = spack.util.path.canonicalize_path(
|
||||||
|
repo_path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
if canon_path == repo_canon_path:
|
if canon_path == repo_canon_path:
|
||||||
repos.remove(repo_path)
|
repos.remove(repo_path)
|
||||||
spack.config.set("repos", repos, args.scope)
|
spack.config.set("repos", repos, args.scope)
|
||||||
|
@ -11,9 +11,9 @@
|
|||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.gpg
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg
|
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
from spack.util.spack_yaml import syaml_dict
|
from spack.util.spack_yaml import syaml_dict
|
||||||
|
|
||||||
@ -76,7 +76,7 @@ def tutorial(parser, args):
|
|||||||
spack.config.set("mirrors", mirror_config, scope="user")
|
spack.config.set("mirrors", mirror_config, scope="user")
|
||||||
|
|
||||||
tty.msg("Ensuring that we trust tutorial binaries", f"spack gpg trust {tutorial_key}")
|
tty.msg("Ensuring that we trust tutorial binaries", f"spack gpg trust {tutorial_key}")
|
||||||
spack.util.gpg.trust(tutorial_key)
|
spack.gpg.trust(tutorial_key)
|
||||||
|
|
||||||
# Note that checkout MUST be last. It changes Spack under our feet.
|
# Note that checkout MUST be last. It changes Spack under our feet.
|
||||||
# If you don't put this last, you'll get import errors for the code
|
# If you don't put this last, you'll get import errors for the code
|
||||||
|
@ -31,6 +31,7 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment
|
import spack.environment
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@ -91,7 +92,9 @@ def concretize_develop(self, spec):
|
|||||||
if not dev_info:
|
if not dev_info:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
path = spack.util.path.canonicalize_path(dev_info["path"], default_wd=env.path)
|
path = spack.util.path.canonicalize_path(
|
||||||
|
dev_info["path"], default_wd=env.path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
if "dev_path" in spec.variants:
|
if "dev_path" in spec.variants:
|
||||||
assert spec.variants["dev_path"].value == path
|
assert spec.variants["dev_path"].value == path
|
||||||
|
@ -1451,7 +1451,9 @@ def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) ->
|
|||||||
def _fetch_file(url):
|
def _fetch_file(url):
|
||||||
raw = raw_github_gitlab_url(url)
|
raw = raw_github_gitlab_url(url)
|
||||||
tty.debug("Reading config from url {0}".format(raw))
|
tty.debug("Reading config from url {0}".format(raw))
|
||||||
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
|
return web_util.fetch_url_text(
|
||||||
|
raw, dest_dir=dest_dir, fetch_method=CONFIG.get("config:url_fetch_method")
|
||||||
|
)
|
||||||
|
|
||||||
if not url:
|
if not url:
|
||||||
raise ConfigFileError("Cannot retrieve configuration without a URL")
|
raise ConfigFileError("Cannot retrieve configuration without a URL")
|
||||||
|
@ -90,7 +90,8 @@
|
|||||||
def env_root_path():
|
def env_root_path():
|
||||||
"""Override default root path if the user specified it"""
|
"""Override default root path if the user specified it"""
|
||||||
return spack.util.path.canonicalize_path(
|
return spack.util.path.canonicalize_path(
|
||||||
spack.config.get("config:environments_root", default=default_env_path)
|
spack.config.get("config:environments_root", default=default_env_path),
|
||||||
|
replacements=spack.paths.path_replacements(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -478,7 +479,9 @@ def __init__(
|
|||||||
):
|
):
|
||||||
self.base = base_path
|
self.base = base_path
|
||||||
self.raw_root = root
|
self.raw_root = root
|
||||||
self.root = spack.util.path.canonicalize_path(root, default_wd=base_path)
|
self.root = spack.util.path.canonicalize_path(
|
||||||
|
root, default_wd=base_path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
self.projections = projections
|
self.projections = projections
|
||||||
self.select = select
|
self.select = select
|
||||||
self.exclude = exclude
|
self.exclude = exclude
|
||||||
@ -493,7 +496,9 @@ def exclude_fn(self, spec):
|
|||||||
|
|
||||||
def update_root(self, new_path):
|
def update_root(self, new_path):
|
||||||
self.raw_root = new_path
|
self.raw_root = new_path
|
||||||
self.root = spack.util.path.canonicalize_path(new_path, default_wd=self.base)
|
self.root = spack.util.path.canonicalize_path(
|
||||||
|
new_path, default_wd=self.base, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return all(
|
return all(
|
||||||
@ -985,7 +990,9 @@ def included_config_scopes(self):
|
|||||||
missing = []
|
missing = []
|
||||||
for i, config_path in enumerate(reversed(includes)):
|
for i, config_path in enumerate(reversed(includes)):
|
||||||
# allow paths to contain spack config/environment variables, etc.
|
# allow paths to contain spack config/environment variables, etc.
|
||||||
config_path = substitute_path_variables(config_path)
|
config_path = substitute_path_variables(
|
||||||
|
config_path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
include_url = urllib.parse.urlparse(config_path)
|
include_url = urllib.parse.urlparse(config_path)
|
||||||
|
|
||||||
@ -1296,7 +1303,9 @@ def develop(self, spec: Spec, path: str, clone: bool = False) -> bool:
|
|||||||
# to be created, then copy it afterwards somewhere else. It would be
|
# to be created, then copy it afterwards somewhere else. It would be
|
||||||
# better if we can create the `source_path` directly into its final
|
# better if we can create the `source_path` directly into its final
|
||||||
# destination.
|
# destination.
|
||||||
abspath = spack.util.path.canonicalize_path(path, default_wd=self.path)
|
abspath = spack.util.path.canonicalize_path(
|
||||||
|
path, default_wd=self.path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||||
# We construct a package class ourselves, rather than asking for
|
# We construct a package class ourselves, rather than asking for
|
||||||
# Spec.package, since Spec only allows this when it is concrete
|
# Spec.package, since Spec only allows this when it is concrete
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.paths
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
|
|
||||||
_extension_regexp = re.compile(r"spack-(\w[-\w]*)$")
|
_extension_regexp = re.compile(r"spack-(\w[-\w]*)$")
|
||||||
@ -109,7 +110,8 @@ def ensure_package_creation(name):
|
|||||||
def get_extension_paths():
|
def get_extension_paths():
|
||||||
"""Return the list of canonicalized extension paths from config:extensions."""
|
"""Return the list of canonicalized extension paths from config:extensions."""
|
||||||
extension_paths = spack.config.get("config:extensions") or []
|
extension_paths = spack.config.get("config:extensions") or []
|
||||||
paths = [spack.util.path.canonicalize_path(p) for p in extension_paths]
|
r = spack.paths.path_replacements()
|
||||||
|
paths = [spack.util.path.canonicalize_path(p, replacements=r) for p in extension_paths]
|
||||||
return paths
|
return paths
|
||||||
|
|
||||||
|
|
||||||
|
@ -301,7 +301,12 @@ def fetch(self):
|
|||||||
url = None
|
url = None
|
||||||
errors = []
|
errors = []
|
||||||
for url in self.candidate_urls:
|
for url in self.candidate_urls:
|
||||||
if not web_util.url_exists(url):
|
if not web_util.url_exists(
|
||||||
|
url,
|
||||||
|
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
):
|
||||||
tty.debug("URL does not exist: " + url)
|
tty.debug("URL does not exist: " + url)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -338,8 +343,12 @@ def _fetch_urllib(self, url):
|
|||||||
|
|
||||||
# Run urllib but grab the mime type from the http headers
|
# Run urllib but grab the mime type from the http headers
|
||||||
try:
|
try:
|
||||||
url, headers, response = web_util.read_from_url(url)
|
url, headers, response = web_util.read_from_url(
|
||||||
except web_util.SpackWebError as e:
|
url,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
|
except web_util.WebError as e:
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
@ -385,7 +394,15 @@ def _fetch_curl(self, url):
|
|||||||
|
|
||||||
timeout = self.extra_options.get("timeout")
|
timeout = self.extra_options.get("timeout")
|
||||||
|
|
||||||
base_args = web_util.base_curl_fetch_args(url, timeout)
|
connect_timeout = spack.config.get("config:connect_timeout", 10)
|
||||||
|
if timeout:
|
||||||
|
timeout = max(int(timeout), int(connect_timeout))
|
||||||
|
else:
|
||||||
|
timeout = int(connect_timeout)
|
||||||
|
|
||||||
|
base_args = web_util.base_curl_fetch_args(
|
||||||
|
url, timeout=timeout, verify_ssl=spack.config.get("config:verify_ssl")
|
||||||
|
)
|
||||||
curl_args = save_args + base_args + cookie_args
|
curl_args = save_args + base_args + cookie_args
|
||||||
|
|
||||||
# Run curl but grab the mime type from the http headers
|
# Run curl but grab the mime type from the http headers
|
||||||
@ -403,7 +420,7 @@ def _fetch_curl(self, url):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
web_util.check_curl_code(curl.returncode)
|
web_util.check_curl_code(curl.returncode)
|
||||||
except spack.error.FetchError as err:
|
except web_util.WebError as err:
|
||||||
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
||||||
|
|
||||||
self._check_headers(headers)
|
self._check_headers(headers)
|
||||||
@ -463,7 +480,10 @@ def archive(self, destination):
|
|||||||
raise NoArchiveFileError("Cannot call archive() before fetching.")
|
raise NoArchiveFileError("Cannot call archive() before fetching.")
|
||||||
|
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(
|
||||||
self.archive_file, url_util.path_to_file_url(destination), keep_original=True
|
self.archive_file,
|
||||||
|
url_util.path_to_file_url(destination),
|
||||||
|
keep_original=True,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
)
|
)
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
@ -1330,7 +1350,11 @@ def fetch(self):
|
|||||||
basename = os.path.basename(parsed_url.path)
|
basename = os.path.basename(parsed_url.path)
|
||||||
|
|
||||||
with working_dir(self.stage.path):
|
with working_dir(self.stage.path):
|
||||||
_, headers, stream = web_util.read_from_url(self.url)
|
_, headers, stream = web_util.read_from_url(
|
||||||
|
self.url,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
|
|
||||||
with open(basename, "wb") as f:
|
with open(basename, "wb") as f:
|
||||||
shutil.copyfileobj(stream, f)
|
shutil.copyfileobj(stream, f)
|
||||||
@ -1377,7 +1401,11 @@ def fetch(self):
|
|||||||
basename = os.path.basename(parsed_url.path)
|
basename = os.path.basename(parsed_url.path)
|
||||||
|
|
||||||
with working_dir(self.stage.path):
|
with working_dir(self.stage.path):
|
||||||
_, headers, stream = web_util.read_from_url(self.url)
|
_, headers, stream = web_util.read_from_url(
|
||||||
|
self.url,
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
|
|
||||||
with open(basename, "wb") as f:
|
with open(basename, "wb") as f:
|
||||||
shutil.copyfileobj(stream, f)
|
shutil.copyfileobj(stream, f)
|
||||||
|
@ -8,10 +8,8 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import spack.error
|
import spack.util.error
|
||||||
import spack.paths
|
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.version
|
|
||||||
|
|
||||||
#: Executable instance for "gpg", initialized lazily
|
#: Executable instance for "gpg", initialized lazily
|
||||||
GPG = None
|
GPG = None
|
||||||
@ -29,7 +27,7 @@ def clear():
|
|||||||
GPG, GPGCONF, SOCKET_DIR, GNUPGHOME = None, None, None, None
|
GPG, GPGCONF, SOCKET_DIR, GNUPGHOME = None, None, None, None
|
||||||
|
|
||||||
|
|
||||||
def init(gnupghome=None, force=False):
|
def init(gnupghome=None, force=False, gpg_path=None):
|
||||||
"""Initialize the global objects in the module, if not set.
|
"""Initialize the global objects in the module, if not set.
|
||||||
|
|
||||||
When calling any gpg executable, the GNUPGHOME environment
|
When calling any gpg executable, the GNUPGHOME environment
|
||||||
@ -56,7 +54,7 @@ def init(gnupghome=None, force=False):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Set the value of GNUPGHOME to be used in this module
|
# Set the value of GNUPGHOME to be used in this module
|
||||||
GNUPGHOME = gnupghome or os.getenv("SPACK_GNUPGHOME") or spack.paths.gpg_path
|
GNUPGHOME = gnupghome or os.getenv("SPACK_GNUPGHOME") or gpg_path
|
||||||
|
|
||||||
# Set the executable objects for "gpg" and "gpgconf"
|
# Set the executable objects for "gpg" and "gpgconf"
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
@ -165,7 +163,7 @@ def _get_unimported_public_keys(output):
|
|||||||
return keys
|
return keys
|
||||||
|
|
||||||
|
|
||||||
class SpackGPGError(spack.error.SpackError):
|
class SpackGPGError(spack.util.error.UtilityError):
|
||||||
"""Class raised when GPG errors are detected."""
|
"""Class raised when GPG errors are detected."""
|
||||||
|
|
||||||
|
|
||||||
@ -334,11 +332,11 @@ def _verify_exe_or_raise(exe):
|
|||||||
raise SpackGPGError(msg)
|
raise SpackGPGError(msg)
|
||||||
|
|
||||||
output = exe("--version", output=str)
|
output = exe("--version", output=str)
|
||||||
match = re.search(r"^gpg(conf)? \(GnuPG\) (.*)$", output, re.M)
|
match = re.search(r"^gpg(conf)? \(GnuPG\) (\d+).*$", output, re.M)
|
||||||
if not match:
|
if not match:
|
||||||
raise SpackGPGError('Could not determine "{0}" version'.format(exe.name))
|
raise SpackGPGError('Could not determine "{0}" version'.format(exe.name))
|
||||||
|
|
||||||
if spack.version.Version(match.group(2)) < spack.version.Version("2"):
|
if int(match.group(2)) < 2:
|
||||||
raise SpackGPGError(msg)
|
raise SpackGPGError(msg)
|
||||||
|
|
||||||
|
|
@ -9,6 +9,7 @@
|
|||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import mkdirp
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
|
|
||||||
|
import spack.config
|
||||||
import spack.util.editor as ed
|
import spack.util.editor as ed
|
||||||
|
|
||||||
|
|
||||||
@ -39,7 +40,7 @@ def set_up_license(pkg):
|
|||||||
write_license_file(pkg, license_path)
|
write_license_file(pkg, license_path)
|
||||||
|
|
||||||
# use spack.util.executable so the editor does not hang on return here
|
# use spack.util.executable so the editor does not hang on return here
|
||||||
ed.editor(license_path, exec_fn=ed.executable)
|
ed.editor(license_path, exec_fn=ed.executable, debug=spack.config.get("config:debug"))
|
||||||
else:
|
else:
|
||||||
# Use already existing license file
|
# Use already existing license file
|
||||||
tty.msg("Found already existing license %s" % license_path)
|
tty.msg("Found already existing license %s" % license_path)
|
||||||
|
@ -5,18 +5,18 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import spack.util.file_permissions as fp
|
import spack.package_prefs as pp
|
||||||
|
|
||||||
|
|
||||||
def post_install(spec, explicit=None):
|
def post_install(spec, explicit=None):
|
||||||
if not spec.external:
|
if not spec.external:
|
||||||
fp.set_permissions_by_spec(spec.prefix, spec)
|
pp.set_permissions_by_spec(spec.prefix, spec)
|
||||||
|
|
||||||
# os.walk explicitly set not to follow links
|
# os.walk explicitly set not to follow links
|
||||||
for root, dirs, files in os.walk(spec.prefix, followlinks=False):
|
for root, dirs, files in os.walk(spec.prefix, followlinks=False):
|
||||||
for d in dirs:
|
for d in dirs:
|
||||||
if not os.path.islink(os.path.join(root, d)):
|
if not os.path.islink(os.path.join(root, d)):
|
||||||
fp.set_permissions_by_spec(os.path.join(root, d), spec)
|
pp.set_permissions_by_spec(os.path.join(root, d), spec)
|
||||||
for f in files:
|
for f in files:
|
||||||
if not os.path.islink(os.path.join(root, f)):
|
if not os.path.islink(os.path.join(root, f)):
|
||||||
fp.set_permissions_by_spec(os.path.join(root, f), spec)
|
pp.set_permissions_by_spec(os.path.join(root, f), spec)
|
||||||
|
@ -91,7 +91,8 @@ def get_test_stage_dir():
|
|||||||
the default test stage path
|
the default test stage path
|
||||||
"""
|
"""
|
||||||
return spack.util.path.canonicalize_path(
|
return spack.util.path.canonicalize_path(
|
||||||
spack.config.get("config:test_stage", spack.paths.default_test_path)
|
spack.config.get("config:test_stage", spack.paths.default_test_path),
|
||||||
|
replacements=spack.paths.path_replacements(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -491,7 +491,8 @@ def _process_binary_cache_tarball(
|
|||||||
|
|
||||||
tty.msg(f"Extracting {package_id(pkg)} from binary cache")
|
tty.msg(f"Extracting {package_id(pkg)} from binary cache")
|
||||||
|
|
||||||
with timer.measure("install"), spack.util.path.filter_padding():
|
padding = spack.config.get("config:install_tree:padded_length", None)
|
||||||
|
with timer.measure("install"), spack.util.path.filter_padding(padding=padding):
|
||||||
binary_distribution.extract_tarball(
|
binary_distribution.extract_tarball(
|
||||||
pkg.spec, download_result, unsigned=unsigned, force=False, timer=timer
|
pkg.spec, download_result, unsigned=unsigned, force=False, timer=timer
|
||||||
)
|
)
|
||||||
@ -2492,7 +2493,8 @@ def build_process(pkg: "spack.package_base.PackageBase", install_args: dict) ->
|
|||||||
installer = BuildProcessInstaller(pkg, install_args)
|
installer = BuildProcessInstaller(pkg, install_args)
|
||||||
|
|
||||||
# don't print long padded paths in executable debug output.
|
# don't print long padded paths in executable debug output.
|
||||||
with spack.util.path.filter_padding():
|
padding = spack.config.get("config:install_tree:padded_length", None)
|
||||||
|
with spack.util.path.filter_padding(padding=padding):
|
||||||
return installer.run()
|
return installer.run()
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,6 +30,7 @@
|
|||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.oci.image
|
import spack.oci.image
|
||||||
|
import spack.paths
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
@ -51,7 +52,11 @@ def _url_or_path_to_url(url_or_path: str) -> str:
|
|||||||
return url_or_path
|
return url_or_path
|
||||||
|
|
||||||
# Otherwise we interpret it as path, and we should promote it to file:// URL.
|
# Otherwise we interpret it as path, and we should promote it to file:// URL.
|
||||||
return url_util.path_to_file_url(spack.util.path.canonicalize_path(url_or_path))
|
return url_util.path_to_file_url(
|
||||||
|
spack.util.path.canonicalize_path(
|
||||||
|
url_or_path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Mirror:
|
class Mirror:
|
||||||
|
@ -46,6 +46,7 @@
|
|||||||
import spack.environment
|
import spack.environment
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.modules.common
|
import spack.modules.common
|
||||||
|
import spack.package_prefs as pp
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.projections as proj
|
import spack.projections as proj
|
||||||
import spack.repo
|
import spack.repo
|
||||||
@ -54,7 +55,6 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
import spack.tengine as tengine
|
import spack.tengine as tengine
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.file_permissions as fp
|
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
from spack.context import Context
|
from spack.context import Context
|
||||||
@ -225,7 +225,7 @@ def root_path(name, module_set_name):
|
|||||||
roots = spack.config.merge_yaml(defaults, roots)
|
roots = spack.config.merge_yaml(defaults, roots)
|
||||||
|
|
||||||
path = roots.get(name, os.path.join(spack.paths.share_path, name))
|
path = roots.get(name, os.path.join(spack.paths.share_path, name))
|
||||||
return spack.util.path.canonicalize_path(path)
|
return spack.util.path.canonicalize_path(path, replacements=spack.paths.path_replacements())
|
||||||
|
|
||||||
|
|
||||||
def generate_module_index(root, modules, overwrite=False):
|
def generate_module_index(root, modules, overwrite=False):
|
||||||
@ -968,7 +968,7 @@ def write(self, overwrite=False):
|
|||||||
|
|
||||||
# Set the file permissions of the module to match that of the package
|
# Set the file permissions of the module to match that of the package
|
||||||
if os.path.exists(self.layout.filename):
|
if os.path.exists(self.layout.filename):
|
||||||
fp.set_permissions_by_spec(self.layout.filename, self.spec)
|
pp.set_permissions_by_spec(self.layout.filename, self.spec)
|
||||||
|
|
||||||
# Symlink defaults if needed
|
# Symlink defaults if needed
|
||||||
self.update_module_defaults()
|
self.update_module_defaults()
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
# flake8: noqa: F401
|
# flake8: noqa: F401
|
||||||
"""spack.util.package is a set of useful build tools and directives for packages.
|
"""spack.package is a set of useful build tools and directives for packages.
|
||||||
|
|
||||||
Everything in this module is automatically imported into Spack package files.
|
Everything in this module is automatically imported into Spack package files.
|
||||||
"""
|
"""
|
||||||
@ -101,7 +101,6 @@
|
|||||||
on_package_attributes,
|
on_package_attributes,
|
||||||
)
|
)
|
||||||
from spack.spec import InvalidSpecDetected, Spec
|
from spack.spec import InvalidSpecDetected, Spec
|
||||||
from spack.util.cpus import determine_number_of_jobs
|
|
||||||
from spack.util.executable import *
|
from spack.util.executable import *
|
||||||
from spack.variant import (
|
from spack.variant import (
|
||||||
any_combination_of,
|
any_combination_of,
|
||||||
|
@ -63,9 +63,9 @@
|
|||||||
install_test_root,
|
install_test_root,
|
||||||
)
|
)
|
||||||
from spack.installer import InstallError, PackageInstaller
|
from spack.installer import InstallError, PackageInstaller
|
||||||
|
from spack.package_hash import package_hash
|
||||||
from spack.stage import DIYStage, ResourceStage, Stage, StageComposite, compute_stage_name
|
from spack.stage import DIYStage, ResourceStage, Stage, StageComposite, compute_stage_name
|
||||||
from spack.util.executable import ProcessError, which
|
from spack.util.executable import ProcessError, which
|
||||||
from spack.util.package_hash import package_hash
|
|
||||||
from spack.version import GitVersion, StandardVersion, Version
|
from spack.version import GitVersion, StandardVersion, Version
|
||||||
|
|
||||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||||
@ -829,7 +829,9 @@ def name(cls):
|
|||||||
@classproperty
|
@classproperty
|
||||||
def global_license_dir(cls):
|
def global_license_dir(cls):
|
||||||
"""Returns the directory where license files for all packages are stored."""
|
"""Returns the directory where license files for all packages are stored."""
|
||||||
return spack.util.path.canonicalize_path(spack.config.get("config:license_dir"))
|
return spack.util.path.canonicalize_path(
|
||||||
|
spack.config.get("config:license_dir"), replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def global_license_file(self):
|
def global_license_file(self):
|
||||||
@ -985,7 +987,12 @@ def find_valid_url_for_version(self, version):
|
|||||||
urls = self.all_urls_for_version(version)
|
urls = self.all_urls_for_version(version)
|
||||||
|
|
||||||
for u in urls:
|
for u in urls:
|
||||||
if spack.util.web.url_exists(u):
|
if spack.util.web.url_exists(
|
||||||
|
u,
|
||||||
|
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
):
|
||||||
return u
|
return u
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
@ -2,11 +2,14 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import os
|
||||||
import stat
|
import stat
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
import spack.util.file_permissions as fp
|
||||||
from spack.config import ConfigError
|
from spack.config import ConfigError
|
||||||
from spack.util.path import canonicalize_path
|
from spack.util.path import canonicalize_path
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
@ -177,7 +180,9 @@ def _package(maybe_abstract_spec):
|
|||||||
spec_str = entry["spec"]
|
spec_str = entry["spec"]
|
||||||
external_path = entry.get("prefix", None)
|
external_path = entry.get("prefix", None)
|
||||||
if external_path:
|
if external_path:
|
||||||
external_path = canonicalize_path(external_path)
|
external_path = canonicalize_path(
|
||||||
|
external_path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
external_modules = entry.get("modules", None)
|
external_modules = entry.get("modules", None)
|
||||||
external_spec = spack.spec.Spec.from_detection(
|
external_spec = spack.spec.Spec.from_detection(
|
||||||
spack.spec.Spec(
|
spack.spec.Spec(
|
||||||
@ -294,5 +299,16 @@ def get_package_group(spec):
|
|||||||
return group
|
return group
|
||||||
|
|
||||||
|
|
||||||
|
def set_permissions_by_spec(path, spec):
|
||||||
|
# Get permissions for spec
|
||||||
|
if os.path.isdir(path):
|
||||||
|
perms = get_package_dir_permissions(spec)
|
||||||
|
else:
|
||||||
|
perms = get_package_permissions(spec)
|
||||||
|
group = get_package_group(spec)
|
||||||
|
|
||||||
|
fp.set_permissions(path, perms, group)
|
||||||
|
|
||||||
|
|
||||||
class VirtualInPackagesYAMLError(spack.error.SpackError):
|
class VirtualInPackagesYAMLError(spack.error.SpackError):
|
||||||
"""Raised when a disallowed virtual is found in packages.yaml"""
|
"""Raised when a disallowed virtual is found in packages.yaml"""
|
||||||
|
@ -9,11 +9,16 @@
|
|||||||
throughout Spack and should bring in a minimal number of external
|
throughout Spack and should bring in a minimal number of external
|
||||||
dependencies.
|
dependencies.
|
||||||
"""
|
"""
|
||||||
|
import getpass
|
||||||
import os
|
import os
|
||||||
|
import tempfile
|
||||||
|
from datetime import date
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
|
|
||||||
|
from spack.util.path import NOMATCH
|
||||||
|
|
||||||
#: This file lives in $prefix/lib/spack/spack/__file__
|
#: This file lives in $prefix/lib/spack/spack/__file__
|
||||||
prefix = str(PurePath(llnl.util.filesystem.ancestor(__file__, 4)))
|
prefix = str(PurePath(llnl.util.filesystem.ancestor(__file__, 4)))
|
||||||
|
|
||||||
@ -136,3 +141,50 @@ def _get_system_config_path():
|
|||||||
|
|
||||||
#: System configuration location
|
#: System configuration location
|
||||||
system_config_path = _get_system_config_path()
|
system_config_path = _get_system_config_path()
|
||||||
|
|
||||||
|
|
||||||
|
def architecture():
|
||||||
|
# break circular import
|
||||||
|
import spack.platforms
|
||||||
|
import spack.spec
|
||||||
|
|
||||||
|
host_platform = spack.platforms.host()
|
||||||
|
host_os = host_platform.operating_system("default_os")
|
||||||
|
host_target = host_platform.target("default_target")
|
||||||
|
|
||||||
|
return spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||||
|
|
||||||
|
|
||||||
|
def get_user():
|
||||||
|
# User pwd where available because it accounts for effective uids when using ksu and similar
|
||||||
|
try:
|
||||||
|
# user pwd for unix systems
|
||||||
|
import pwd
|
||||||
|
|
||||||
|
return pwd.getpwuid(os.geteuid()).pw_name
|
||||||
|
except ImportError:
|
||||||
|
# fallback on getpass
|
||||||
|
return getpass.getuser()
|
||||||
|
|
||||||
|
|
||||||
|
def path_replacements():
|
||||||
|
# break circular imports
|
||||||
|
import spack.environment as ev
|
||||||
|
|
||||||
|
arch = architecture()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"spack": lambda: prefix,
|
||||||
|
"user": lambda: get_user(),
|
||||||
|
"tempdir": lambda: tempfile.gettempdir(),
|
||||||
|
"user_cache_path": lambda: user_cache_path,
|
||||||
|
"architecture": lambda: arch,
|
||||||
|
"arch": lambda: arch,
|
||||||
|
"platform": lambda: arch.platform,
|
||||||
|
"operating_system": lambda: arch.os,
|
||||||
|
"os": lambda: arch.os,
|
||||||
|
"target": lambda: arch.target,
|
||||||
|
"target_family": lambda: arch.target.microarchitecture.family,
|
||||||
|
"date": lambda: date.today().strftime("%Y-%m-%d"),
|
||||||
|
"env": lambda: ev.active_environment().path if ev.active_environment() else NOMATCH,
|
||||||
|
}
|
||||||
|
@ -37,6 +37,7 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.patch
|
import spack.patch
|
||||||
|
import spack.paths
|
||||||
import spack.provider_index
|
import spack.provider_index
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.tag
|
import spack.tag
|
||||||
@ -928,7 +929,9 @@ def __init__(self, root, cache=None):
|
|||||||
"""
|
"""
|
||||||
# Root directory, containing _repo.yaml and package dirs
|
# Root directory, containing _repo.yaml and package dirs
|
||||||
# Allow roots to by spack-relative by starting with '$spack'
|
# Allow roots to by spack-relative by starting with '$spack'
|
||||||
self.root = spack.util.path.canonicalize_path(root)
|
self.root = spack.util.path.canonicalize_path(
|
||||||
|
root, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
# check and raise BadRepoError on fail.
|
# check and raise BadRepoError on fail.
|
||||||
def check(condition, msg):
|
def check(condition, msg):
|
||||||
@ -1327,7 +1330,7 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
|||||||
If the namespace is not provided, use basename of root.
|
If the namespace is not provided, use basename of root.
|
||||||
Return the canonicalized path and namespace of the created repository.
|
Return the canonicalized path and namespace of the created repository.
|
||||||
"""
|
"""
|
||||||
root = spack.util.path.canonicalize_path(root)
|
root = spack.util.path.canonicalize_path(root, replacements=spack.paths.path_replacements())
|
||||||
if not namespace:
|
if not namespace:
|
||||||
namespace = os.path.basename(root)
|
namespace = os.path.basename(root)
|
||||||
|
|
||||||
|
@ -41,6 +41,7 @@
|
|||||||
import spack.error
|
import spack.error
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.package_prefs
|
import spack.package_prefs
|
||||||
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@ -2602,7 +2603,11 @@ def setup(
|
|||||||
dev_specs = tuple(
|
dev_specs = tuple(
|
||||||
spack.spec.Spec(info["spec"]).constrained(
|
spack.spec.Spec(info["spec"]).constrained(
|
||||||
"dev_path=%s"
|
"dev_path=%s"
|
||||||
% spack.util.path.canonicalize_path(info["path"], default_wd=env.path)
|
% spack.util.path.canonicalize_path(
|
||||||
|
info["path"],
|
||||||
|
default_wd=env.path,
|
||||||
|
replacements=spack.paths.path_replacements(),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
for name, info in env.dev_specs.items()
|
for name, info in env.dev_specs.items()
|
||||||
)
|
)
|
||||||
@ -3119,7 +3124,9 @@ def _develop_specs_from_env(spec, env):
|
|||||||
if not dev_info:
|
if not dev_info:
|
||||||
return
|
return
|
||||||
|
|
||||||
path = spack.util.path.canonicalize_path(dev_info["path"], default_wd=env.path)
|
path = spack.util.path.canonicalize_path(
|
||||||
|
dev_info["path"], default_wd=env.path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
if "dev_path" in spec.variants:
|
if "dev_path" in spec.variants:
|
||||||
error_msg = (
|
error_msg = (
|
||||||
|
@ -150,7 +150,7 @@ def _resolve_paths(candidates):
|
|||||||
Adjustments involve removing extra $user from $tempdir if $tempdir includes
|
Adjustments involve removing extra $user from $tempdir if $tempdir includes
|
||||||
$user and appending $user if it is not present in the path.
|
$user and appending $user if it is not present in the path.
|
||||||
"""
|
"""
|
||||||
temp_path = sup.canonicalize_path("$tempdir")
|
temp_path = sup.canonicalize_path("$tempdir", replacements=spack.paths.path_replacements())
|
||||||
user = getpass.getuser()
|
user = getpass.getuser()
|
||||||
tmp_has_usr = user in temp_path.split(os.path.sep)
|
tmp_has_usr = user in temp_path.split(os.path.sep)
|
||||||
|
|
||||||
@ -162,7 +162,7 @@ def _resolve_paths(candidates):
|
|||||||
path = path.replace("/$user", "", 1)
|
path = path.replace("/$user", "", 1)
|
||||||
|
|
||||||
# Ensure the path is unique per user.
|
# Ensure the path is unique per user.
|
||||||
can_path = sup.canonicalize_path(path)
|
can_path = sup.canonicalize_path(path, replacements=spack.paths.path_replacements())
|
||||||
# When multiple users share a stage root, we can avoid conflicts between
|
# When multiple users share a stage root, we can avoid conflicts between
|
||||||
# them by adding a per-user subdirectory.
|
# them by adding a per-user subdirectory.
|
||||||
# Avoid doing this on Windows to keep stage absolute path as short as possible.
|
# Avoid doing this on Windows to keep stage absolute path as short as possible.
|
||||||
@ -199,9 +199,10 @@ def get_stage_root():
|
|||||||
def _mirror_roots():
|
def _mirror_roots():
|
||||||
mirrors = spack.config.get("mirrors")
|
mirrors = spack.config.get("mirrors")
|
||||||
return [
|
return [
|
||||||
sup.substitute_path_variables(root)
|
sup.substitute_path_variables(root, replacements=spack.paths.path_replacements())
|
||||||
if root.endswith(os.sep)
|
if root.endswith(os.sep)
|
||||||
else sup.substitute_path_variables(root) + os.sep
|
else sup.substitute_path_variables(root, replacemnts=spack.paths.path_replacements())
|
||||||
|
+ os.sep
|
||||||
for root in mirrors.values()
|
for root in mirrors.values()
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -77,7 +77,9 @@ def parse_install_tree(config_dict):
|
|||||||
if isinstance(install_tree, str):
|
if isinstance(install_tree, str):
|
||||||
tty.warn("Using deprecated format for configuring install_tree")
|
tty.warn("Using deprecated format for configuring install_tree")
|
||||||
unpadded_root = install_tree
|
unpadded_root = install_tree
|
||||||
unpadded_root = spack.util.path.canonicalize_path(unpadded_root)
|
unpadded_root = spack.util.path.canonicalize_path(
|
||||||
|
unpadded_root, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
# construct projection from previous values for backwards compatibility
|
# construct projection from previous values for backwards compatibility
|
||||||
all_projection = config_dict.get(
|
all_projection = config_dict.get(
|
||||||
"install_path_scheme", spack.directory_layout.default_projections["all"]
|
"install_path_scheme", spack.directory_layout.default_projections["all"]
|
||||||
@ -86,7 +88,9 @@ def parse_install_tree(config_dict):
|
|||||||
projections = {"all": all_projection}
|
projections = {"all": all_projection}
|
||||||
else:
|
else:
|
||||||
unpadded_root = install_tree.get("root", DEFAULT_INSTALL_TREE_ROOT)
|
unpadded_root = install_tree.get("root", DEFAULT_INSTALL_TREE_ROOT)
|
||||||
unpadded_root = spack.util.path.canonicalize_path(unpadded_root)
|
unpadded_root = spack.util.path.canonicalize_path(
|
||||||
|
unpadded_root, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
padded_length = install_tree.get("padded_length", False)
|
padded_length = install_tree.get("padded_length", False)
|
||||||
if padded_length is True:
|
if padded_length is True:
|
||||||
@ -267,7 +271,9 @@ def _construct_upstream_dbs_from_install_roots(
|
|||||||
for install_root in reversed(install_roots):
|
for install_root in reversed(install_roots):
|
||||||
upstream_dbs = list(accumulated_upstream_dbs)
|
upstream_dbs = list(accumulated_upstream_dbs)
|
||||||
next_db = spack.database.Database(
|
next_db = spack.database.Database(
|
||||||
spack.util.path.canonicalize_path(install_root),
|
spack.util.path.canonicalize_path(
|
||||||
|
install_root, replacements=spack.paths.path_replacements()
|
||||||
|
),
|
||||||
is_upstream=True,
|
is_upstream=True,
|
||||||
upstream_dbs=upstream_dbs,
|
upstream_dbs=upstream_dbs,
|
||||||
)
|
)
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.extensions
|
import spack.extensions
|
||||||
|
import spack.paths
|
||||||
from spack.util.path import canonicalize_path
|
from spack.util.path import canonicalize_path
|
||||||
|
|
||||||
|
|
||||||
@ -76,7 +77,10 @@ def make_environment(dirs: Optional[Tuple[str, ...]] = None):
|
|||||||
# Default directories where to search for templates
|
# Default directories where to search for templates
|
||||||
builtins = spack.config.get("config:template_dirs", ["$spack/share/spack/templates"])
|
builtins = spack.config.get("config:template_dirs", ["$spack/share/spack/templates"])
|
||||||
extensions = spack.extensions.get_template_dirs()
|
extensions = spack.extensions.get_template_dirs()
|
||||||
dirs = tuple(canonicalize_path(d) for d in itertools.chain(builtins, extensions))
|
r = spack.paths.path_replacements()
|
||||||
|
dirs = tuple(
|
||||||
|
canonicalize_path(d, replacements=r) for d in itertools.chain(builtins, extensions)
|
||||||
|
)
|
||||||
|
|
||||||
# Loader for the templates
|
# Loader for the templates
|
||||||
loader = jinja2.FileSystemLoader(dirs)
|
loader = jinja2.FileSystemLoader(dirs)
|
||||||
|
@ -25,12 +25,12 @@
|
|||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
|
import spack.gpg
|
||||||
import spack.hooks.sbang as sbang
|
import spack.hooks.sbang as sbang
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.gpg
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
@ -344,10 +344,10 @@ def test_push_and_fetch_keys(mock_gnupghome):
|
|||||||
|
|
||||||
# dir 1: create a new key, record its fingerprint, and push it to a new
|
# dir 1: create a new key, record its fingerprint, and push it to a new
|
||||||
# mirror
|
# mirror
|
||||||
with spack.util.gpg.gnupghome_override(gpg_dir1):
|
with spack.gpg.gnupghome_override(gpg_dir1):
|
||||||
spack.util.gpg.create(name="test-key", email="fake@test.key", expires="0", comment=None)
|
spack.gpg.create(name="test-key", email="fake@test.key", expires="0", comment=None)
|
||||||
|
|
||||||
keys = spack.util.gpg.public_keys()
|
keys = spack.gpg.public_keys()
|
||||||
assert len(keys) == 1
|
assert len(keys) == 1
|
||||||
fpr = keys[0]
|
fpr = keys[0]
|
||||||
|
|
||||||
@ -355,12 +355,12 @@ def test_push_and_fetch_keys(mock_gnupghome):
|
|||||||
|
|
||||||
# dir 2: import the key from the mirror, and confirm that its fingerprint
|
# dir 2: import the key from the mirror, and confirm that its fingerprint
|
||||||
# matches the one created above
|
# matches the one created above
|
||||||
with spack.util.gpg.gnupghome_override(gpg_dir2):
|
with spack.gpg.gnupghome_override(gpg_dir2):
|
||||||
assert len(spack.util.gpg.public_keys()) == 0
|
assert len(spack.gpg.public_keys()) == 0
|
||||||
|
|
||||||
bindist.get_keys(mirrors=mirrors, install=True, trust=True, force=True)
|
bindist.get_keys(mirrors=mirrors, install=True, trust=True, force=True)
|
||||||
|
|
||||||
new_keys = spack.util.gpg.public_keys()
|
new_keys = spack.gpg.public_keys()
|
||||||
assert len(new_keys) == 1
|
assert len(new_keys) == 1
|
||||||
assert new_keys[0] == fpr
|
assert new_keys[0] == fpr
|
||||||
|
|
||||||
@ -672,7 +672,7 @@ def test_etag_fetching_304():
|
|||||||
# Test conditional fetch with etags. If the remote hasn't modified the file
|
# Test conditional fetch with etags. If the remote hasn't modified the file
|
||||||
# it returns 304, which is an HTTPError in urllib-land. That should be
|
# it returns 304, which is an HTTPError in urllib-land. That should be
|
||||||
# handled as success, since it means the local cache is up-to-date.
|
# handled as success, since it means the local cache is up-to-date.
|
||||||
def response_304(request: urllib.request.Request):
|
def response_304(request: urllib.request.Request, verify_ssl=True, timeout=10):
|
||||||
url = request.get_full_url()
|
url = request.get_full_url()
|
||||||
if url == "https://www.example.com/build_cache/index.json":
|
if url == "https://www.example.com/build_cache/index.json":
|
||||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||||
@ -694,7 +694,7 @@ def response_304(request: urllib.request.Request):
|
|||||||
|
|
||||||
def test_etag_fetching_200():
|
def test_etag_fetching_200():
|
||||||
# Test conditional fetch with etags. The remote has modified the file.
|
# Test conditional fetch with etags. The remote has modified the file.
|
||||||
def response_200(request: urllib.request.Request):
|
def response_200(request: urllib.request.Request, verify_ssl=True, timeout=10):
|
||||||
url = request.get_full_url()
|
url = request.get_full_url()
|
||||||
if url == "https://www.example.com/build_cache/index.json":
|
if url == "https://www.example.com/build_cache/index.json":
|
||||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||||
@ -722,7 +722,7 @@ def response_200(request: urllib.request.Request):
|
|||||||
|
|
||||||
def test_etag_fetching_404():
|
def test_etag_fetching_404():
|
||||||
# Test conditional fetch with etags. The remote has modified the file.
|
# Test conditional fetch with etags. The remote has modified the file.
|
||||||
def response_404(request: urllib.request.Request):
|
def response_404(request: urllib.request.Request, verify_ssl=True, timeout=10):
|
||||||
raise urllib.error.HTTPError(
|
raise urllib.error.HTTPError(
|
||||||
request.get_full_url(),
|
request.get_full_url(),
|
||||||
404,
|
404,
|
||||||
@ -745,7 +745,7 @@ def test_default_index_fetch_200():
|
|||||||
index_json = '{"Hello": "World"}'
|
index_json = '{"Hello": "World"}'
|
||||||
index_json_hash = bindist.compute_hash(index_json)
|
index_json_hash = bindist.compute_hash(index_json)
|
||||||
|
|
||||||
def urlopen(request: urllib.request.Request):
|
def urlopen(request: urllib.request.Request, **kwargs):
|
||||||
url = request.get_full_url()
|
url = request.get_full_url()
|
||||||
if url.endswith("index.json.hash"):
|
if url.endswith("index.json.hash"):
|
||||||
return urllib.response.addinfourl( # type: ignore[arg-type]
|
return urllib.response.addinfourl( # type: ignore[arg-type]
|
||||||
@ -784,7 +784,7 @@ def test_default_index_dont_fetch_index_json_hash_if_no_local_hash():
|
|||||||
index_json = '{"Hello": "World"}'
|
index_json = '{"Hello": "World"}'
|
||||||
index_json_hash = bindist.compute_hash(index_json)
|
index_json_hash = bindist.compute_hash(index_json)
|
||||||
|
|
||||||
def urlopen(request: urllib.request.Request):
|
def urlopen(request: urllib.request.Request, **kwargs):
|
||||||
url = request.get_full_url()
|
url = request.get_full_url()
|
||||||
if url.endswith("index.json"):
|
if url.endswith("index.json"):
|
||||||
return urllib.response.addinfourl(
|
return urllib.response.addinfourl(
|
||||||
@ -813,7 +813,7 @@ def test_default_index_not_modified():
|
|||||||
index_json = '{"Hello": "World"}'
|
index_json = '{"Hello": "World"}'
|
||||||
index_json_hash = bindist.compute_hash(index_json)
|
index_json_hash = bindist.compute_hash(index_json)
|
||||||
|
|
||||||
def urlopen(request: urllib.request.Request):
|
def urlopen(request: urllib.request.Request, **kwargs):
|
||||||
url = request.get_full_url()
|
url = request.get_full_url()
|
||||||
if url.endswith("index.json.hash"):
|
if url.endswith("index.json.hash"):
|
||||||
return urllib.response.addinfourl(
|
return urllib.response.addinfourl(
|
||||||
@ -838,7 +838,7 @@ def test_default_index_invalid_hash_file(index_json):
|
|||||||
# Test invalid unicode / invalid hash type
|
# Test invalid unicode / invalid hash type
|
||||||
index_json_hash = bindist.compute_hash(index_json)
|
index_json_hash = bindist.compute_hash(index_json)
|
||||||
|
|
||||||
def urlopen(request: urllib.request.Request):
|
def urlopen(request: urllib.request.Request, **kwargs):
|
||||||
return urllib.response.addinfourl(
|
return urllib.response.addinfourl(
|
||||||
io.BytesIO(),
|
io.BytesIO(),
|
||||||
headers={}, # type: ignore[arg-type]
|
headers={}, # type: ignore[arg-type]
|
||||||
@ -858,7 +858,7 @@ def test_default_index_json_404():
|
|||||||
index_json = '{"Hello": "World"}'
|
index_json = '{"Hello": "World"}'
|
||||||
index_json_hash = bindist.compute_hash(index_json)
|
index_json_hash = bindist.compute_hash(index_json)
|
||||||
|
|
||||||
def urlopen(request: urllib.request.Request):
|
def urlopen(request: urllib.request.Request, **kwargs):
|
||||||
url = request.get_full_url()
|
url = request.get_full_url()
|
||||||
if url.endswith("index.json.hash"):
|
if url.endswith("index.json.hash"):
|
||||||
return urllib.response.addinfourl(
|
return urllib.response.addinfourl(
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
import spack.bootstrap.core
|
import spack.bootstrap.core
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.environment
|
import spack.environment
|
||||||
|
import spack.paths
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
|
|
||||||
@ -81,7 +82,9 @@ def test_store_path_customization(config_value, expected, mutable_config):
|
|||||||
|
|
||||||
# Check the store path
|
# Check the store path
|
||||||
current = spack.bootstrap.config.store_path()
|
current = spack.bootstrap.config.store_path()
|
||||||
assert current == spack.util.path.canonicalize_path(expected)
|
assert current == spack.util.path.canonicalize_path(
|
||||||
|
expected, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_raising_exception_if_bootstrap_disabled(mutable_config):
|
def test_raising_exception_if_bootstrap_disabled(mutable_config):
|
||||||
|
@ -16,9 +16,9 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.gpg
|
||||||
import spack.paths as spack_paths
|
import spack.paths as spack_paths
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,11 +18,11 @@
|
|||||||
import spack.ci as ci
|
import spack.ci as ci
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.gpg
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.paths as spack_paths
|
import spack.paths as spack_paths
|
||||||
import spack.repo as repo
|
import spack.repo as repo
|
||||||
import spack.util.gpg
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.paths
|
||||||
import spack.spec
|
import spack.spec
|
||||||
from spack.main import SpackCommand
|
from spack.main import SpackCommand
|
||||||
|
|
||||||
@ -106,7 +107,9 @@ def test_develop_canonicalize_path(self, monkeypatch, config):
|
|||||||
env("create", "test")
|
env("create", "test")
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
path = "../$user"
|
path = "../$user"
|
||||||
abspath = spack.util.path.canonicalize_path(path, e.path)
|
abspath = spack.util.path.canonicalize_path(
|
||||||
|
path, e.path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
def check_path(stage, dest):
|
def check_path(stage, dest):
|
||||||
assert dest == abspath
|
assert dest == abspath
|
||||||
@ -123,7 +126,9 @@ def test_develop_canonicalize_path_no_args(self, monkeypatch, config):
|
|||||||
env("create", "test")
|
env("create", "test")
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
path = "$user"
|
path = "$user"
|
||||||
abspath = spack.util.path.canonicalize_path(path, e.path)
|
abspath = spack.util.path.canonicalize_path(
|
||||||
|
path, e.path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
|
|
||||||
def check_path(stage, dest):
|
def check_path(stage, dest):
|
||||||
assert dest == abspath
|
assert dest == abspath
|
||||||
|
@ -904,7 +904,9 @@ def test_env_with_included_config_var_path(tmpdir, packages_file):
|
|||||||
spack_yaml = env_path / ev.manifest_name
|
spack_yaml = env_path / ev.manifest_name
|
||||||
spack_yaml.write_text(mpileaks_env_config(config_var_path))
|
spack_yaml.write_text(mpileaks_env_config(config_var_path))
|
||||||
|
|
||||||
config_real_path = substitute_path_variables(config_var_path)
|
config_real_path = substitute_path_variables(
|
||||||
|
config_var_path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
shutil.move(included_file, config_real_path)
|
shutil.move(included_file, config_real_path)
|
||||||
assert os.path.exists(config_real_path)
|
assert os.path.exists(config_real_path)
|
||||||
|
|
||||||
|
@ -10,8 +10,9 @@
|
|||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.bootstrap
|
import spack.bootstrap
|
||||||
|
import spack.gpg
|
||||||
|
import spack.paths
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.gpg
|
|
||||||
from spack.main import SpackCommand
|
from spack.main import SpackCommand
|
||||||
from spack.paths import mock_gpg_data_path, mock_gpg_keys_path
|
from spack.paths import mock_gpg_data_path, mock_gpg_keys_path
|
||||||
from spack.util.executable import ProcessError
|
from spack.util.executable import ProcessError
|
||||||
@ -45,19 +46,19 @@ def test_find_gpg(cmd_name, version, tmpdir, mock_gnupghome, monkeypatch):
|
|||||||
|
|
||||||
monkeypatch.setenv("PATH", str(tmpdir))
|
monkeypatch.setenv("PATH", str(tmpdir))
|
||||||
if version == "undetectable" or version.endswith("1.3.4"):
|
if version == "undetectable" or version.endswith("1.3.4"):
|
||||||
with pytest.raises(spack.util.gpg.SpackGPGError):
|
with pytest.raises(spack.gpg.SpackGPGError):
|
||||||
spack.util.gpg.init(force=True)
|
spack.gpg.init(force=True, gpg_path=spack.paths.gpg_path)
|
||||||
else:
|
else:
|
||||||
spack.util.gpg.init(force=True)
|
spack.gpg.init(force=True, gpg_path=spack.paths.gpg_path)
|
||||||
assert spack.util.gpg.GPG is not None
|
assert spack.gpg.GPG is not None
|
||||||
assert spack.util.gpg.GPGCONF is not None
|
assert spack.gpg.GPGCONF is not None
|
||||||
|
|
||||||
|
|
||||||
def test_no_gpg_in_path(tmpdir, mock_gnupghome, monkeypatch, mutable_config):
|
def test_no_gpg_in_path(tmpdir, mock_gnupghome, monkeypatch, mutable_config):
|
||||||
monkeypatch.setenv("PATH", str(tmpdir))
|
monkeypatch.setenv("PATH", str(tmpdir))
|
||||||
bootstrap("disable")
|
bootstrap("disable")
|
||||||
with pytest.raises(RuntimeError):
|
with pytest.raises(RuntimeError):
|
||||||
spack.util.gpg.init(force=True)
|
spack.gpg.init(force=True, gpg_path=spack.paths.gpg_path)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.maybeslow
|
@pytest.mark.maybeslow
|
||||||
@ -105,7 +106,7 @@ def test_gpg(tmpdir, mutable_config, mock_gnupghome):
|
|||||||
"Spack testing 1",
|
"Spack testing 1",
|
||||||
"spack@googlegroups.com",
|
"spack@googlegroups.com",
|
||||||
)
|
)
|
||||||
keyfp = spack.util.gpg.signing_keys()[0]
|
keyfp = spack.gpg.signing_keys()[0]
|
||||||
|
|
||||||
# List the keys.
|
# List the keys.
|
||||||
# TODO: Test the output here.
|
# TODO: Test the output here.
|
||||||
|
@ -338,50 +338,51 @@ def __init__(self, path):
|
|||||||
|
|
||||||
def test_substitute_config_variables(mock_low_high_config, monkeypatch):
|
def test_substitute_config_variables(mock_low_high_config, monkeypatch):
|
||||||
prefix = spack.paths.prefix.lstrip("/")
|
prefix = spack.paths.prefix.lstrip("/")
|
||||||
|
r = spack.paths.path_replacements()
|
||||||
assert cross_plat_join(
|
assert cross_plat_join(
|
||||||
os.sep + os.path.join("foo", "bar", "baz"), prefix
|
os.sep + os.path.join("foo", "bar", "baz"), prefix
|
||||||
) == spack_path.canonicalize_path("/foo/bar/baz/$spack")
|
) == spack_path.canonicalize_path("/foo/bar/baz/$spack", replacements=r)
|
||||||
|
|
||||||
assert cross_plat_join(
|
assert cross_plat_join(
|
||||||
spack.paths.prefix, os.path.join("foo", "bar", "baz")
|
spack.paths.prefix, os.path.join("foo", "bar", "baz")
|
||||||
) == spack_path.canonicalize_path("$spack/foo/bar/baz/")
|
) == spack_path.canonicalize_path("$spack/foo/bar/baz/", replacements=r)
|
||||||
|
|
||||||
assert cross_plat_join(
|
assert cross_plat_join(
|
||||||
os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
|
os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
|
||||||
) == spack_path.canonicalize_path("/foo/bar/baz/$spack/foo/bar/baz/")
|
) == spack_path.canonicalize_path("/foo/bar/baz/$spack/foo/bar/baz/", replacements=r)
|
||||||
|
|
||||||
assert cross_plat_join(
|
assert cross_plat_join(
|
||||||
os.sep + os.path.join("foo", "bar", "baz"), prefix
|
os.sep + os.path.join("foo", "bar", "baz"), prefix
|
||||||
) == spack_path.canonicalize_path("/foo/bar/baz/${spack}")
|
) == spack_path.canonicalize_path("/foo/bar/baz/${spack}", replacements=r)
|
||||||
|
|
||||||
assert cross_plat_join(
|
assert cross_plat_join(
|
||||||
spack.paths.prefix, os.path.join("foo", "bar", "baz")
|
spack.paths.prefix, os.path.join("foo", "bar", "baz")
|
||||||
) == spack_path.canonicalize_path("${spack}/foo/bar/baz/")
|
) == spack_path.canonicalize_path("${spack}/foo/bar/baz/", replacements=r)
|
||||||
|
|
||||||
assert cross_plat_join(
|
assert cross_plat_join(
|
||||||
os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
|
os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
|
||||||
) == spack_path.canonicalize_path("/foo/bar/baz/${spack}/foo/bar/baz/")
|
) == spack_path.canonicalize_path("/foo/bar/baz/${spack}/foo/bar/baz/", replacements=r)
|
||||||
|
|
||||||
assert cross_plat_join(
|
assert cross_plat_join(
|
||||||
os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
|
os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
|
||||||
) != spack_path.canonicalize_path("/foo/bar/baz/${spack/foo/bar/baz/")
|
) != spack_path.canonicalize_path("/foo/bar/baz/${spack/foo/bar/baz/", replacements=r)
|
||||||
|
|
||||||
# $env replacement is a no-op when no environment is active
|
# $env replacement is a no-op when no environment is active
|
||||||
assert spack_path.canonicalize_path(
|
assert spack_path.canonicalize_path(
|
||||||
os.sep + os.path.join("foo", "bar", "baz", "$env")
|
os.sep + os.path.join("foo", "bar", "baz", "$env"), replacements=r
|
||||||
) == os.sep + os.path.join("foo", "bar", "baz", "$env")
|
) == os.sep + os.path.join("foo", "bar", "baz", "$env")
|
||||||
|
|
||||||
# Fake an active environment and $env is replaced properly
|
# Fake an active environment and $env is replaced properly
|
||||||
fake_env_path = os.sep + os.path.join("quux", "quuux")
|
fake_env_path = os.sep + os.path.join("quux", "quuux")
|
||||||
monkeypatch.setattr(ev, "active_environment", lambda: MockEnv(fake_env_path))
|
monkeypatch.setattr(ev, "active_environment", lambda: MockEnv(fake_env_path))
|
||||||
assert spack_path.canonicalize_path("$env/foo/bar/baz") == os.path.join(
|
assert spack_path.canonicalize_path("$env/foo/bar/baz", replacements=r) == os.path.join(
|
||||||
fake_env_path, os.path.join("foo", "bar", "baz")
|
fake_env_path, os.path.join("foo", "bar", "baz")
|
||||||
)
|
)
|
||||||
|
|
||||||
# relative paths without source information are relative to cwd
|
# relative paths without source information are relative to cwd
|
||||||
assert spack_path.canonicalize_path(os.path.join("foo", "bar", "baz")) == os.path.abspath(
|
assert spack_path.canonicalize_path(
|
||||||
os.path.join("foo", "bar", "baz")
|
os.path.join("foo", "bar", "baz"), replacements=r
|
||||||
)
|
) == os.path.abspath(os.path.join("foo", "bar", "baz"))
|
||||||
|
|
||||||
# relative paths with source information are relative to the file
|
# relative paths with source information are relative to the file
|
||||||
spack.config.set(
|
spack.config.set(
|
||||||
@ -389,19 +390,19 @@ def test_substitute_config_variables(mock_low_high_config, monkeypatch):
|
|||||||
)
|
)
|
||||||
spack.config.CONFIG.clear_caches()
|
spack.config.CONFIG.clear_caches()
|
||||||
path = spack.config.get("modules:default:roots:lmod")
|
path = spack.config.get("modules:default:roots:lmod")
|
||||||
assert spack_path.canonicalize_path(path) == os.path.normpath(
|
assert spack_path.canonicalize_path(path, replacements=r) == os.path.normpath(
|
||||||
os.path.join(mock_low_high_config.scopes["low"].path, os.path.join("foo", "bar", "baz"))
|
os.path.join(mock_low_high_config.scopes["low"].path, os.path.join("foo", "bar", "baz"))
|
||||||
)
|
)
|
||||||
|
|
||||||
# test architecture information is in replacements
|
# test architecture information is in replacements
|
||||||
assert spack_path.canonicalize_path(
|
assert spack_path.canonicalize_path(
|
||||||
os.path.join("foo", "$platform", "bar")
|
os.path.join("foo", "$platform", "bar"), replacements=r
|
||||||
) == os.path.abspath(os.path.join("foo", "test", "bar"))
|
) == os.path.abspath(os.path.join("foo", "test", "bar"))
|
||||||
|
|
||||||
host_target = spack.platforms.host().target("default_target")
|
host_target = spack.platforms.host().target("default_target")
|
||||||
host_target_family = str(host_target.microarchitecture.family)
|
host_target_family = str(host_target.microarchitecture.family)
|
||||||
assert spack_path.canonicalize_path(
|
assert spack_path.canonicalize_path(
|
||||||
os.path.join("foo", "$target_family", "bar")
|
os.path.join("foo", "$target_family", "bar"), replacements=r
|
||||||
) == os.path.abspath(os.path.join("foo", host_target_family, "bar"))
|
) == os.path.abspath(os.path.join("foo", host_target_family, "bar"))
|
||||||
|
|
||||||
|
|
||||||
@ -438,28 +439,33 @@ def test_substitute_user(mock_low_high_config):
|
|||||||
assert os.sep + os.path.join(
|
assert os.sep + os.path.join(
|
||||||
"foo", "bar"
|
"foo", "bar"
|
||||||
) + os.sep + user + os.sep + "baz" == spack_path.canonicalize_path(
|
) + os.sep + user + os.sep + "baz" == spack_path.canonicalize_path(
|
||||||
os.sep + os.path.join("foo", "bar", "$user", "baz")
|
os.sep + os.path.join("foo", "bar", "$user", "baz"),
|
||||||
|
replacements=spack.paths.path_replacements(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_substitute_user_cache(mock_low_high_config):
|
def test_substitute_user_cache(mock_low_high_config):
|
||||||
user_cache_path = spack.paths.user_cache_path
|
user_cache_path = spack.paths.user_cache_path
|
||||||
assert user_cache_path + os.sep + "baz" == spack_path.canonicalize_path(
|
assert user_cache_path + os.sep + "baz" == spack_path.canonicalize_path(
|
||||||
os.path.join("$user_cache_path", "baz")
|
os.path.join("$user_cache_path", "baz"), replacements=spack.paths.path_replacements()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_substitute_tempdir(mock_low_high_config):
|
def test_substitute_tempdir(mock_low_high_config):
|
||||||
tempdir = tempfile.gettempdir()
|
tempdir = tempfile.gettempdir()
|
||||||
assert tempdir == spack_path.canonicalize_path("$tempdir")
|
assert tempdir == spack_path.canonicalize_path(
|
||||||
|
"$tempdir", replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
assert tempdir + os.sep + os.path.join("foo", "bar", "baz") == spack_path.canonicalize_path(
|
assert tempdir + os.sep + os.path.join("foo", "bar", "baz") == spack_path.canonicalize_path(
|
||||||
os.path.join("$tempdir", "foo", "bar", "baz")
|
os.path.join("$tempdir", "foo", "bar", "baz"), replacements=spack.paths.path_replacements()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_substitute_date(mock_low_high_config):
|
def test_substitute_date(mock_low_high_config):
|
||||||
test_path = os.path.join("hello", "world", "on", "$date")
|
test_path = os.path.join("hello", "world", "on", "$date")
|
||||||
new_path = spack_path.canonicalize_path(test_path)
|
new_path = spack_path.canonicalize_path(
|
||||||
|
test_path, replacements=spack.paths.path_replacements()
|
||||||
|
)
|
||||||
assert "$date" in test_path
|
assert "$date" in test_path
|
||||||
assert date.today().strftime("%Y-%m-%d") in new_path
|
assert date.today().strftime("%Y-%m-%d") in new_path
|
||||||
|
|
||||||
|
@ -39,6 +39,7 @@
|
|||||||
import spack.directory_layout
|
import spack.directory_layout
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.gpg
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.package_prefs
|
import spack.package_prefs
|
||||||
import spack.paths
|
import spack.paths
|
||||||
@ -50,7 +51,6 @@
|
|||||||
import spack.test.cray_manifest
|
import spack.test.cray_manifest
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
from spack.fetch_strategy import URLFetchStrategy
|
from spack.fetch_strategy import URLFetchStrategy
|
||||||
@ -1074,13 +1074,13 @@ def mock_gnupghome(monkeypatch):
|
|||||||
# This comes up because tmp paths on macOS are already long-ish, and
|
# This comes up because tmp paths on macOS are already long-ish, and
|
||||||
# pytest makes them longer.
|
# pytest makes them longer.
|
||||||
try:
|
try:
|
||||||
spack.util.gpg.init()
|
spack.gpg.init(gpg_path=spack.paths.gpg_path)
|
||||||
except spack.util.gpg.SpackGPGError:
|
except spack.gpg.SpackGPGError:
|
||||||
if not spack.util.gpg.GPG:
|
if not spack.gpg.GPG:
|
||||||
pytest.skip("This test requires gpg")
|
pytest.skip("This test requires gpg")
|
||||||
|
|
||||||
short_name_tmpdir = tempfile.mkdtemp()
|
short_name_tmpdir = tempfile.mkdtemp()
|
||||||
with spack.util.gpg.gnupghome_override(short_name_tmpdir):
|
with spack.gpg.gnupghome_override(short_name_tmpdir):
|
||||||
yield short_name_tmpdir
|
yield short_name_tmpdir
|
||||||
|
|
||||||
# clean up, since we are doing this manually
|
# clean up, since we are doing this manually
|
||||||
|
@ -9,9 +9,9 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import spack.directives
|
import spack.directives
|
||||||
|
import spack.package_hash as ph
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.package_hash as ph
|
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.util.unparse import unparse
|
from spack.util.unparse import unparse
|
||||||
|
|
@ -21,10 +21,10 @@
|
|||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.cmd.buildcache as buildcache
|
import spack.cmd.buildcache as buildcache
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.gpg
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.gpg
|
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
from spack.fetch_strategy import URLFetchStrategy
|
from spack.fetch_strategy import URLFetchStrategy
|
||||||
from spack.paths import mock_gpg_keys_path
|
from spack.paths import mock_gpg_keys_path
|
||||||
@ -72,7 +72,7 @@ def test_buildcache(mock_archive, tmp_path, monkeypatch, mutable_config):
|
|||||||
|
|
||||||
create_args = ["create", "-f", "--rebuild-index", mirror_path, pkghash]
|
create_args = ["create", "-f", "--rebuild-index", mirror_path, pkghash]
|
||||||
# Create a private key to sign package with if gpg2 available
|
# Create a private key to sign package with if gpg2 available
|
||||||
spack.util.gpg.create(
|
spack.gpg.create(
|
||||||
name="test key 1",
|
name="test key 1",
|
||||||
expires="0",
|
expires="0",
|
||||||
email="spack@googlegroups.com",
|
email="spack@googlegroups.com",
|
||||||
|
@ -734,7 +734,7 @@ def test_resolve_paths(self):
|
|||||||
assert spack.stage._resolve_paths(paths) == paths
|
assert spack.stage._resolve_paths(paths) == paths
|
||||||
|
|
||||||
tempdir = "$tempdir"
|
tempdir = "$tempdir"
|
||||||
can_tempdir = canonicalize_path(tempdir)
|
can_tempdir = canonicalize_path(tempdir, replacements=spack.paths.path_replacements())
|
||||||
user = getpass.getuser()
|
user = getpass.getuser()
|
||||||
temp_has_user = user in can_tempdir.split(os.sep)
|
temp_has_user = user in can_tempdir.split(os.sep)
|
||||||
paths = [
|
paths = [
|
||||||
@ -744,7 +744,8 @@ def test_resolve_paths(self):
|
|||||||
os.path.join(tempdir, "$user", "stage", "$user"),
|
os.path.join(tempdir, "$user", "stage", "$user"),
|
||||||
]
|
]
|
||||||
|
|
||||||
res_paths = [canonicalize_path(p) for p in paths]
|
r = spack.paths.path_replacements()
|
||||||
|
res_paths = [canonicalize_path(p, replacements=r) for p in paths]
|
||||||
if temp_has_user:
|
if temp_has_user:
|
||||||
res_paths[1] = can_tempdir
|
res_paths[1] = can_tempdir
|
||||||
res_paths[2] = os.path.join(can_tempdir, user)
|
res_paths[2] = os.path.join(can_tempdir, user)
|
||||||
|
@ -7,6 +7,7 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.paths
|
||||||
import spack.tengine as tengine
|
import spack.tengine as tengine
|
||||||
from spack.util.path import canonicalize_path
|
from spack.util.path import canonicalize_path
|
||||||
|
|
||||||
@ -70,8 +71,9 @@ class TestTengineEnvironment:
|
|||||||
def test_template_retrieval(self):
|
def test_template_retrieval(self):
|
||||||
"""Tests the template retrieval mechanism hooked into config files"""
|
"""Tests the template retrieval mechanism hooked into config files"""
|
||||||
# Check the directories are correct
|
# Check the directories are correct
|
||||||
|
r = spack.paths.path_replacements()
|
||||||
template_dirs = spack.config.get("config:template_dirs")
|
template_dirs = spack.config.get("config:template_dirs")
|
||||||
template_dirs = tuple([canonicalize_path(x) for x in template_dirs])
|
template_dirs = tuple([canonicalize_path(x, replacements=r) for x in template_dirs])
|
||||||
assert len(template_dirs) == 3
|
assert len(template_dirs) == 3
|
||||||
|
|
||||||
env = tengine.make_environment(template_dirs)
|
env = tengine.make_environment(template_dirs)
|
||||||
|
@ -350,8 +350,8 @@ def _which(*args, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
def test_url_fetch_text_without_url(tmpdir):
|
def test_url_fetch_text_without_url(tmpdir):
|
||||||
with pytest.raises(spack.error.FetchError, match="URL is required"):
|
with pytest.raises(web_util.WebError, match="URL is required"):
|
||||||
web_util.fetch_url_text(None)
|
web_util.fetch_url_text(None, fetch_method=spack.config.get("config:url_fetch_method"))
|
||||||
|
|
||||||
|
|
||||||
def test_url_fetch_text_curl_failures(tmpdir, monkeypatch):
|
def test_url_fetch_text_curl_failures(tmpdir, monkeypatch):
|
||||||
@ -367,18 +367,20 @@ def _which(*args, **kwargs):
|
|||||||
monkeypatch.setattr(spack.util.web, "which", _which)
|
monkeypatch.setattr(spack.util.web, "which", _which)
|
||||||
|
|
||||||
with spack.config.override("config:url_fetch_method", "curl"):
|
with spack.config.override("config:url_fetch_method", "curl"):
|
||||||
with pytest.raises(spack.error.FetchError, match="Missing required curl"):
|
with pytest.raises(web_util.WebError, match="Missing required curl"):
|
||||||
web_util.fetch_url_text("https://github.com/")
|
web_util.fetch_url_text(
|
||||||
|
"https://github.com/", fetch_method=spack.config.get("config:url_fetch_method")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_url_check_curl_errors():
|
def test_url_check_curl_errors():
|
||||||
"""Check that standard curl error returncodes raise expected errors."""
|
"""Check that standard curl error returncodes raise expected errors."""
|
||||||
# Check returncode 22 (i.e., 404)
|
# Check returncode 22 (i.e., 404)
|
||||||
with pytest.raises(spack.error.FetchError, match="not found"):
|
with pytest.raises(web_util.WebError, match="not found"):
|
||||||
web_util.check_curl_code(22)
|
web_util.check_curl_code(22)
|
||||||
|
|
||||||
# Check returncode 60 (certificate error)
|
# Check returncode 60 (certificate error)
|
||||||
with pytest.raises(spack.error.FetchError, match="invalid certificate"):
|
with pytest.raises(web_util.WebError, match="invalid certificate"):
|
||||||
web_util.check_curl_code(60)
|
web_util.check_curl_code(60)
|
||||||
|
|
||||||
|
|
||||||
@ -395,8 +397,11 @@ def _which(*args, **kwargs):
|
|||||||
monkeypatch.setattr(spack.util.web, "which", _which)
|
monkeypatch.setattr(spack.util.web, "which", _which)
|
||||||
|
|
||||||
with spack.config.override("config:url_fetch_method", "curl"):
|
with spack.config.override("config:url_fetch_method", "curl"):
|
||||||
with pytest.raises(spack.error.FetchError, match="Missing required curl"):
|
with pytest.raises(web_util.WebError, match="Missing required curl"):
|
||||||
web_util.url_exists("https://github.com/")
|
web_util.url_exists(
|
||||||
|
"https://github.com/",
|
||||||
|
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_url_fetch_text_urllib_bad_returncode(tmpdir, monkeypatch):
|
def test_url_fetch_text_urllib_bad_returncode(tmpdir, monkeypatch):
|
||||||
@ -410,16 +415,20 @@ def _read_from_url(*args, **kwargs):
|
|||||||
monkeypatch.setattr(spack.util.web, "read_from_url", _read_from_url)
|
monkeypatch.setattr(spack.util.web, "read_from_url", _read_from_url)
|
||||||
|
|
||||||
with spack.config.override("config:url_fetch_method", "urllib"):
|
with spack.config.override("config:url_fetch_method", "urllib"):
|
||||||
with pytest.raises(spack.error.FetchError, match="failed with error code"):
|
with pytest.raises(web_util.WebError, match="failed with error code"):
|
||||||
web_util.fetch_url_text("https://github.com/")
|
web_util.fetch_url_text(
|
||||||
|
"https://github.com/", fetch_method=spack.config.get("config:url_fetch_method")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_url_fetch_text_urllib_web_error(tmpdir, monkeypatch):
|
def test_url_fetch_text_urllib_web_error(tmpdir, monkeypatch):
|
||||||
def _raise_web_error(*args, **kwargs):
|
def _raise_web_error(*args, **kwargs):
|
||||||
raise web_util.SpackWebError("bad url")
|
raise web_util.WebError("bad url")
|
||||||
|
|
||||||
monkeypatch.setattr(spack.util.web, "read_from_url", _raise_web_error)
|
monkeypatch.setattr(spack.util.web, "read_from_url", _raise_web_error)
|
||||||
|
|
||||||
with spack.config.override("config:url_fetch_method", "urllib"):
|
with spack.config.override("config:url_fetch_method", "urllib"):
|
||||||
with pytest.raises(spack.error.FetchError, match="fetch failed to verify"):
|
with pytest.raises(web_util.WebError, match="fetch failed to verify"):
|
||||||
web_util.fetch_url_text("https://github.com/")
|
web_util.fetch_url_text(
|
||||||
|
"https://github.com/", fetch_method=spack.config.get("config:url_fetch_method")
|
||||||
|
)
|
||||||
|
@ -79,25 +79,26 @@ def test_output_filtering(self, capfd, install_mockery, mutable_config):
|
|||||||
# test filtering when padding is enabled
|
# test filtering when padding is enabled
|
||||||
with spack.config.override("config:install_tree", {"padded_length": 256}):
|
with spack.config.override("config:install_tree", {"padded_length": 256}):
|
||||||
# tty.msg with filtering on the first argument
|
# tty.msg with filtering on the first argument
|
||||||
with sup.filter_padding():
|
padding = spack.config.get("config:install_tree:padded_length", None)
|
||||||
|
with sup.filter_padding(padding=padding):
|
||||||
tty.msg("here is a long path: %s/with/a/suffix" % long_path)
|
tty.msg("here is a long path: %s/with/a/suffix" % long_path)
|
||||||
out, err = capfd.readouterr()
|
out, err = capfd.readouterr()
|
||||||
assert padding_string in out
|
assert padding_string in out
|
||||||
|
|
||||||
# tty.msg with filtering on a laterargument
|
# tty.msg with filtering on a laterargument
|
||||||
with sup.filter_padding():
|
with sup.filter_padding(padding=padding):
|
||||||
tty.msg("here is a long path:", "%s/with/a/suffix" % long_path)
|
tty.msg("here is a long path:", "%s/with/a/suffix" % long_path)
|
||||||
out, err = capfd.readouterr()
|
out, err = capfd.readouterr()
|
||||||
assert padding_string in out
|
assert padding_string in out
|
||||||
|
|
||||||
# tty.error with filtering on the first argument
|
# tty.error with filtering on the first argument
|
||||||
with sup.filter_padding():
|
with sup.filter_padding(padding=padding):
|
||||||
tty.error("here is a long path: %s/with/a/suffix" % long_path)
|
tty.error("here is a long path: %s/with/a/suffix" % long_path)
|
||||||
out, err = capfd.readouterr()
|
out, err = capfd.readouterr()
|
||||||
assert padding_string in err
|
assert padding_string in err
|
||||||
|
|
||||||
# tty.error with filtering on a later argument
|
# tty.error with filtering on a later argument
|
||||||
with sup.filter_padding():
|
with sup.filter_padding(padding=padding):
|
||||||
tty.error("here is a long path:", "%s/with/a/suffix" % long_path)
|
tty.error("here is a long path:", "%s/with/a/suffix" % long_path)
|
||||||
out, err = capfd.readouterr()
|
out, err = capfd.readouterr()
|
||||||
assert padding_string in err
|
assert padding_string in err
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
from llnl.util.filesystem import getuid, group_ids
|
from llnl.util.filesystem import getuid, group_ids
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.util.error
|
||||||
import spack.util.lock as lk
|
import spack.util.lock as lk
|
||||||
|
|
||||||
|
|
||||||
@ -54,7 +55,7 @@ def test_lock_checks_user(tmpdir):
|
|||||||
|
|
||||||
# unsafe
|
# unsafe
|
||||||
tmpdir.chmod(0o777)
|
tmpdir.chmod(0o777)
|
||||||
with pytest.raises(spack.error.SpackError):
|
with pytest.raises(spack.util.error.UtilityError):
|
||||||
lk.check_lock_safety(path)
|
lk.check_lock_safety(path)
|
||||||
|
|
||||||
# safe
|
# safe
|
||||||
@ -85,12 +86,12 @@ def test_lock_checks_group(tmpdir):
|
|||||||
|
|
||||||
# unsafe
|
# unsafe
|
||||||
tmpdir.chmod(0o774)
|
tmpdir.chmod(0o774)
|
||||||
with pytest.raises(spack.error.SpackError):
|
with pytest.raises(spack.util.error.UtilityError):
|
||||||
lk.check_lock_safety(path)
|
lk.check_lock_safety(path)
|
||||||
|
|
||||||
# unsafe
|
# unsafe
|
||||||
tmpdir.chmod(0o777)
|
tmpdir.chmod(0o777)
|
||||||
with pytest.raises(spack.error.SpackError):
|
with pytest.raises(spack.util.error.UtilityError):
|
||||||
lk.check_lock_safety(path)
|
lk.check_lock_safety(path)
|
||||||
|
|
||||||
# safe
|
# safe
|
||||||
|
@ -7,13 +7,14 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import spack.util.gpg
|
import spack.gpg
|
||||||
|
import spack.paths
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def has_socket_dir():
|
def has_socket_dir():
|
||||||
spack.util.gpg.init()
|
spack.gpg.init(gpg_path=spack.paths.gpg_path)
|
||||||
return bool(spack.util.gpg.SOCKET_DIR)
|
return bool(spack.gpg.SOCKET_DIR)
|
||||||
|
|
||||||
|
|
||||||
def test_parse_gpg_output_case_one():
|
def test_parse_gpg_output_case_one():
|
||||||
@ -27,7 +28,7 @@ def test_parse_gpg_output_case_one():
|
|||||||
uid:::::::AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA::Joe (Test) <j.s@s.com>:
|
uid:::::::AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA::Joe (Test) <j.s@s.com>:
|
||||||
ssb::2048:1:AAAAAAAAAAAAAAAA:AAAAAAAAAA::::::::::
|
ssb::2048:1:AAAAAAAAAAAAAAAA:AAAAAAAAAA::::::::::
|
||||||
"""
|
"""
|
||||||
keys = spack.util.gpg._parse_secret_keys_output(output)
|
keys = spack.gpg._parse_secret_keys_output(output)
|
||||||
|
|
||||||
assert len(keys) == 2
|
assert len(keys) == 2
|
||||||
assert keys[0] == "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
assert keys[0] == "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
||||||
@ -44,7 +45,7 @@ def test_parse_gpg_output_case_two():
|
|||||||
fpr:::::::::YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY:
|
fpr:::::::::YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY:
|
||||||
grp:::::::::AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA:
|
grp:::::::::AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA:
|
||||||
"""
|
"""
|
||||||
keys = spack.util.gpg._parse_secret_keys_output(output)
|
keys = spack.gpg._parse_secret_keys_output(output)
|
||||||
|
|
||||||
assert len(keys) == 1
|
assert len(keys) == 1
|
||||||
assert keys[0] == "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
assert keys[0] == "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
||||||
@ -63,7 +64,7 @@ def test_parse_gpg_output_case_three():
|
|||||||
ssb::2048:1:AAAAAAAAAAAAAAAA:AAAAAAAAAA::::::::::
|
ssb::2048:1:AAAAAAAAAAAAAAAA:AAAAAAAAAA::::::::::
|
||||||
fpr:::::::::ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ:"""
|
fpr:::::::::ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ:"""
|
||||||
|
|
||||||
keys = spack.util.gpg._parse_secret_keys_output(output)
|
keys = spack.gpg._parse_secret_keys_output(output)
|
||||||
|
|
||||||
assert len(keys) == 2
|
assert len(keys) == 2
|
||||||
assert keys[0] == "WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW"
|
assert keys[0] == "WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW"
|
||||||
@ -83,8 +84,8 @@ def test_really_long_gnupghome_dir(tmpdir, has_socket_dir):
|
|||||||
tdir = tdir[:N].rstrip(os.sep)
|
tdir = tdir[:N].rstrip(os.sep)
|
||||||
tdir += "0" * (N - len(tdir))
|
tdir += "0" * (N - len(tdir))
|
||||||
|
|
||||||
with spack.util.gpg.gnupghome_override(tdir):
|
with spack.gpg.gnupghome_override(tdir):
|
||||||
spack.util.gpg.create(
|
spack.gpg.create(
|
||||||
name="Spack testing 1", email="test@spack.io", comment="Spack testing key", expires="0"
|
name="Spack testing 1", email="test@spack.io", comment="Spack testing key", expires="0"
|
||||||
)
|
)
|
||||||
spack.util.gpg.list(True, True)
|
spack.gpg.list(True, True)
|
||||||
|
@ -287,7 +287,7 @@ def test_gather_s3_information(monkeypatch, capfd):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
session_args, client_args = spack.util.s3.get_mirror_s3_connection_info(mirror, "push")
|
session_args, client_args = spack.util.s3.get_mirror_s3_connection_info(mirror, "push", False)
|
||||||
|
|
||||||
# Session args are used to create the S3 Session object
|
# Session args are used to create the S3 Session object
|
||||||
assert "aws_session_token" in session_args
|
assert "aws_session_token" in session_args
|
||||||
@ -307,7 +307,7 @@ def test_gather_s3_information(monkeypatch, capfd):
|
|||||||
def test_remove_s3_url(monkeypatch, capfd):
|
def test_remove_s3_url(monkeypatch, capfd):
|
||||||
fake_s3_url = "s3://my-bucket/subdirectory/mirror"
|
fake_s3_url = "s3://my-bucket/subdirectory/mirror"
|
||||||
|
|
||||||
def get_s3_session(url, method="fetch"):
|
def get_s3_session(url, method="fetch", verify_ssl=True):
|
||||||
return MockS3Client()
|
return MockS3Client()
|
||||||
|
|
||||||
monkeypatch.setattr(spack.util.web, "get_s3_session", get_s3_session)
|
monkeypatch.setattr(spack.util.web, "get_s3_session", get_s3_session)
|
||||||
@ -315,7 +315,9 @@ def get_s3_session(url, method="fetch"):
|
|||||||
current_debug_level = tty.debug_level()
|
current_debug_level = tty.debug_level()
|
||||||
tty.set_debug(1)
|
tty.set_debug(1)
|
||||||
|
|
||||||
spack.util.web.remove_url(fake_s3_url, recursive=True)
|
spack.util.web.remove_url(
|
||||||
|
fake_s3_url, recursive=True, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||||
|
)
|
||||||
err = capfd.readouterr()[1]
|
err = capfd.readouterr()[1]
|
||||||
|
|
||||||
tty.set_debug(current_debug_level)
|
tty.set_debug(current_debug_level)
|
||||||
@ -326,16 +328,26 @@ def get_s3_session(url, method="fetch"):
|
|||||||
|
|
||||||
|
|
||||||
def test_s3_url_exists(monkeypatch, capfd):
|
def test_s3_url_exists(monkeypatch, capfd):
|
||||||
def get_s3_session(url, method="fetch"):
|
def get_s3_session(url, method="fetch", verify_ssl=True):
|
||||||
return MockS3Client()
|
return MockS3Client()
|
||||||
|
|
||||||
monkeypatch.setattr(spack.util.s3, "get_s3_session", get_s3_session)
|
monkeypatch.setattr(spack.util.s3, "get_s3_session", get_s3_session)
|
||||||
|
|
||||||
fake_s3_url_exists = "s3://my-bucket/subdirectory/my-file"
|
fake_s3_url_exists = "s3://my-bucket/subdirectory/my-file"
|
||||||
assert spack.util.web.url_exists(fake_s3_url_exists)
|
assert spack.util.web.url_exists(
|
||||||
|
fake_s3_url_exists,
|
||||||
|
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
|
|
||||||
fake_s3_url_does_not_exist = "s3://my-bucket/subdirectory/my-notfound-file"
|
fake_s3_url_does_not_exist = "s3://my-bucket/subdirectory/my-notfound-file"
|
||||||
assert not spack.util.web.url_exists(fake_s3_url_does_not_exist)
|
assert not spack.util.web.url_exists(
|
||||||
|
fake_s3_url_does_not_exist,
|
||||||
|
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||||
|
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||||
|
timeout=spack.config.get("config:connect_timeout", 10),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_s3_url_parsing():
|
def test_s3_url_parsing():
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
import llnl.url
|
import llnl.url
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
from spack.error import SpackError
|
from spack.util.error import UtilityError
|
||||||
from spack.util.executable import CommandNotFoundError, which
|
from spack.util.executable import CommandNotFoundError, which
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -400,7 +400,7 @@ def decompressor_for_win(extension):
|
|||||||
compression_extension = llnl.url.compression_ext_from_compressed_archive(extension)
|
compression_extension = llnl.url.compression_ext_from_compressed_archive(extension)
|
||||||
decompressor = _determine_py_decomp_archive_strategy(compression_extension)
|
decompressor = _determine_py_decomp_archive_strategy(compression_extension)
|
||||||
if not decompressor:
|
if not decompressor:
|
||||||
raise SpackError(
|
raise UtilityError(
|
||||||
"Spack was unable to determine a proper decompression strategy for"
|
"Spack was unable to determine a proper decompression strategy for"
|
||||||
f"valid extension: {extension}"
|
f"valid extension: {extension}"
|
||||||
"This is a bug, please file an issue at https://github.com/spack/spack/issues"
|
"This is a bug, please file an issue at https://github.com/spack/spack/issues"
|
||||||
|
@ -7,8 +7,6 @@
|
|||||||
import os
|
import os
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import spack.config
|
|
||||||
|
|
||||||
|
|
||||||
def cpus_available():
|
def cpus_available():
|
||||||
"""
|
"""
|
||||||
@ -27,7 +25,8 @@ def determine_number_of_jobs(
|
|||||||
*,
|
*,
|
||||||
parallel: bool = False,
|
parallel: bool = False,
|
||||||
max_cpus: int = cpus_available(),
|
max_cpus: int = cpus_available(),
|
||||||
config: Optional["spack.config.Configuration"] = None,
|
config=None,
|
||||||
|
config_path: str = "config:build_jobs",
|
||||||
) -> int:
|
) -> int:
|
||||||
"""
|
"""
|
||||||
Packages that require sequential builds need 1 job. Otherwise we use the
|
Packages that require sequential builds need 1 job. Otherwise we use the
|
||||||
@ -39,18 +38,24 @@ def determine_number_of_jobs(
|
|||||||
parallel: true when package supports parallel builds
|
parallel: true when package supports parallel builds
|
||||||
max_cpus: maximum number of CPUs to use (defaults to cpus_available())
|
max_cpus: maximum number of CPUs to use (defaults to cpus_available())
|
||||||
config: configuration object (defaults to global config)
|
config: configuration object (defaults to global config)
|
||||||
|
config_path: configuration path to read number of jobs from
|
||||||
"""
|
"""
|
||||||
if not parallel:
|
if not parallel:
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
cfg = config or spack.config.CONFIG
|
|
||||||
|
|
||||||
# Command line overrides all
|
# Command line overrides all
|
||||||
|
config_jobs = 16
|
||||||
try:
|
try:
|
||||||
command_line = cfg.get("config:build_jobs", default=None, scope="command_line")
|
command_line = None
|
||||||
|
if config is not None:
|
||||||
|
command_line = config.get(config_path, default=None, scope="command_line")
|
||||||
|
|
||||||
if command_line is not None:
|
if command_line is not None:
|
||||||
return command_line
|
return command_line
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return min(max_cpus, cfg.get("config:build_jobs", 16))
|
if config is not None:
|
||||||
|
config_jobs = config.get(config_path, 16)
|
||||||
|
|
||||||
|
return min(max_cpus, config_jobs)
|
||||||
|
@ -18,7 +18,6 @@
|
|||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.config
|
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
|
||||||
#: editors to try if VISUAL and EDITOR are not set
|
#: editors to try if VISUAL and EDITOR are not set
|
||||||
@ -61,7 +60,9 @@ def executable(exe: str, args: List[str]) -> int:
|
|||||||
return cmd.returncode
|
return cmd.returncode
|
||||||
|
|
||||||
|
|
||||||
def editor(*args: str, exec_fn: Callable[[str, List[str]], int] = os.execv) -> bool:
|
def editor(
|
||||||
|
*args: str, exec_fn: Callable[[str, List[str]], int] = os.execv, debug: bool = False
|
||||||
|
) -> bool:
|
||||||
"""Invoke the user's editor.
|
"""Invoke the user's editor.
|
||||||
|
|
||||||
This will try to execute the following, in order:
|
This will try to execute the following, in order:
|
||||||
@ -100,7 +101,7 @@ def try_exec(exe, args, var=None):
|
|||||||
return exec_fn(exe, args) == 0
|
return exec_fn(exe, args) == 0
|
||||||
|
|
||||||
except (OSError, spack.util.executable.ProcessError) as e:
|
except (OSError, spack.util.executable.ProcessError) as e:
|
||||||
if spack.config.get("config:debug"):
|
if debug:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# Show variable we were trying to use, if it's from one
|
# Show variable we were trying to use, if it's from one
|
||||||
|
87
lib/spack/spack/util/error.py
Normal file
87
lib/spack/spack/util/error.py
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
#: at what level we should write stack traces or short error messages
|
||||||
|
#: this is module-scoped because it needs to be set very early
|
||||||
|
debug = 0
|
||||||
|
|
||||||
|
|
||||||
|
class UtilityError(Exception):
|
||||||
|
"""This is the superclass for all Spack errors.
|
||||||
|
Subclasses can be found in the modules they have to do with.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, message, long_message=None):
|
||||||
|
super().__init__()
|
||||||
|
self.message = message
|
||||||
|
self._long_message = long_message
|
||||||
|
|
||||||
|
# for exceptions raised from child build processes, we save the
|
||||||
|
# traceback as a string and print it in the parent.
|
||||||
|
self.traceback = None
|
||||||
|
|
||||||
|
# we allow exceptions to print debug info via print_context()
|
||||||
|
# before they are caught at the top level. If they *haven't*
|
||||||
|
# printed context early, we do it by default when die() is
|
||||||
|
# called, so we need to remember whether it's been called.
|
||||||
|
self.printed = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def long_message(self):
|
||||||
|
return self._long_message
|
||||||
|
|
||||||
|
def print_context(self):
|
||||||
|
"""Print extended debug information about this exception.
|
||||||
|
|
||||||
|
This is usually printed when the top-level Spack error handler
|
||||||
|
calls ``die()``, but it can be called separately beforehand if a
|
||||||
|
lower-level error handler needs to print error context and
|
||||||
|
continue without raising the exception to the top level.
|
||||||
|
"""
|
||||||
|
if self.printed:
|
||||||
|
return
|
||||||
|
|
||||||
|
# basic debug message
|
||||||
|
tty.error(self.message)
|
||||||
|
if self.long_message:
|
||||||
|
sys.stderr.write(self.long_message)
|
||||||
|
sys.stderr.write("\n")
|
||||||
|
|
||||||
|
# stack trace, etc. in debug mode.
|
||||||
|
if debug:
|
||||||
|
if self.traceback:
|
||||||
|
# exception came from a build child, already got
|
||||||
|
# traceback in child, so print it.
|
||||||
|
sys.stderr.write(self.traceback)
|
||||||
|
else:
|
||||||
|
# run parent exception hook.
|
||||||
|
sys.excepthook(*sys.exc_info())
|
||||||
|
|
||||||
|
sys.stderr.flush()
|
||||||
|
self.printed = True
|
||||||
|
|
||||||
|
def die(self):
|
||||||
|
self.print_context()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
msg = self.message
|
||||||
|
if self._long_message:
|
||||||
|
msg += "\n %s" % self._long_message
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
args = [repr(self.message), repr(self.long_message)]
|
||||||
|
args = ",".join(args)
|
||||||
|
qualified_name = inspect.getmodule(self).__name__ + "." + type(self).__name__
|
||||||
|
return qualified_name + "(" + args + ")"
|
||||||
|
|
||||||
|
def __reduce__(self):
|
||||||
|
return type(self), (self.message, self.long_message)
|
@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.error
|
import spack.util.error
|
||||||
|
|
||||||
__all__ = ["Executable", "which", "ProcessError"]
|
__all__ = ["Executable", "which", "ProcessError"]
|
||||||
|
|
||||||
@ -362,7 +362,11 @@ def which(*args, **kwargs):
|
|||||||
return Executable(exe) if exe else None
|
return Executable(exe) if exe else None
|
||||||
|
|
||||||
|
|
||||||
class ProcessError(spack.error.SpackError):
|
class EditorError(spack.util.error.UtilityError):
|
||||||
|
"""Base error for all errors from the executable utility"""
|
||||||
|
|
||||||
|
|
||||||
|
class ProcessError(EditorError):
|
||||||
"""ProcessErrors are raised when Executables exit with an error code."""
|
"""ProcessErrors are raised when Executables exit with an error code."""
|
||||||
|
|
||||||
|
|
||||||
@ -371,5 +375,5 @@ class ProcessTimeoutError(ProcessError):
|
|||||||
specified timeout exceed that time"""
|
specified timeout exceed that time"""
|
||||||
|
|
||||||
|
|
||||||
class CommandNotFoundError(spack.error.SpackError):
|
class CommandNotFoundError(EditorError):
|
||||||
"""Raised when ``which()`` can't find a required executable."""
|
"""Raised when ``which()`` can't find a required executable."""
|
||||||
|
@ -10,7 +10,6 @@
|
|||||||
|
|
||||||
from llnl.util.filesystem import mkdirp, rename
|
from llnl.util.filesystem import mkdirp, rename
|
||||||
|
|
||||||
from spack.error import SpackError
|
|
||||||
from spack.util.lock import Lock, ReadTransaction, WriteTransaction
|
from spack.util.lock import Lock, ReadTransaction, WriteTransaction
|
||||||
|
|
||||||
|
|
||||||
@ -178,5 +177,5 @@ def remove(self, key):
|
|||||||
lock.release_write()
|
lock.release_write()
|
||||||
|
|
||||||
|
|
||||||
class CacheError(SpackError):
|
class CacheError(Exception):
|
||||||
pass
|
pass
|
||||||
|
@ -8,19 +8,7 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.package_prefs as pp
|
from spack.util.error import UtilityError
|
||||||
from spack.error import SpackError
|
|
||||||
|
|
||||||
|
|
||||||
def set_permissions_by_spec(path, spec):
|
|
||||||
# Get permissions for spec
|
|
||||||
if os.path.isdir(path):
|
|
||||||
perms = pp.get_package_dir_permissions(spec)
|
|
||||||
else:
|
|
||||||
perms = pp.get_package_permissions(spec)
|
|
||||||
group = pp.get_package_group(spec)
|
|
||||||
|
|
||||||
set_permissions(path, perms, group)
|
|
||||||
|
|
||||||
|
|
||||||
def set_permissions(path, perms, group=None):
|
def set_permissions(path, perms, group=None):
|
||||||
@ -44,5 +32,5 @@ def set_permissions(path, perms, group=None):
|
|||||||
fs.chgrp(path, group, follow_symlinks=False)
|
fs.chgrp(path, group, follow_symlinks=False)
|
||||||
|
|
||||||
|
|
||||||
class InvalidPermissionsError(SpackError):
|
class InvalidPermissionsError(UtilityError):
|
||||||
"""Error class for invalid permission setters"""
|
"""Error class for invalid permission setters"""
|
||||||
|
@ -18,8 +18,7 @@
|
|||||||
from llnl.util.lock import ReadTransaction # noqa: F401
|
from llnl.util.lock import ReadTransaction # noqa: F401
|
||||||
from llnl.util.lock import WriteTransaction # noqa: F401
|
from llnl.util.lock import WriteTransaction # noqa: F401
|
||||||
|
|
||||||
import spack.error
|
import spack.util.error
|
||||||
import spack.paths
|
|
||||||
|
|
||||||
|
|
||||||
class Lock(llnl.util.lock.Lock):
|
class Lock(llnl.util.lock.Lock):
|
||||||
@ -100,4 +99,4 @@ def check_lock_safety(path: str) -> None:
|
|||||||
f"Running a shared spack without locks is unsafe. You must "
|
f"Running a shared spack without locks is unsafe. You must "
|
||||||
f"restrict permissions on {path} or enable locks."
|
f"restrict permissions on {path} or enable locks."
|
||||||
)
|
)
|
||||||
raise spack.error.SpackError(msg, long_msg)
|
raise spack.util.error.UtilityError(msg, long_msg)
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
import re
|
import re
|
||||||
import string
|
import string
|
||||||
|
|
||||||
import spack.error
|
import spack.util.error
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"mod_to_class",
|
"mod_to_class",
|
||||||
@ -159,7 +159,7 @@ def validate_fully_qualified_module_name(mod_name):
|
|||||||
raise InvalidFullyQualifiedModuleNameError(mod_name)
|
raise InvalidFullyQualifiedModuleNameError(mod_name)
|
||||||
|
|
||||||
|
|
||||||
class InvalidModuleNameError(spack.error.SpackError):
|
class InvalidModuleNameError(spack.util.error.UtilityError):
|
||||||
"""Raised when we encounter a bad module name."""
|
"""Raised when we encounter a bad module name."""
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
@ -167,7 +167,7 @@ def __init__(self, name):
|
|||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
|
|
||||||
class InvalidFullyQualifiedModuleNameError(spack.error.SpackError):
|
class InvalidFullyQualifiedModuleNameError(spack.util.error.UtilityError):
|
||||||
"""Raised when we encounter a bad full package name."""
|
"""Raised when we encounter a bad full package name."""
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
|
@ -8,75 +8,27 @@
|
|||||||
TODO: this is really part of spack.config. Consolidate it.
|
TODO: this is really part of spack.config. Consolidate it.
|
||||||
"""
|
"""
|
||||||
import contextlib
|
import contextlib
|
||||||
import getpass
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
|
||||||
from datetime import date
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import memoized
|
||||||
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
__all__ = ["substitute_config_variables", "substitute_path_variables", "canonicalize_path"]
|
__all__ = [
|
||||||
|
"substitute_config_variables",
|
||||||
|
"substitute_path_variables",
|
||||||
def architecture():
|
"canonicalize_path",
|
||||||
# break circular import
|
"NOMATCH",
|
||||||
import spack.platforms
|
]
|
||||||
import spack.spec
|
|
||||||
|
|
||||||
host_platform = spack.platforms.host()
|
|
||||||
host_os = host_platform.operating_system("default_os")
|
|
||||||
host_target = host_platform.target("default_target")
|
|
||||||
|
|
||||||
return spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
|
||||||
|
|
||||||
|
|
||||||
def get_user():
|
|
||||||
# User pwd where available because it accounts for effective uids when using ksu and similar
|
|
||||||
try:
|
|
||||||
# user pwd for unix systems
|
|
||||||
import pwd
|
|
||||||
|
|
||||||
return pwd.getpwuid(os.geteuid()).pw_name
|
|
||||||
except ImportError:
|
|
||||||
# fallback on getpass
|
|
||||||
return getpass.getuser()
|
|
||||||
|
|
||||||
|
|
||||||
# return value for replacements with no match
|
# return value for replacements with no match
|
||||||
NOMATCH = object()
|
NOMATCH = object()
|
||||||
|
|
||||||
|
|
||||||
# Substitutions to perform
|
|
||||||
def replacements():
|
|
||||||
# break circular imports
|
|
||||||
import spack.environment as ev
|
|
||||||
import spack.paths
|
|
||||||
|
|
||||||
arch = architecture()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"spack": lambda: spack.paths.prefix,
|
|
||||||
"user": lambda: get_user(),
|
|
||||||
"tempdir": lambda: tempfile.gettempdir(),
|
|
||||||
"user_cache_path": lambda: spack.paths.user_cache_path,
|
|
||||||
"architecture": lambda: arch,
|
|
||||||
"arch": lambda: arch,
|
|
||||||
"platform": lambda: arch.platform,
|
|
||||||
"operating_system": lambda: arch.os,
|
|
||||||
"os": lambda: arch.os,
|
|
||||||
"target": lambda: arch.target,
|
|
||||||
"target_family": lambda: arch.target.microarchitecture.family,
|
|
||||||
"date": lambda: date.today().strftime("%Y-%m-%d"),
|
|
||||||
"env": lambda: ev.active_environment().path if ev.active_environment() else NOMATCH,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# This is intended to be longer than the part of the install path
|
# This is intended to be longer than the part of the install path
|
||||||
# spack generates from the root path we give it. Included in the
|
# spack generates from the root path we give it. Included in the
|
||||||
# estimate:
|
# estimate:
|
||||||
@ -144,7 +96,7 @@ def get_system_path_max():
|
|||||||
return sys_max_path_length
|
return sys_max_path_length
|
||||||
|
|
||||||
|
|
||||||
def substitute_config_variables(path):
|
def substitute_config_variables(path, replacements={}):
|
||||||
"""Substitute placeholders into paths.
|
"""Substitute placeholders into paths.
|
||||||
|
|
||||||
Spack allows paths in configs to have some placeholders, as follows:
|
Spack allows paths in configs to have some placeholders, as follows:
|
||||||
@ -168,22 +120,21 @@ def substitute_config_variables(path):
|
|||||||
replaced if there is an active environment, and should only be used in
|
replaced if there is an active environment, and should only be used in
|
||||||
environment yaml files.
|
environment yaml files.
|
||||||
"""
|
"""
|
||||||
_replacements = replacements()
|
|
||||||
|
|
||||||
# Look up replacements
|
# Look up replacements
|
||||||
def repl(match):
|
def repl(match):
|
||||||
m = match.group(0)
|
m = match.group(0)
|
||||||
key = m.strip("${}").lower()
|
key = m.strip("${}").lower()
|
||||||
repl = _replacements.get(key, lambda: m)()
|
repl = replacements.get(key, lambda: m)()
|
||||||
return m if repl is NOMATCH else str(repl)
|
return m if repl is NOMATCH else str(repl)
|
||||||
|
|
||||||
# Replace $var or ${var}.
|
# Replace $var or ${var}.
|
||||||
return re.sub(r"(\$\w+\b|\$\{\w+\})", repl, path)
|
return re.sub(r"(\$\w+\b|\$\{\w+\})", repl, path)
|
||||||
|
|
||||||
|
|
||||||
def substitute_path_variables(path):
|
def substitute_path_variables(path, replacements={}):
|
||||||
"""Substitute config vars, expand environment vars, expand user home."""
|
"""Substitute config vars, expand environment vars, expand user home."""
|
||||||
path = substitute_config_variables(path)
|
path = substitute_config_variables(path, replacements=replacements)
|
||||||
path = os.path.expandvars(path)
|
path = os.path.expandvars(path)
|
||||||
path = os.path.expanduser(path)
|
path = os.path.expanduser(path)
|
||||||
return path
|
return path
|
||||||
@ -225,7 +176,7 @@ def add_padding(path, length):
|
|||||||
return os.path.join(path, padding)
|
return os.path.join(path, padding)
|
||||||
|
|
||||||
|
|
||||||
def canonicalize_path(path, default_wd=None):
|
def canonicalize_path(path, default_wd=None, replacements=None):
|
||||||
"""Same as substitute_path_variables, but also take absolute path.
|
"""Same as substitute_path_variables, but also take absolute path.
|
||||||
|
|
||||||
If the string is a yaml object with file annotations, make absolute paths
|
If the string is a yaml object with file annotations, make absolute paths
|
||||||
@ -234,6 +185,7 @@ def canonicalize_path(path, default_wd=None):
|
|||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
path (str): path being converted as needed
|
path (str): path being converted as needed
|
||||||
|
replacements (dict): dictionary of replacements to use
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(str): An absolute path with path variable substitution
|
(str): An absolute path with path variable substitution
|
||||||
@ -245,7 +197,18 @@ def canonicalize_path(path, default_wd=None):
|
|||||||
filename = os.path.dirname(path._start_mark.name)
|
filename = os.path.dirname(path._start_mark.name)
|
||||||
assert path._start_mark.name == path._end_mark.name
|
assert path._start_mark.name == path._end_mark.name
|
||||||
|
|
||||||
path = substitute_path_variables(path)
|
if replacements is None:
|
||||||
|
_replacements = {}
|
||||||
|
else:
|
||||||
|
_replacements = replacements
|
||||||
|
|
||||||
|
if not isinstance(_replacements, dict):
|
||||||
|
tty.die(
|
||||||
|
"Replacements returned by replacements func are of type"
|
||||||
|
f"{type(replacements)} and not of the expected type of dict."
|
||||||
|
)
|
||||||
|
|
||||||
|
path = substitute_path_variables(path, replacements=_replacements)
|
||||||
if not os.path.isabs(path):
|
if not os.path.isabs(path):
|
||||||
if filename:
|
if filename:
|
||||||
path = os.path.join(filename, path)
|
path = os.path.join(filename, path)
|
||||||
@ -326,15 +289,12 @@ def replacer(match):
|
|||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def filter_padding():
|
def filter_padding(padding=None):
|
||||||
"""Context manager to safely disable path padding in all Spack output.
|
"""Context manager to safely disable path padding in all Spack output.
|
||||||
|
|
||||||
This is needed because Spack's debug output gets extremely long when we use a
|
This is needed because Spack's debug output gets extremely long when we use a
|
||||||
long padded installation path.
|
long padded installation path.
|
||||||
"""
|
"""
|
||||||
import spack.config
|
|
||||||
|
|
||||||
padding = spack.config.get("config:install_tree:padded_length", None)
|
|
||||||
if padding:
|
if padding:
|
||||||
# filter out all padding from the intsall command output
|
# filter out all padding from the intsall command output
|
||||||
with tty.output_filter(padding_filter):
|
with tty.output_filter(padding_filter):
|
||||||
|
@ -10,13 +10,11 @@
|
|||||||
from io import BufferedReader, BytesIO, IOBase
|
from io import BufferedReader, BytesIO, IOBase
|
||||||
from typing import Any, Dict, Tuple
|
from typing import Any, Dict, Tuple
|
||||||
|
|
||||||
import spack.config
|
|
||||||
|
|
||||||
#: Map (mirror name, method) tuples to s3 client instances.
|
#: Map (mirror name, method) tuples to s3 client instances.
|
||||||
s3_client_cache: Dict[Tuple[str, str], Any] = dict()
|
s3_client_cache: Dict[Tuple[str, str], Any] = dict()
|
||||||
|
|
||||||
|
|
||||||
def get_s3_session(url, method="fetch"):
|
def get_s3_session(url, method="fetch", verify_ssl=True):
|
||||||
# import boto and friends as late as possible. We don't want to require boto as a
|
# import boto and friends as late as possible. We don't want to require boto as a
|
||||||
# dependency unless the user actually wants to access S3 mirrors.
|
# dependency unless the user actually wants to access S3 mirrors.
|
||||||
from boto3 import Session
|
from boto3 import Session
|
||||||
@ -62,7 +60,7 @@ def get_mirror_url(mirror):
|
|||||||
return s3_client_cache[key]
|
return s3_client_cache[key]
|
||||||
|
|
||||||
# Otherwise, create it.
|
# Otherwise, create it.
|
||||||
s3_connection, s3_client_args = get_mirror_s3_connection_info(mirror, method)
|
s3_connection, s3_client_args = get_mirror_s3_connection_info(mirror, method, verify_ssl)
|
||||||
|
|
||||||
session = Session(**s3_connection)
|
session = Session(**s3_connection)
|
||||||
# if no access credentials provided above, then access anonymously
|
# if no access credentials provided above, then access anonymously
|
||||||
@ -84,13 +82,13 @@ def _parse_s3_endpoint_url(endpoint_url):
|
|||||||
return endpoint_url
|
return endpoint_url
|
||||||
|
|
||||||
|
|
||||||
def get_mirror_s3_connection_info(mirror, method):
|
def get_mirror_s3_connection_info(mirror, method, verify_ssl):
|
||||||
"""Create s3 config for session/client from a Mirror instance (or just set defaults
|
"""Create s3 config for session/client from a Mirror instance (or just set defaults
|
||||||
when no mirror is given.)"""
|
when no mirror is given.)"""
|
||||||
from spack.mirror import Mirror
|
from spack.mirror import Mirror
|
||||||
|
|
||||||
s3_connection = {}
|
s3_connection = {}
|
||||||
s3_client_args = {"use_ssl": spack.config.get("config:verify_ssl")}
|
s3_client_args = {"use_ssl": verify_ssl}
|
||||||
|
|
||||||
# access token
|
# access token
|
||||||
if isinstance(mirror, Mirror):
|
if isinstance(mirror, Mirror):
|
||||||
@ -150,9 +148,9 @@ def __getattr__(self, key):
|
|||||||
return getattr(self.raw, key)
|
return getattr(self.raw, key)
|
||||||
|
|
||||||
|
|
||||||
def _s3_open(url, method="GET"):
|
def _s3_open(url, method="GET", verify_ssl=True):
|
||||||
parsed = urllib.parse.urlparse(url)
|
parsed = urllib.parse.urlparse(url)
|
||||||
s3 = get_s3_session(url, method="fetch")
|
s3 = get_s3_session(url, method="fetch", verify_ssl=verify_ssl)
|
||||||
|
|
||||||
bucket = parsed.netloc
|
bucket = parsed.netloc
|
||||||
key = parsed.path
|
key = parsed.path
|
||||||
@ -182,7 +180,13 @@ def _s3_open(url, method="GET"):
|
|||||||
|
|
||||||
|
|
||||||
class UrllibS3Handler(urllib.request.BaseHandler):
|
class UrllibS3Handler(urllib.request.BaseHandler):
|
||||||
|
def __init__(self, verify_ssl=True):
|
||||||
|
super().__init__()
|
||||||
|
self.verify_ssl = verify_ssl
|
||||||
|
|
||||||
def s3_open(self, req):
|
def s3_open(self, req):
|
||||||
orig_url = req.get_full_url()
|
orig_url = req.get_full_url()
|
||||||
url, headers, stream = _s3_open(orig_url, method=req.get_method())
|
url, headers, stream = _s3_open(
|
||||||
|
orig_url, method=req.get_method(), verify_ssl=self.verify_ssl
|
||||||
|
)
|
||||||
return urllib.response.addinfourl(stream, headers, url)
|
return urllib.response.addinfourl(stream, headers, url)
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
import json
|
import json
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
import spack.error
|
from .error import UtilityError
|
||||||
|
|
||||||
__all__ = ["load", "dump", "SpackJSONError"]
|
__all__ = ["load", "dump", "SpackJSONError"]
|
||||||
|
|
||||||
@ -29,7 +29,7 @@ def dump(data: Dict, stream: Optional[Any] = None) -> Optional[str]:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class SpackJSONError(spack.error.SpackError):
|
class SpackJSONError(UtilityError):
|
||||||
"""Raised when there are issues with JSON parsing."""
|
"""Raised when there are issues with JSON parsing."""
|
||||||
|
|
||||||
def __init__(self, msg: str, json_error: BaseException):
|
def __init__(self, msg: str, json_error: BaseException):
|
||||||
|
@ -27,7 +27,7 @@
|
|||||||
|
|
||||||
from llnl.util.tty.color import cextra, clen, colorize
|
from llnl.util.tty.color import cextra, clen, colorize
|
||||||
|
|
||||||
import spack.error
|
from .error import UtilityError
|
||||||
|
|
||||||
# Only export load and dump
|
# Only export load and dump
|
||||||
__all__ = ["load", "dump", "SpackYAMLError"]
|
__all__ = ["load", "dump", "SpackYAMLError"]
|
||||||
@ -493,7 +493,7 @@ def name_mark(name):
|
|||||||
return error.StringMark(name, None, None, None, None, None)
|
return error.StringMark(name, None, None, None, None, None)
|
||||||
|
|
||||||
|
|
||||||
class SpackYAMLError(spack.error.SpackError):
|
class SpackYAMLError(UtilityError):
|
||||||
"""Raised when there are issues with YAML parsing."""
|
"""Raised when there are issues with YAML parsing."""
|
||||||
|
|
||||||
def __init__(self, msg, yaml_error):
|
def __init__(self, msg, yaml_error):
|
||||||
|
@ -25,8 +25,7 @@
|
|||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
from llnl.util.filesystem import mkdirp, rename, working_dir
|
from llnl.util.filesystem import mkdirp, rename, working_dir
|
||||||
|
|
||||||
import spack.config
|
import spack.util.error
|
||||||
import spack.error
|
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
|
|
||||||
from .executable import CommandNotFoundError, which
|
from .executable import CommandNotFoundError, which
|
||||||
@ -60,24 +59,24 @@ def http_error_default(self, req, fp, code, msg, hdrs):
|
|||||||
|
|
||||||
|
|
||||||
def _urlopen():
|
def _urlopen():
|
||||||
s3 = UrllibS3Handler()
|
s3_with_ssl = UrllibS3Handler(verify_ssl=True)
|
||||||
|
s3_no_ssl = UrllibS3Handler(verify_ssl=False)
|
||||||
gcs = GCSHandler()
|
gcs = GCSHandler()
|
||||||
error_handler = SpackHTTPDefaultErrorHandler()
|
error_handler = SpackHTTPDefaultErrorHandler()
|
||||||
|
|
||||||
# One opener with HTTPS ssl enabled
|
# One opener with HTTPS ssl enabled
|
||||||
with_ssl = build_opener(
|
with_ssl = build_opener(
|
||||||
s3, gcs, HTTPSHandler(context=ssl.create_default_context()), error_handler
|
s3_with_ssl, gcs, HTTPSHandler(context=ssl.create_default_context()), error_handler
|
||||||
)
|
)
|
||||||
|
|
||||||
# One opener with HTTPS ssl disabled
|
# One opener with HTTPS ssl disabled
|
||||||
without_ssl = build_opener(
|
without_ssl = build_opener(
|
||||||
s3, gcs, HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
s3_no_ssl, gcs, HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||||
)
|
)
|
||||||
|
|
||||||
# And dynamically dispatch based on the config:verify_ssl.
|
# And dynamically dispatch based on the config:verify_ssl.
|
||||||
def dispatch_open(fullurl, data=None, timeout=None):
|
def dispatch_open(fullurl, data=None, timeout=10, verify_ssl=True):
|
||||||
opener = with_ssl if spack.config.get("config:verify_ssl", True) else without_ssl
|
opener = with_ssl if verify_ssl else without_ssl
|
||||||
timeout = timeout or spack.config.get("config:connect_timeout", 10)
|
|
||||||
return opener.open(fullurl, data, timeout)
|
return opener.open(fullurl, data, timeout)
|
||||||
|
|
||||||
return dispatch_open
|
return dispatch_open
|
||||||
@ -134,7 +133,7 @@ def handle_starttag(self, tag, attrs):
|
|||||||
self.base_url = val
|
self.base_url = val
|
||||||
|
|
||||||
|
|
||||||
def read_from_url(url, accept_content_type=None):
|
def read_from_url(url, accept_content_type=None, verify_ssl=True, timeout=10, **kwargs):
|
||||||
if isinstance(url, str):
|
if isinstance(url, str):
|
||||||
url = urllib.parse.urlparse(url)
|
url = urllib.parse.urlparse(url)
|
||||||
|
|
||||||
@ -142,9 +141,9 @@ def read_from_url(url, accept_content_type=None):
|
|||||||
request = Request(url.geturl(), headers={"User-Agent": SPACK_USER_AGENT})
|
request = Request(url.geturl(), headers={"User-Agent": SPACK_USER_AGENT})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = urlopen(request)
|
response = urlopen(request, **kwargs)
|
||||||
except URLError as err:
|
except URLError as err:
|
||||||
raise SpackWebError("Download failed: {}".format(str(err)))
|
raise WebError("Download failed: {}".format(str(err)))
|
||||||
|
|
||||||
if accept_content_type:
|
if accept_content_type:
|
||||||
try:
|
try:
|
||||||
@ -164,7 +163,9 @@ def read_from_url(url, accept_content_type=None):
|
|||||||
return response.geturl(), response.headers, response
|
return response.geturl(), response.headers, response
|
||||||
|
|
||||||
|
|
||||||
def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=None):
|
def push_to_url(
|
||||||
|
local_file_path, remote_path, keep_original=True, extra_args=None, verify_ssl=True
|
||||||
|
):
|
||||||
remote_url = urllib.parse.urlparse(remote_path)
|
remote_url = urllib.parse.urlparse(remote_path)
|
||||||
if remote_url.scheme == "file":
|
if remote_url.scheme == "file":
|
||||||
remote_file_path = url_util.local_file_path(remote_url)
|
remote_file_path = url_util.local_file_path(remote_url)
|
||||||
@ -193,7 +194,7 @@ def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=Non
|
|||||||
while remote_path.startswith("/"):
|
while remote_path.startswith("/"):
|
||||||
remote_path = remote_path[1:]
|
remote_path = remote_path[1:]
|
||||||
|
|
||||||
s3 = get_s3_session(remote_url, method="push")
|
s3 = get_s3_session(remote_url, method="push", verify_ssl=verify_ssl)
|
||||||
s3.upload_file(local_file_path, remote_url.netloc, remote_path, ExtraArgs=extra_args)
|
s3.upload_file(local_file_path, remote_url.netloc, remote_path, ExtraArgs=extra_args)
|
||||||
|
|
||||||
if not keep_original:
|
if not keep_original:
|
||||||
@ -211,23 +212,23 @@ def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=Non
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def base_curl_fetch_args(url, timeout=0):
|
def base_curl_fetch_args(url, verify_ssl=True, timeout=0):
|
||||||
"""Return the basic fetch arguments typically used in calls to curl.
|
"""Return the basic fetch arguments typically used in calls to curl.
|
||||||
|
|
||||||
The arguments include those for ensuring behaviors such as failing on
|
The arguments include those for ensuring behaviors such as failing on
|
||||||
errors for codes over 400, printing HTML headers, resolving 3xx redirects,
|
errors for codes over 400, printing HTML headers, resolving 3xx redirects,
|
||||||
status or failure handling, and connection timeouts.
|
status or failure handling, and connection timeouts.
|
||||||
|
|
||||||
It also uses the following configuration option to set an additional
|
It also uses the following input arguments to set an additional
|
||||||
argument as needed:
|
argument as needed:
|
||||||
|
|
||||||
* config:connect_timeout (int): connection timeout
|
* timeout (int): connection timeout
|
||||||
* config:verify_ssl (str): Perform SSL verification
|
* verify_ssl (bool): Perform SSL verification
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
url (str): URL whose contents will be fetched
|
url (str): URL whose contents will be fetched
|
||||||
timeout (int): Connection timeout, which is only used if higher than
|
timeout (int): Connection timeout
|
||||||
config:connect_timeout
|
verify_ssl (bool): Performing SSL verification
|
||||||
|
|
||||||
Returns (list): list of argument strings
|
Returns (list): list of argument strings
|
||||||
"""
|
"""
|
||||||
@ -238,7 +239,7 @@ def base_curl_fetch_args(url, timeout=0):
|
|||||||
"-L", # resolve 3xx redirects
|
"-L", # resolve 3xx redirects
|
||||||
url,
|
url,
|
||||||
]
|
]
|
||||||
if not spack.config.get("config:verify_ssl"):
|
if not verify_ssl:
|
||||||
curl_args.append("-k")
|
curl_args.append("-k")
|
||||||
|
|
||||||
if sys.stdout.isatty() and tty.msg_enabled():
|
if sys.stdout.isatty() and tty.msg_enabled():
|
||||||
@ -246,11 +247,8 @@ def base_curl_fetch_args(url, timeout=0):
|
|||||||
else:
|
else:
|
||||||
curl_args.append("-sS") # show errors if fail
|
curl_args.append("-sS") # show errors if fail
|
||||||
|
|
||||||
connect_timeout = spack.config.get("config:connect_timeout", 10)
|
if timeout > 0:
|
||||||
if timeout:
|
curl_args.extend(["--connect-timeout", str(timeout)])
|
||||||
connect_timeout = max(int(connect_timeout), int(timeout))
|
|
||||||
if connect_timeout > 0:
|
|
||||||
curl_args.extend(["--connect-timeout", str(connect_timeout)])
|
|
||||||
|
|
||||||
return curl_args
|
return curl_args
|
||||||
|
|
||||||
@ -266,11 +264,11 @@ def check_curl_code(returncode):
|
|||||||
if returncode != 0:
|
if returncode != 0:
|
||||||
if returncode == 22:
|
if returncode == 22:
|
||||||
# This is a 404. Curl will print the error.
|
# This is a 404. Curl will print the error.
|
||||||
raise spack.error.FetchError("URL was not found!")
|
raise WebError("URL was not found!")
|
||||||
|
|
||||||
if returncode == 60:
|
if returncode == 60:
|
||||||
# This is a certificate error. Suggest spack -k
|
# This is a certificate error. Suggest spack -k
|
||||||
raise spack.error.FetchError(
|
raise WebError(
|
||||||
"Curl was unable to fetch due to invalid certificate. "
|
"Curl was unable to fetch due to invalid certificate. "
|
||||||
"This is either an attack, or your cluster's SSL "
|
"This is either an attack, or your cluster's SSL "
|
||||||
"configuration is bad. If you believe your SSL "
|
"configuration is bad. If you believe your SSL "
|
||||||
@ -279,7 +277,7 @@ def check_curl_code(returncode):
|
|||||||
"Use this at your own risk."
|
"Use this at your own risk."
|
||||||
)
|
)
|
||||||
|
|
||||||
raise spack.error.FetchError("Curl failed with error {0}".format(returncode))
|
raise WebError("Curl failed with error {0}".format(returncode))
|
||||||
|
|
||||||
|
|
||||||
def _curl(curl=None):
|
def _curl(curl=None):
|
||||||
@ -288,11 +286,11 @@ def _curl(curl=None):
|
|||||||
curl = which("curl", required=True)
|
curl = which("curl", required=True)
|
||||||
except CommandNotFoundError as exc:
|
except CommandNotFoundError as exc:
|
||||||
tty.error(str(exc))
|
tty.error(str(exc))
|
||||||
raise spack.error.FetchError("Missing required curl fetch method")
|
raise WebError("Missing required curl fetch method")
|
||||||
return curl
|
return curl
|
||||||
|
|
||||||
|
|
||||||
def fetch_url_text(url, curl=None, dest_dir="."):
|
def fetch_url_text(url, curl=None, dest_dir=".", fetch_method=None):
|
||||||
"""Retrieves text-only URL content using the configured fetch method.
|
"""Retrieves text-only URL content using the configured fetch method.
|
||||||
It determines the fetch method from:
|
It determines the fetch method from:
|
||||||
|
|
||||||
@ -316,19 +314,18 @@ def fetch_url_text(url, curl=None, dest_dir="."):
|
|||||||
Raises FetchError if the curl returncode indicates failure
|
Raises FetchError if the curl returncode indicates failure
|
||||||
"""
|
"""
|
||||||
if not url:
|
if not url:
|
||||||
raise spack.error.FetchError("A URL is required to fetch its text")
|
raise WebError("A URL is required to fetch its text")
|
||||||
|
|
||||||
tty.debug("Fetching text at {0}".format(url))
|
tty.debug("Fetching text at {0}".format(url))
|
||||||
|
|
||||||
filename = os.path.basename(url)
|
filename = os.path.basename(url)
|
||||||
path = os.path.join(dest_dir, filename)
|
path = os.path.join(dest_dir, filename)
|
||||||
|
|
||||||
fetch_method = spack.config.get("config:url_fetch_method")
|
|
||||||
tty.debug("Using '{0}' to fetch {1} into {2}".format(fetch_method, url, path))
|
tty.debug("Using '{0}' to fetch {1} into {2}".format(fetch_method, url, path))
|
||||||
if fetch_method == "curl":
|
if fetch_method == "curl":
|
||||||
curl_exe = _curl(curl)
|
curl_exe = _curl(curl)
|
||||||
if not curl_exe:
|
if not curl_exe:
|
||||||
raise spack.error.FetchError("Missing required fetch method (curl)")
|
raise WebError("Missing required fetch method (curl)")
|
||||||
|
|
||||||
curl_args = ["-O"]
|
curl_args = ["-O"]
|
||||||
curl_args.extend(base_curl_fetch_args(url))
|
curl_args.extend(base_curl_fetch_args(url))
|
||||||
@ -346,9 +343,7 @@ def fetch_url_text(url, curl=None, dest_dir="."):
|
|||||||
|
|
||||||
returncode = response.getcode()
|
returncode = response.getcode()
|
||||||
if returncode and returncode != 200:
|
if returncode and returncode != 200:
|
||||||
raise spack.error.FetchError(
|
raise WebError("Urllib failed with error code {0}".format(returncode))
|
||||||
"Urllib failed with error code {0}".format(returncode)
|
|
||||||
)
|
|
||||||
|
|
||||||
output = codecs.getreader("utf-8")(response).read()
|
output = codecs.getreader("utf-8")(response).read()
|
||||||
if output:
|
if output:
|
||||||
@ -358,13 +353,13 @@ def fetch_url_text(url, curl=None, dest_dir="."):
|
|||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
except SpackWebError as err:
|
except WebError as err:
|
||||||
raise spack.error.FetchError("Urllib fetch failed to verify url: {0}".format(str(err)))
|
raise WebError("Urllib fetch failed to verify url: {0}".format(str(err)))
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def url_exists(url, curl=None):
|
def url_exists(url, curl=None, fetch_method=None, verify_ssl=True, timeout=10):
|
||||||
"""Determines whether url exists.
|
"""Determines whether url exists.
|
||||||
|
|
||||||
A scheme-specific process is used for Google Storage (`gs`) and Amazon
|
A scheme-specific process is used for Google Storage (`gs`) and Amazon
|
||||||
@ -382,9 +377,7 @@ def url_exists(url, curl=None):
|
|||||||
url_result = urllib.parse.urlparse(url)
|
url_result = urllib.parse.urlparse(url)
|
||||||
|
|
||||||
# Use curl if configured to do so
|
# Use curl if configured to do so
|
||||||
use_curl = spack.config.get(
|
use_curl = fetch_method == "curl" and url_result.scheme not in ("gs", "s3")
|
||||||
"config:url_fetch_method", "urllib"
|
|
||||||
) == "curl" and url_result.scheme not in ("gs", "s3")
|
|
||||||
if use_curl:
|
if use_curl:
|
||||||
curl_exe = _curl(curl)
|
curl_exe = _curl(curl)
|
||||||
if not curl_exe:
|
if not curl_exe:
|
||||||
@ -393,7 +386,7 @@ def url_exists(url, curl=None):
|
|||||||
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
|
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
|
||||||
# portable.
|
# portable.
|
||||||
curl_args = ["--stderr", "-", "-s", "-f", "-r", "0-0", url]
|
curl_args = ["--stderr", "-", "-s", "-f", "-r", "0-0", url]
|
||||||
if not spack.config.get("config:verify_ssl"):
|
if not verify_ssl:
|
||||||
curl_args.append("-k")
|
curl_args.append("-k")
|
||||||
_ = curl_exe(*curl_args, fail_on_error=False, output=os.devnull)
|
_ = curl_exe(*curl_args, fail_on_error=False, output=os.devnull)
|
||||||
return curl_exe.returncode == 0
|
return curl_exe.returncode == 0
|
||||||
@ -401,8 +394,7 @@ def url_exists(url, curl=None):
|
|||||||
# Otherwise use urllib.
|
# Otherwise use urllib.
|
||||||
try:
|
try:
|
||||||
urlopen(
|
urlopen(
|
||||||
Request(url, method="HEAD", headers={"User-Agent": SPACK_USER_AGENT}),
|
Request(url, method="HEAD", headers={"User-Agent": SPACK_USER_AGENT}), timeout=timeout
|
||||||
timeout=spack.config.get("config:connect_timeout", 10),
|
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
except URLError as e:
|
except URLError as e:
|
||||||
@ -419,7 +411,7 @@ def _debug_print_delete_results(result):
|
|||||||
tty.debug("Failed to delete {0} ({1})".format(e["Key"], e["Message"]))
|
tty.debug("Failed to delete {0} ({1})".format(e["Key"], e["Message"]))
|
||||||
|
|
||||||
|
|
||||||
def remove_url(url, recursive=False):
|
def remove_url(url, recursive=False, verify_ssl=True):
|
||||||
url = urllib.parse.urlparse(url)
|
url = urllib.parse.urlparse(url)
|
||||||
|
|
||||||
local_path = url_util.local_file_path(url)
|
local_path = url_util.local_file_path(url)
|
||||||
@ -432,7 +424,7 @@ def remove_url(url, recursive=False):
|
|||||||
|
|
||||||
if url.scheme == "s3":
|
if url.scheme == "s3":
|
||||||
# Try to find a mirror for potential connection information
|
# Try to find a mirror for potential connection information
|
||||||
s3 = get_s3_session(url, method="push")
|
s3 = get_s3_session(url, method="push", verify_ssl=verify_ssl)
|
||||||
bucket = url.netloc
|
bucket = url.netloc
|
||||||
if recursive:
|
if recursive:
|
||||||
# Because list_objects_v2 can only return up to 1000 items
|
# Because list_objects_v2 can only return up to 1000 items
|
||||||
@ -528,7 +520,7 @@ def _iter_local_prefix(path):
|
|||||||
yield os.path.relpath(os.path.join(root, f), path)
|
yield os.path.relpath(os.path.join(root, f), path)
|
||||||
|
|
||||||
|
|
||||||
def list_url(url, recursive=False):
|
def list_url(url, recursive=False, verify_ssl=True):
|
||||||
url = urllib.parse.urlparse(url)
|
url = urllib.parse.urlparse(url)
|
||||||
local_path = url_util.local_file_path(url)
|
local_path = url_util.local_file_path(url)
|
||||||
|
|
||||||
@ -543,7 +535,7 @@ def list_url(url, recursive=False):
|
|||||||
]
|
]
|
||||||
|
|
||||||
if url.scheme == "s3":
|
if url.scheme == "s3":
|
||||||
s3 = get_s3_session(url, method="fetch")
|
s3 = get_s3_session(url, method="fetch", verify_ssl=verify_ssl)
|
||||||
if recursive:
|
if recursive:
|
||||||
return list(_iter_s3_prefix(s3, url))
|
return list(_iter_s3_prefix(s3, url))
|
||||||
|
|
||||||
@ -771,11 +763,11 @@ def parse_etag(header_value):
|
|||||||
return valid.group(1) if valid else None
|
return valid.group(1) if valid else None
|
||||||
|
|
||||||
|
|
||||||
class SpackWebError(spack.error.SpackError):
|
class WebError(spack.util.error.UtilityError):
|
||||||
"""Superclass for Spack web spidering errors."""
|
"""Superclass for Spack web spidering errors."""
|
||||||
|
|
||||||
|
|
||||||
class NoNetworkConnectionError(SpackWebError):
|
class NoNetworkConnectionError(WebError):
|
||||||
"""Raised when an operation can't get an internet connection."""
|
"""Raised when an operation can't get an internet connection."""
|
||||||
|
|
||||||
def __init__(self, message, url):
|
def __init__(self, message, url):
|
||||||
|
@ -11,8 +11,8 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.filesystem_view
|
import spack.filesystem_view
|
||||||
|
import spack.package_prefs as pp
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.file_permissions as fp
|
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
from spack.package_base import spack_times_log
|
from spack.package_base import spack_times_log
|
||||||
|
|
||||||
@ -68,7 +68,7 @@ def write_manifest(spec):
|
|||||||
with open(manifest_file, "w") as f:
|
with open(manifest_file, "w") as f:
|
||||||
sjson.dump(manifest, f)
|
sjson.dump(manifest, f)
|
||||||
|
|
||||||
fp.set_permissions_by_spec(manifest_file, spec)
|
pp.set_permissions_by_spec(manifest_file, spec)
|
||||||
|
|
||||||
|
|
||||||
def check_entry(path, data):
|
def check_entry(path, data):
|
||||||
|
Loading…
Reference in New Issue
Block a user