isolate util/s3
This commit is contained in:
parent
ee36214f83
commit
0b92a19620
@ -488,9 +488,9 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json"),
|
||||
fetch_method=spack.config.get('config:url_fetch_method', 'urllib'),
|
||||
verify_ssl=spack.config.get('config:verify_ssl'),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
):
|
||||
return False
|
||||
|
||||
@ -536,8 +536,9 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
def binary_index_location():
|
||||
"""Set up a BinaryCacheIndex for remote buildcache dbs in the user's homedir."""
|
||||
cache_root = os.path.join(misc_cache_location(), "indices")
|
||||
return spack.util.path.canonicalize_path(cache_root,
|
||||
replacements=spack.paths.path_replacements())
|
||||
return spack.util.path.canonicalize_path(
|
||||
cache_root, replacements=spack.paths.path_replacements()
|
||||
)
|
||||
|
||||
|
||||
#: Default binary cache index instance
|
||||
@ -909,6 +910,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
|
||||
# Push the hash
|
||||
@ -917,6 +919,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
|
||||
|
||||
@ -984,7 +987,7 @@ def url_read_method(url):
|
||||
_, _, spec_file = web_util.read_from_url(
|
||||
url,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10)
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
except (URLError, web_util.WebError) as url_err:
|
||||
@ -995,7 +998,9 @@ def url_read_method(url):
|
||||
try:
|
||||
file_list = [
|
||||
url_util.join(cache_prefix, entry)
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
for entry in web_util.list_url(
|
||||
cache_prefix, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||
)
|
||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||
]
|
||||
read_fn = url_read_method
|
||||
@ -1093,7 +1098,9 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
try:
|
||||
fingerprints = (
|
||||
entry[:-4]
|
||||
for entry in web_util.list_url(key_prefix, recursive=False)
|
||||
for entry in web_util.list_url(
|
||||
key_prefix, recursive=False, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||
)
|
||||
if entry.endswith(".pub")
|
||||
)
|
||||
except KeyError as inst:
|
||||
@ -1130,6 +1137,7 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
url_util.join(key_prefix, "index.json"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json"},
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
|
||||
@ -1373,20 +1381,18 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack"))
|
||||
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, stage_dir))
|
||||
|
||||
fetch_method=spack.config.get('config:url_fetch_method', 'urllib'),
|
||||
verify_ssl=spack.config.get('config:verify_ssl'),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
fetch_method = (spack.config.get("config:url_fetch_method", "urllib"),)
|
||||
verify_ssl = (spack.config.get("config:verify_ssl"),)
|
||||
timeout = spack.config.get("config:connect_timeout", 10)
|
||||
|
||||
url_args = {
|
||||
'fetch_method': fetch_method,
|
||||
'verify_ssl': verify_ssl,
|
||||
'timeout': timeout
|
||||
}
|
||||
url_args = {"fetch_method": fetch_method, "verify_ssl": verify_ssl, "timeout": timeout}
|
||||
|
||||
mkdirp(tarfile_dir)
|
||||
if web_util.url_exists(remote_spackfile_path, **url_args):
|
||||
if options.force:
|
||||
web_util.remove_url(remote_spackfile_path)
|
||||
web_util.remove_url(
|
||||
remote_spackfile_path, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||
)
|
||||
else:
|
||||
raise NoOverwriteException(url_util.format(remote_spackfile_path))
|
||||
|
||||
@ -1406,10 +1412,11 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
|
||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||
if options.force:
|
||||
verify_ssl = spack.config.get("config:verify_ssl", True)
|
||||
if web_util.url_exists(remote_specfile_path, **url_args):
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
web_util.remove_url(remote_specfile_path, verify_ssl=verify_ssl)
|
||||
if web_util.url_exists(remote_signed_specfile_path, **url_args):
|
||||
web_util.remove_url(remote_signed_specfile_path)
|
||||
web_util.remove_url(remote_signed_specfile_path, verify_ssl=verify_ssl)
|
||||
elif web_util.url_exists(remote_specfile_path, **url_args) or web_util.url_exists(
|
||||
remote_signed_specfile_path, **url_args
|
||||
):
|
||||
@ -1445,11 +1452,17 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
sign_specfile(key, options.force, specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
spackfile_path,
|
||||
remote_spackfile_path,
|
||||
keep_original=False,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
@ -2230,7 +2243,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
tty.debug("Verified SHA256 checksum of the build cache")
|
||||
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
padding = spack.config.get('config:install_tree:padded_length', None)
|
||||
padding = spack.config.get("config:install_tree:padded_length", None)
|
||||
with spack.util.path.filter_padding(padding=padding):
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, unsigned, force)
|
||||
@ -2273,7 +2286,7 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
_, _, fs = web_util.read_from_url(
|
||||
buildcache_fetch_url_signed_json,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10)
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
specfile_is_signed = True
|
||||
except (URLError, web_util.WebError, HTTPError) as url_err:
|
||||
@ -2281,7 +2294,7 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
_, _, fs = web_util.read_from_url(
|
||||
buildcache_fetch_url_json,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10)
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
except (URLError, web_util.WebError, HTTPError) as url_err_x:
|
||||
tty.debug(
|
||||
@ -2389,17 +2402,17 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
try:
|
||||
_, _, json_file = web_util.read_from_url(
|
||||
keys_index,
|
||||
verify_ssl=spack.config.get('config:verify_ssl', True),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||
except (URLError, web_util.WebError) as url_err:
|
||||
if web_util.url_exists(
|
||||
keys_index,
|
||||
fetch_method=spack.config.get('config:url_fetch_method', 'urllib'),
|
||||
verify_ssl=spack.config.get('config:verify_ssl'),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
):
|
||||
keys_index,
|
||||
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
):
|
||||
err_msg = [
|
||||
"Unable to find public keys in {0},",
|
||||
" caught exception attempting to read from {1}.",
|
||||
@ -2489,7 +2502,10 @@ def push_keys(*mirrors, **kwargs):
|
||||
# uploaded to the mirror.
|
||||
if not keys_local:
|
||||
spack.util.web.push_to_url(
|
||||
export_target, url_util.join(keys_url, filename), keep_original=False
|
||||
export_target,
|
||||
url_util.join(keys_url, filename),
|
||||
keep_original=False,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
|
||||
if regenerate_index:
|
||||
@ -2525,9 +2541,9 @@ def needs_rebuild(spec, mirror_url):
|
||||
# need to rebuild.
|
||||
return not web_util.url_exists(
|
||||
specfile_path,
|
||||
fetch_method=spack.config.get('config:url_fetch_method', 'urllib'),
|
||||
verify_ssl=spack.config.get('config:verify_ssl'),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
|
||||
|
||||
@ -2694,9 +2710,11 @@ def get_remote_hash(self):
|
||||
# Failure to fetch index.json.hash is not fatal
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers),
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10))
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(url_index_hash, headers=self.headers),
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
except urllib.error.URLError:
|
||||
return None
|
||||
|
||||
@ -2718,9 +2736,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers),
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10))
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(url_index, headers=self.headers),
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
|
||||
@ -2768,9 +2788,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url, headers=headers),
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10))
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(url, headers=headers),
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.getcode() == 304:
|
||||
# Not modified; that means fresh.
|
||||
|
@ -1759,7 +1759,7 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
gpg_path = web_util.fetch_url_text(
|
||||
gpg_url,
|
||||
dest_dir=os.path.join(work_dir, "_pgp"),
|
||||
fetch_method=spack.config.get('config:url_fetch_method')
|
||||
fetch_method=spack.config.get("config:url_fetch_method"),
|
||||
)
|
||||
rel_gpg_path = gpg_path.replace(work_dir, "").lstrip(os.path.sep)
|
||||
|
||||
@ -2117,7 +2117,11 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
||||
with open(file_path, "w") as fd:
|
||||
fd.write(syaml.dump(broken_spec_details))
|
||||
web_util.push_to_url(
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
file_path,
|
||||
url,
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain"},
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
@ -2136,8 +2140,8 @@ def read_broken_spec(broken_spec_url):
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(
|
||||
broken_spec_url,
|
||||
verify_ssl=cfg.get('config:verify_ssl', True),
|
||||
timeout=cfg.get('config:connect_timeout', 10)
|
||||
verify_ssl=cfg.get("config:verify_ssl", True),
|
||||
timeout=cfg.get("config:connect_timeout", 10),
|
||||
)
|
||||
except (URLError, web_util.WebError, HTTPError):
|
||||
tty.warn("Unable to read broken spec from {0}".format(broken_spec_url))
|
||||
|
@ -918,7 +918,12 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
try:
|
||||
temp_stage.create()
|
||||
temp_stage.fetch()
|
||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
||||
web_util.push_to_url(
|
||||
local_path,
|
||||
dest_url,
|
||||
keep_original=True,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
except spack.error.FetchError as e:
|
||||
# Expected, since we have to try all the possible extensions
|
||||
tty.debug("no such file: {0}".format(src_url))
|
||||
|
@ -730,13 +730,17 @@ def ci_rebuild(args):
|
||||
broken_specs_url = ci_config["broken-specs-url"]
|
||||
just_built_hash = job_spec.dag_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||
if web_util.url_exists(broken_spec_path,
|
||||
fetch_method=cfg.get('config:url_fetch_method', 'urllib'),
|
||||
verify_ssl=cfg.get('config:verify_ssl'),
|
||||
timeout=cfg.get('config:connect_timeout', 10)):
|
||||
if web_util.url_exists(
|
||||
broken_spec_path,
|
||||
fetch_method=cfg.get("config:url_fetch_method", "urllib"),
|
||||
verify_ssl=cfg.get("config:verify_ssl"),
|
||||
timeout=cfg.get("config:connect_timeout", 10),
|
||||
):
|
||||
tty.msg("Removing {0} from the list of broken specs".format(broken_spec_path))
|
||||
try:
|
||||
web_util.remove_url(broken_spec_path)
|
||||
web_util.remove_url(
|
||||
broken_spec_path, verify_ssl=cfg.get("config:verify_ssl", True)
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
|
@ -495,7 +495,9 @@ def mirror_destroy(args):
|
||||
elif args.mirror_url:
|
||||
mirror_url = args.mirror_url
|
||||
|
||||
web_util.remove_url(mirror_url, recursive=True)
|
||||
web_util.remove_url(
|
||||
mirror_url, recursive=True, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||
)
|
||||
|
||||
|
||||
def mirror(parser, args):
|
||||
|
@ -302,11 +302,11 @@ def fetch(self):
|
||||
errors = []
|
||||
for url in self.candidate_urls:
|
||||
if not web_util.url_exists(
|
||||
url,
|
||||
fetch_method=spack.config.get('config:url_fetch_method', 'urllib'),
|
||||
verify_ssl=spack.config.get('config:verify_ssl'),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
):
|
||||
url,
|
||||
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
):
|
||||
tty.debug("URL does not exist: " + url)
|
||||
continue
|
||||
|
||||
@ -345,8 +345,8 @@ def _fetch_urllib(self, url):
|
||||
try:
|
||||
url, headers, response = web_util.read_from_url(
|
||||
url,
|
||||
verify_ssl=spack.config.get('config:verify_ssl', True),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
except web_util.WebError as e:
|
||||
# clean up archive on failure.
|
||||
@ -394,14 +394,15 @@ def _fetch_curl(self, url):
|
||||
|
||||
timeout = self.extra_options.get("timeout")
|
||||
|
||||
connect_timeout = spack.config.get('config:connect_timeout', 10)
|
||||
connect_timeout = spack.config.get("config:connect_timeout", 10)
|
||||
if timeout:
|
||||
timeout = max(int(timeout), int(connect_timeout))
|
||||
else:
|
||||
timeout = int(connect_timeout)
|
||||
|
||||
base_args = web_util.base_curl_fetch_args(url, timeout=timeout,
|
||||
verify_ssl=spack.config.get('config:verify_ssl'))
|
||||
base_args = web_util.base_curl_fetch_args(
|
||||
url, timeout=timeout, verify_ssl=spack.config.get("config:verify_ssl")
|
||||
)
|
||||
curl_args = save_args + base_args + cookie_args
|
||||
|
||||
# Run curl but grab the mime type from the http headers
|
||||
@ -479,7 +480,10 @@ def archive(self, destination):
|
||||
raise NoArchiveFileError("Cannot call archive() before fetching.")
|
||||
|
||||
web_util.push_to_url(
|
||||
self.archive_file, url_util.path_to_file_url(destination), keep_original=True
|
||||
self.archive_file,
|
||||
url_util.path_to_file_url(destination),
|
||||
keep_original=True,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
|
||||
@_needs_stage
|
||||
@ -1348,8 +1352,8 @@ def fetch(self):
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = web_util.read_from_url(
|
||||
self.url,
|
||||
verify_ssl=spack.config.get('config:verify_ssl', True),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
@ -1399,8 +1403,8 @@ def fetch(self):
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = web_util.read_from_url(
|
||||
self.url,
|
||||
verify_ssl=spack.config.get('config:verify_ssl', True),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
|
@ -287,7 +287,7 @@ def test_gather_s3_information(monkeypatch, capfd):
|
||||
}
|
||||
)
|
||||
|
||||
session_args, client_args = spack.util.s3.get_mirror_s3_connection_info(mirror, "push")
|
||||
session_args, client_args = spack.util.s3.get_mirror_s3_connection_info(mirror, "push", False)
|
||||
|
||||
# Session args are used to create the S3 Session object
|
||||
assert "aws_session_token" in session_args
|
||||
@ -307,7 +307,7 @@ def test_gather_s3_information(monkeypatch, capfd):
|
||||
def test_remove_s3_url(monkeypatch, capfd):
|
||||
fake_s3_url = "s3://my-bucket/subdirectory/mirror"
|
||||
|
||||
def get_s3_session(url, method="fetch"):
|
||||
def get_s3_session(url, method="fetch", verify_ssl=True):
|
||||
return MockS3Client()
|
||||
|
||||
monkeypatch.setattr(spack.util.web, "get_s3_session", get_s3_session)
|
||||
@ -315,7 +315,9 @@ def get_s3_session(url, method="fetch"):
|
||||
current_debug_level = tty.debug_level()
|
||||
tty.set_debug(1)
|
||||
|
||||
spack.util.web.remove_url(fake_s3_url, recursive=True)
|
||||
spack.util.web.remove_url(
|
||||
fake_s3_url, recursive=True, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||
)
|
||||
err = capfd.readouterr()[1]
|
||||
|
||||
tty.set_debug(current_debug_level)
|
||||
@ -326,26 +328,26 @@ def get_s3_session(url, method="fetch"):
|
||||
|
||||
|
||||
def test_s3_url_exists(monkeypatch, capfd):
|
||||
def get_s3_session(url, method="fetch"):
|
||||
def get_s3_session(url, method="fetch", verify_ssl=True):
|
||||
return MockS3Client()
|
||||
|
||||
monkeypatch.setattr(spack.util.s3, "get_s3_session", get_s3_session)
|
||||
|
||||
fake_s3_url_exists = "s3://my-bucket/subdirectory/my-file"
|
||||
assert spack.util.web.url_exists(
|
||||
fake_s3_url_exists,
|
||||
fetch_method=spack.config.get('config:url_fetch_method', 'urllib'),
|
||||
verify_ssl=spack.config.get('config:verify_ssl'),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
)
|
||||
fake_s3_url_exists,
|
||||
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
|
||||
fake_s3_url_does_not_exist = "s3://my-bucket/subdirectory/my-notfound-file"
|
||||
assert not spack.util.web.url_exists(
|
||||
fake_s3_url_does_not_exist,
|
||||
fetch_method=spack.config.get('config:url_fetch_method', 'urllib'),
|
||||
verify_ssl=spack.config.get('config:verify_ssl'),
|
||||
timeout=spack.config.get('config:connect_timeout', 10)
|
||||
)
|
||||
fake_s3_url_does_not_exist,
|
||||
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
|
||||
|
||||
def test_s3_url_parsing():
|
||||
|
@ -10,13 +10,11 @@
|
||||
from io import BufferedReader, BytesIO, IOBase
|
||||
from typing import Any, Dict, Tuple
|
||||
|
||||
import spack.config
|
||||
|
||||
#: Map (mirror name, method) tuples to s3 client instances.
|
||||
s3_client_cache: Dict[Tuple[str, str], Any] = dict()
|
||||
|
||||
|
||||
def get_s3_session(url, method="fetch"):
|
||||
def get_s3_session(url, method="fetch", verify_ssl=True):
|
||||
# import boto and friends as late as possible. We don't want to require boto as a
|
||||
# dependency unless the user actually wants to access S3 mirrors.
|
||||
from boto3 import Session
|
||||
@ -62,7 +60,7 @@ def get_mirror_url(mirror):
|
||||
return s3_client_cache[key]
|
||||
|
||||
# Otherwise, create it.
|
||||
s3_connection, s3_client_args = get_mirror_s3_connection_info(mirror, method)
|
||||
s3_connection, s3_client_args = get_mirror_s3_connection_info(mirror, method, verify_ssl)
|
||||
|
||||
session = Session(**s3_connection)
|
||||
# if no access credentials provided above, then access anonymously
|
||||
@ -84,13 +82,13 @@ def _parse_s3_endpoint_url(endpoint_url):
|
||||
return endpoint_url
|
||||
|
||||
|
||||
def get_mirror_s3_connection_info(mirror, method):
|
||||
def get_mirror_s3_connection_info(mirror, method, verify_ssl):
|
||||
"""Create s3 config for session/client from a Mirror instance (or just set defaults
|
||||
when no mirror is given.)"""
|
||||
from spack.mirror import Mirror
|
||||
|
||||
s3_connection = {}
|
||||
s3_client_args = {"use_ssl": spack.config.get("config:verify_ssl")}
|
||||
s3_client_args = {"use_ssl": verify_ssl}
|
||||
|
||||
# access token
|
||||
if isinstance(mirror, Mirror):
|
||||
@ -150,9 +148,9 @@ def __getattr__(self, key):
|
||||
return getattr(self.raw, key)
|
||||
|
||||
|
||||
def _s3_open(url, method="GET"):
|
||||
def _s3_open(url, method="GET", verify_ssl=True):
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
s3 = get_s3_session(url, method="fetch")
|
||||
s3 = get_s3_session(url, method="fetch", verify_ssl=verify_ssl)
|
||||
|
||||
bucket = parsed.netloc
|
||||
key = parsed.path
|
||||
@ -182,7 +180,13 @@ def _s3_open(url, method="GET"):
|
||||
|
||||
|
||||
class UrllibS3Handler(urllib.request.BaseHandler):
|
||||
def __init__(self, verify_ssl=True):
|
||||
super().__init__()
|
||||
self.verify_ssl = verify_ssl
|
||||
|
||||
def s3_open(self, req):
|
||||
orig_url = req.get_full_url()
|
||||
url, headers, stream = _s3_open(orig_url, method=req.get_method())
|
||||
url, headers, stream = _s3_open(
|
||||
orig_url, method=req.get_method(), verify_ssl=self.verify_ssl
|
||||
)
|
||||
return urllib.response.addinfourl(stream, headers, url)
|
||||
|
@ -59,18 +59,19 @@ def http_error_default(self, req, fp, code, msg, hdrs):
|
||||
|
||||
|
||||
def _urlopen():
|
||||
s3 = UrllibS3Handler()
|
||||
s3_with_ssl = UrllibS3Handler(verify_ssl=True)
|
||||
s3_no_ssl = UrllibS3Handler(verify_ssl=False)
|
||||
gcs = GCSHandler()
|
||||
error_handler = SpackHTTPDefaultErrorHandler()
|
||||
|
||||
# One opener with HTTPS ssl enabled
|
||||
with_ssl = build_opener(
|
||||
s3, gcs, HTTPSHandler(context=ssl.create_default_context()), error_handler
|
||||
s3_with_ssl, gcs, HTTPSHandler(context=ssl.create_default_context()), error_handler
|
||||
)
|
||||
|
||||
# One opener with HTTPS ssl disabled
|
||||
without_ssl = build_opener(
|
||||
s3, gcs, HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||
s3_no_ssl, gcs, HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||
)
|
||||
|
||||
# And dynamically dispatch based on the config:verify_ssl.
|
||||
@ -162,7 +163,9 @@ def read_from_url(url, accept_content_type=None, verify_ssl=True, timeout=10, **
|
||||
return response.geturl(), response.headers, response
|
||||
|
||||
|
||||
def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=None):
|
||||
def push_to_url(
|
||||
local_file_path, remote_path, keep_original=True, extra_args=None, verify_ssl=True
|
||||
):
|
||||
remote_url = urllib.parse.urlparse(remote_path)
|
||||
if remote_url.scheme == "file":
|
||||
remote_file_path = url_util.local_file_path(remote_url)
|
||||
@ -191,7 +194,7 @@ def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=Non
|
||||
while remote_path.startswith("/"):
|
||||
remote_path = remote_path[1:]
|
||||
|
||||
s3 = get_s3_session(remote_url, method="push")
|
||||
s3 = get_s3_session(remote_url, method="push", verify_ssl=verify_ssl)
|
||||
s3.upload_file(local_file_path, remote_url.netloc, remote_path, ExtraArgs=extra_args)
|
||||
|
||||
if not keep_original:
|
||||
@ -340,9 +343,7 @@ def fetch_url_text(url, curl=None, dest_dir=".", fetch_method=None):
|
||||
|
||||
returncode = response.getcode()
|
||||
if returncode and returncode != 200:
|
||||
raise WebError(
|
||||
"Urllib failed with error code {0}".format(returncode)
|
||||
)
|
||||
raise WebError("Urllib failed with error code {0}".format(returncode))
|
||||
|
||||
output = codecs.getreader("utf-8")(response).read()
|
||||
if output:
|
||||
@ -393,8 +394,7 @@ def url_exists(url, curl=None, fetch_method=None, verify_ssl=True, timeout=10):
|
||||
# Otherwise use urllib.
|
||||
try:
|
||||
urlopen(
|
||||
Request(url, method="HEAD", headers={"User-Agent": SPACK_USER_AGENT}),
|
||||
timeout=timeout,
|
||||
Request(url, method="HEAD", headers={"User-Agent": SPACK_USER_AGENT}), timeout=timeout
|
||||
)
|
||||
return True
|
||||
except URLError as e:
|
||||
@ -411,7 +411,7 @@ def _debug_print_delete_results(result):
|
||||
tty.debug("Failed to delete {0} ({1})".format(e["Key"], e["Message"]))
|
||||
|
||||
|
||||
def remove_url(url, recursive=False):
|
||||
def remove_url(url, recursive=False, verify_ssl=True):
|
||||
url = urllib.parse.urlparse(url)
|
||||
|
||||
local_path = url_util.local_file_path(url)
|
||||
@ -424,7 +424,7 @@ def remove_url(url, recursive=False):
|
||||
|
||||
if url.scheme == "s3":
|
||||
# Try to find a mirror for potential connection information
|
||||
s3 = get_s3_session(url, method="push")
|
||||
s3 = get_s3_session(url, method="push", verify_ssl=verify_ssl)
|
||||
bucket = url.netloc
|
||||
if recursive:
|
||||
# Because list_objects_v2 can only return up to 1000 items
|
||||
@ -520,7 +520,7 @@ def _iter_local_prefix(path):
|
||||
yield os.path.relpath(os.path.join(root, f), path)
|
||||
|
||||
|
||||
def list_url(url, recursive=False):
|
||||
def list_url(url, recursive=False, verify_ssl=True):
|
||||
url = urllib.parse.urlparse(url)
|
||||
local_path = url_util.local_file_path(url)
|
||||
|
||||
@ -535,7 +535,7 @@ def list_url(url, recursive=False):
|
||||
]
|
||||
|
||||
if url.scheme == "s3":
|
||||
s3 = get_s3_session(url, method="fetch")
|
||||
s3 = get_s3_session(url, method="fetch", verify_ssl=verify_ssl)
|
||||
if recursive:
|
||||
return list(_iter_s3_prefix(s3, url))
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user