Show underlying errors on fetch failure (#45714)

- unwrap/flatten nested exceptions
- improve tests
- unify curl lookup
This commit is contained in:
Harmen Stoppels 2024-08-14 10:15:15 +02:00 committed by GitHub
parent b61cd74707
commit 7b10aae356
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 198 additions and 240 deletions

View File

@ -30,6 +30,7 @@
import shutil
import urllib.error
import urllib.parse
import urllib.request
from pathlib import PurePath
from typing import List, Optional
@ -273,10 +274,7 @@ def __init__(self, url=None, checksum=None, **kwargs):
@property
def curl(self):
if not self._curl:
try:
self._curl = which("curl", required=True)
except CommandNotFoundError as exc:
tty.error(str(exc))
self._curl = web_util.require_curl()
return self._curl
def source_id(self):
@ -297,27 +295,23 @@ def candidate_urls(self):
@_needs_stage
def fetch(self):
if self.archive_file:
tty.debug("Already downloaded {0}".format(self.archive_file))
tty.debug(f"Already downloaded {self.archive_file}")
return
url = None
errors = []
errors: List[Exception] = []
for url in self.candidate_urls:
if not web_util.url_exists(url):
tty.debug("URL does not exist: " + url)
continue
try:
self._fetch_from_url(url)
break
except FailedDownloadError as e:
errors.append(str(e))
for msg in errors:
tty.debug(msg)
errors.extend(e.exceptions)
else:
raise FailedDownloadError(*errors)
if not self.archive_file:
raise FailedDownloadError(url)
raise FailedDownloadError(
RuntimeError(f"Missing archive {self.archive_file} after fetching")
)
def _fetch_from_url(self, url):
if spack.config.get("config:url_fetch_method") == "curl":
@ -336,19 +330,20 @@ def _check_headers(self, headers):
@_needs_stage
def _fetch_urllib(self, url):
save_file = self.stage.save_filename
tty.msg("Fetching {0}".format(url))
# Run urllib but grab the mime type from the http headers
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
try:
url, headers, response = web_util.read_from_url(url)
except web_util.SpackWebError as e:
response = web_util.urlopen(request)
except (TimeoutError, urllib.error.URLError) as e:
# clean up archive on failure.
if self.archive_file:
os.remove(self.archive_file)
if os.path.lexists(save_file):
os.remove(save_file)
msg = "urllib failed to fetch with error {0}".format(e)
raise FailedDownloadError(url, msg)
raise FailedDownloadError(e) from e
tty.msg(f"Fetching {url}")
if os.path.lexists(save_file):
os.remove(save_file)
@ -356,7 +351,7 @@ def _fetch_urllib(self, url):
with open(save_file, "wb") as _open_file:
shutil.copyfileobj(response, _open_file)
self._check_headers(str(headers))
self._check_headers(str(response.headers))
@_needs_stage
def _fetch_curl(self, url):
@ -365,7 +360,7 @@ def _fetch_curl(self, url):
if self.stage.save_filename:
save_file = self.stage.save_filename
partial_file = self.stage.save_filename + ".part"
tty.msg("Fetching {0}".format(url))
tty.msg(f"Fetching {url}")
if partial_file:
save_args = [
"-C",
@ -405,8 +400,8 @@ def _fetch_curl(self, url):
try:
web_util.check_curl_code(curl.returncode)
except spack.error.FetchError as err:
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
except spack.error.FetchError as e:
raise FailedDownloadError(e) from e
self._check_headers(headers)
@ -560,7 +555,7 @@ def fetch(self):
os.remove(self.archive_file)
if os.path.lexists(file):
os.remove(file)
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
raise FailedDownloadError(e) from e
if os.path.lexists(file):
os.remove(file)
@ -1312,35 +1307,41 @@ def __init__(self, *args, **kwargs):
@_needs_stage
def fetch(self):
if self.archive_file:
tty.debug("Already downloaded {0}".format(self.archive_file))
tty.debug(f"Already downloaded {self.archive_file}")
return
parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "s3":
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
tty.debug("Fetching {0}".format(self.url))
basename = os.path.basename(parsed_url.path)
request = urllib.request.Request(
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
)
with working_dir(self.stage.path):
_, headers, stream = web_util.read_from_url(self.url)
try:
response = web_util.urlopen(request)
except (TimeoutError, urllib.error.URLError) as e:
raise FailedDownloadError(e) from e
tty.debug(f"Fetching {self.url}")
with open(basename, "wb") as f:
shutil.copyfileobj(stream, f)
shutil.copyfileobj(response, f)
content_type = web_util.get_header(headers, "Content-type")
content_type = web_util.get_header(response.headers, "Content-type")
if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive")
if self.stage.save_filename:
llnl.util.filesystem.rename(
os.path.join(self.stage.path, basename), self.stage.save_filename
)
fs.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
if not self.archive_file:
raise FailedDownloadError(self.url)
raise FailedDownloadError(
RuntimeError(f"Missing archive {self.archive_file} after fetching")
)
@fetcher
@ -1366,17 +1367,23 @@ def fetch(self):
if parsed_url.scheme != "gs":
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
tty.debug("Fetching {0}".format(self.url))
basename = os.path.basename(parsed_url.path)
request = urllib.request.Request(
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
)
with working_dir(self.stage.path):
_, headers, stream = web_util.read_from_url(self.url)
try:
response = web_util.urlopen(request)
except (TimeoutError, urllib.error.URLError) as e:
raise FailedDownloadError(e) from e
tty.debug(f"Fetching {self.url}")
with open(basename, "wb") as f:
shutil.copyfileobj(stream, f)
shutil.copyfileobj(response, f)
content_type = web_util.get_header(headers, "Content-type")
content_type = web_util.get_header(response.headers, "Content-type")
if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive")
@ -1385,7 +1392,9 @@ def fetch(self):
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
if not self.archive_file:
raise FailedDownloadError(self.url)
raise FailedDownloadError(
RuntimeError(f"Missing archive {self.archive_file} after fetching")
)
@fetcher
@ -1722,9 +1731,9 @@ class NoCacheError(spack.error.FetchError):
class FailedDownloadError(spack.error.FetchError):
"""Raised when a download fails."""
def __init__(self, url, msg=""):
super().__init__("Failed to fetch file from URL: %s" % url, msg)
self.url = url
def __init__(self, *exceptions: Exception):
super().__init__("Failed to download")
self.exceptions = exceptions
class NoArchiveFileError(spack.error.FetchError):

View File

@ -13,7 +13,7 @@
import stat
import sys
import tempfile
from typing import Callable, Dict, Iterable, Optional, Set
from typing import Callable, Dict, Iterable, List, Optional, Set
import llnl.string
import llnl.util.lang
@ -40,6 +40,7 @@
import spack.resource
import spack.spec
import spack.stage
import spack.util.crypto
import spack.util.lock
import spack.util.path as sup
import spack.util.pattern as pattern
@ -534,32 +535,29 @@ def generate_fetchers():
for fetcher in dynamic_fetchers:
yield fetcher
def print_errors(errors):
for msg in errors:
tty.debug(msg)
errors = []
errors: List[str] = []
for fetcher in generate_fetchers():
try:
fetcher.stage = self
self.fetcher = fetcher
self.fetcher.fetch()
break
except spack.fetch_strategy.NoCacheError:
except fs.NoCacheError:
# Don't bother reporting when something is not cached.
continue
except fs.FailedDownloadError as f:
errors.extend(f"{fetcher}: {e.__class__.__name__}: {e}" for e in f.exceptions)
continue
except spack.error.SpackError as e:
errors.append("Fetching from {0} failed.".format(fetcher))
tty.debug(e)
errors.append(f"{fetcher}: {e.__class__.__name__}: {e}")
continue
else:
print_errors(errors)
self.fetcher = self.default_fetcher
default_msg = "All fetchers failed for {0}".format(self.name)
raise spack.error.FetchError(err_msg or default_msg, None)
print_errors(errors)
if err_msg:
raise spack.error.FetchError(err_msg)
raise spack.error.FetchError(
f"All fetchers failed for {self.name}", "\n".join(f" {e}" for e in errors)
)
def steal_source(self, dest):
"""Copy the source_path directory in its entirety to directory dest
@ -1188,7 +1186,7 @@ def _fetch_and_checksum(url, options, keep_stage, action_fn=None):
# Checksum the archive and add it to the list
checksum = spack.util.crypto.checksum(hashlib.sha256, stage.archive_file)
return checksum, None
except FailedDownloadError:
except fs.FailedDownloadError:
return None, f"[WORKER] Failed to fetch {url}"
except Exception as e:
return None, f"[WORKER] Something failed on {url}, skipping. ({e})"
@ -1208,7 +1206,3 @@ class RestageError(StageError):
class VersionFetchError(StageError):
"""Raised when we can't determine a URL to fetch a package."""
# Keep this in namespace for convenience
FailedDownloadError = fs.FailedDownloadError

View File

@ -18,6 +18,7 @@
import spack.config
import spack.directory_layout
import spack.environment as ev
import spack.fetch_strategy
import spack.main
import spack.package_base
import spack.paths

View File

@ -59,6 +59,7 @@
import spack.util.parallel
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.util.web
import spack.version
from spack.fetch_strategy import URLFetchStrategy
from spack.util.pattern import Bunch
@ -1812,12 +1813,7 @@ def __call__(self, *args, **kwargs):
tty.msg("curl: (22) The requested URL returned error: 404")
self.returncode = 22
def mock_curl(*args):
return MockCurl()
monkeypatch.setattr(spack.util.web, "_curl", mock_curl)
yield
monkeypatch.setattr(spack.util.web, "require_curl", MockCurl)
@pytest.fixture(scope="function")

View File

@ -11,6 +11,7 @@
import pathlib
import platform
import shutil
import urllib.error
from collections import OrderedDict
import pytest
@ -21,6 +22,7 @@
import spack.binary_distribution as bindist
import spack.cmd.buildcache as buildcache
import spack.error
import spack.fetch_strategy
import spack.package_base
import spack.repo
import spack.store
@ -478,7 +480,7 @@ def test_macho_make_paths():
@pytest.fixture()
def mock_download():
def mock_download(monkeypatch):
"""Mock a failing download strategy."""
class FailedDownloadStrategy(spack.fetch_strategy.FetchStrategy):
@ -487,19 +489,14 @@ def mirror_id(self):
def fetch(self):
raise spack.fetch_strategy.FailedDownloadError(
"<non-existent URL>", "This FetchStrategy always fails"
urllib.error.URLError("This FetchStrategy always fails")
)
fetcher = FailedDownloadStrategy()
@property
def fake_fn(self):
return fetcher
return FailedDownloadStrategy()
orig_fn = spack.package_base.PackageBase.fetcher
spack.package_base.PackageBase.fetcher = fake_fn
yield
spack.package_base.PackageBase.fetcher = orig_fn
monkeypatch.setattr(spack.package_base.PackageBase, "fetcher", fake_fn)
@pytest.mark.parametrize(

View File

@ -18,6 +18,7 @@
from llnl.util.symlink import readlink
import spack.error
import spack.fetch_strategy
import spack.paths
import spack.stage
import spack.util.executable
@ -323,17 +324,11 @@ def _mock():
return _mock
@pytest.fixture
def failing_fetch_strategy():
"""Returns a fetch strategy that fails."""
class FailingFetchStrategy(spack.fetch_strategy.FetchStrategy):
def fetch(self):
raise spack.fetch_strategy.FailedDownloadError(
"<non-existent URL>", "This implementation of FetchStrategy always fails"
)
return FailingFetchStrategy()
class FailingFetchStrategy(spack.fetch_strategy.FetchStrategy):
def fetch(self):
raise spack.fetch_strategy.FailedDownloadError(
"<non-existent URL>", "This implementation of FetchStrategy always fails"
)
@pytest.fixture
@ -511,8 +506,8 @@ def test_no_search_if_default_succeeds(self, mock_stage_archive, failing_search_
stage.fetch()
check_destroy(stage, self.stage_name)
def test_no_search_mirror_only(self, failing_fetch_strategy, failing_search_fn):
stage = Stage(failing_fetch_strategy, name=self.stage_name, search_fn=failing_search_fn)
def test_no_search_mirror_only(self, failing_search_fn):
stage = Stage(FailingFetchStrategy(), name=self.stage_name, search_fn=failing_search_fn)
with stage:
try:
stage.fetch(mirror_only=True)
@ -527,8 +522,8 @@ def test_no_search_mirror_only(self, failing_fetch_strategy, failing_search_fn):
(None, "All fetchers failed"),
],
)
def test_search_if_default_fails(self, failing_fetch_strategy, search_fn, err_msg, expected):
stage = Stage(failing_fetch_strategy, name=self.stage_name, search_fn=search_fn)
def test_search_if_default_fails(self, search_fn, err_msg, expected):
stage = Stage(FailingFetchStrategy(), name=self.stage_name, search_fn=search_fn)
with stage:
with pytest.raises(spack.error.FetchError, match=expected):

View File

@ -4,8 +4,10 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import filecmp
import os
import sys
import urllib.error
import pytest
@ -24,6 +26,14 @@
from spack.util.executable import which
@pytest.fixture
def missing_curl(monkeypatch):
def require_curl():
raise spack.error.FetchError("curl is required but not found")
monkeypatch.setattr(web_util, "require_curl", require_curl)
@pytest.fixture(params=list(crypto.hashes.keys()))
def checksum_type(request):
return request.param
@ -66,66 +76,62 @@ def fn_urls(v):
return factory
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_urlfetchstrategy_sans_url(_fetch_method):
def test_urlfetchstrategy_sans_url():
"""Ensure constructor with no URL fails."""
with spack.config.override("config:url_fetch_method", _fetch_method):
with pytest.raises(ValueError):
with fs.URLFetchStrategy(None):
pass
with pytest.raises(ValueError):
fs.URLFetchStrategy(None)
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_urlfetchstrategy_bad_url(tmpdir, _fetch_method):
@pytest.mark.parametrize("method", ["curl", "urllib"])
def test_urlfetchstrategy_bad_url(tmp_path, mutable_config, method):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
with spack.config.override("config:url_fetch_method", _fetch_method):
with pytest.raises(fs.FailedDownloadError):
fetcher = fs.URLFetchStrategy(url="file:///does-not-exist")
assert fetcher is not None
mutable_config.set("config:url_fetch_method", method)
fetcher = fs.URLFetchStrategy(url=(tmp_path / "does-not-exist").as_uri())
with Stage(fetcher, path=testpath) as stage:
assert stage is not None
assert fetcher.archive_file is None
fetcher.fetch()
with Stage(fetcher, path=str(tmp_path / "stage")):
with pytest.raises(fs.FailedDownloadError) as exc:
fetcher.fetch()
assert len(exc.value.exceptions) == 1
exception = exc.value.exceptions[0]
if method == "curl":
assert isinstance(exception, spack.error.FetchError)
assert "Curl failed with error 37" in str(exception) # FILE_COULDNT_READ_FILE
elif method == "urllib":
assert isinstance(exception, urllib.error.URLError)
assert isinstance(exception.reason, FileNotFoundError)
def test_fetch_options(tmpdir, mock_archive):
testpath = str(tmpdir)
def test_fetch_options(tmp_path, mock_archive):
with spack.config.override("config:url_fetch_method", "curl"):
fetcher = fs.URLFetchStrategy(
url=mock_archive.url, fetch_options={"cookie": "True", "timeout": 10}
)
assert fetcher is not None
with Stage(fetcher, path=testpath) as stage:
assert stage is not None
with Stage(fetcher, path=str(tmp_path)):
assert fetcher.archive_file is None
fetcher.fetch()
assert filecmp.cmp(fetcher.archive_file, mock_archive.archive_file)
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_archive_file_errors(tmpdir, mock_archive, _fetch_method):
def test_archive_file_errors(tmp_path, mutable_config, mock_archive, _fetch_method):
"""Ensure FetchStrategy commands may only be used as intended"""
testpath = str(tmpdir)
with spack.config.override("config:url_fetch_method", _fetch_method):
fetcher = fs.URLFetchStrategy(url=mock_archive.url)
assert fetcher is not None
with pytest.raises(fs.FailedDownloadError):
with Stage(fetcher, path=testpath) as stage:
assert stage is not None
assert fetcher.archive_file is None
with pytest.raises(fs.NoArchiveFileError):
fetcher.archive(testpath)
with pytest.raises(fs.NoArchiveFileError):
fetcher.expand()
with pytest.raises(fs.NoArchiveFileError):
fetcher.reset()
stage.fetch()
with pytest.raises(fs.NoDigestError):
fetcher.check()
assert fetcher.archive_file is not None
fetcher._fetch_from_url("file:///does-not-exist")
with Stage(fetcher, path=str(tmp_path)) as stage:
assert fetcher.archive_file is None
with pytest.raises(fs.NoArchiveFileError):
fetcher.archive(str(tmp_path))
with pytest.raises(fs.NoArchiveFileError):
fetcher.expand()
with pytest.raises(fs.NoArchiveFileError):
fetcher.reset()
stage.fetch()
with pytest.raises(fs.NoDigestError):
fetcher.check()
assert filecmp.cmp(fetcher.archive_file, mock_archive.archive_file)
files = [(".tar.gz", "z"), (".tgz", "z")]
@ -271,16 +277,15 @@ def is_true():
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_url_extra_fetch(tmpdir, mock_archive, _fetch_method):
def test_url_extra_fetch(tmp_path, mutable_config, mock_archive, _fetch_method):
"""Ensure a fetch after downloading is effectively a no-op."""
with spack.config.override("config:url_fetch_method", _fetch_method):
testpath = str(tmpdir)
fetcher = fs.URLFetchStrategy(mock_archive.url)
with Stage(fetcher, path=testpath) as stage:
assert fetcher.archive_file is None
stage.fetch()
assert fetcher.archive_file is not None
fetcher.fetch()
mutable_config.set("config:url_fetch_method", _fetch_method)
fetcher = fs.URLFetchStrategy(mock_archive.url)
with Stage(fetcher, path=str(tmp_path)) as stage:
assert fetcher.archive_file is None
stage.fetch()
assert filecmp.cmp(fetcher.archive_file, mock_archive.archive_file)
fetcher.fetch()
@pytest.mark.parametrize(
@ -316,49 +321,25 @@ def test_candidate_urls(pkg_factory, url, urls, version, expected, _fetch_method
@pytest.mark.regression("19673")
def test_missing_curl(tmpdir, monkeypatch):
def test_missing_curl(tmp_path, missing_curl, mutable_config, monkeypatch):
"""Ensure a fetch involving missing curl package reports the error."""
err_fmt = "No such command {0}"
def _which(*args, **kwargs):
err_msg = err_fmt.format(args[0])
raise spack.util.executable.CommandNotFoundError(err_msg)
# Patching the 'which' symbol imported by fetch_strategy needed due
# to 'from spack.util.executable import which' in this module.
monkeypatch.setattr(fs, "which", _which)
testpath = str(tmpdir)
url = "http://github.com/spack/spack"
with spack.config.override("config:url_fetch_method", "curl"):
fetcher = fs.URLFetchStrategy(url=url)
assert fetcher is not None
with pytest.raises(TypeError, match="object is not callable"):
with Stage(fetcher, path=testpath) as stage:
out = stage.fetch()
assert err_fmt.format("curl") in out
mutable_config.set("config:url_fetch_method", "curl")
fetcher = fs.URLFetchStrategy(url="http://example.com/file.tar.gz")
with pytest.raises(spack.error.FetchError, match="curl is required but not found"):
with Stage(fetcher, path=str(tmp_path)) as stage:
stage.fetch()
def test_url_fetch_text_without_url(tmpdir):
def test_url_fetch_text_without_url():
with pytest.raises(spack.error.FetchError, match="URL is required"):
web_util.fetch_url_text(None)
def test_url_fetch_text_curl_failures(tmpdir, monkeypatch):
def test_url_fetch_text_curl_failures(mutable_config, missing_curl, monkeypatch):
"""Check fetch_url_text if URL's curl is missing."""
err_fmt = "No such command {0}"
def _which(*args, **kwargs):
err_msg = err_fmt.format(args[0])
raise spack.util.executable.CommandNotFoundError(err_msg)
# Patching the 'which' symbol imported by spack.util.web needed due
# to 'from spack.util.executable import which' in this module.
monkeypatch.setattr(spack.util.web, "which", _which)
with spack.config.override("config:url_fetch_method", "curl"):
with pytest.raises(spack.error.FetchError, match="Missing required curl"):
web_util.fetch_url_text("https://github.com/")
mutable_config.set("config:url_fetch_method", "curl")
with pytest.raises(spack.error.FetchError, match="curl is required but not found"):
web_util.fetch_url_text("https://example.com/")
def test_url_check_curl_errors():
@ -372,24 +353,14 @@ def test_url_check_curl_errors():
web_util.check_curl_code(60)
def test_url_missing_curl(tmpdir, monkeypatch):
def test_url_missing_curl(mutable_config, missing_curl, monkeypatch):
"""Check url_exists failures if URL's curl is missing."""
err_fmt = "No such command {0}"
def _which(*args, **kwargs):
err_msg = err_fmt.format(args[0])
raise spack.util.executable.CommandNotFoundError(err_msg)
# Patching the 'which' symbol imported by spack.util.web needed due
# to 'from spack.util.executable import which' in this module.
monkeypatch.setattr(spack.util.web, "which", _which)
with spack.config.override("config:url_fetch_method", "curl"):
with pytest.raises(spack.error.FetchError, match="Missing required curl"):
web_util.url_exists("https://github.com/")
mutable_config.set("config:url_fetch_method", "curl")
with pytest.raises(spack.error.FetchError, match="curl is required but not found"):
web_util.url_exists("https://example.com/")
def test_url_fetch_text_urllib_bad_returncode(tmpdir, monkeypatch):
def test_url_fetch_text_urllib_bad_returncode(mutable_config, monkeypatch):
class response:
def getcode(self):
return 404
@ -397,19 +368,19 @@ def getcode(self):
def _read_from_url(*args, **kwargs):
return None, None, response()
monkeypatch.setattr(spack.util.web, "read_from_url", _read_from_url)
monkeypatch.setattr(web_util, "read_from_url", _read_from_url)
mutable_config.set("config:url_fetch_method", "urllib")
with spack.config.override("config:url_fetch_method", "urllib"):
with pytest.raises(spack.error.FetchError, match="failed with error code"):
web_util.fetch_url_text("https://github.com/")
with pytest.raises(spack.error.FetchError, match="failed with error code"):
web_util.fetch_url_text("https://example.com/")
def test_url_fetch_text_urllib_web_error(tmpdir, monkeypatch):
def test_url_fetch_text_urllib_web_error(mutable_config, monkeypatch):
def _raise_web_error(*args, **kwargs):
raise web_util.SpackWebError("bad url")
monkeypatch.setattr(spack.util.web, "read_from_url", _raise_web_error)
monkeypatch.setattr(web_util, "read_from_url", _raise_web_error)
mutable_config.set("config:url_fetch_method", "urllib")
with spack.config.override("config:url_fetch_method", "urllib"):
with pytest.raises(spack.error.FetchError, match="fetch failed to verify"):
web_util.fetch_url_text("https://github.com/")
with pytest.raises(spack.error.FetchError, match="fetch failed to verify"):
web_util.fetch_url_text("https://example.com/")

View File

@ -432,7 +432,7 @@ def test_ssl_curl_cert_file(cert_exists, tmpdir, ssl_scrubbed_env, mutable_confi
if cert_exists:
open(mock_cert, "w").close()
assert os.path.isfile(mock_cert)
curl = spack.util.web._curl()
curl = spack.util.web.require_curl()
# arbitrary call to query the run env
dump_env = {}

View File

@ -28,10 +28,11 @@
import spack.config
import spack.error
import spack.util.executable
import spack.util.path
import spack.util.url as url_util
from .executable import CommandNotFoundError, Executable, which
from .executable import CommandNotFoundError, Executable
from .gcs import GCSBlob, GCSBucket, GCSHandler
from .s3 import UrllibS3Handler, get_s3_session
@ -198,7 +199,7 @@ def read_from_url(url, accept_content_type=None):
try:
response = urlopen(request)
except (TimeoutError, URLError) as e:
raise SpackWebError(f"Download of {url.geturl()} failed: {e}")
raise SpackWebError(f"Download of {url.geturl()} failed: {e.__class__.__name__}: {e}")
if accept_content_type:
try:
@ -307,45 +308,44 @@ def base_curl_fetch_args(url, timeout=0):
return curl_args
def check_curl_code(returncode):
def check_curl_code(returncode: int) -> None:
"""Check standard return code failures for provided arguments.
Arguments:
returncode (int): curl return code
returncode: curl return code
Raises FetchError if the curl returncode indicates failure
"""
if returncode != 0:
if returncode == 22:
# This is a 404. Curl will print the error.
raise spack.error.FetchError("URL was not found!")
if returncode == 0:
return
elif returncode == 22:
# This is a 404. Curl will print the error.
raise spack.error.FetchError("URL was not found!")
elif returncode == 60:
# This is a certificate error. Suggest spack -k
raise spack.error.FetchError(
"Curl was unable to fetch due to invalid certificate. "
"This is either an attack, or your cluster's SSL "
"configuration is bad. If you believe your SSL "
"configuration is bad, you can try running spack -k, "
"which will not check SSL certificates."
"Use this at your own risk."
)
if returncode == 60:
# This is a certificate error. Suggest spack -k
raise spack.error.FetchError(
"Curl was unable to fetch due to invalid certificate. "
"This is either an attack, or your cluster's SSL "
"configuration is bad. If you believe your SSL "
"configuration is bad, you can try running spack -k, "
"which will not check SSL certificates."
"Use this at your own risk."
)
raise spack.error.FetchError("Curl failed with error {0}".format(returncode))
raise spack.error.FetchError(f"Curl failed with error {returncode}")
def _curl(curl=None):
if not curl:
try:
curl = which("curl", required=True)
except CommandNotFoundError as exc:
tty.error(str(exc))
raise spack.error.FetchError("Missing required curl fetch method")
def require_curl() -> Executable:
try:
path = spack.util.executable.which_string("curl", required=True)
except CommandNotFoundError as e:
raise spack.error.FetchError(f"curl is required but not found: {e}") from e
curl = spack.util.executable.Executable(path)
set_curl_env_for_ssl_certs(curl)
return curl
def fetch_url_text(url, curl=None, dest_dir="."):
def fetch_url_text(url, curl: Optional[Executable] = None, dest_dir="."):
"""Retrieves text-only URL content using the configured fetch method.
It determines the fetch method from:
@ -379,10 +379,7 @@ def fetch_url_text(url, curl=None, dest_dir="."):
fetch_method = spack.config.get("config:url_fetch_method")
tty.debug("Using '{0}' to fetch {1} into {2}".format(fetch_method, url, path))
if fetch_method == "curl":
curl_exe = _curl(curl)
if not curl_exe:
raise spack.error.FetchError("Missing required fetch method (curl)")
curl_exe = curl or require_curl()
curl_args = ["-O"]
curl_args.extend(base_curl_fetch_args(url))
@ -439,9 +436,7 @@ def url_exists(url, curl=None):
"config:url_fetch_method", "urllib"
) == "curl" and url_result.scheme not in ("gs", "s3")
if use_curl:
curl_exe = _curl(curl)
if not curl_exe:
return False
curl_exe = curl or require_curl()
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
# portable.