Get timeout for web requests with urllib from spack config, same as for curl (#30468)

This commit is contained in:
Dom Heinzeller 2022-05-09 19:35:17 +02:00 committed by GitHub
parent 9bcf496f21
commit c49508648a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -49,9 +49,6 @@
class HTMLParseError(Exception):
pass
# Timeout in seconds for web requests
_timeout = 10
class LinkParser(HTMLParser):
"""This parser just takes an HTML page and strips out the hrefs on the
@ -100,6 +97,9 @@ def read_from_url(url, accept_content_type=None):
verify_ssl = spack.config.get('config:verify_ssl')
# Timeout in seconds for web requests
timeout = spack.config.get('config:connect_timeout', 10)
# Don't even bother with a context unless the URL scheme is one that uses
# SSL certs.
if uses_ssl(url):
@ -131,7 +131,7 @@ def read_from_url(url, accept_content_type=None):
# one round-trip. However, most servers seem to ignore the header
# if you ask for a tarball with Accept: text/html.
req.get_method = lambda: "HEAD"
resp = _urlopen(req, timeout=_timeout, context=context)
resp = _urlopen(req, timeout=timeout, context=context)
content_type = get_header(resp.headers, 'Content-type')
@ -139,7 +139,7 @@ def read_from_url(url, accept_content_type=None):
req.get_method = lambda: "GET"
try:
response = _urlopen(req, timeout=_timeout, context=context)
response = _urlopen(req, timeout=timeout, context=context)
except URLError as err:
raise SpackWebError('Download failed: {ERROR}'.format(
ERROR=str(err)))