Gracefully handle lack of network connection.

This commit is contained in:
Todd Gamblin 2013-11-23 21:01:07 -08:00
parent 1247036141
commit 3de3efc75d
3 changed files with 29 additions and 17 deletions

View File

@ -11,3 +11,10 @@ class UnsupportedPlatformError(SpackError):
"""Raised by packages when a platform is not supported"""
def __init__(self, message):
super(UnsupportedPlatformError, self).__init__(message)
class NoNetworkConnectionError(SpackError):
"""Raised when an operation needs an internet connection."""
def __init__(self, message, url):
super(NoNetworkConnectionError, self).__init__(message)
self.url = url

View File

@ -664,7 +664,9 @@ def fetch_available_versions(self):
url_regex = os.path.basename(url.wildcard_version(self.url))
wildcard = self.version.wildcard()
try:
page_map = get_pages(self.list_url, depth=self.list_depth)
for site, page in page_map.iteritems():
strings = re.findall(url_regex, page)
@ -680,6 +682,10 @@ def fetch_available_versions(self):
+ self.name + " package.",
"Use them to tell Spack where to look for versions.")
except spack.error.NoNetworkConnectionError, e:
tty.die("Package.fetch_available_versions couldn't connect to:",
e.url, e.message)
return self._available_versions

View File

@ -6,6 +6,7 @@
from HTMLParser import HTMLParser
import spack
import spack.error
import spack.tty as tty
from spack.util.compression import ALLOWED_ARCHIVE_TYPES
@ -90,12 +91,10 @@ def _spider(args):
for d in dicts:
pages.update(d)
except urllib2.HTTPError, e:
except urllib2.URLError, e:
# Only report it if it's the root page. We ignore errors when spidering.
if depth == 1:
tty.warn("Could not connect to %s" % url, e.reason,
"Package.available_versions requires an internet connection.",
"Version list may be incomplete.")
raise spack.error.NoNetworkConnectionError(e.reason, url)
return pages