Handle packages with unparseable extensions (#1758)
This closes #1757 which provides an example of a url scheme where the version appears after the extension. Instead of extending the parsing logic to handle this case, this commit allows the user to specify their extension type. This helps Spack choose the appropriate decompressor and mirror archive filename.
This commit is contained in:
parent
6f0b32a71f
commit
508d79c475
@ -159,6 +159,8 @@ def __init__(self, url=None, digest=None, **kwargs):
|
||||
|
||||
self.expand_archive = kwargs.get('expand', True)
|
||||
|
||||
self.extension = kwargs.get('extension', None)
|
||||
|
||||
if not self.url:
|
||||
raise ValueError("URLFetchStrategy requires a url for fetching.")
|
||||
|
||||
@ -270,7 +272,7 @@ def expand(self):
|
||||
"URLFetchStrategy couldn't find archive file",
|
||||
"Failed on expand() for URL %s" % self.url)
|
||||
|
||||
decompress = decompressor_for(self.archive_file)
|
||||
decompress = decompressor_for(self.archive_file, self.extension)
|
||||
|
||||
# Expand all tarballs in their own directory to contain
|
||||
# exploding tarballs.
|
||||
|
@ -52,7 +52,14 @@ def mirror_archive_filename(spec, fetcher):
|
||||
if isinstance(fetcher, fs.URLFetchStrategy):
|
||||
if fetcher.expand_archive:
|
||||
# If we fetch with a URLFetchStrategy, use URL's archive type
|
||||
ext = url.downloaded_file_extension(fetcher.url)
|
||||
ext = url.determine_url_file_extension(fetcher.url)
|
||||
ext = ext or spec.package.versions[spec.package.version].get(
|
||||
'extension', None)
|
||||
ext = ext.lstrip('.')
|
||||
if not ext:
|
||||
raise MirrorError(
|
||||
"%s version does not specify an extension" % spec.name +
|
||||
" and could not parse extension from %s" % fetcher.url)
|
||||
else:
|
||||
# If the archive shouldn't be expanded, don't check extension.
|
||||
ext = None
|
||||
|
@ -53,14 +53,12 @@
|
||||
import spack.url
|
||||
import spack.util.web
|
||||
|
||||
from urlparse import urlparse
|
||||
from StringIO import StringIO
|
||||
from llnl.util.filesystem import *
|
||||
from llnl.util.lang import *
|
||||
from llnl.util.link_tree import LinkTree
|
||||
from llnl.util.tty.log import log_output
|
||||
from spack.stage import Stage, ResourceStage, StageComposite
|
||||
from spack.util.compression import allowed_archive
|
||||
from spack.util.environment import dump_environment
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.version import *
|
||||
@ -1473,16 +1471,6 @@ def flatten_dependencies(spec, flat_dir):
|
||||
dep_files.merge(flat_dir + '/' + name)
|
||||
|
||||
|
||||
def validate_package_url(url_string):
|
||||
"""Determine whether spack can handle a particular URL or not."""
|
||||
url = urlparse(url_string)
|
||||
if url.scheme not in _ALLOWED_URL_SCHEMES:
|
||||
tty.die("Invalid protocol in URL: '%s'" % url_string)
|
||||
|
||||
if not allowed_archive(url_string):
|
||||
tty.die("Invalid file type in URL: '%s'" % url_string)
|
||||
|
||||
|
||||
def dump_packages(spec, path):
|
||||
"""Dump all package information for a spec and its dependencies.
|
||||
|
||||
|
@ -302,9 +302,11 @@ def fetch(self, mirror_only=False):
|
||||
# the checksum will be the same.
|
||||
digest = None
|
||||
expand = True
|
||||
extension = None
|
||||
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
|
||||
digest = self.default_fetcher.digest
|
||||
expand = self.default_fetcher.expand_archive
|
||||
extension = self.default_fetcher.extension
|
||||
|
||||
# Have to skip the checksum for things archived from
|
||||
# repositories. How can this be made safer?
|
||||
@ -313,10 +315,12 @@ def fetch(self, mirror_only=False):
|
||||
# Add URL strategies for all the mirrors with the digest
|
||||
for url in urls:
|
||||
fetchers.insert(
|
||||
0, fs.URLFetchStrategy(url, digest, expand=expand))
|
||||
0, fs.URLFetchStrategy(
|
||||
url, digest, expand=expand, extension=extension))
|
||||
fetchers.insert(
|
||||
0, spack.fetch_cache.fetcher(
|
||||
self.mirror_path, digest, expand=expand))
|
||||
self.mirror_path, digest, expand=expand,
|
||||
extension=extension))
|
||||
|
||||
# Look for the archive in list_url
|
||||
package_name = os.path.dirname(self.mirror_path)
|
||||
|
@ -142,7 +142,7 @@ def split_url_extension(path):
|
||||
return prefix, ext, suffix
|
||||
|
||||
|
||||
def downloaded_file_extension(path):
|
||||
def determine_url_file_extension(path):
|
||||
"""This returns the type of archive a URL refers to. This is
|
||||
sometimes confusing because of URLs like:
|
||||
|
||||
@ -160,8 +160,6 @@ def downloaded_file_extension(path):
|
||||
return 'tar.gz'
|
||||
|
||||
prefix, ext, suffix = split_url_extension(path)
|
||||
if not ext:
|
||||
raise UrlParseError("Cannot deduce archive type in %s" % path, path)
|
||||
return ext
|
||||
|
||||
|
||||
|
@ -40,9 +40,10 @@ def allowed_archive(path):
|
||||
return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
||||
|
||||
|
||||
def decompressor_for(path):
|
||||
def decompressor_for(path, extension=None):
|
||||
"""Get the appropriate decompressor for a path."""
|
||||
if path.endswith(".zip"):
|
||||
if ((extension and re.match(r'\.?zip$', extension)) or
|
||||
path.endswith('.zip')):
|
||||
unzip = which('unzip', required=True)
|
||||
return unzip
|
||||
tar = which('tar', required=True)
|
||||
|
Loading…
Reference in New Issue
Block a user