Fix fetching non-expanded resources from mirrors (#1310)

This closes #1308, where fetching a non-expanded resource from a mirror
will cause an error.

This also ensures that when a URL resource is fetched from a mirror,
that it will be named as though it were retrieved from the original
URL. This is particularly useful for non-expanded resources since it
ensures that the resource name is consistent for the installation
(this is less important for expanded resources because the build takes
place inside the expanded resource).
This commit is contained in:
scheibelp 2016-09-07 07:02:24 -07:00 committed by Todd Gamblin
parent d55b17dd65
commit fd02a140c4
3 changed files with 25 additions and 22 deletions

View File

@ -56,7 +56,7 @@ class MockCache(object):
def store(self, copyCmd, relativeDst):
pass
def fetcher(self, targetPath, digest):
def fetcher(self, targetPath, digest, **kwargs):
return MockCacheFetcher()

View File

@ -170,12 +170,11 @@ def fetch(self):
tty.msg("Already downloaded %s" % self.archive_file)
return
possible_files = self.stage.expected_archive_files
save_file = None
partial_file = None
if possible_files:
save_file = self.stage.expected_archive_files[0]
partial_file = self.stage.expected_archive_files[0] + '.part'
if self.stage.save_filename:
save_file = self.stage.save_filename
partial_file = self.stage.save_filename + '.part'
tty.msg("Trying to fetch from %s" % self.url)
@ -858,9 +857,9 @@ def store(self, fetcher, relativeDst):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)
def fetcher(self, targetPath, digest):
def fetcher(self, targetPath, digest, **kwargs):
url = "file://" + join_path(self.root, targetPath)
return CacheURLFetchStrategy(url, digest)
return CacheURLFetchStrategy(url, digest, **kwargs)
def destroy(self):
shutil.rmtree(self.root, ignore_errors=True)

View File

@ -216,9 +216,9 @@ def _need_to_create_path(self):
def expected_archive_files(self):
"""Possible archive file paths."""
paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy):
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(
self.path, os.path.basename(self.fetcher.url)))
self.path, os.path.basename(self.default_fetcher.url)))
if self.mirror_path:
paths.append(os.path.join(
@ -226,19 +226,19 @@ def expected_archive_files(self):
return paths
@property
def save_filename(self):
possible_filenames = self.expected_archive_files
if possible_filenames:
# This prefers using the URL associated with the default fetcher if
# available, so that the fetched resource name matches the remote
# name
return possible_filenames[0]
@property
def archive_file(self):
"""Path to the source archive within this stage directory."""
paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(
self.path, os.path.basename(self.fetcher.url)))
if self.mirror_path:
paths.append(os.path.join(
self.path, os.path.basename(self.mirror_path)))
for path in paths:
for path in self.expected_archive_files:
if os.path.exists(path):
return path
else:
@ -301,8 +301,10 @@ def fetch(self, mirror_only=False):
# then use the same digest. `spack mirror` ensures that
# the checksum will be the same.
digest = None
expand = True
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.default_fetcher.digest
expand = self.default_fetcher.expand_archive
# Have to skip the checksum for things archived from
# repositories. How can this be made safer?
@ -310,9 +312,11 @@ def fetch(self, mirror_only=False):
# Add URL strategies for all the mirrors with the digest
for url in urls:
fetchers.insert(0, fs.URLFetchStrategy(url, digest))
fetchers.insert(0, spack.fetch_cache.fetcher(self.mirror_path,
digest))
fetchers.insert(
0, fs.URLFetchStrategy(url, digest, expand=expand))
fetchers.insert(
0, spack.fetch_cache.fetcher(
self.mirror_path, digest, expand=expand))
# Look for the archive in list_url
package_name = os.path.dirname(self.mirror_path)