Waste less space when fetching cached archives, simplify fetch messages. (#2264)
* Waste less space when fetching cached archives, simplify fetch messages. - Just symlink cached files into the stage instead of copying them with curl. - Don't copy linked files back into the cache when done fetching. * Fixes for review. * more updates * last update
This commit is contained in:
parent
08477f6624
commit
908ba6e3d6
@ -194,7 +194,7 @@ def fetch(self):
|
|||||||
save_file = self.stage.save_filename
|
save_file = self.stage.save_filename
|
||||||
partial_file = self.stage.save_filename + '.part'
|
partial_file = self.stage.save_filename + '.part'
|
||||||
|
|
||||||
tty.msg("Trying to fetch from %s" % self.url)
|
tty.msg("Fetching %s" % self.url)
|
||||||
|
|
||||||
if partial_file:
|
if partial_file:
|
||||||
save_args = ['-C',
|
save_args = ['-C',
|
||||||
@ -295,7 +295,7 @@ def expand(self):
|
|||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise NoArchiveFileError(
|
raise NoArchiveFileError(
|
||||||
"URLFetchStrategy couldn't find archive file",
|
"Couldn't find archive file",
|
||||||
"Failed on expand() for URL %s" % self.url)
|
"Failed on expand() for URL %s" % self.url)
|
||||||
|
|
||||||
if not self.extension:
|
if not self.extension:
|
||||||
@ -392,16 +392,34 @@ def __init__(self, *args, **kwargs):
|
|||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
super(CacheURLFetchStrategy, self).fetch()
|
path = re.sub('^file://', '', self.url)
|
||||||
|
|
||||||
|
# check whether the cache file exists.
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
raise NoCacheError('No cache of %s' % path)
|
||||||
|
|
||||||
|
self.stage.chdir()
|
||||||
|
|
||||||
|
# remove old symlink if one is there.
|
||||||
|
filename = self.stage.save_filename
|
||||||
|
if os.path.exists(filename):
|
||||||
|
os.remove(filename)
|
||||||
|
|
||||||
|
# Symlink to local cached archive.
|
||||||
|
os.symlink(path, filename)
|
||||||
|
|
||||||
|
# Remove link if checksum fails, or subsequent fetchers
|
||||||
|
# will assume they don't need to download.
|
||||||
if self.digest:
|
if self.digest:
|
||||||
try:
|
try:
|
||||||
self.check()
|
self.check()
|
||||||
except ChecksumError:
|
except ChecksumError:
|
||||||
# Future fetchers will assume they don't need to
|
|
||||||
# download if the file remains
|
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
# Notify the user how we fetched.
|
||||||
|
tty.msg('Using cached archive: %s' % path)
|
||||||
|
|
||||||
|
|
||||||
class VCSFetchStrategy(FetchStrategy):
|
class VCSFetchStrategy(FetchStrategy):
|
||||||
|
|
||||||
@ -907,31 +925,36 @@ def __init__(self, root):
|
|||||||
self.root = os.path.abspath(root)
|
self.root = os.path.abspath(root)
|
||||||
|
|
||||||
def store(self, fetcher, relativeDst):
|
def store(self, fetcher, relativeDst):
|
||||||
|
# skip fetchers that aren't cachable
|
||||||
if not fetcher.cachable:
|
if not fetcher.cachable:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Don't store things that are already cached.
|
||||||
|
if isinstance(fetcher, CacheURLFetchStrategy):
|
||||||
|
return
|
||||||
|
|
||||||
dst = join_path(self.root, relativeDst)
|
dst = join_path(self.root, relativeDst)
|
||||||
mkdirp(os.path.dirname(dst))
|
mkdirp(os.path.dirname(dst))
|
||||||
fetcher.archive(dst)
|
fetcher.archive(dst)
|
||||||
|
|
||||||
def fetcher(self, targetPath, digest, **kwargs):
|
def fetcher(self, targetPath, digest, **kwargs):
|
||||||
url = "file://" + join_path(self.root, targetPath)
|
path = join_path(self.root, targetPath)
|
||||||
return CacheURLFetchStrategy(url, digest, **kwargs)
|
return CacheURLFetchStrategy(path, digest, **kwargs)
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
shutil.rmtree(self.root, ignore_errors=True)
|
shutil.rmtree(self.root, ignore_errors=True)
|
||||||
|
|
||||||
|
|
||||||
class FetchError(spack.error.SpackError):
|
class FetchError(spack.error.SpackError):
|
||||||
|
"""Superclass fo fetcher errors."""
|
||||||
|
|
||||||
def __init__(self, msg, long_msg=None):
|
|
||||||
super(FetchError, self).__init__(msg, long_msg)
|
class NoCacheError(FetchError):
|
||||||
|
"""Raised when there is no cached archive for a package."""
|
||||||
|
|
||||||
|
|
||||||
class FailedDownloadError(FetchError):
|
class FailedDownloadError(FetchError):
|
||||||
|
|
||||||
"""Raised wen a download fails."""
|
"""Raised wen a download fails."""
|
||||||
|
|
||||||
def __init__(self, url, msg=""):
|
def __init__(self, url, msg=""):
|
||||||
super(FailedDownloadError, self).__init__(
|
super(FailedDownloadError, self).__init__(
|
||||||
"Failed to fetch file from URL: %s" % url, msg)
|
"Failed to fetch file from URL: %s" % url, msg)
|
||||||
@ -939,19 +962,14 @@ def __init__(self, url, msg=""):
|
|||||||
|
|
||||||
|
|
||||||
class NoArchiveFileError(FetchError):
|
class NoArchiveFileError(FetchError):
|
||||||
|
""""Raised when an archive file is expected but none exists."""
|
||||||
def __init__(self, msg, long_msg):
|
|
||||||
super(NoArchiveFileError, self).__init__(msg, long_msg)
|
|
||||||
|
|
||||||
|
|
||||||
class NoDigestError(FetchError):
|
class NoDigestError(FetchError):
|
||||||
|
"""Raised after attempt to checksum when URL has no digest."""
|
||||||
def __init__(self, msg, long_msg=None):
|
|
||||||
super(NoDigestError, self).__init__(msg, long_msg)
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidArgsError(FetchError):
|
class InvalidArgsError(FetchError):
|
||||||
|
|
||||||
def __init__(self, pkg, version):
|
def __init__(self, pkg, version):
|
||||||
msg = ("Could not construct a fetch strategy for package %s at "
|
msg = ("Could not construct a fetch strategy for package %s at "
|
||||||
"version %s")
|
"version %s")
|
||||||
@ -960,17 +978,11 @@ def __init__(self, pkg, version):
|
|||||||
|
|
||||||
|
|
||||||
class ChecksumError(FetchError):
|
class ChecksumError(FetchError):
|
||||||
|
|
||||||
"""Raised when archive fails to checksum."""
|
"""Raised when archive fails to checksum."""
|
||||||
|
|
||||||
def __init__(self, message, long_msg=None):
|
|
||||||
super(ChecksumError, self).__init__(message, long_msg)
|
|
||||||
|
|
||||||
|
|
||||||
class NoStageError(FetchError):
|
class NoStageError(FetchError):
|
||||||
|
|
||||||
"""Raised when fetch operations are called before set_stage()."""
|
"""Raised when fetch operations are called before set_stage()."""
|
||||||
|
|
||||||
def __init__(self, method):
|
def __init__(self, method):
|
||||||
super(NoStageError, self).__init__(
|
super(NoStageError, self).__init__(
|
||||||
"Must call FetchStrategy.set_stage() before calling %s" %
|
"Must call FetchStrategy.set_stage() before calling %s" %
|
||||||
|
@ -425,6 +425,9 @@ def fetch(self, mirror_only=False):
|
|||||||
self.fetcher = fetcher
|
self.fetcher = fetcher
|
||||||
self.fetcher.fetch()
|
self.fetcher.fetch()
|
||||||
break
|
break
|
||||||
|
except spack.fetch_strategy.NoCacheError as e:
|
||||||
|
# Don't bother reporting when something is not cached.
|
||||||
|
continue
|
||||||
except spack.error.SpackError as e:
|
except spack.error.SpackError as e:
|
||||||
tty.msg("Fetching from %s failed." % fetcher)
|
tty.msg("Fetching from %s failed." % fetcher)
|
||||||
tty.debug(e)
|
tty.debug(e)
|
||||||
|
Loading…
Reference in New Issue
Block a user