Merge pull request #1346 from glennpj/url_list_fetch

Url list fetch
This commit is contained in:
becker33 2016-08-01 13:37:57 -07:00 committed by GitHub
commit 630ff6871d

View File

@ -37,6 +37,7 @@
import spack.config import spack.config
import spack.fetch_strategy as fs import spack.fetch_strategy as fs
import spack.error import spack.error
from spack.version import *
STAGE_PREFIX = 'spack-stage-' STAGE_PREFIX = 'spack-stage-'
@ -51,10 +52,12 @@ class Stage(object):
lifecycle looks like this: lifecycle looks like this:
``` ```
with Stage() as stage: # Context manager creates and destroys the stage directory with Stage() as stage: # Context manager creates and destroys the
# stage directory
stage.fetch() # Fetch a source archive into the stage. stage.fetch() # Fetch a source archive into the stage.
stage.expand_archive() # Expand the source archive. stage.expand_archive() # Expand the source archive.
<install> # Build and install the archive. (handled by user of Stage) <install> # Build and install the archive. (handled by
# user of Stage)
``` ```
When used as a context manager, the stage is automatically When used as a context manager, the stage is automatically
@ -71,7 +74,8 @@ class Stage(object):
stage.create() # Explicitly create the stage directory. stage.create() # Explicitly create the stage directory.
stage.fetch() # Fetch a source archive into the stage. stage.fetch() # Fetch a source archive into the stage.
stage.expand_archive() # Expand the source archive. stage.expand_archive() # Expand the source archive.
<install> # Build and install the archive. (handled by user of Stage) <install> # Build and install the archive. (handled by
# user of Stage)
finally: finally:
stage.destroy() # Explicitly destroy the stage directory. stage.destroy() # Explicitly destroy the stage directory.
``` ```
@ -120,13 +124,17 @@ def __init__(self, url_or_fetch_strategy,
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy): elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy self.fetcher = url_or_fetch_strategy
else: else:
raise ValueError("Can't construct Stage without url or fetch strategy") raise ValueError(
"Can't construct Stage without url or fetch strategy")
self.fetcher.set_stage(self) self.fetcher.set_stage(self)
self.default_fetcher = self.fetcher # self.fetcher can change with mirrors. # self.fetcher can change with mirrors.
self.skip_checksum_for_mirror = True # used for mirrored archives of repositories. self.default_fetcher = self.fetcher
# used for mirrored archives of repositories.
self.skip_checksum_for_mirror = True
# TODO : this uses a protected member of tempfile, but seemed the only way to get a temporary name # TODO : this uses a protected member of tempfile, but seemed the only
# TODO : besides, the temporary link name won't be the same as the temporary stage area in tmp_root # TODO : way to get a temporary name besides, the temporary link name
# TODO : won't be the same as the temporary stage area in tmp_root
self.name = name self.name = name
if name is None: if name is None:
self.name = STAGE_PREFIX + next(tempfile._get_candidate_names()) self.name = STAGE_PREFIX + next(tempfile._get_candidate_names())
@ -143,7 +151,6 @@ def __init__(self, url_or_fetch_strategy,
# Flag to decide whether to delete the stage folder on exit or not # Flag to decide whether to delete the stage folder on exit or not
self.keep = keep self.keep = keep
def __enter__(self): def __enter__(self):
""" """
Entering a stage context will create the stage directory Entering a stage context will create the stage directory
@ -154,7 +161,6 @@ def __enter__(self):
self.create() self.create()
return self return self
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):
""" """
Exiting from a stage context will delete the stage directory unless: Exiting from a stage context will delete the stage directory unless:
@ -173,12 +179,10 @@ def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None and not self.keep: if exc_type is None and not self.keep:
self.destroy() self.destroy()
def _need_to_create_path(self): def _need_to_create_path(self):
"""Makes sure nothing weird has happened since the last time we """Makes sure nothing weird has happened since the last time we
looked at path. Returns True if path already exists and is ok. looked at path. Returns True if path already exists and is ok.
Returns False if path needs to be created. Returns False if path needs to be created."""
"""
# Path doesn't exist yet. Will need to create it. # Path doesn't exist yet. Will need to create it.
if not os.path.exists(self.path): if not os.path.exists(self.path):
return True return True
@ -196,7 +200,8 @@ def _need_to_create_path(self):
if spack.use_tmp_stage: if spack.use_tmp_stage:
# If we're using a tmp dir, it's a link, and it points at the # If we're using a tmp dir, it's a link, and it points at the
# right spot, then keep it. # right spot, then keep it.
if (real_path.startswith(real_tmp) and os.path.exists(real_path)): if (real_path.startswith(real_tmp) and
os.path.exists(real_path)):
return False return False
else: else:
# otherwise, just unlink it and start over. # otherwise, just unlink it and start over.
@ -204,7 +209,8 @@ def _need_to_create_path(self):
return True return True
else: else:
# If we're not tmp mode, then it's a link and we want a directory. # If we're not tmp mode, then it's a link and we want a
# directory.
os.unlink(self.path) os.unlink(self.path)
return True return True
@ -215,10 +221,12 @@ def expected_archive_files(self):
"""Possible archive file paths.""" """Possible archive file paths."""
paths = [] paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy): if isinstance(self.fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url))) paths.append(os.path.join(
self.path, os.path.basename(self.fetcher.url)))
if self.mirror_path: if self.mirror_path:
paths.append(os.path.join(self.path, os.path.basename(self.mirror_path))) paths.append(os.path.join(
self.path, os.path.basename(self.mirror_path)))
return paths return paths
@ -227,10 +235,12 @@ def archive_file(self):
"""Path to the source archive within this stage directory.""" """Path to the source archive within this stage directory."""
paths = [] paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy): if isinstance(self.fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url))) paths.append(os.path.join(
self.path, os.path.basename(self.fetcher.url)))
if self.mirror_path: if self.mirror_path:
paths.append(os.path.join(self.path, os.path.basename(self.mirror_path))) paths.append(os.path.join(
self.path, os.path.basename(self.mirror_path)))
for path in paths: for path in paths:
if os.path.exists(path): if os.path.exists(path):
@ -262,7 +272,8 @@ def source_path(self):
return None return None
def chdir(self): def chdir(self):
"""Changes directory to the stage path. Or dies if it is not set up.""" """Changes directory to the stage path. Or dies if it is not set
up."""
if os.path.isdir(self.path): if os.path.isdir(self.path):
os.chdir(self.path) os.chdir(self.path)
else: else:
@ -306,6 +317,19 @@ def fetch(self, mirror_only=False):
fetchers.insert(0, fs.URLFetchStrategy(url, digest)) fetchers.insert(0, fs.URLFetchStrategy(url, digest))
fetchers.insert(0, spack.cache.fetcher(self.mirror_path, digest)) fetchers.insert(0, spack.cache.fetcher(self.mirror_path, digest))
# Look for the archive in list_url
archive_version = spack.url.parse_version(self.default_fetcher.url)
package_name = os.path.dirname(self.mirror_path)
pkg = spack.repo.get(package_name)
if pkg.list_url is not None and pkg.url is not None:
versions = pkg.fetch_remote_versions()
try:
url_from_list = versions[Version(archive_version)]
fetchers.append(fs.URLFetchStrategy(url_from_list, digest))
except KeyError:
tty.msg("Can not find version %s in url_list" %
archive_version)
for fetcher in fetchers: for fetcher in fetchers:
try: try:
fetcher.set_stage(self) fetcher.set_stage(self)
@ -321,11 +345,11 @@ def fetch(self, mirror_only=False):
self.fetcher = self.default_fetcher self.fetcher = self.default_fetcher
raise fs.FetchError(errMessage, None) raise fs.FetchError(errMessage, None)
def check(self): def check(self):
"""Check the downloaded archive against a checksum digest. """Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository.""" No-op if this stage checks code out of a repository."""
if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror: if self.fetcher is not self.default_fetcher and \
self.skip_checksum_for_mirror:
tty.warn("Fetching from mirror without a checksum!", tty.warn("Fetching from mirror without a checksum!",
"This package is normally checked out from a version " "This package is normally checked out from a version "
"control system, but it has been archived on a spack " "control system, but it has been archived on a spack "
@ -335,16 +359,13 @@ def check(self):
else: else:
self.fetcher.check() self.fetcher.check()
def cache_local(self): def cache_local(self):
spack.cache.store(self.fetcher, self.mirror_path) spack.cache.store(self.fetcher, self.mirror_path)
def expand_archive(self): def expand_archive(self):
"""Changes to the stage directory and attempt to expand the downloaded """Changes to the stage directory and attempt to expand the downloaded
archive. Fail if the stage is not set up or if the archive is not yet archive. Fail if the stage is not set up or if the archive is not yet
downloaded. downloaded."""
"""
archive_dir = self.source_path archive_dir = self.source_path
if not archive_dir: if not archive_dir:
self.fetcher.expand() self.fetcher.expand()
@ -386,8 +407,8 @@ def create(self):
# Create the top-level stage directory # Create the top-level stage directory
mkdirp(spack.stage_path) mkdirp(spack.stage_path)
remove_dead_links(spack.stage_path) remove_dead_links(spack.stage_path)
# If a tmp_root exists then create a directory there and then link it in the stage area, # If a tmp_root exists then create a directory there and then link it
# otherwise create the stage directory in self.path # in the stage area, otherwise create the stage directory in self.path
if self._need_to_create_path(): if self._need_to_create_path():
if self.tmp_root: if self.tmp_root:
tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root)
@ -409,6 +430,7 @@ def destroy(self):
class ResourceStage(Stage): class ResourceStage(Stage):
def __init__(self, url_or_fetch_strategy, root, resource, **kwargs): def __init__(self, url_or_fetch_strategy, root, resource, **kwargs):
super(ResourceStage, self).__init__(url_or_fetch_strategy, **kwargs) super(ResourceStage, self).__init__(url_or_fetch_strategy, **kwargs)
self.root_stage = root self.root_stage = root
@ -418,12 +440,15 @@ def expand_archive(self):
super(ResourceStage, self).expand_archive() super(ResourceStage, self).expand_archive()
root_stage = self.root_stage root_stage = self.root_stage
resource = self.resource resource = self.resource
placement = os.path.basename(self.source_path) if resource.placement is None else resource.placement placement = os.path.basename(self.source_path) \
if resource.placement is None \
else resource.placement
if not isinstance(placement, dict): if not isinstance(placement, dict):
placement = {'': placement} placement = {'': placement}
# Make the paths in the dictionary absolute and link # Make the paths in the dictionary absolute and link
for key, value in placement.iteritems(): for key, value in placement.iteritems():
target_path = join_path(root_stage.source_path, resource.destination) target_path = join_path(
root_stage.source_path, resource.destination)
destination_path = join_path(target_path, value) destination_path = join_path(target_path, value)
source_path = join_path(self.source_path, key) source_path = join_path(self.source_path, key)
@ -437,21 +462,23 @@ def expand_archive(self):
if not os.path.exists(destination_path): if not os.path.exists(destination_path):
# Create a symlink # Create a symlink
tty.info('Moving resource stage\n\tsource : {stage}\n\tdestination : {destination}'.format( tty.info('Moving resource stage\n\tsource : '
stage=source_path, destination=destination_path '{stage}\n\tdestination : {destination}'.format(
)) stage=source_path, destination=destination_path
))
shutil.move(source_path, destination_path) shutil.move(source_path, destination_path)
@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy', 'cache_local']) @pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive',
'restage', 'destroy', 'cache_local'])
class StageComposite: class StageComposite:
""" """Composite for Stage type objects. The first item in this composite is
Composite for Stage type objects. The first item in this composite is considered to be the root package, and considered to be the root package, and operations that return a value are
operations that return a value are forwarded to it. forwarded to it."""
"""
# #
# __enter__ and __exit__ delegate to all stages in the composite. # __enter__ and __exit__ delegate to all stages in the composite.
# #
def __enter__(self): def __enter__(self):
for item in self: for item in self:
item.__enter__() item.__enter__()
@ -496,8 +523,11 @@ def chdir(self):
raise ChdirError("Setup failed: no such directory: " + self.path) raise ChdirError("Setup failed: no such directory: " + self.path)
# DIY stages do nothing as context managers. # DIY stages do nothing as context managers.
def __enter__(self): pass def __enter__(self):
def __exit__(self, exc_type, exc_val, exc_tb): pass pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def chdir_to_source(self): def chdir_to_source(self):
self.chdir() self.chdir()