More thorough mirror test: test full round-trip
- Old test: did not attempt to actually fetch mirrored packages. - New test: 1. Creates a temporary mirror, 2. Registers it with spack, 3. Fetches from it, and 4. Verifies that the fetched archive matches the original - This test will hopefully mean that `spack mirror` is less brittle from now on.
This commit is contained in:
@@ -172,6 +172,7 @@ def mirror_create(args):
|
||||
specs = [Spec(n) for n in spack.repo.all_package_names()]
|
||||
specs.sort(key=lambda s: s.format("$_$@").lower())
|
||||
|
||||
# If the user asked for dependencies, traverse spec DAG get them.
|
||||
if args.dependencies:
|
||||
new_specs = set()
|
||||
for spec in specs:
|
||||
|
@@ -631,7 +631,7 @@ def remove_prefix(self):
|
||||
spack.install_layout.remove_install_directory(self.spec)
|
||||
|
||||
|
||||
def do_fetch(self):
|
||||
def do_fetch(self, mirror_only=False):
|
||||
"""Creates a stage directory and downloads the taball for this package.
|
||||
Working directory will be set to the stage directory.
|
||||
"""
|
||||
@@ -656,7 +656,7 @@ def do_fetch(self):
|
||||
raise FetchError(
|
||||
"Will not fetch %s." % self.spec.format('$_$@'), checksum_msg)
|
||||
|
||||
self.stage.fetch()
|
||||
self.stage.fetch(mirror_only)
|
||||
|
||||
##########
|
||||
# Fetch resources
|
||||
@@ -677,7 +677,8 @@ def do_fetch(self):
|
||||
if spack.do_checksum and self.version in self.versions:
|
||||
self.stage.check()
|
||||
|
||||
def do_stage(self):
|
||||
|
||||
def do_stage(self, mirror_only=False):
|
||||
"""Unpacks the fetched tarball, then changes into the expanded tarball
|
||||
directory."""
|
||||
if not self.spec.concrete:
|
||||
@@ -691,8 +692,7 @@ def _expand_archive(stage, name=self.name):
|
||||
else:
|
||||
tty.msg("Already staged %s in %s." % (name, stage.path))
|
||||
|
||||
|
||||
self.do_fetch()
|
||||
self.do_fetch(mirror_only)
|
||||
_expand_archive(self.stage)
|
||||
|
||||
##########
|
||||
@@ -835,10 +835,6 @@ def _resource_stage(self, resource):
|
||||
resource_stage_folder = '-'.join(pieces)
|
||||
return resource_stage_folder
|
||||
|
||||
def _build_logger(self, log_path):
|
||||
"""Create a context manager to log build output."""
|
||||
|
||||
|
||||
|
||||
def do_install(self,
|
||||
keep_prefix=False, keep_stage=False, ignore_deps=False,
|
||||
|
@@ -97,7 +97,6 @@ def __init__(self, url_or_fetch_strategy, **kwargs):
|
||||
|
||||
self.name = kwargs.get('name')
|
||||
self.mirror_path = kwargs.get('mirror_path')
|
||||
|
||||
self.tmp_root = find_tmp_root()
|
||||
|
||||
self.path = None
|
||||
@@ -240,11 +239,13 @@ def chdir(self):
|
||||
tty.die("Setup failed: no such directory: " + self.path)
|
||||
|
||||
|
||||
def fetch(self):
|
||||
def fetch(self, mirror_only=False):
|
||||
"""Downloads an archive or checks out code from a repository."""
|
||||
self.chdir()
|
||||
|
||||
fetchers = [self.default_fetcher]
|
||||
fetchers = []
|
||||
if not mirror_only:
|
||||
fetchers.append(self.default_fetcher)
|
||||
|
||||
# TODO: move mirror logic out of here and clean it up!
|
||||
# TODO: Or @alalazo may have some ideas about how to use a
|
||||
@@ -267,10 +268,11 @@ def fetch(self):
|
||||
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
|
||||
digest = self.default_fetcher.digest
|
||||
|
||||
# Have to skip the checkesum for things archived from
|
||||
# Have to skip the checksum for things archived from
|
||||
# repositories. How can this be made safer?
|
||||
self.skip_checksum_for_mirror = not bool(digest)
|
||||
|
||||
# Add URL strategies for all the mirrors with the digest
|
||||
for url in urls:
|
||||
fetchers.insert(0, fs.URLFetchStrategy(url, digest))
|
||||
|
||||
|
@@ -77,6 +77,10 @@ def check_mirror(self):
|
||||
stage = Stage('spack-mirror-test')
|
||||
mirror_root = join_path(stage.path, 'test-mirror')
|
||||
|
||||
# register mirror with spack config
|
||||
mirrors = { 'spack-mirror-test' : 'file://' + mirror_root }
|
||||
spack.config.update_config('mirrors', mirrors)
|
||||
|
||||
try:
|
||||
os.chdir(stage.path)
|
||||
spack.mirror.create(
|
||||
@@ -85,7 +89,7 @@ def check_mirror(self):
|
||||
# Stage directory exists
|
||||
self.assertTrue(os.path.isdir(mirror_root))
|
||||
|
||||
# subdirs for each package
|
||||
# check that there are subdirs for each package
|
||||
for name in self.repos:
|
||||
subdir = join_path(mirror_root, name)
|
||||
self.assertTrue(os.path.isdir(subdir))
|
||||
@@ -93,38 +97,35 @@ def check_mirror(self):
|
||||
files = os.listdir(subdir)
|
||||
self.assertEqual(len(files), 1)
|
||||
|
||||
# Decompress archive in the mirror
|
||||
archive = files[0]
|
||||
archive_path = join_path(subdir, archive)
|
||||
decomp = decompressor_for(archive_path)
|
||||
# Now try to fetch each package.
|
||||
for name, mock_repo in self.repos.items():
|
||||
spec = Spec(name).concretized()
|
||||
pkg = spec.package
|
||||
|
||||
with working_dir(subdir):
|
||||
decomp(archive_path)
|
||||
|
||||
# Find the untarred archive directory.
|
||||
files = os.listdir(subdir)
|
||||
self.assertEqual(len(files), 2)
|
||||
self.assertTrue(archive in files)
|
||||
files.remove(archive)
|
||||
|
||||
expanded_archive = join_path(subdir, files[0])
|
||||
self.assertTrue(os.path.isdir(expanded_archive))
|
||||
saved_checksum_setting = spack.do_checksum
|
||||
try:
|
||||
# Stage the archive from the mirror and cd to it.
|
||||
spack.do_checksum = False
|
||||
pkg.do_stage(mirror_only=True)
|
||||
|
||||
# Compare the original repo with the expanded archive
|
||||
repo = self.repos[name]
|
||||
if not 'svn' in name:
|
||||
original_path = repo.path
|
||||
else:
|
||||
co = 'checked_out'
|
||||
svn('checkout', repo.url, co)
|
||||
original_path = join_path(subdir, co)
|
||||
original_path = mock_repo.path
|
||||
if 'svn' in name:
|
||||
# have to check out the svn repo to compare.
|
||||
original_path = join_path(mock_repo.path, 'checked_out')
|
||||
svn('checkout', mock_repo.url, original_path)
|
||||
|
||||
dcmp = dircmp(original_path, expanded_archive)
|
||||
dcmp = dircmp(original_path, pkg.stage.source_path)
|
||||
|
||||
# make sure there are no new files in the expanded tarball
|
||||
self.assertFalse(dcmp.right_only)
|
||||
|
||||
# and that all original files are present.
|
||||
self.assertTrue(all(l in exclude for l in dcmp.left_only))
|
||||
|
||||
finally:
|
||||
spack.do_checksum = saved_checksum_setting
|
||||
pkg.do_clean()
|
||||
finally:
|
||||
stage.destroy()
|
||||
|
||||
|
Reference in New Issue
Block a user