Resources: use expanded archive name by default (#11688)
For resources, it is desirable to use the expanded archive name of the resource as the name of the directory when adding it to the root staging area. #11528 established 'spack-src' as the universal directory where source files are placed, which also affected the behavior of resources managed with Stages. This adds a new property ('srcdir') to Stage to remember the name of the expanded source directory, and uses this as the default name when placing a resource directory in the root staging area. This also: * Ensures that downloaded sources are archived using the expanded archive name (otherwise Spack will not be able to determine the original directory name when using a cached archive). * Updates working_dir context manager to guarantee restoration of original working directory when an exception occurs * Adds a "temp_cwd" context manager which creates a temporary directory and sets it as the working directory
This commit is contained in:
parent
4858d8c275
commit
284ae9d1cc
@ -455,8 +455,10 @@ def working_dir(dirname, **kwargs):
|
||||
|
||||
orig_dir = os.getcwd()
|
||||
os.chdir(dirname)
|
||||
yield
|
||||
os.chdir(orig_dir)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(orig_dir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
@ -605,6 +607,36 @@ def ancestor(dir, n=1):
|
||||
return parent
|
||||
|
||||
|
||||
def get_single_file(directory):
|
||||
fnames = os.listdir(directory)
|
||||
if len(fnames) != 1:
|
||||
raise ValueError("Expected exactly 1 file, got {0}"
|
||||
.format(str(len(fnames))))
|
||||
return fnames[0]
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temp_cwd():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
with working_dir(tmp_dir):
|
||||
yield tmp_dir
|
||||
finally:
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temp_rename(orig_path, temp_path):
|
||||
same_path = os.path.realpath(orig_path) == os.path.realpath(temp_path)
|
||||
if not same_path:
|
||||
shutil.move(orig_path, temp_path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if not same_path:
|
||||
shutil.move(temp_path, orig_path)
|
||||
|
||||
|
||||
def can_access(file_name):
|
||||
"""True if we have read/write access to the file."""
|
||||
return os.access(file_name, os.R_OK | os.W_OK)
|
||||
|
@ -32,7 +32,8 @@
|
||||
from six import string_types, with_metaclass
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir, mkdirp
|
||||
from llnl.util.filesystem import (
|
||||
working_dir, mkdirp, temp_rename, temp_cwd, get_single_file)
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
@ -374,6 +375,7 @@ def expand(self):
|
||||
if len(non_hidden) == 1:
|
||||
src = os.path.join(tarball_container, non_hidden[0])
|
||||
if os.path.isdir(src):
|
||||
self.stage.srcdir = non_hidden[0]
|
||||
shutil.move(src, self.stage.source_path)
|
||||
if len(files) > 1:
|
||||
files.remove(non_hidden[0])
|
||||
@ -523,7 +525,16 @@ def archive(self, destination, **kwargs):
|
||||
tar.add_default_arg('--exclude=%s' % p)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
tar('-czf', destination, os.path.basename(self.stage.source_path))
|
||||
if self.stage.srcdir:
|
||||
# Here we create an archive with the default repository name.
|
||||
# The 'tar' command has options for changing the name of a
|
||||
# directory that is included in the archive, but they differ
|
||||
# based on OS, so we temporarily rename the repo
|
||||
with temp_rename(self.stage.source_path, self.stage.srcdir):
|
||||
tar('-czf', destination, self.stage.srcdir)
|
||||
else:
|
||||
tar('-czf', destination,
|
||||
os.path.basename(self.stage.source_path))
|
||||
|
||||
def __str__(self):
|
||||
return "VCS: %s" % self.url
|
||||
@ -692,16 +703,22 @@ def fetch(self):
|
||||
if self.commit:
|
||||
# Need to do a regular clone and check out everything if
|
||||
# they asked for a particular commit.
|
||||
args = ['clone', self.url, self.stage.source_path]
|
||||
if not spack.config.get('config:debug'):
|
||||
args.insert(1, '--quiet')
|
||||
git(*args)
|
||||
debug = spack.config.get('config:debug')
|
||||
|
||||
clone_args = ['clone', self.url]
|
||||
if not debug:
|
||||
clone_args.insert(1, '--quiet')
|
||||
with temp_cwd():
|
||||
git(*clone_args)
|
||||
repo_name = get_single_file('.')
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, self.stage.source_path)
|
||||
|
||||
with working_dir(self.stage.source_path):
|
||||
args = ['checkout', self.commit]
|
||||
if not spack.config.get('config:debug'):
|
||||
args.insert(1, '--quiet')
|
||||
git(*args)
|
||||
checkout_args = ['checkout', self.commit]
|
||||
if not debug:
|
||||
checkout_args.insert(1, '--quiet')
|
||||
git(*checkout_args)
|
||||
|
||||
else:
|
||||
# Can be more efficient if not checking out a specific commit.
|
||||
@ -720,21 +737,25 @@ def fetch(self):
|
||||
if self.git_version > ver('1.7.10'):
|
||||
args.append('--single-branch')
|
||||
|
||||
cloned = False
|
||||
# Yet more efficiency, only download a 1-commit deep tree
|
||||
if self.git_version >= ver('1.7.1'):
|
||||
try:
|
||||
git(*(args + ['--depth', '1', self.url,
|
||||
self.stage.source_path]))
|
||||
cloned = True
|
||||
except spack.error.SpackError as e:
|
||||
# This will fail with the dumb HTTP transport
|
||||
# continue and try without depth, cleanup first
|
||||
tty.debug(e)
|
||||
with temp_cwd():
|
||||
cloned = False
|
||||
# Yet more efficiency, only download a 1-commit deep tree
|
||||
if self.git_version >= ver('1.7.1'):
|
||||
try:
|
||||
git(*(args + ['--depth', '1', self.url]))
|
||||
cloned = True
|
||||
except spack.error.SpackError as e:
|
||||
# This will fail with the dumb HTTP transport
|
||||
# continue and try without depth, cleanup first
|
||||
tty.debug(e)
|
||||
|
||||
if not cloned:
|
||||
args.extend([self.url, self.stage.source_path])
|
||||
git(*args)
|
||||
if not cloned:
|
||||
args.extend([self.url])
|
||||
git(*args)
|
||||
|
||||
repo_name = get_single_file('.')
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, self.stage.source_path)
|
||||
|
||||
with working_dir(self.stage.source_path):
|
||||
# For tags, be conservative and check them out AFTER
|
||||
@ -838,8 +859,13 @@ def fetch(self):
|
||||
args = ['checkout', '--force', '--quiet']
|
||||
if self.revision:
|
||||
args += ['-r', self.revision]
|
||||
args.extend([self.url, self.stage.source_path])
|
||||
self.svn(*args)
|
||||
args.extend([self.url])
|
||||
|
||||
with temp_cwd():
|
||||
self.svn(*args)
|
||||
repo_name = get_single_file('.')
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, self.stage.source_path)
|
||||
|
||||
def _remove_untracked_files(self):
|
||||
"""Removes untracked files in an svn repository."""
|
||||
@ -949,8 +975,13 @@ def fetch(self):
|
||||
if self.revision:
|
||||
args.extend(['-r', self.revision])
|
||||
|
||||
args.extend([self.url, self.stage.source_path])
|
||||
self.hg(*args)
|
||||
args.extend([self.url])
|
||||
|
||||
with temp_cwd():
|
||||
self.hg(*args)
|
||||
repo_name = get_single_file('.')
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, self.stage.source_path)
|
||||
|
||||
def archive(self, destination):
|
||||
super(HgFetchStrategy, self).archive(destination, exclude='.hg')
|
||||
|
@ -182,6 +182,8 @@ def __init__(
|
||||
# used for mirrored archives of repositories.
|
||||
self.skip_checksum_for_mirror = True
|
||||
|
||||
self.srcdir = None
|
||||
|
||||
# TODO : this uses a protected member of tempfile, but seemed the only
|
||||
# TODO : way to get a temporary name besides, the temporary link name
|
||||
# TODO : won't be the same as the temporary stage area in tmp_root
|
||||
@ -294,18 +296,13 @@ def _need_to_create_path(self):
|
||||
def expected_archive_files(self):
|
||||
"""Possible archive file paths."""
|
||||
paths = []
|
||||
roots = [self.path]
|
||||
if self.expanded:
|
||||
roots.insert(0, self.source_path)
|
||||
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
|
||||
paths.append(os.path.join(
|
||||
self.path, os.path.basename(self.default_fetcher.url)))
|
||||
|
||||
for path in roots:
|
||||
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
|
||||
paths.append(os.path.join(
|
||||
path, os.path.basename(self.default_fetcher.url)))
|
||||
|
||||
if self.mirror_path:
|
||||
paths.append(os.path.join(
|
||||
path, os.path.basename(self.mirror_path)))
|
||||
if self.mirror_path:
|
||||
paths.append(os.path.join(
|
||||
self.path, os.path.basename(self.mirror_path)))
|
||||
|
||||
return paths
|
||||
|
||||
@ -512,9 +509,14 @@ def _add_to_root_stage(self):
|
||||
"""
|
||||
root_stage = self.root_stage
|
||||
resource = self.resource
|
||||
placement = os.path.basename(self.source_path) \
|
||||
if resource.placement is None \
|
||||
else resource.placement
|
||||
|
||||
if resource.placement:
|
||||
placement = resource.placement
|
||||
elif self.srcdir:
|
||||
placement = self.srcdir
|
||||
else:
|
||||
placement = self.source_path
|
||||
|
||||
if not isinstance(placement, dict):
|
||||
placement = {'': placement}
|
||||
|
||||
|
@ -511,6 +511,31 @@ def test_composite_stage_with_expand_resource(
|
||||
root_stage.source_path, 'resource-dir', fname)
|
||||
assert os.path.exists(file_path)
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@pytest.mark.usefixtures('tmpdir_for_stage')
|
||||
def test_composite_stage_with_expand_resource_default_placement(
|
||||
self, mock_stage_archive, mock_expand_resource,
|
||||
composite_stage_with_expanding_resource):
|
||||
"""For a resource which refers to a compressed archive which expands
|
||||
to a directory, check that by default the resource is placed in
|
||||
the source_path of the root stage with the name of the decompressed
|
||||
directory.
|
||||
"""
|
||||
|
||||
composite_stage, root_stage, resource_stage = (
|
||||
composite_stage_with_expanding_resource)
|
||||
|
||||
resource_stage.resource.placement = None
|
||||
|
||||
composite_stage.create()
|
||||
composite_stage.fetch()
|
||||
composite_stage.expand_archive()
|
||||
|
||||
for fname in mock_expand_resource.files:
|
||||
file_path = os.path.join(
|
||||
root_stage.source_path, 'resource-expand', fname)
|
||||
assert os.path.exists(file_path)
|
||||
|
||||
def test_setup_and_destroy_no_name_without_tmp(self, mock_stage_archive):
|
||||
archive = mock_stage_archive()
|
||||
with Stage(archive.url) as stage:
|
||||
|
@ -41,7 +41,7 @@ class IntelXed(Package):
|
||||
version(vers, commit=xed_hash)
|
||||
resource(name='mbuild',
|
||||
git='https://github.com/intelxed/mbuild.git',
|
||||
commit=mbuild_hash, placement='mbuild',
|
||||
commit=mbuild_hash,
|
||||
when='@{0}'.format(vers))
|
||||
|
||||
variant('debug', default=False, description='Enable debug symbols')
|
||||
|
@ -40,19 +40,16 @@ class Warpx(MakefilePackage):
|
||||
resource(name='amrex',
|
||||
git='https://github.com/AMReX-Codes/amrex.git',
|
||||
when='@master',
|
||||
tag='master',
|
||||
placement='amrex')
|
||||
tag='master')
|
||||
|
||||
resource(name='amrex',
|
||||
git='https://github.com/AMReX-Codes/amrex.git',
|
||||
when='@dev',
|
||||
tag='development',
|
||||
placement='amrex')
|
||||
tag='development')
|
||||
|
||||
resource(name='picsar',
|
||||
git='https://bitbucket.org/berkeleylab/picsar.git',
|
||||
tag='master',
|
||||
placement='picsar')
|
||||
tag='master')
|
||||
|
||||
@property
|
||||
def build_targets(self):
|
||||
|
Loading…
Reference in New Issue
Block a user