Spack core and tests no longer use os.chdir()

- Spack's core package interface was previously overly stateful, in that
  calling methods like `do_stage()` could change your working directory.

- This removes Stage's `chdir` and `chdir_to_source` methods and replaces
  their usages with `with working_dir(stage.path)` and `with
  working_dir(stage.source_path)`, respectively.  These ensure the
  original working directory is preserved.

- This not only makes the API more usable, it makes the tests more
  deterministic, as previously a test could leave the current working
  directory in a bad state and cause subsequent tests to fail
  mysteriously.
This commit is contained in:
Todd Gamblin 2017-10-14 00:30:17 -07:00
parent 890c5ad329
commit c14f2dc7b4
14 changed files with 375 additions and 432 deletions

View File

@ -25,7 +25,7 @@
import os import os
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp from llnl.util.filesystem import mkdirp, working_dir
import spack import spack
from spack.util.executable import ProcessError, which from spack.util.executable import ProcessError, which
@ -47,7 +47,7 @@ def setup_parser(subparser):
def get_origin_info(remote): def get_origin_info(remote):
git_dir = join_path(spack.prefix, '.git') git_dir = os.path.join(spack.prefix, '.git')
git = which('git', required=True) git = which('git', required=True)
try: try:
branch = git('symbolic-ref', '--short', 'HEAD', output=str) branch = git('symbolic-ref', '--short', 'HEAD', output=str)
@ -81,7 +81,7 @@ def clone(parser, args):
mkdirp(prefix) mkdirp(prefix)
if os.path.exists(join_path(prefix, '.git')): if os.path.exists(os.path.join(prefix, '.git')):
tty.die("There already seems to be a git repository in %s" % prefix) tty.die("There already seems to be a git repository in %s" % prefix)
files_in_the_way = os.listdir(prefix) files_in_the_way = os.listdir(prefix)
@ -94,7 +94,7 @@ def clone(parser, args):
"%s/bin/spack" % prefix, "%s/bin/spack" % prefix,
"%s/lib/spack/..." % prefix) "%s/lib/spack/..." % prefix)
os.chdir(prefix) with working_dir(prefix):
git = which('git', required=True) git = which('git', required=True)
git('init', '--shared', '-q') git('init', '--shared', '-q')
git('remote', 'add', 'origin', origin_url) git('remote', 'add', 'origin', origin_url)

View File

@ -153,16 +153,15 @@ def matches(cls, args):
@pattern.composite(interface=FetchStrategy) @pattern.composite(interface=FetchStrategy)
class FetchStrategyComposite(object): class FetchStrategyComposite(object):
"""Composite for a FetchStrategy object.
""" Implements the GoF composite pattern.
Composite for a FetchStrategy object. Implements the GoF composite pattern.
""" """
matches = FetchStrategy.matches matches = FetchStrategy.matches
set_stage = FetchStrategy.set_stage set_stage = FetchStrategy.set_stage
class URLFetchStrategy(FetchStrategy): class URLFetchStrategy(FetchStrategy):
"""FetchStrategy that pulls source code from a URL for an archive, """FetchStrategy that pulls source code from a URL for an archive,
checks the archive against a checksum,and decompresses the archive. checks the archive against a checksum,and decompresses the archive.
""" """
@ -200,8 +199,6 @@ def curl(self):
@_needs_stage @_needs_stage
def fetch(self): def fetch(self):
self.stage.chdir()
if self.archive_file: if self.archive_file:
tty.msg("Already downloaded %s" % self.archive_file) tty.msg("Already downloaded %s" % self.archive_file)
return return
@ -242,6 +239,7 @@ def fetch(self):
# Run curl but grab the mime type from the http headers # Run curl but grab the mime type from the http headers
curl = self.curl curl = self.curl
with working_dir(self.stage.path):
headers = curl(*curl_args, output=str, fail_on_error=False) headers = curl(*curl_args, output=str, fail_on_error=False)
if curl.returncode != 0: if curl.returncode != 0:
@ -310,7 +308,6 @@ def expand(self):
tty.msg("Staging archive: %s" % self.archive_file) tty.msg("Staging archive: %s" % self.archive_file)
self.stage.chdir()
if not self.archive_file: if not self.archive_file:
raise NoArchiveFileError( raise NoArchiveFileError(
"Couldn't find archive file", "Couldn't find archive file",
@ -318,14 +315,16 @@ def expand(self):
if not self.extension: if not self.extension:
self.extension = extension(self.archive_file) self.extension = extension(self.archive_file)
decompress = decompressor_for(self.archive_file, self.extension) decompress = decompressor_for(self.archive_file, self.extension)
# Expand all tarballs in their own directory to contain # Expand all tarballs in their own directory to contain
# exploding tarballs. # exploding tarballs.
tarball_container = os.path.join(self.stage.path, tarball_container = os.path.join(self.stage.path,
"spack-expanded-archive") "spack-expanded-archive")
mkdirp(tarball_container) mkdirp(tarball_container)
os.chdir(tarball_container) with working_dir(tarball_container):
decompress(self.archive_file) decompress(self.archive_file)
# Check for an exploding tarball, i.e. one that doesn't expand # Check for an exploding tarball, i.e. one that doesn't expand
@ -345,10 +344,9 @@ def expand(self):
shutil.move(os.path.join(tarball_container, f), shutil.move(os.path.join(tarball_container, f),
os.path.join(self.stage.path, f)) os.path.join(self.stage.path, f))
os.rmdir(tarball_container) os.rmdir(tarball_container)
if not files: if not files:
os.rmdir(tarball_container) os.rmdir(tarball_container)
# Set the wd back to the stage when done.
self.stage.chdir()
def archive(self, destination): def archive(self, destination):
"""Just moves this archive to the destination.""" """Just moves this archive to the destination."""
@ -416,8 +414,6 @@ def fetch(self):
if not os.path.isfile(path): if not os.path.isfile(path):
raise NoCacheError('No cache of %s' % path) raise NoCacheError('No cache of %s' % path)
self.stage.chdir()
# remove old symlink if one is there. # remove old symlink if one is there.
filename = self.stage.save_filename filename = self.stage.save_filename
if os.path.exists(filename): if os.path.exists(filename):
@ -484,7 +480,7 @@ def archive(self, destination, **kwargs):
for p in patterns: for p in patterns:
tar.add_default_arg('--exclude=%s' % p) tar.add_default_arg('--exclude=%s' % p)
self.stage.chdir() with working_dir(self.stage.path):
tar('-czf', destination, os.path.basename(self.stage.source_path)) tar('-czf', destination, os.path.basename(self.stage.source_path))
def __str__(self): def __str__(self):
@ -495,9 +491,8 @@ def __repr__(self):
class GoFetchStrategy(VCSFetchStrategy): class GoFetchStrategy(VCSFetchStrategy):
"""Fetch strategy that employs the `go get` infrastructure.
"""
Fetch strategy that employs the `go get` infrastructure
Use like this in a package: Use like this in a package:
version('name', version('name',
@ -530,10 +525,9 @@ def go(self):
@_needs_stage @_needs_stage
def fetch(self): def fetch(self):
self.stage.chdir()
tty.msg("Trying to get go resource:", self.url) tty.msg("Trying to get go resource:", self.url)
with working_dir(self.stage.path):
try: try:
os.mkdir('go') os.mkdir('go')
except OSError: except OSError:
@ -547,7 +541,7 @@ def archive(self, destination):
@_needs_stage @_needs_stage
def reset(self): def reset(self):
self.stage.chdir_to_source() with working_dir(self.stage.source_path):
self.go('clean') self.go('clean')
def __str__(self): def __str__(self):
@ -607,10 +601,7 @@ def git(self):
def cachable(self): def cachable(self):
return bool(self.commit or self.tag) return bool(self.commit or self.tag)
@_needs_stage
def fetch(self): def fetch(self):
self.stage.chdir()
if self.stage.source_path: if self.stage.source_path:
tty.msg("Already fetched %s" % self.stage.source_path) tty.msg("Already fetched %s" % self.stage.source_path)
return return
@ -622,20 +613,24 @@ def fetch(self):
args = 'at tag %s' % self.tag args = 'at tag %s' % self.tag
elif self.branch: elif self.branch:
args = 'on branch %s' % self.branch args = 'on branch %s' % self.branch
tty.msg("Trying to clone git repository: %s %s" % (self.url, args)) tty.msg("Trying to clone git repository: %s %s" % (self.url, args))
git = self.git
if self.commit: if self.commit:
# Need to do a regular clone and check out everything if # Need to do a regular clone and check out everything if
# they asked for a particular commit. # they asked for a particular commit.
with working_dir(self.stage.path):
if spack.debug: if spack.debug:
self.git('clone', self.url) git('clone', self.url)
else: else:
self.git('clone', '--quiet', self.url) git('clone', '--quiet', self.url)
self.stage.chdir_to_source()
with working_dir(self.stage.source_path):
if spack.debug: if spack.debug:
self.git('checkout', self.commit) git('checkout', self.commit)
else: else:
self.git('checkout', '--quiet', self.commit) git('checkout', '--quiet', self.commit)
else: else:
# Can be more efficient if not checking out a specific commit. # Can be more efficient if not checking out a specific commit.
@ -654,11 +649,12 @@ def fetch(self):
if self.git_version > ver('1.7.10'): if self.git_version > ver('1.7.10'):
args.append('--single-branch') args.append('--single-branch')
with working_dir(self.stage.path):
cloned = False cloned = False
# Yet more efficiency, only download a 1-commit deep tree # Yet more efficiency, only download a 1-commit deep tree
if self.git_version >= ver('1.7.1'): if self.git_version >= ver('1.7.1'):
try: try:
self.git(*(args + ['--depth', '1', self.url])) git(*(args + ['--depth', '1', self.url]))
cloned = True cloned = True
except spack.error.SpackError: except spack.error.SpackError:
# This will fail with the dumb HTTP transport # This will fail with the dumb HTTP transport
@ -667,10 +663,9 @@ def fetch(self):
if not cloned: if not cloned:
args.append(self.url) args.append(self.url)
self.git(*args) git(*args)
self.stage.chdir_to_source()
with working_dir(self.stage.source_path):
# For tags, be conservative and check them out AFTER # For tags, be conservative and check them out AFTER
# cloning. Later git versions can do this with clone # cloning. Later git versions can do this with clone
# --branch, but older ones fail. # --branch, but older ones fail.
@ -679,18 +674,19 @@ def fetch(self):
# older versions that we have to ignore. # older versions that we have to ignore.
# see: https://github.com/git/git/commit/19d122b # see: https://github.com/git/git/commit/19d122b
if spack.debug: if spack.debug:
self.git('pull', '--tags', ignore_errors=1) git('pull', '--tags', ignore_errors=1)
self.git('checkout', self.tag) git('checkout', self.tag)
else: else:
self.git('pull', '--quiet', '--tags', ignore_errors=1) git('pull', '--quiet', '--tags', ignore_errors=1)
self.git('checkout', '--quiet', self.tag) git('checkout', '--quiet', self.tag)
with working_dir(self.stage.source_path):
# Init submodules if the user asked for them. # Init submodules if the user asked for them.
if self.submodules: if self.submodules:
if spack.debug: if spack.debug:
self.git('submodule', 'update', '--init', '--recursive') git('submodule', 'update', '--init', '--recursive')
else: else:
self.git('submodule', '--quiet', 'update', '--init', git('submodule', '--quiet', 'update', '--init',
'--recursive') '--recursive')
def archive(self, destination): def archive(self, destination):
@ -698,7 +694,7 @@ def archive(self, destination):
@_needs_stage @_needs_stage
def reset(self): def reset(self):
self.stage.chdir_to_source() with working_dir(self.stage.source_path):
if spack.debug: if spack.debug:
self.git('checkout', '.') self.git('checkout', '.')
self.git('clean', '-f') self.git('clean', '-f')
@ -749,8 +745,6 @@ def cachable(self):
@_needs_stage @_needs_stage
def fetch(self): def fetch(self):
self.stage.chdir()
if self.stage.source_path: if self.stage.source_path:
tty.msg("Already fetched %s" % self.stage.source_path) tty.msg("Already fetched %s" % self.stage.source_path)
return return
@ -762,11 +756,12 @@ def fetch(self):
args += ['-r', self.revision] args += ['-r', self.revision]
args.append(self.url) args.append(self.url)
with working_dir(self.stage.path):
self.svn(*args) self.svn(*args)
self.stage.chdir_to_source()
def _remove_untracked_files(self): def _remove_untracked_files(self):
"""Removes untracked files in an svn repository.""" """Removes untracked files in an svn repository."""
with working_dir(self.stage.source_path):
status = self.svn('status', '--no-ignore', output=str) status = self.svn('status', '--no-ignore', output=str)
self.svn('status', '--no-ignore') self.svn('status', '--no-ignore')
for line in status.split('\n'): for line in status.split('\n'):
@ -783,8 +778,8 @@ def archive(self, destination):
@_needs_stage @_needs_stage
def reset(self): def reset(self):
self.stage.chdir_to_source()
self._remove_untracked_files() self._remove_untracked_files()
with working_dir(self.stage.source_path):
self.svn('revert', '.', '-R') self.svn('revert', '.', '-R')
def __str__(self): def __str__(self):
@ -845,8 +840,6 @@ def cachable(self):
@_needs_stage @_needs_stage
def fetch(self): def fetch(self):
self.stage.chdir()
if self.stage.source_path: if self.stage.source_path:
tty.msg("Already fetched %s" % self.stage.source_path) tty.msg("Already fetched %s" % self.stage.source_path)
return return
@ -866,6 +859,7 @@ def fetch(self):
if self.revision: if self.revision:
args.extend(['-r', self.revision]) args.extend(['-r', self.revision])
with working_dir(self.stage.path):
self.hg(*args) self.hg(*args)
def archive(self, destination): def archive(self, destination):
@ -873,8 +867,7 @@ def archive(self, destination):
@_needs_stage @_needs_stage
def reset(self): def reset(self):
self.stage.chdir() with working_dir(self.stage.path):
source_path = self.stage.source_path source_path = self.stage.source_path
scrubbed = "scrubbed-source-tmp" scrubbed = "scrubbed-source-tmp"
@ -886,7 +879,6 @@ def reset(self):
shutil.rmtree(source_path, ignore_errors=True) shutil.rmtree(source_path, ignore_errors=True)
shutil.move(scrubbed, source_path) shutil.move(scrubbed, source_path)
self.stage.chdir_to_source()
def __str__(self): def __str__(self):
return "[hg] %s" % self.url return "[hg] %s" % self.url

View File

@ -984,14 +984,15 @@ def do_fetch(self, mirror_only=False):
self.stage.cache_local() self.stage.cache_local()
def do_stage(self, mirror_only=False): def do_stage(self, mirror_only=False):
"""Unpacks the fetched tarball, then changes into the expanded tarball """Unpacks and expands the fetched tarball."""
directory."""
if not self.spec.concrete: if not self.spec.concrete:
raise ValueError("Can only stage concrete packages.") raise ValueError("Can only stage concrete packages.")
self.do_fetch(mirror_only) self.do_fetch(mirror_only)
self.stage.expand_archive() self.stage.expand_archive()
self.stage.chdir_to_source()
if not os.listdir(self.stage.path):
raise FetchError("Archive was empty for %s" % self.name)
@classmethod @classmethod
def lookup_patch(cls, sha256): def lookup_patch(cls, sha256):
@ -1059,8 +1060,6 @@ def do_patch(self):
tty.msg("Patching failed last time. Restaging.") tty.msg("Patching failed last time. Restaging.")
self.stage.restage() self.stage.restage()
self.stage.chdir_to_source()
# If this file exists, then we already applied all the patches. # If this file exists, then we already applied all the patches.
if os.path.isfile(good_file): if os.path.isfile(good_file):
tty.msg("Already patched %s" % self.name) tty.msg("Already patched %s" % self.name)
@ -1073,6 +1072,7 @@ def do_patch(self):
patched = False patched = False
for patch in patches: for patch in patches:
try: try:
with working_dir(self.stage.source_path):
patch.apply(self.stage) patch.apply(self.stage)
tty.msg('Applied patch %s' % patch.path_or_url) tty.msg('Applied patch %s' % patch.path_or_url)
patched = True patched = True
@ -1084,6 +1084,7 @@ def do_patch(self):
if has_patch_fun: if has_patch_fun:
try: try:
with working_dir(self.stage.source_path):
self.patch() self.patch()
tty.msg("Ran patch() for %s" % self.name) tty.msg("Ran patch() for %s" % self.name)
patched = True patched = True
@ -1384,8 +1385,7 @@ def build_process():
install_tree(self.stage.source_path, src_target) install_tree(self.stage.source_path, src_target)
# Do the real install in the source directory. # Do the real install in the source directory.
self.stage.chdir_to_source() with working_dir(self.stage.source_path):
# Save the build environment in a file before building. # Save the build environment in a file before building.
dump_environment(self.env_path) dump_environment(self.env_path)
@ -1396,7 +1396,8 @@ def build_process():
self.phases, self._InstallPhase_phases): self.phases, self._InstallPhase_phases):
with logger.force_echo(): with logger.force_echo():
tty.msg("Executing phase: '%s'" % phase_name) tty.msg(
"Executing phase: '%s'" % phase_name)
# Redirect stdout and stderr to daemon pipe # Redirect stdout and stderr to daemon pipe
phase = getattr(self, phase_attr) phase = getattr(self, phase_attr)

View File

@ -366,18 +366,8 @@ def source_path(self):
return p return p
return None return None
def chdir(self):
"""Changes directory to the stage path. Or dies if it is not set
up."""
if os.path.isdir(self.path):
os.chdir(self.path)
else:
raise ChdirError("Setup failed: no such directory: " + self.path)
def fetch(self, mirror_only=False): def fetch(self, mirror_only=False):
"""Downloads an archive or checks out code from a repository.""" """Downloads an archive or checks out code from a repository."""
self.chdir()
fetchers = [] fetchers = []
if not mirror_only: if not mirror_only:
fetchers.append(self.default_fetcher) fetchers.append(self.default_fetcher)
@ -478,18 +468,6 @@ def expand_archive(self):
else: else:
tty.msg("Already staged %s in %s" % (self.name, self.path)) tty.msg("Already staged %s in %s" % (self.name, self.path))
def chdir_to_source(self):
"""Changes directory to the expanded archive directory.
Dies with an error if there was no expanded archive.
"""
path = self.source_path
if not path:
raise StageError("Attempt to chdir before expanding archive.")
else:
os.chdir(path)
if not os.listdir(path):
raise StageError("Archive was empty for %s" % self.name)
def restage(self): def restage(self):
"""Removes the expanded archive path if it exists, then re-expands """Removes the expanded archive path if it exists, then re-expands
the archive. the archive.
@ -613,9 +591,6 @@ def source_path(self):
def path(self): def path(self):
return self[0].path return self[0].path
def chdir_to_source(self):
return self[0].chdir_to_source()
@property @property
def archive_file(self): def archive_file(self):
return self[0].archive_file return self[0].archive_file
@ -634,12 +609,6 @@ def __init__(self, path):
self.source_path = path self.source_path = path
self.created = True self.created = True
def chdir(self):
if os.path.isdir(self.path):
os.chdir(self.path)
else:
raise ChdirError("Setup failed: no such directory: " + self.path)
# DIY stages do nothing as context managers. # DIY stages do nothing as context managers.
def __enter__(self): def __enter__(self):
pass pass
@ -647,9 +616,6 @@ def __enter__(self):
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):
pass pass
def chdir_to_source(self):
self.chdir()
def fetch(self, *args, **kwargs): def fetch(self, *args, **kwargs):
tty.msg("No need to fetch for DIY.") tty.msg("No need to fetch for DIY.")
@ -701,9 +667,5 @@ class RestageError(StageError):
""""Error encountered during restaging.""" """"Error encountered during restaging."""
class ChdirError(StageError):
"""Raised when Spack can't change directories."""
# Keep this in namespace for convenience # Keep this in namespace for convenience
FailedDownloadError = fs.FailedDownloadError FailedDownloadError = fs.FailedDownloadError

View File

@ -57,7 +57,7 @@ def test_install_package_and_dependency(
tmpdir, builtin_mock, mock_archive, mock_fetch, config, tmpdir, builtin_mock, mock_archive, mock_fetch, config,
install_mockery): install_mockery):
tmpdir.chdir() with tmpdir.as_cwd():
install('--log-format=junit', '--log-file=test.xml', 'libdwarf') install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
files = tmpdir.listdir() files = tmpdir.listdir()
@ -104,7 +104,7 @@ def test_install_package_already_installed(
tmpdir, builtin_mock, mock_archive, mock_fetch, config, tmpdir, builtin_mock, mock_archive, mock_fetch, config,
install_mockery): install_mockery):
tmpdir.chdir() with tmpdir.as_cwd():
install('libdwarf') install('libdwarf')
install('--log-format=junit', '--log-file=test.xml', 'libdwarf') install('--log-format=junit', '--log-file=test.xml', 'libdwarf')

View File

@ -49,9 +49,28 @@
from spack.version import Version from spack.version import Version
########## #
# Monkey-patching that is applied to all tests # These fixtures are applied to all tests
########## #
@pytest.fixture(scope='function', autouse=True)
def no_chdir():
"""Ensure that no test changes Spack's working dirctory.
This prevents Spack tests (and therefore Spack commands) from
changing the working directory and causing other tests to fail
mysteriously. Tests should use ``working_dir`` or ``py.path``'s
``.as_cwd()`` instead of ``os.chdir`` to avoid failing this check.
We assert that the working directory hasn't changed, unless the
original wd somehow ceased to exist.
"""
original_wd = os.getcwd()
yield
if os.path.isdir(original_wd):
assert os.getcwd() == original_wd
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def mock_fetch_cache(monkeypatch): def mock_fetch_cache(monkeypatch):
"""Substitutes spack.fetch_cache with a mock object that does nothing """Substitutes spack.fetch_cache with a mock object that does nothing
@ -200,12 +219,11 @@ def database(tmpdir_factory, builtin_mock, config):
# Make a fake install directory # Make a fake install directory
install_path = tmpdir_factory.mktemp('install_for_database') install_path = tmpdir_factory.mktemp('install_for_database')
spack_install_path = py.path.local(spack.store.root) spack_install_path = spack.store.root
spack.store.root = str(install_path)
spack.store.root = str(install_path)
install_layout = spack.directory_layout.YamlDirectoryLayout( install_layout = spack.directory_layout.YamlDirectoryLayout(
str(install_path) str(install_path))
)
spack_install_layout = spack.store.layout spack_install_layout = spack.store.layout
spack.store.layout = install_layout spack.store.layout = install_layout
@ -216,14 +234,12 @@ def database(tmpdir_factory, builtin_mock, config):
Entry = collections.namedtuple('Entry', ['path', 'layout', 'db']) Entry = collections.namedtuple('Entry', ['path', 'layout', 'db'])
Database = collections.namedtuple( Database = collections.namedtuple(
'Database', ['real', 'mock', 'install', 'uninstall', 'refresh'] 'Database', ['real', 'mock', 'install', 'uninstall', 'refresh'])
)
real = Entry( real = Entry(
path=spack_install_path, path=spack_install_path,
layout=spack_install_layout, layout=spack_install_layout,
db=spack_install_db db=spack_install_db)
)
mock = Entry(path=install_path, layout=install_layout, db=install_db) mock = Entry(path=install_path, layout=install_layout, db=install_db)
def _install(spec): def _install(spec):
@ -249,8 +265,8 @@ def _refresh():
mock=mock, mock=mock,
install=_install, install=_install,
uninstall=_uninstall, uninstall=_uninstall,
refresh=_refresh refresh=_refresh)
)
# Transaction used to avoid repeated writes. # Transaction used to avoid repeated writes.
with spack.store.db.write_transaction(): with spack.store.db.write_transaction():
t.install('mpileaks ^mpich') t.install('mpileaks ^mpich')
@ -268,7 +284,7 @@ def _refresh():
spack.store.db.remove(spec) spack.store.db.remove(spec)
install_path.remove(rec=1) install_path.remove(rec=1)
spack.store.root = str(spack_install_path) spack.store.root = spack_install_path
spack.store.layout = spack_install_layout spack.store.layout = spack_install_layout
spack.store.db = spack_install_db spack.store.db = spack_install_db
@ -285,11 +301,13 @@ def install_mockery(tmpdir, config, builtin_mock):
"""Hooks a fake install directory and a fake db into Spack.""" """Hooks a fake install directory and a fake db into Spack."""
layout = spack.store.layout layout = spack.store.layout
db = spack.store.db db = spack.store.db
new_opt = str(tmpdir.join('opt'))
# Use a fake install directory to avoid conflicts bt/w # Use a fake install directory to avoid conflicts bt/w
# installed pkgs and mock packages. # installed pkgs and mock packages.
spack.store.layout = spack.directory_layout.YamlDirectoryLayout( spack.store.layout = spack.directory_layout.YamlDirectoryLayout(new_opt)
str(tmpdir)) spack.store.db = spack.database.Database(new_opt)
spack.store.db = spack.database.Database(str(tmpdir))
# We use a fake package, so skip the checksum. # We use a fake package, so skip the checksum.
spack.do_checksum = False spack.do_checksum = False
yield yield
@ -328,6 +346,7 @@ def mock_archive():
""" """
tar = spack.util.executable.which('tar', required=True) tar = spack.util.executable.which('tar', required=True)
stage = spack.stage.Stage('mock-archive-stage') stage = spack.stage.Stage('mock-archive-stage')
tmpdir = py.path.local(stage.path) tmpdir = py.path.local(stage.path)
repo_name = 'mock-archive-repo' repo_name = 'mock-archive-repo'
tmpdir.ensure(repo_name, dir=True) tmpdir.ensure(repo_name, dir=True)
@ -350,10 +369,10 @@ def mock_archive():
os.chmod(configure_path, 0o755) os.chmod(configure_path, 0o755)
# Archive it # Archive it
current = tmpdir.chdir() with tmpdir.as_cwd():
archive_name = '{0}.tar.gz'.format(repo_name) archive_name = '{0}.tar.gz'.format(repo_name)
tar('-czf', archive_name, repo_name) tar('-czf', archive_name, repo_name)
current.chdir()
Archive = collections.namedtuple('Archive', Archive = collections.namedtuple('Archive',
['url', 'path', 'archive_file']) ['url', 'path', 'archive_file'])
archive_file = str(tmpdir.join(archive_name)) archive_file = str(tmpdir.join(archive_name))
@ -379,7 +398,7 @@ def mock_git_repository():
repodir = tmpdir.join(repo_name) repodir = tmpdir.join(repo_name)
# Initialize the repository # Initialize the repository
current = repodir.chdir() with repodir.as_cwd():
git('init') git('init')
url = 'file://' + str(repodir) url = 'file://' + str(repodir)
@ -418,7 +437,6 @@ def mock_git_repository():
rev_hash = lambda x: git('rev-parse', x, output=str).strip() rev_hash = lambda x: git('rev-parse', x, output=str).strip()
r1 = rev_hash(branch) r1 = rev_hash(branch)
r1_file = branch_file r1_file = branch_file
current.chdir()
Bunch = spack.util.pattern.Bunch Bunch = spack.util.pattern.Bunch
@ -457,22 +475,23 @@ def mock_hg_repository():
get_rev = lambda: hg('id', '-i', output=str).strip() get_rev = lambda: hg('id', '-i', output=str).strip()
# Initialize the repository # Initialize the repository
current = repodir.chdir() with repodir.as_cwd():
url = 'file://' + str(repodir) url = 'file://' + str(repodir)
hg('init') hg('init')
# Commit file r0 # Commit file r0
r0_file = 'r0_file' r0_file = 'r0_file'
repodir.ensure(r0_file) repodir.ensure(r0_file)
hg('add', r0_file) hg('add', r0_file)
hg('commit', '-m', 'revision 0', '-u', 'test') hg('commit', '-m', 'revision 0', '-u', 'test')
r0 = get_rev() r0 = get_rev()
# Commit file r1 # Commit file r1
r1_file = 'r1_file' r1_file = 'r1_file'
repodir.ensure(r1_file) repodir.ensure(r1_file)
hg('add', r1_file) hg('add', r1_file)
hg('commit', '-m' 'revision 1', '-u', 'test') hg('commit', '-m' 'revision 1', '-u', 'test')
r1 = get_rev() r1 = get_rev()
current.chdir()
Bunch = spack.util.pattern.Bunch Bunch = spack.util.pattern.Bunch
@ -497,13 +516,15 @@ def mock_svn_repository():
svn = spack.util.executable.which('svn', required=True) svn = spack.util.executable.which('svn', required=True)
svnadmin = spack.util.executable.which('svnadmin', required=True) svnadmin = spack.util.executable.which('svnadmin', required=True)
stage = spack.stage.Stage('mock-svn-stage') stage = spack.stage.Stage('mock-svn-stage')
tmpdir = py.path.local(stage.path) tmpdir = py.path.local(stage.path)
repo_name = 'mock-svn-repo' repo_name = 'mock-svn-repo'
tmpdir.ensure(repo_name, dir=True) tmpdir.ensure(repo_name, dir=True)
repodir = tmpdir.join(repo_name) repodir = tmpdir.join(repo_name)
url = 'file://' + str(repodir) url = 'file://' + str(repodir)
# Initialize the repository # Initialize the repository
current = repodir.chdir() with repodir.as_cwd():
# NOTE: Adding --pre-1.5-compatible works for NERSC # NOTE: Adding --pre-1.5-compatible works for NERSC
# Unknown if this is also an issue at other sites. # Unknown if this is also an issue at other sites.
svnadmin('create', '--pre-1.5-compatible', str(repodir)) svnadmin('create', '--pre-1.5-compatible', str(repodir))
@ -511,23 +532,24 @@ def mock_svn_repository():
# Import a structure (first commit) # Import a structure (first commit)
r0_file = 'r0_file' r0_file = 'r0_file'
tmpdir.ensure('tmp-path', r0_file) tmpdir.ensure('tmp-path', r0_file)
svn( tmp_path = tmpdir.join('tmp-path')
'import', svn('import',
str(tmpdir.join('tmp-path')), str(tmp_path),
url, url,
'-m', '-m',
'Initial import r0' 'Initial import r0')
) tmp_path.remove()
shutil.rmtree(str(tmpdir.join('tmp-path')))
# Second commit # Second commit
r1_file = 'r1_file' r1_file = 'r1_file'
svn('checkout', url, str(tmpdir.join('tmp-path'))) svn('checkout', url, str(tmp_path))
tmpdir.ensure('tmp-path', r1_file) tmpdir.ensure('tmp-path', r1_file)
tmpdir.join('tmp-path').chdir()
with tmp_path.as_cwd():
svn('add', str(tmpdir.ensure('tmp-path', r1_file))) svn('add', str(tmpdir.ensure('tmp-path', r1_file)))
svn('ci', '-m', 'second revision r1') svn('ci', '-m', 'second revision r1')
repodir.chdir()
shutil.rmtree(str(tmpdir.join('tmp-path'))) tmp_path.remove()
r0 = '1' r0 = '1'
r1 = '2' r1 = '2'
@ -535,13 +557,10 @@ def mock_svn_repository():
checks = { checks = {
'default': Bunch( 'default': Bunch(
revision=r1, file=r1_file, args={'svn': url} revision=r1, file=r1_file, args={'svn': url}),
),
'rev0': Bunch( 'rev0': Bunch(
revision=r0, file=r0_file, args={ revision=r0, file=r0_file, args={
'svn': url, 'revision': r0 'svn': url, 'revision': r0})
}
)
} }
def get_rev(): def get_rev():
@ -553,8 +572,8 @@ def get_rev():
return match.group(1) return match.group(1)
t = Bunch(checks=checks, url=url, hash=get_rev, path=str(repodir)) t = Bunch(checks=checks, url=url, hash=get_rev, path=str(repodir))
yield t yield t
current.chdir()
########## ##########

View File

@ -47,7 +47,6 @@ def test_inspect_path(tmpdir):
'': ['CMAKE_PREFIX_PATH'] '': ['CMAKE_PREFIX_PATH']
} }
tmpdir.chdir()
tmpdir.mkdir('bin') tmpdir.mkdir('bin')
tmpdir.mkdir('lib') tmpdir.mkdir('lib')
tmpdir.mkdir('include') tmpdir.mkdir('include')

View File

@ -72,6 +72,7 @@ def test_fetch(
finally: finally:
spack.insecure = False spack.insecure = False
with working_dir(pkg.stage.source_path):
assert h('HEAD') == h(t.revision) assert h('HEAD') == h(t.revision)
file_path = join_path(pkg.stage.source_path, t.file) file_path = join_path(pkg.stage.source_path, t.file)

View File

@ -72,6 +72,7 @@ def test_fetch(
finally: finally:
spack.insecure = False spack.insecure = False
with working_dir(pkg.stage.source_path):
assert h() == t.revision assert h() == t.revision
file_path = join_path(pkg.stage.source_path, t.file) file_path = join_path(pkg.stage.source_path, t.file)

View File

@ -70,11 +70,7 @@ def check_mirror():
# register mirror with spack config # register mirror with spack config
mirrors = {'spack-mirror-test': 'file://' + mirror_root} mirrors = {'spack-mirror-test': 'file://' + mirror_root}
spack.config.update_config('mirrors', mirrors) spack.config.update_config('mirrors', mirrors)
spack.mirror.create(mirror_root, repos, no_checksum=True)
os.chdir(stage.path)
spack.mirror.create(
mirror_root, repos, no_checksum=True
)
# Stage directory exists # Stage directory exists
assert os.path.isdir(mirror_root) assert os.path.isdir(mirror_root)

View File

@ -25,20 +25,22 @@
""" """
This test checks the binary packaging infrastructure This test checks the binary packaging infrastructure
""" """
import pytest
import spack
import spack.store
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
from spack.spec import Spec
import spack.binary_distribution as bindist
from llnl.util.filesystem import join_path, mkdirp
import argparse
import spack.cmd.buildcache as buildcache
from spack.relocate import *
import os import os
import stat import stat
import sys import sys
import shutil import shutil
import pytest
import argparse
from llnl.util.filesystem import mkdirp
import spack
import spack.store
import spack.binary_distribution as bindist
import spack.cmd.buildcache as buildcache
from spack.spec import Spec
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
from spack.relocate import *
from spack.util.executable import ProcessError from spack.util.executable import ProcessError
@ -72,8 +74,8 @@ def test_packaging(mock_archive, tmpdir):
spec.concretize() spec.concretize()
pkg = spack.repo.get(spec) pkg = spack.repo.get(spec)
fake_fetchify(pkg.fetcher, pkg) fake_fetchify(pkg.fetcher, pkg)
mkdirp(join_path(pkg.prefix, "bin")) mkdirp(os.path.join(pkg.prefix, "bin"))
patchelfscr = join_path(pkg.prefix, "bin", "patchelf") patchelfscr = os.path.join(pkg.prefix, "bin", "patchelf")
f = open(patchelfscr, 'w') f = open(patchelfscr, 'w')
body = """#!/bin/bash body = """#!/bin/bash
echo $PATH""" echo $PATH"""
@ -91,14 +93,14 @@ def test_packaging(mock_archive, tmpdir):
pkg.do_install() pkg.do_install()
# Put some non-relocatable file in there # Put some non-relocatable file in there
filename = join_path(spec.prefix, "dummy.txt") filename = os.path.join(spec.prefix, "dummy.txt")
with open(filename, "w") as script: with open(filename, "w") as script:
script.write(spec.prefix) script.write(spec.prefix)
# Create the build cache and # Create the build cache and
# put it directly into the mirror # put it directly into the mirror
mirror_path = join_path(tmpdir, 'test-mirror') mirror_path = os.path.join(str(tmpdir), 'test-mirror')
specs = [spec] specs = [spec]
spack.mirror.create( spack.mirror.create(
mirror_path, specs, no_checksum=True mirror_path, specs, no_checksum=True
@ -286,17 +288,24 @@ def test_relocate():
@pytest.mark.skipif(sys.platform != 'darwin', @pytest.mark.skipif(sys.platform != 'darwin',
reason="only works with Mach-o objects") reason="only works with Mach-o objects")
def test_relocate_macho(): def test_relocate_macho(tmpdir):
with tmpdir.as_cwd():
get_patchelf() get_patchelf()
assert (needs_binary_relocation('Mach-O') is True) assert (needs_binary_relocation('Mach-O') is True)
macho_get_paths('/bin/bash') macho_get_paths('/bin/bash')
shutil.copyfile('/bin/bash', 'bash') shutil.copyfile('/bin/bash', 'bash')
modify_macho_object('bash', '/bin/bash', '/usr', '/opt', False) modify_macho_object('bash', '/bin/bash', '/usr', '/opt', False)
modify_macho_object('bash', '/bin/bash', '/usr', '/opt', True) modify_macho_object('bash', '/bin/bash', '/usr', '/opt', True)
shutil.copyfile('/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib')
modify_macho_object('libncurses.5.4.dylib', shutil.copyfile(
'/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib')
modify_macho_object(
'libncurses.5.4.dylib',
'/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', False) '/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', False)
modify_macho_object('libncurses.5.4.dylib', modify_macho_object(
'libncurses.5.4.dylib',
'/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', True) '/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', True)

View File

@ -30,18 +30,10 @@
import spack import spack
import spack.stage import spack.stage
import spack.util.executable import spack.util.executable
from llnl.util.filesystem import join_path from llnl.util.filesystem import join_path, working_dir
from spack.stage import Stage from spack.stage import Stage
def check_chdir_to_source(stage, stage_name):
stage_path = get_stage_path(stage, stage_name)
archive_dir = 'test-files'
assert join_path(
os.path.realpath(stage_path), archive_dir
) == os.getcwd()
def check_expand_archive(stage, stage_name, mock_archive): def check_expand_archive(stage, stage_name, mock_archive):
stage_path = get_stage_path(stage, stage_name) stage_path = get_stage_path(stage, stage_name)
archive_name = 'test-files.tar.gz' archive_name = 'test-files.tar.gz'
@ -64,11 +56,6 @@ def check_fetch(stage, stage_name):
assert join_path(stage_path, archive_name) == stage.fetcher.archive_file assert join_path(stage_path, archive_name) == stage.fetcher.archive_file
def check_chdir(stage, stage_name):
stage_path = get_stage_path(stage, stage_name)
assert os.path.realpath(stage_path) == os.getcwd()
def check_destroy(stage, stage_name): def check_destroy(stage, stage_name):
"""Figure out whether a stage was destroyed correctly.""" """Figure out whether a stage was destroyed correctly."""
stage_path = get_stage_path(stage, stage_name) stage_path = get_stage_path(stage, stage_name)
@ -162,10 +149,9 @@ def mock_archive(tmpdir, monkeypatch):
test_tmp_path.ensure(dir=True) test_tmp_path.ensure(dir=True)
test_readme.write('hello world!\n') test_readme.write('hello world!\n')
current = tmpdir.chdir() with tmpdir.as_cwd():
tar = spack.util.executable.which('tar', required=True) tar = spack.util.executable.which('tar', required=True)
tar('czf', str(archive_name), 'test-files') tar('czf', str(archive_name), 'test-files')
current.chdir()
# Make spack use the test environment for tmp stuff. # Make spack use the test environment for tmp stuff.
monkeypatch.setattr(spack.stage, '_tmp_root', None) monkeypatch.setattr(spack.stage, '_tmp_root', None)
@ -180,9 +166,6 @@ def mock_archive(tmpdir, monkeypatch):
test_tmp_dir=test_tmp_path, test_tmp_dir=test_tmp_path,
archive_dir=archive_dir archive_dir=archive_dir
) )
# record this since this test changes to directories that will
# be removed.
current.chdir()
@pytest.fixture() @pytest.fixture()
@ -245,18 +228,10 @@ def test_setup_and_destroy_no_name_without_tmp(self, mock_archive):
check_setup(stage, None, mock_archive) check_setup(stage, None, mock_archive)
check_destroy(stage, None) check_destroy(stage, None)
def test_chdir(self, mock_archive):
with Stage(mock_archive.url, name=self.stage_name) as stage:
stage.chdir()
check_setup(stage, self.stage_name, mock_archive)
check_chdir(stage, self.stage_name)
check_destroy(stage, self.stage_name)
def test_fetch(self, mock_archive): def test_fetch(self, mock_archive):
with Stage(mock_archive.url, name=self.stage_name) as stage: with Stage(mock_archive.url, name=self.stage_name) as stage:
stage.fetch() stage.fetch()
check_setup(stage, self.stage_name, mock_archive) check_setup(stage, self.stage_name, mock_archive)
check_chdir(stage, self.stage_name)
check_fetch(stage, self.stage_name) check_fetch(stage, self.stage_name)
check_destroy(stage, self.stage_name) check_destroy(stage, self.stage_name)
@ -307,24 +282,13 @@ def test_expand_archive(self, mock_archive):
check_expand_archive(stage, self.stage_name, mock_archive) check_expand_archive(stage, self.stage_name, mock_archive)
check_destroy(stage, self.stage_name) check_destroy(stage, self.stage_name)
def test_expand_archive_with_chdir(self, mock_archive):
with Stage(mock_archive.url, name=self.stage_name) as stage:
stage.fetch()
check_setup(stage, self.stage_name, mock_archive)
check_fetch(stage, self.stage_name)
stage.expand_archive()
stage.chdir_to_source()
check_expand_archive(stage, self.stage_name, mock_archive)
check_chdir_to_source(stage, self.stage_name)
check_destroy(stage, self.stage_name)
def test_restage(self, mock_archive): def test_restage(self, mock_archive):
with Stage(mock_archive.url, name=self.stage_name) as stage: with Stage(mock_archive.url, name=self.stage_name) as stage:
stage.fetch() stage.fetch()
stage.expand_archive() stage.expand_archive()
stage.chdir_to_source()
with working_dir(stage.source_path):
check_expand_archive(stage, self.stage_name, mock_archive) check_expand_archive(stage, self.stage_name, mock_archive)
check_chdir_to_source(stage, self.stage_name)
# Try to make a file in the old archive dir # Try to make a file in the old archive dir
with open('foobar', 'w') as file: with open('foobar', 'w') as file:
@ -334,10 +298,7 @@ def test_restage(self, mock_archive):
# Make sure the file is not there after restage. # Make sure the file is not there after restage.
stage.restage() stage.restage()
check_chdir(stage, self.stage_name)
check_fetch(stage, self.stage_name) check_fetch(stage, self.stage_name)
stage.chdir_to_source()
check_chdir_to_source(stage, self.stage_name)
assert 'foobar' not in os.listdir(stage.source_path) assert 'foobar' not in os.listdir(stage.source_path)
check_destroy(stage, self.stage_name) check_destroy(stage, self.stage_name)

View File

@ -72,6 +72,7 @@ def test_fetch(
finally: finally:
spack.insecure = False spack.insecure = False
with working_dir(pkg.stage.source_path):
assert h() == t.revision assert h() == t.revision
file_path = join_path(pkg.stage.source_path, t.file) file_path = join_path(pkg.stage.source_path, t.file)

View File

@ -72,6 +72,7 @@ def test_fetch(
finally: finally:
spack.insecure = False spack.insecure = False
with working_dir(pkg.stage.source_path):
assert os.path.exists('configure') assert os.path.exists('configure')
assert is_exe('configure') assert is_exe('configure')