Spack core and tests no longer use os.chdir()
- Spack's core package interface was previously overly stateful, in that calling methods like `do_stage()` could change your working directory. - This removes Stage's `chdir` and `chdir_to_source` methods and replaces their usages with `with working_dir(stage.path)` and `with working_dir(stage.source_path)`, respectively. These ensure the original working directory is preserved. - This not only makes the API more usable, it makes the tests more deterministic, as previously a test could leave the current working directory in a bad state and cause subsequent tests to fail mysteriously.
This commit is contained in:
parent
890c5ad329
commit
c14f2dc7b4
@ -25,7 +25,7 @@
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path, mkdirp
|
||||
from llnl.util.filesystem import mkdirp, working_dir
|
||||
|
||||
import spack
|
||||
from spack.util.executable import ProcessError, which
|
||||
@ -47,7 +47,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def get_origin_info(remote):
|
||||
git_dir = join_path(spack.prefix, '.git')
|
||||
git_dir = os.path.join(spack.prefix, '.git')
|
||||
git = which('git', required=True)
|
||||
try:
|
||||
branch = git('symbolic-ref', '--short', 'HEAD', output=str)
|
||||
@ -81,7 +81,7 @@ def clone(parser, args):
|
||||
|
||||
mkdirp(prefix)
|
||||
|
||||
if os.path.exists(join_path(prefix, '.git')):
|
||||
if os.path.exists(os.path.join(prefix, '.git')):
|
||||
tty.die("There already seems to be a git repository in %s" % prefix)
|
||||
|
||||
files_in_the_way = os.listdir(prefix)
|
||||
@ -94,14 +94,14 @@ def clone(parser, args):
|
||||
"%s/bin/spack" % prefix,
|
||||
"%s/lib/spack/..." % prefix)
|
||||
|
||||
os.chdir(prefix)
|
||||
git = which('git', required=True)
|
||||
git('init', '--shared', '-q')
|
||||
git('remote', 'add', 'origin', origin_url)
|
||||
git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
|
||||
'-n', '-q')
|
||||
git('reset', '--hard', 'origin/%s' % branch, '-q')
|
||||
git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
|
||||
with working_dir(prefix):
|
||||
git = which('git', required=True)
|
||||
git('init', '--shared', '-q')
|
||||
git('remote', 'add', 'origin', origin_url)
|
||||
git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
|
||||
'-n', '-q')
|
||||
git('reset', '--hard', 'origin/%s' % branch, '-q')
|
||||
git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
|
||||
|
||||
tty.msg("Successfully created a new spack in %s" % prefix,
|
||||
"Run %s/bin/spack to use this installation." % prefix)
|
||||
tty.msg("Successfully created a new spack in %s" % prefix,
|
||||
"Run %s/bin/spack to use this installation." % prefix)
|
||||
|
@ -153,16 +153,15 @@ def matches(cls, args):
|
||||
|
||||
@pattern.composite(interface=FetchStrategy)
|
||||
class FetchStrategyComposite(object):
|
||||
"""Composite for a FetchStrategy object.
|
||||
|
||||
"""
|
||||
Composite for a FetchStrategy object. Implements the GoF composite pattern.
|
||||
Implements the GoF composite pattern.
|
||||
"""
|
||||
matches = FetchStrategy.matches
|
||||
set_stage = FetchStrategy.set_stage
|
||||
|
||||
|
||||
class URLFetchStrategy(FetchStrategy):
|
||||
|
||||
"""FetchStrategy that pulls source code from a URL for an archive,
|
||||
checks the archive against a checksum,and decompresses the archive.
|
||||
"""
|
||||
@ -200,8 +199,6 @@ def curl(self):
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
self.stage.chdir()
|
||||
|
||||
if self.archive_file:
|
||||
tty.msg("Already downloaded %s" % self.archive_file)
|
||||
return
|
||||
@ -242,7 +239,8 @@ def fetch(self):
|
||||
|
||||
# Run curl but grab the mime type from the http headers
|
||||
curl = self.curl
|
||||
headers = curl(*curl_args, output=str, fail_on_error=False)
|
||||
with working_dir(self.stage.path):
|
||||
headers = curl(*curl_args, output=str, fail_on_error=False)
|
||||
|
||||
if curl.returncode != 0:
|
||||
# clean up archive on failure.
|
||||
@ -310,7 +308,6 @@ def expand(self):
|
||||
|
||||
tty.msg("Staging archive: %s" % self.archive_file)
|
||||
|
||||
self.stage.chdir()
|
||||
if not self.archive_file:
|
||||
raise NoArchiveFileError(
|
||||
"Couldn't find archive file",
|
||||
@ -318,15 +315,17 @@ def expand(self):
|
||||
|
||||
if not self.extension:
|
||||
self.extension = extension(self.archive_file)
|
||||
|
||||
decompress = decompressor_for(self.archive_file, self.extension)
|
||||
|
||||
# Expand all tarballs in their own directory to contain
|
||||
# exploding tarballs.
|
||||
tarball_container = os.path.join(self.stage.path,
|
||||
"spack-expanded-archive")
|
||||
|
||||
mkdirp(tarball_container)
|
||||
os.chdir(tarball_container)
|
||||
decompress(self.archive_file)
|
||||
with working_dir(tarball_container):
|
||||
decompress(self.archive_file)
|
||||
|
||||
# Check for an exploding tarball, i.e. one that doesn't expand
|
||||
# to a single directory. If the tarball *didn't* explode,
|
||||
@ -345,10 +344,9 @@ def expand(self):
|
||||
shutil.move(os.path.join(tarball_container, f),
|
||||
os.path.join(self.stage.path, f))
|
||||
os.rmdir(tarball_container)
|
||||
|
||||
if not files:
|
||||
os.rmdir(tarball_container)
|
||||
# Set the wd back to the stage when done.
|
||||
self.stage.chdir()
|
||||
|
||||
def archive(self, destination):
|
||||
"""Just moves this archive to the destination."""
|
||||
@ -416,8 +414,6 @@ def fetch(self):
|
||||
if not os.path.isfile(path):
|
||||
raise NoCacheError('No cache of %s' % path)
|
||||
|
||||
self.stage.chdir()
|
||||
|
||||
# remove old symlink if one is there.
|
||||
filename = self.stage.save_filename
|
||||
if os.path.exists(filename):
|
||||
@ -484,8 +480,8 @@ def archive(self, destination, **kwargs):
|
||||
for p in patterns:
|
||||
tar.add_default_arg('--exclude=%s' % p)
|
||||
|
||||
self.stage.chdir()
|
||||
tar('-czf', destination, os.path.basename(self.stage.source_path))
|
||||
with working_dir(self.stage.path):
|
||||
tar('-czf', destination, os.path.basename(self.stage.source_path))
|
||||
|
||||
def __str__(self):
|
||||
return "VCS: %s" % self.url
|
||||
@ -495,9 +491,8 @@ def __repr__(self):
|
||||
|
||||
|
||||
class GoFetchStrategy(VCSFetchStrategy):
|
||||
"""Fetch strategy that employs the `go get` infrastructure.
|
||||
|
||||
"""
|
||||
Fetch strategy that employs the `go get` infrastructure
|
||||
Use like this in a package:
|
||||
|
||||
version('name',
|
||||
@ -530,25 +525,24 @@ def go(self):
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
self.stage.chdir()
|
||||
|
||||
tty.msg("Trying to get go resource:", self.url)
|
||||
|
||||
try:
|
||||
os.mkdir('go')
|
||||
except OSError:
|
||||
pass
|
||||
env = dict(os.environ)
|
||||
env['GOPATH'] = os.path.join(os.getcwd(), 'go')
|
||||
self.go('get', '-v', '-d', self.url, env=env)
|
||||
with working_dir(self.stage.path):
|
||||
try:
|
||||
os.mkdir('go')
|
||||
except OSError:
|
||||
pass
|
||||
env = dict(os.environ)
|
||||
env['GOPATH'] = os.path.join(os.getcwd(), 'go')
|
||||
self.go('get', '-v', '-d', self.url, env=env)
|
||||
|
||||
def archive(self, destination):
|
||||
super(GoFetchStrategy, self).archive(destination, exclude='.git')
|
||||
|
||||
@_needs_stage
|
||||
def reset(self):
|
||||
self.stage.chdir_to_source()
|
||||
self.go('clean')
|
||||
with working_dir(self.stage.source_path):
|
||||
self.go('clean')
|
||||
|
||||
def __str__(self):
|
||||
return "[go] %s" % self.url
|
||||
@ -607,10 +601,7 @@ def git(self):
|
||||
def cachable(self):
|
||||
return bool(self.commit or self.tag)
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
self.stage.chdir()
|
||||
|
||||
if self.stage.source_path:
|
||||
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||
return
|
||||
@ -622,20 +613,24 @@ def fetch(self):
|
||||
args = 'at tag %s' % self.tag
|
||||
elif self.branch:
|
||||
args = 'on branch %s' % self.branch
|
||||
|
||||
tty.msg("Trying to clone git repository: %s %s" % (self.url, args))
|
||||
|
||||
git = self.git
|
||||
if self.commit:
|
||||
# Need to do a regular clone and check out everything if
|
||||
# they asked for a particular commit.
|
||||
if spack.debug:
|
||||
self.git('clone', self.url)
|
||||
else:
|
||||
self.git('clone', '--quiet', self.url)
|
||||
self.stage.chdir_to_source()
|
||||
if spack.debug:
|
||||
self.git('checkout', self.commit)
|
||||
else:
|
||||
self.git('checkout', '--quiet', self.commit)
|
||||
with working_dir(self.stage.path):
|
||||
if spack.debug:
|
||||
git('clone', self.url)
|
||||
else:
|
||||
git('clone', '--quiet', self.url)
|
||||
|
||||
with working_dir(self.stage.source_path):
|
||||
if spack.debug:
|
||||
git('checkout', self.commit)
|
||||
else:
|
||||
git('checkout', '--quiet', self.commit)
|
||||
|
||||
else:
|
||||
# Can be more efficient if not checking out a specific commit.
|
||||
@ -654,57 +649,58 @@ def fetch(self):
|
||||
if self.git_version > ver('1.7.10'):
|
||||
args.append('--single-branch')
|
||||
|
||||
cloned = False
|
||||
# Yet more efficiency, only download a 1-commit deep tree
|
||||
if self.git_version >= ver('1.7.1'):
|
||||
try:
|
||||
self.git(*(args + ['--depth', '1', self.url]))
|
||||
cloned = True
|
||||
except spack.error.SpackError:
|
||||
# This will fail with the dumb HTTP transport
|
||||
# continue and try without depth, cleanup first
|
||||
pass
|
||||
with working_dir(self.stage.path):
|
||||
cloned = False
|
||||
# Yet more efficiency, only download a 1-commit deep tree
|
||||
if self.git_version >= ver('1.7.1'):
|
||||
try:
|
||||
git(*(args + ['--depth', '1', self.url]))
|
||||
cloned = True
|
||||
except spack.error.SpackError:
|
||||
# This will fail with the dumb HTTP transport
|
||||
# continue and try without depth, cleanup first
|
||||
pass
|
||||
|
||||
if not cloned:
|
||||
args.append(self.url)
|
||||
self.git(*args)
|
||||
if not cloned:
|
||||
args.append(self.url)
|
||||
git(*args)
|
||||
|
||||
self.stage.chdir_to_source()
|
||||
with working_dir(self.stage.source_path):
|
||||
# For tags, be conservative and check them out AFTER
|
||||
# cloning. Later git versions can do this with clone
|
||||
# --branch, but older ones fail.
|
||||
if self.tag and self.git_version < ver('1.8.5.2'):
|
||||
# pull --tags returns a "special" error code of 1 in
|
||||
# older versions that we have to ignore.
|
||||
# see: https://github.com/git/git/commit/19d122b
|
||||
if spack.debug:
|
||||
git('pull', '--tags', ignore_errors=1)
|
||||
git('checkout', self.tag)
|
||||
else:
|
||||
git('pull', '--quiet', '--tags', ignore_errors=1)
|
||||
git('checkout', '--quiet', self.tag)
|
||||
|
||||
# For tags, be conservative and check them out AFTER
|
||||
# cloning. Later git versions can do this with clone
|
||||
# --branch, but older ones fail.
|
||||
if self.tag and self.git_version < ver('1.8.5.2'):
|
||||
# pull --tags returns a "special" error code of 1 in
|
||||
# older versions that we have to ignore.
|
||||
# see: https://github.com/git/git/commit/19d122b
|
||||
with working_dir(self.stage.source_path):
|
||||
# Init submodules if the user asked for them.
|
||||
if self.submodules:
|
||||
if spack.debug:
|
||||
self.git('pull', '--tags', ignore_errors=1)
|
||||
self.git('checkout', self.tag)
|
||||
git('submodule', 'update', '--init', '--recursive')
|
||||
else:
|
||||
self.git('pull', '--quiet', '--tags', ignore_errors=1)
|
||||
self.git('checkout', '--quiet', self.tag)
|
||||
|
||||
# Init submodules if the user asked for them.
|
||||
if self.submodules:
|
||||
if spack.debug:
|
||||
self.git('submodule', 'update', '--init', '--recursive')
|
||||
else:
|
||||
self.git('submodule', '--quiet', 'update', '--init',
|
||||
'--recursive')
|
||||
git('submodule', '--quiet', 'update', '--init',
|
||||
'--recursive')
|
||||
|
||||
def archive(self, destination):
|
||||
super(GitFetchStrategy, self).archive(destination, exclude='.git')
|
||||
|
||||
@_needs_stage
|
||||
def reset(self):
|
||||
self.stage.chdir_to_source()
|
||||
if spack.debug:
|
||||
self.git('checkout', '.')
|
||||
self.git('clean', '-f')
|
||||
else:
|
||||
self.git('checkout', '--quiet', '.')
|
||||
self.git('clean', '--quiet', '-f')
|
||||
with working_dir(self.stage.source_path):
|
||||
if spack.debug:
|
||||
self.git('checkout', '.')
|
||||
self.git('clean', '-f')
|
||||
else:
|
||||
self.git('checkout', '--quiet', '.')
|
||||
self.git('clean', '--quiet', '-f')
|
||||
|
||||
def __str__(self):
|
||||
return "[git] %s" % self.url
|
||||
@ -749,8 +745,6 @@ def cachable(self):
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
self.stage.chdir()
|
||||
|
||||
if self.stage.source_path:
|
||||
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||
return
|
||||
@ -762,30 +756,31 @@ def fetch(self):
|
||||
args += ['-r', self.revision]
|
||||
args.append(self.url)
|
||||
|
||||
self.svn(*args)
|
||||
self.stage.chdir_to_source()
|
||||
with working_dir(self.stage.path):
|
||||
self.svn(*args)
|
||||
|
||||
def _remove_untracked_files(self):
|
||||
"""Removes untracked files in an svn repository."""
|
||||
status = self.svn('status', '--no-ignore', output=str)
|
||||
self.svn('status', '--no-ignore')
|
||||
for line in status.split('\n'):
|
||||
if not re.match('^[I?]', line):
|
||||
continue
|
||||
path = line[8:].strip()
|
||||
if os.path.isfile(path):
|
||||
os.unlink(path)
|
||||
elif os.path.isdir(path):
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
with working_dir(self.stage.source_path):
|
||||
status = self.svn('status', '--no-ignore', output=str)
|
||||
self.svn('status', '--no-ignore')
|
||||
for line in status.split('\n'):
|
||||
if not re.match('^[I?]', line):
|
||||
continue
|
||||
path = line[8:].strip()
|
||||
if os.path.isfile(path):
|
||||
os.unlink(path)
|
||||
elif os.path.isdir(path):
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
|
||||
def archive(self, destination):
|
||||
super(SvnFetchStrategy, self).archive(destination, exclude='.svn')
|
||||
|
||||
@_needs_stage
|
||||
def reset(self):
|
||||
self.stage.chdir_to_source()
|
||||
self._remove_untracked_files()
|
||||
self.svn('revert', '.', '-R')
|
||||
with working_dir(self.stage.source_path):
|
||||
self.svn('revert', '.', '-R')
|
||||
|
||||
def __str__(self):
|
||||
return "[svn] %s" % self.url
|
||||
@ -845,8 +840,6 @@ def cachable(self):
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
self.stage.chdir()
|
||||
|
||||
if self.stage.source_path:
|
||||
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||
return
|
||||
@ -866,27 +859,26 @@ def fetch(self):
|
||||
if self.revision:
|
||||
args.extend(['-r', self.revision])
|
||||
|
||||
self.hg(*args)
|
||||
with working_dir(self.stage.path):
|
||||
self.hg(*args)
|
||||
|
||||
def archive(self, destination):
|
||||
super(HgFetchStrategy, self).archive(destination, exclude='.hg')
|
||||
|
||||
@_needs_stage
|
||||
def reset(self):
|
||||
self.stage.chdir()
|
||||
with working_dir(self.stage.path):
|
||||
source_path = self.stage.source_path
|
||||
scrubbed = "scrubbed-source-tmp"
|
||||
|
||||
source_path = self.stage.source_path
|
||||
scrubbed = "scrubbed-source-tmp"
|
||||
args = ['clone']
|
||||
if self.revision:
|
||||
args += ['-r', self.revision]
|
||||
args += [source_path, scrubbed]
|
||||
self.hg(*args)
|
||||
|
||||
args = ['clone']
|
||||
if self.revision:
|
||||
args += ['-r', self.revision]
|
||||
args += [source_path, scrubbed]
|
||||
self.hg(*args)
|
||||
|
||||
shutil.rmtree(source_path, ignore_errors=True)
|
||||
shutil.move(scrubbed, source_path)
|
||||
self.stage.chdir_to_source()
|
||||
shutil.rmtree(source_path, ignore_errors=True)
|
||||
shutil.move(scrubbed, source_path)
|
||||
|
||||
def __str__(self):
|
||||
return "[hg] %s" % self.url
|
||||
|
@ -984,14 +984,15 @@ def do_fetch(self, mirror_only=False):
|
||||
self.stage.cache_local()
|
||||
|
||||
def do_stage(self, mirror_only=False):
|
||||
"""Unpacks the fetched tarball, then changes into the expanded tarball
|
||||
directory."""
|
||||
"""Unpacks and expands the fetched tarball."""
|
||||
if not self.spec.concrete:
|
||||
raise ValueError("Can only stage concrete packages.")
|
||||
|
||||
self.do_fetch(mirror_only)
|
||||
self.stage.expand_archive()
|
||||
self.stage.chdir_to_source()
|
||||
|
||||
if not os.listdir(self.stage.path):
|
||||
raise FetchError("Archive was empty for %s" % self.name)
|
||||
|
||||
@classmethod
|
||||
def lookup_patch(cls, sha256):
|
||||
@ -1059,8 +1060,6 @@ def do_patch(self):
|
||||
tty.msg("Patching failed last time. Restaging.")
|
||||
self.stage.restage()
|
||||
|
||||
self.stage.chdir_to_source()
|
||||
|
||||
# If this file exists, then we already applied all the patches.
|
||||
if os.path.isfile(good_file):
|
||||
tty.msg("Already patched %s" % self.name)
|
||||
@ -1073,7 +1072,8 @@ def do_patch(self):
|
||||
patched = False
|
||||
for patch in patches:
|
||||
try:
|
||||
patch.apply(self.stage)
|
||||
with working_dir(self.stage.source_path):
|
||||
patch.apply(self.stage)
|
||||
tty.msg('Applied patch %s' % patch.path_or_url)
|
||||
patched = True
|
||||
except:
|
||||
@ -1084,7 +1084,8 @@ def do_patch(self):
|
||||
|
||||
if has_patch_fun:
|
||||
try:
|
||||
self.patch()
|
||||
with working_dir(self.stage.source_path):
|
||||
self.patch()
|
||||
tty.msg("Ran patch() for %s" % self.name)
|
||||
patched = True
|
||||
except spack.multimethod.NoSuchMethodError:
|
||||
@ -1384,23 +1385,23 @@ def build_process():
|
||||
install_tree(self.stage.source_path, src_target)
|
||||
|
||||
# Do the real install in the source directory.
|
||||
self.stage.chdir_to_source()
|
||||
with working_dir(self.stage.source_path):
|
||||
# Save the build environment in a file before building.
|
||||
dump_environment(self.env_path)
|
||||
|
||||
# Save the build environment in a file before building.
|
||||
dump_environment(self.env_path)
|
||||
# Spawn a daemon that reads from a pipe and redirects
|
||||
# everything to log_path
|
||||
with log_output(self.log_path, echo, True) as logger:
|
||||
for phase_name, phase_attr in zip(
|
||||
self.phases, self._InstallPhase_phases):
|
||||
|
||||
# Spawn a daemon that reads from a pipe and redirects
|
||||
# everything to log_path
|
||||
with log_output(self.log_path, echo, True) as logger:
|
||||
for phase_name, phase_attr in zip(
|
||||
self.phases, self._InstallPhase_phases):
|
||||
with logger.force_echo():
|
||||
tty.msg(
|
||||
"Executing phase: '%s'" % phase_name)
|
||||
|
||||
with logger.force_echo():
|
||||
tty.msg("Executing phase: '%s'" % phase_name)
|
||||
|
||||
# Redirect stdout and stderr to daemon pipe
|
||||
phase = getattr(self, phase_attr)
|
||||
phase(self.spec, self.prefix)
|
||||
# Redirect stdout and stderr to daemon pipe
|
||||
phase = getattr(self, phase_attr)
|
||||
phase(self.spec, self.prefix)
|
||||
|
||||
echo = logger.echo
|
||||
self.log()
|
||||
|
@ -366,18 +366,8 @@ def source_path(self):
|
||||
return p
|
||||
return None
|
||||
|
||||
def chdir(self):
|
||||
"""Changes directory to the stage path. Or dies if it is not set
|
||||
up."""
|
||||
if os.path.isdir(self.path):
|
||||
os.chdir(self.path)
|
||||
else:
|
||||
raise ChdirError("Setup failed: no such directory: " + self.path)
|
||||
|
||||
def fetch(self, mirror_only=False):
|
||||
"""Downloads an archive or checks out code from a repository."""
|
||||
self.chdir()
|
||||
|
||||
fetchers = []
|
||||
if not mirror_only:
|
||||
fetchers.append(self.default_fetcher)
|
||||
@ -478,18 +468,6 @@ def expand_archive(self):
|
||||
else:
|
||||
tty.msg("Already staged %s in %s" % (self.name, self.path))
|
||||
|
||||
def chdir_to_source(self):
|
||||
"""Changes directory to the expanded archive directory.
|
||||
Dies with an error if there was no expanded archive.
|
||||
"""
|
||||
path = self.source_path
|
||||
if not path:
|
||||
raise StageError("Attempt to chdir before expanding archive.")
|
||||
else:
|
||||
os.chdir(path)
|
||||
if not os.listdir(path):
|
||||
raise StageError("Archive was empty for %s" % self.name)
|
||||
|
||||
def restage(self):
|
||||
"""Removes the expanded archive path if it exists, then re-expands
|
||||
the archive.
|
||||
@ -613,9 +591,6 @@ def source_path(self):
|
||||
def path(self):
|
||||
return self[0].path
|
||||
|
||||
def chdir_to_source(self):
|
||||
return self[0].chdir_to_source()
|
||||
|
||||
@property
|
||||
def archive_file(self):
|
||||
return self[0].archive_file
|
||||
@ -634,12 +609,6 @@ def __init__(self, path):
|
||||
self.source_path = path
|
||||
self.created = True
|
||||
|
||||
def chdir(self):
|
||||
if os.path.isdir(self.path):
|
||||
os.chdir(self.path)
|
||||
else:
|
||||
raise ChdirError("Setup failed: no such directory: " + self.path)
|
||||
|
||||
# DIY stages do nothing as context managers.
|
||||
def __enter__(self):
|
||||
pass
|
||||
@ -647,9 +616,6 @@ def __enter__(self):
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
def chdir_to_source(self):
|
||||
self.chdir()
|
||||
|
||||
def fetch(self, *args, **kwargs):
|
||||
tty.msg("No need to fetch for DIY.")
|
||||
|
||||
@ -701,9 +667,5 @@ class RestageError(StageError):
|
||||
""""Error encountered during restaging."""
|
||||
|
||||
|
||||
class ChdirError(StageError):
|
||||
"""Raised when Spack can't change directories."""
|
||||
|
||||
|
||||
# Keep this in namespace for convenience
|
||||
FailedDownloadError = fs.FailedDownloadError
|
||||
|
@ -57,8 +57,8 @@ def test_install_package_and_dependency(
|
||||
tmpdir, builtin_mock, mock_archive, mock_fetch, config,
|
||||
install_mockery):
|
||||
|
||||
tmpdir.chdir()
|
||||
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
|
||||
with tmpdir.as_cwd():
|
||||
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
|
||||
|
||||
files = tmpdir.listdir()
|
||||
filename = tmpdir.join('test.xml')
|
||||
@ -104,9 +104,9 @@ def test_install_package_already_installed(
|
||||
tmpdir, builtin_mock, mock_archive, mock_fetch, config,
|
||||
install_mockery):
|
||||
|
||||
tmpdir.chdir()
|
||||
install('libdwarf')
|
||||
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
|
||||
with tmpdir.as_cwd():
|
||||
install('libdwarf')
|
||||
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
|
||||
|
||||
files = tmpdir.listdir()
|
||||
filename = tmpdir.join('test.xml')
|
||||
|
@ -49,9 +49,28 @@
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
##########
|
||||
# Monkey-patching that is applied to all tests
|
||||
##########
|
||||
#
|
||||
# These fixtures are applied to all tests
|
||||
#
|
||||
@pytest.fixture(scope='function', autouse=True)
|
||||
def no_chdir():
|
||||
"""Ensure that no test changes Spack's working dirctory.
|
||||
|
||||
This prevents Spack tests (and therefore Spack commands) from
|
||||
changing the working directory and causing other tests to fail
|
||||
mysteriously. Tests should use ``working_dir`` or ``py.path``'s
|
||||
``.as_cwd()`` instead of ``os.chdir`` to avoid failing this check.
|
||||
|
||||
We assert that the working directory hasn't changed, unless the
|
||||
original wd somehow ceased to exist.
|
||||
|
||||
"""
|
||||
original_wd = os.getcwd()
|
||||
yield
|
||||
if os.path.isdir(original_wd):
|
||||
assert os.getcwd() == original_wd
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_fetch_cache(monkeypatch):
|
||||
"""Substitutes spack.fetch_cache with a mock object that does nothing
|
||||
@ -200,12 +219,11 @@ def database(tmpdir_factory, builtin_mock, config):
|
||||
|
||||
# Make a fake install directory
|
||||
install_path = tmpdir_factory.mktemp('install_for_database')
|
||||
spack_install_path = py.path.local(spack.store.root)
|
||||
spack.store.root = str(install_path)
|
||||
spack_install_path = spack.store.root
|
||||
|
||||
spack.store.root = str(install_path)
|
||||
install_layout = spack.directory_layout.YamlDirectoryLayout(
|
||||
str(install_path)
|
||||
)
|
||||
str(install_path))
|
||||
spack_install_layout = spack.store.layout
|
||||
spack.store.layout = install_layout
|
||||
|
||||
@ -216,14 +234,12 @@ def database(tmpdir_factory, builtin_mock, config):
|
||||
|
||||
Entry = collections.namedtuple('Entry', ['path', 'layout', 'db'])
|
||||
Database = collections.namedtuple(
|
||||
'Database', ['real', 'mock', 'install', 'uninstall', 'refresh']
|
||||
)
|
||||
'Database', ['real', 'mock', 'install', 'uninstall', 'refresh'])
|
||||
|
||||
real = Entry(
|
||||
path=spack_install_path,
|
||||
layout=spack_install_layout,
|
||||
db=spack_install_db
|
||||
)
|
||||
db=spack_install_db)
|
||||
mock = Entry(path=install_path, layout=install_layout, db=install_db)
|
||||
|
||||
def _install(spec):
|
||||
@ -249,8 +265,8 @@ def _refresh():
|
||||
mock=mock,
|
||||
install=_install,
|
||||
uninstall=_uninstall,
|
||||
refresh=_refresh
|
||||
)
|
||||
refresh=_refresh)
|
||||
|
||||
# Transaction used to avoid repeated writes.
|
||||
with spack.store.db.write_transaction():
|
||||
t.install('mpileaks ^mpich')
|
||||
@ -268,7 +284,7 @@ def _refresh():
|
||||
spack.store.db.remove(spec)
|
||||
|
||||
install_path.remove(rec=1)
|
||||
spack.store.root = str(spack_install_path)
|
||||
spack.store.root = spack_install_path
|
||||
spack.store.layout = spack_install_layout
|
||||
spack.store.db = spack_install_db
|
||||
|
||||
@ -285,11 +301,13 @@ def install_mockery(tmpdir, config, builtin_mock):
|
||||
"""Hooks a fake install directory and a fake db into Spack."""
|
||||
layout = spack.store.layout
|
||||
db = spack.store.db
|
||||
new_opt = str(tmpdir.join('opt'))
|
||||
|
||||
# Use a fake install directory to avoid conflicts bt/w
|
||||
# installed pkgs and mock packages.
|
||||
spack.store.layout = spack.directory_layout.YamlDirectoryLayout(
|
||||
str(tmpdir))
|
||||
spack.store.db = spack.database.Database(str(tmpdir))
|
||||
spack.store.layout = spack.directory_layout.YamlDirectoryLayout(new_opt)
|
||||
spack.store.db = spack.database.Database(new_opt)
|
||||
|
||||
# We use a fake package, so skip the checksum.
|
||||
spack.do_checksum = False
|
||||
yield
|
||||
@ -328,6 +346,7 @@ def mock_archive():
|
||||
"""
|
||||
tar = spack.util.executable.which('tar', required=True)
|
||||
stage = spack.stage.Stage('mock-archive-stage')
|
||||
|
||||
tmpdir = py.path.local(stage.path)
|
||||
repo_name = 'mock-archive-repo'
|
||||
tmpdir.ensure(repo_name, dir=True)
|
||||
@ -350,10 +369,10 @@ def mock_archive():
|
||||
os.chmod(configure_path, 0o755)
|
||||
|
||||
# Archive it
|
||||
current = tmpdir.chdir()
|
||||
archive_name = '{0}.tar.gz'.format(repo_name)
|
||||
tar('-czf', archive_name, repo_name)
|
||||
current.chdir()
|
||||
with tmpdir.as_cwd():
|
||||
archive_name = '{0}.tar.gz'.format(repo_name)
|
||||
tar('-czf', archive_name, repo_name)
|
||||
|
||||
Archive = collections.namedtuple('Archive',
|
||||
['url', 'path', 'archive_file'])
|
||||
archive_file = str(tmpdir.join(archive_name))
|
||||
@ -379,46 +398,45 @@ def mock_git_repository():
|
||||
repodir = tmpdir.join(repo_name)
|
||||
|
||||
# Initialize the repository
|
||||
current = repodir.chdir()
|
||||
git('init')
|
||||
url = 'file://' + str(repodir)
|
||||
with repodir.as_cwd():
|
||||
git('init')
|
||||
url = 'file://' + str(repodir)
|
||||
|
||||
# r0 is just the first commit
|
||||
r0_file = 'r0_file'
|
||||
repodir.ensure(r0_file)
|
||||
git('add', r0_file)
|
||||
git('commit', '-m', 'mock-git-repo r0')
|
||||
# r0 is just the first commit
|
||||
r0_file = 'r0_file'
|
||||
repodir.ensure(r0_file)
|
||||
git('add', r0_file)
|
||||
git('commit', '-m', 'mock-git-repo r0')
|
||||
|
||||
branch = 'test-branch'
|
||||
branch_file = 'branch_file'
|
||||
git('branch', branch)
|
||||
branch = 'test-branch'
|
||||
branch_file = 'branch_file'
|
||||
git('branch', branch)
|
||||
|
||||
tag_branch = 'tag-branch'
|
||||
tag_file = 'tag_file'
|
||||
git('branch', tag_branch)
|
||||
tag_branch = 'tag-branch'
|
||||
tag_file = 'tag_file'
|
||||
git('branch', tag_branch)
|
||||
|
||||
# Check out first branch
|
||||
git('checkout', branch)
|
||||
repodir.ensure(branch_file)
|
||||
git('add', branch_file)
|
||||
git('commit', '-m' 'r1 test branch')
|
||||
# Check out first branch
|
||||
git('checkout', branch)
|
||||
repodir.ensure(branch_file)
|
||||
git('add', branch_file)
|
||||
git('commit', '-m' 'r1 test branch')
|
||||
|
||||
# Check out a second branch and tag it
|
||||
git('checkout', tag_branch)
|
||||
repodir.ensure(tag_file)
|
||||
git('add', tag_file)
|
||||
git('commit', '-m' 'tag test branch')
|
||||
# Check out a second branch and tag it
|
||||
git('checkout', tag_branch)
|
||||
repodir.ensure(tag_file)
|
||||
git('add', tag_file)
|
||||
git('commit', '-m' 'tag test branch')
|
||||
|
||||
tag = 'test-tag'
|
||||
git('tag', tag)
|
||||
tag = 'test-tag'
|
||||
git('tag', tag)
|
||||
|
||||
git('checkout', 'master')
|
||||
git('checkout', 'master')
|
||||
|
||||
# R1 test is the same as test for branch
|
||||
rev_hash = lambda x: git('rev-parse', x, output=str).strip()
|
||||
r1 = rev_hash(branch)
|
||||
r1_file = branch_file
|
||||
current.chdir()
|
||||
# R1 test is the same as test for branch
|
||||
rev_hash = lambda x: git('rev-parse', x, output=str).strip()
|
||||
r1 = rev_hash(branch)
|
||||
r1_file = branch_file
|
||||
|
||||
Bunch = spack.util.pattern.Bunch
|
||||
|
||||
@ -457,22 +475,23 @@ def mock_hg_repository():
|
||||
get_rev = lambda: hg('id', '-i', output=str).strip()
|
||||
|
||||
# Initialize the repository
|
||||
current = repodir.chdir()
|
||||
url = 'file://' + str(repodir)
|
||||
hg('init')
|
||||
# Commit file r0
|
||||
r0_file = 'r0_file'
|
||||
repodir.ensure(r0_file)
|
||||
hg('add', r0_file)
|
||||
hg('commit', '-m', 'revision 0', '-u', 'test')
|
||||
r0 = get_rev()
|
||||
# Commit file r1
|
||||
r1_file = 'r1_file'
|
||||
repodir.ensure(r1_file)
|
||||
hg('add', r1_file)
|
||||
hg('commit', '-m' 'revision 1', '-u', 'test')
|
||||
r1 = get_rev()
|
||||
current.chdir()
|
||||
with repodir.as_cwd():
|
||||
url = 'file://' + str(repodir)
|
||||
hg('init')
|
||||
|
||||
# Commit file r0
|
||||
r0_file = 'r0_file'
|
||||
repodir.ensure(r0_file)
|
||||
hg('add', r0_file)
|
||||
hg('commit', '-m', 'revision 0', '-u', 'test')
|
||||
r0 = get_rev()
|
||||
|
||||
# Commit file r1
|
||||
r1_file = 'r1_file'
|
||||
repodir.ensure(r1_file)
|
||||
hg('add', r1_file)
|
||||
hg('commit', '-m' 'revision 1', '-u', 'test')
|
||||
r1 = get_rev()
|
||||
|
||||
Bunch = spack.util.pattern.Bunch
|
||||
|
||||
@ -497,64 +516,64 @@ def mock_svn_repository():
|
||||
svn = spack.util.executable.which('svn', required=True)
|
||||
svnadmin = spack.util.executable.which('svnadmin', required=True)
|
||||
stage = spack.stage.Stage('mock-svn-stage')
|
||||
|
||||
tmpdir = py.path.local(stage.path)
|
||||
repo_name = 'mock-svn-repo'
|
||||
tmpdir.ensure(repo_name, dir=True)
|
||||
repodir = tmpdir.join(repo_name)
|
||||
url = 'file://' + str(repodir)
|
||||
|
||||
# Initialize the repository
|
||||
current = repodir.chdir()
|
||||
# NOTE: Adding --pre-1.5-compatible works for NERSC
|
||||
# Unknown if this is also an issue at other sites.
|
||||
svnadmin('create', '--pre-1.5-compatible', str(repodir))
|
||||
with repodir.as_cwd():
|
||||
# NOTE: Adding --pre-1.5-compatible works for NERSC
|
||||
# Unknown if this is also an issue at other sites.
|
||||
svnadmin('create', '--pre-1.5-compatible', str(repodir))
|
||||
|
||||
# Import a structure (first commit)
|
||||
r0_file = 'r0_file'
|
||||
tmpdir.ensure('tmp-path', r0_file)
|
||||
svn(
|
||||
'import',
|
||||
str(tmpdir.join('tmp-path')),
|
||||
url,
|
||||
'-m',
|
||||
'Initial import r0'
|
||||
)
|
||||
shutil.rmtree(str(tmpdir.join('tmp-path')))
|
||||
# Second commit
|
||||
r1_file = 'r1_file'
|
||||
svn('checkout', url, str(tmpdir.join('tmp-path')))
|
||||
tmpdir.ensure('tmp-path', r1_file)
|
||||
tmpdir.join('tmp-path').chdir()
|
||||
svn('add', str(tmpdir.ensure('tmp-path', r1_file)))
|
||||
svn('ci', '-m', 'second revision r1')
|
||||
repodir.chdir()
|
||||
shutil.rmtree(str(tmpdir.join('tmp-path')))
|
||||
r0 = '1'
|
||||
r1 = '2'
|
||||
# Import a structure (first commit)
|
||||
r0_file = 'r0_file'
|
||||
tmpdir.ensure('tmp-path', r0_file)
|
||||
tmp_path = tmpdir.join('tmp-path')
|
||||
svn('import',
|
||||
str(tmp_path),
|
||||
url,
|
||||
'-m',
|
||||
'Initial import r0')
|
||||
tmp_path.remove()
|
||||
|
||||
Bunch = spack.util.pattern.Bunch
|
||||
# Second commit
|
||||
r1_file = 'r1_file'
|
||||
svn('checkout', url, str(tmp_path))
|
||||
tmpdir.ensure('tmp-path', r1_file)
|
||||
|
||||
checks = {
|
||||
'default': Bunch(
|
||||
revision=r1, file=r1_file, args={'svn': url}
|
||||
),
|
||||
'rev0': Bunch(
|
||||
revision=r0, file=r0_file, args={
|
||||
'svn': url, 'revision': r0
|
||||
}
|
||||
)
|
||||
}
|
||||
with tmp_path.as_cwd():
|
||||
svn('add', str(tmpdir.ensure('tmp-path', r1_file)))
|
||||
svn('ci', '-m', 'second revision r1')
|
||||
|
||||
def get_rev():
|
||||
output = svn('info', output=str)
|
||||
assert "Revision" in output
|
||||
for line in output.split('\n'):
|
||||
match = re.match(r'Revision: (\d+)', line)
|
||||
if match:
|
||||
return match.group(1)
|
||||
tmp_path.remove()
|
||||
r0 = '1'
|
||||
r1 = '2'
|
||||
|
||||
Bunch = spack.util.pattern.Bunch
|
||||
|
||||
checks = {
|
||||
'default': Bunch(
|
||||
revision=r1, file=r1_file, args={'svn': url}),
|
||||
'rev0': Bunch(
|
||||
revision=r0, file=r0_file, args={
|
||||
'svn': url, 'revision': r0})
|
||||
}
|
||||
|
||||
def get_rev():
|
||||
output = svn('info', output=str)
|
||||
assert "Revision" in output
|
||||
for line in output.split('\n'):
|
||||
match = re.match(r'Revision: (\d+)', line)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
t = Bunch(checks=checks, url=url, hash=get_rev, path=str(repodir))
|
||||
|
||||
t = Bunch(checks=checks, url=url, hash=get_rev, path=str(repodir))
|
||||
yield t
|
||||
current.chdir()
|
||||
|
||||
|
||||
##########
|
||||
|
@ -47,7 +47,6 @@ def test_inspect_path(tmpdir):
|
||||
'': ['CMAKE_PREFIX_PATH']
|
||||
}
|
||||
|
||||
tmpdir.chdir()
|
||||
tmpdir.mkdir('bin')
|
||||
tmpdir.mkdir('lib')
|
||||
tmpdir.mkdir('include')
|
||||
|
@ -72,22 +72,23 @@ def test_fetch(
|
||||
finally:
|
||||
spack.insecure = False
|
||||
|
||||
assert h('HEAD') == h(t.revision)
|
||||
with working_dir(pkg.stage.source_path):
|
||||
assert h('HEAD') == h(t.revision)
|
||||
|
||||
file_path = join_path(pkg.stage.source_path, t.file)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
file_path = join_path(pkg.stage.source_path, t.file)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
|
||||
os.unlink(file_path)
|
||||
assert not os.path.isfile(file_path)
|
||||
os.unlink(file_path)
|
||||
assert not os.path.isfile(file_path)
|
||||
|
||||
untracked_file = 'foobarbaz'
|
||||
touch(untracked_file)
|
||||
assert os.path.isfile(untracked_file)
|
||||
pkg.do_restage()
|
||||
assert not os.path.isfile(untracked_file)
|
||||
untracked_file = 'foobarbaz'
|
||||
touch(untracked_file)
|
||||
assert os.path.isfile(untracked_file)
|
||||
pkg.do_restage()
|
||||
assert not os.path.isfile(untracked_file)
|
||||
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
|
||||
assert h('HEAD') == h(t.revision)
|
||||
assert h('HEAD') == h(t.revision)
|
||||
|
@ -72,22 +72,23 @@ def test_fetch(
|
||||
finally:
|
||||
spack.insecure = False
|
||||
|
||||
assert h() == t.revision
|
||||
with working_dir(pkg.stage.source_path):
|
||||
assert h() == t.revision
|
||||
|
||||
file_path = join_path(pkg.stage.source_path, t.file)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
file_path = join_path(pkg.stage.source_path, t.file)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
|
||||
os.unlink(file_path)
|
||||
assert not os.path.isfile(file_path)
|
||||
os.unlink(file_path)
|
||||
assert not os.path.isfile(file_path)
|
||||
|
||||
untracked_file = 'foobarbaz'
|
||||
touch(untracked_file)
|
||||
assert os.path.isfile(untracked_file)
|
||||
pkg.do_restage()
|
||||
assert not os.path.isfile(untracked_file)
|
||||
untracked_file = 'foobarbaz'
|
||||
touch(untracked_file)
|
||||
assert os.path.isfile(untracked_file)
|
||||
pkg.do_restage()
|
||||
assert not os.path.isfile(untracked_file)
|
||||
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
|
||||
assert h() == t.revision
|
||||
assert h() == t.revision
|
||||
|
@ -70,11 +70,7 @@ def check_mirror():
|
||||
# register mirror with spack config
|
||||
mirrors = {'spack-mirror-test': 'file://' + mirror_root}
|
||||
spack.config.update_config('mirrors', mirrors)
|
||||
|
||||
os.chdir(stage.path)
|
||||
spack.mirror.create(
|
||||
mirror_root, repos, no_checksum=True
|
||||
)
|
||||
spack.mirror.create(mirror_root, repos, no_checksum=True)
|
||||
|
||||
# Stage directory exists
|
||||
assert os.path.isdir(mirror_root)
|
||||
|
@ -25,20 +25,22 @@
|
||||
"""
|
||||
This test checks the binary packaging infrastructure
|
||||
"""
|
||||
import pytest
|
||||
import spack
|
||||
import spack.store
|
||||
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
|
||||
from spack.spec import Spec
|
||||
import spack.binary_distribution as bindist
|
||||
from llnl.util.filesystem import join_path, mkdirp
|
||||
import argparse
|
||||
import spack.cmd.buildcache as buildcache
|
||||
from spack.relocate import *
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import shutil
|
||||
import pytest
|
||||
import argparse
|
||||
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
import spack
|
||||
import spack.store
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd.buildcache as buildcache
|
||||
from spack.spec import Spec
|
||||
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
|
||||
from spack.relocate import *
|
||||
from spack.util.executable import ProcessError
|
||||
|
||||
|
||||
@ -72,8 +74,8 @@ def test_packaging(mock_archive, tmpdir):
|
||||
spec.concretize()
|
||||
pkg = spack.repo.get(spec)
|
||||
fake_fetchify(pkg.fetcher, pkg)
|
||||
mkdirp(join_path(pkg.prefix, "bin"))
|
||||
patchelfscr = join_path(pkg.prefix, "bin", "patchelf")
|
||||
mkdirp(os.path.join(pkg.prefix, "bin"))
|
||||
patchelfscr = os.path.join(pkg.prefix, "bin", "patchelf")
|
||||
f = open(patchelfscr, 'w')
|
||||
body = """#!/bin/bash
|
||||
echo $PATH"""
|
||||
@ -91,14 +93,14 @@ def test_packaging(mock_archive, tmpdir):
|
||||
pkg.do_install()
|
||||
|
||||
# Put some non-relocatable file in there
|
||||
filename = join_path(spec.prefix, "dummy.txt")
|
||||
filename = os.path.join(spec.prefix, "dummy.txt")
|
||||
with open(filename, "w") as script:
|
||||
script.write(spec.prefix)
|
||||
|
||||
# Create the build cache and
|
||||
# put it directly into the mirror
|
||||
|
||||
mirror_path = join_path(tmpdir, 'test-mirror')
|
||||
mirror_path = os.path.join(str(tmpdir), 'test-mirror')
|
||||
specs = [spec]
|
||||
spack.mirror.create(
|
||||
mirror_path, specs, no_checksum=True
|
||||
@ -286,18 +288,25 @@ def test_relocate():
|
||||
|
||||
@pytest.mark.skipif(sys.platform != 'darwin',
|
||||
reason="only works with Mach-o objects")
|
||||
def test_relocate_macho():
|
||||
get_patchelf()
|
||||
assert (needs_binary_relocation('Mach-O') is True)
|
||||
macho_get_paths('/bin/bash')
|
||||
shutil.copyfile('/bin/bash', 'bash')
|
||||
modify_macho_object('bash', '/bin/bash', '/usr', '/opt', False)
|
||||
modify_macho_object('bash', '/bin/bash', '/usr', '/opt', True)
|
||||
shutil.copyfile('/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib')
|
||||
modify_macho_object('libncurses.5.4.dylib',
|
||||
'/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', False)
|
||||
modify_macho_object('libncurses.5.4.dylib',
|
||||
'/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', True)
|
||||
def test_relocate_macho(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
get_patchelf()
|
||||
assert (needs_binary_relocation('Mach-O') is True)
|
||||
|
||||
macho_get_paths('/bin/bash')
|
||||
shutil.copyfile('/bin/bash', 'bash')
|
||||
|
||||
modify_macho_object('bash', '/bin/bash', '/usr', '/opt', False)
|
||||
modify_macho_object('bash', '/bin/bash', '/usr', '/opt', True)
|
||||
|
||||
shutil.copyfile(
|
||||
'/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib')
|
||||
modify_macho_object(
|
||||
'libncurses.5.4.dylib',
|
||||
'/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', False)
|
||||
modify_macho_object(
|
||||
'libncurses.5.4.dylib',
|
||||
'/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', True)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform != 'linux2',
|
||||
|
@ -30,18 +30,10 @@
|
||||
import spack
|
||||
import spack.stage
|
||||
import spack.util.executable
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.filesystem import join_path, working_dir
|
||||
from spack.stage import Stage
|
||||
|
||||
|
||||
def check_chdir_to_source(stage, stage_name):
|
||||
stage_path = get_stage_path(stage, stage_name)
|
||||
archive_dir = 'test-files'
|
||||
assert join_path(
|
||||
os.path.realpath(stage_path), archive_dir
|
||||
) == os.getcwd()
|
||||
|
||||
|
||||
def check_expand_archive(stage, stage_name, mock_archive):
|
||||
stage_path = get_stage_path(stage, stage_name)
|
||||
archive_name = 'test-files.tar.gz'
|
||||
@ -64,11 +56,6 @@ def check_fetch(stage, stage_name):
|
||||
assert join_path(stage_path, archive_name) == stage.fetcher.archive_file
|
||||
|
||||
|
||||
def check_chdir(stage, stage_name):
|
||||
stage_path = get_stage_path(stage, stage_name)
|
||||
assert os.path.realpath(stage_path) == os.getcwd()
|
||||
|
||||
|
||||
def check_destroy(stage, stage_name):
|
||||
"""Figure out whether a stage was destroyed correctly."""
|
||||
stage_path = get_stage_path(stage, stage_name)
|
||||
@ -162,10 +149,9 @@ def mock_archive(tmpdir, monkeypatch):
|
||||
test_tmp_path.ensure(dir=True)
|
||||
test_readme.write('hello world!\n')
|
||||
|
||||
current = tmpdir.chdir()
|
||||
tar = spack.util.executable.which('tar', required=True)
|
||||
tar('czf', str(archive_name), 'test-files')
|
||||
current.chdir()
|
||||
with tmpdir.as_cwd():
|
||||
tar = spack.util.executable.which('tar', required=True)
|
||||
tar('czf', str(archive_name), 'test-files')
|
||||
|
||||
# Make spack use the test environment for tmp stuff.
|
||||
monkeypatch.setattr(spack.stage, '_tmp_root', None)
|
||||
@ -180,9 +166,6 @@ def mock_archive(tmpdir, monkeypatch):
|
||||
test_tmp_dir=test_tmp_path,
|
||||
archive_dir=archive_dir
|
||||
)
|
||||
# record this since this test changes to directories that will
|
||||
# be removed.
|
||||
current.chdir()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@ -245,18 +228,10 @@ def test_setup_and_destroy_no_name_without_tmp(self, mock_archive):
|
||||
check_setup(stage, None, mock_archive)
|
||||
check_destroy(stage, None)
|
||||
|
||||
def test_chdir(self, mock_archive):
|
||||
with Stage(mock_archive.url, name=self.stage_name) as stage:
|
||||
stage.chdir()
|
||||
check_setup(stage, self.stage_name, mock_archive)
|
||||
check_chdir(stage, self.stage_name)
|
||||
check_destroy(stage, self.stage_name)
|
||||
|
||||
def test_fetch(self, mock_archive):
|
||||
with Stage(mock_archive.url, name=self.stage_name) as stage:
|
||||
stage.fetch()
|
||||
check_setup(stage, self.stage_name, mock_archive)
|
||||
check_chdir(stage, self.stage_name)
|
||||
check_fetch(stage, self.stage_name)
|
||||
check_destroy(stage, self.stage_name)
|
||||
|
||||
@ -307,37 +282,23 @@ def test_expand_archive(self, mock_archive):
|
||||
check_expand_archive(stage, self.stage_name, mock_archive)
|
||||
check_destroy(stage, self.stage_name)
|
||||
|
||||
def test_expand_archive_with_chdir(self, mock_archive):
|
||||
with Stage(mock_archive.url, name=self.stage_name) as stage:
|
||||
stage.fetch()
|
||||
check_setup(stage, self.stage_name, mock_archive)
|
||||
check_fetch(stage, self.stage_name)
|
||||
stage.expand_archive()
|
||||
stage.chdir_to_source()
|
||||
check_expand_archive(stage, self.stage_name, mock_archive)
|
||||
check_chdir_to_source(stage, self.stage_name)
|
||||
check_destroy(stage, self.stage_name)
|
||||
|
||||
def test_restage(self, mock_archive):
|
||||
with Stage(mock_archive.url, name=self.stage_name) as stage:
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
stage.chdir_to_source()
|
||||
check_expand_archive(stage, self.stage_name, mock_archive)
|
||||
check_chdir_to_source(stage, self.stage_name)
|
||||
|
||||
# Try to make a file in the old archive dir
|
||||
with open('foobar', 'w') as file:
|
||||
file.write("this file is to be destroyed.")
|
||||
with working_dir(stage.source_path):
|
||||
check_expand_archive(stage, self.stage_name, mock_archive)
|
||||
|
||||
# Try to make a file in the old archive dir
|
||||
with open('foobar', 'w') as file:
|
||||
file.write("this file is to be destroyed.")
|
||||
|
||||
assert 'foobar' in os.listdir(stage.source_path)
|
||||
|
||||
# Make sure the file is not there after restage.
|
||||
stage.restage()
|
||||
check_chdir(stage, self.stage_name)
|
||||
check_fetch(stage, self.stage_name)
|
||||
stage.chdir_to_source()
|
||||
check_chdir_to_source(stage, self.stage_name)
|
||||
assert 'foobar' not in os.listdir(stage.source_path)
|
||||
check_destroy(stage, self.stage_name)
|
||||
|
||||
|
@ -72,22 +72,23 @@ def test_fetch(
|
||||
finally:
|
||||
spack.insecure = False
|
||||
|
||||
assert h() == t.revision
|
||||
with working_dir(pkg.stage.source_path):
|
||||
assert h() == t.revision
|
||||
|
||||
file_path = join_path(pkg.stage.source_path, t.file)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
file_path = join_path(pkg.stage.source_path, t.file)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
|
||||
os.unlink(file_path)
|
||||
assert not os.path.isfile(file_path)
|
||||
os.unlink(file_path)
|
||||
assert not os.path.isfile(file_path)
|
||||
|
||||
untracked_file = 'foobarbaz'
|
||||
touch(untracked_file)
|
||||
assert os.path.isfile(untracked_file)
|
||||
pkg.do_restage()
|
||||
assert not os.path.isfile(untracked_file)
|
||||
untracked_file = 'foobarbaz'
|
||||
touch(untracked_file)
|
||||
assert os.path.isfile(untracked_file)
|
||||
pkg.do_restage()
|
||||
assert not os.path.isfile(untracked_file)
|
||||
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
|
||||
assert h() == t.revision
|
||||
assert h() == t.revision
|
||||
|
@ -72,13 +72,14 @@ def test_fetch(
|
||||
finally:
|
||||
spack.insecure = False
|
||||
|
||||
assert os.path.exists('configure')
|
||||
assert is_exe('configure')
|
||||
with working_dir(pkg.stage.source_path):
|
||||
assert os.path.exists('configure')
|
||||
assert is_exe('configure')
|
||||
|
||||
with open('configure') as f:
|
||||
contents = f.read()
|
||||
assert contents.startswith('#!/bin/sh')
|
||||
assert 'echo Building...' in contents
|
||||
with open('configure') as f:
|
||||
contents = f.read()
|
||||
assert contents.startswith('#!/bin/sh')
|
||||
assert 'echo Building...' in contents
|
||||
|
||||
|
||||
def test_hash_detection(checksum_type):
|
||||
|
Loading…
Reference in New Issue
Block a user