Spack core and tests no longer use os.chdir()
- Spack's core package interface was previously overly stateful, in that calling methods like `do_stage()` could change your working directory. - This removes Stage's `chdir` and `chdir_to_source` methods and replaces their usages with `with working_dir(stage.path)` and `with working_dir(stage.source_path)`, respectively. These ensure the original working directory is preserved. - This not only makes the API more usable, it makes the tests more deterministic, as previously a test could leave the current working directory in a bad state and cause subsequent tests to fail mysteriously.
This commit is contained in:
parent
890c5ad329
commit
c14f2dc7b4
@ -25,7 +25,7 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import join_path, mkdirp
|
from llnl.util.filesystem import mkdirp, working_dir
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
from spack.util.executable import ProcessError, which
|
from spack.util.executable import ProcessError, which
|
||||||
@ -47,7 +47,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
|
|
||||||
def get_origin_info(remote):
|
def get_origin_info(remote):
|
||||||
git_dir = join_path(spack.prefix, '.git')
|
git_dir = os.path.join(spack.prefix, '.git')
|
||||||
git = which('git', required=True)
|
git = which('git', required=True)
|
||||||
try:
|
try:
|
||||||
branch = git('symbolic-ref', '--short', 'HEAD', output=str)
|
branch = git('symbolic-ref', '--short', 'HEAD', output=str)
|
||||||
@ -81,7 +81,7 @@ def clone(parser, args):
|
|||||||
|
|
||||||
mkdirp(prefix)
|
mkdirp(prefix)
|
||||||
|
|
||||||
if os.path.exists(join_path(prefix, '.git')):
|
if os.path.exists(os.path.join(prefix, '.git')):
|
||||||
tty.die("There already seems to be a git repository in %s" % prefix)
|
tty.die("There already seems to be a git repository in %s" % prefix)
|
||||||
|
|
||||||
files_in_the_way = os.listdir(prefix)
|
files_in_the_way = os.listdir(prefix)
|
||||||
@ -94,14 +94,14 @@ def clone(parser, args):
|
|||||||
"%s/bin/spack" % prefix,
|
"%s/bin/spack" % prefix,
|
||||||
"%s/lib/spack/..." % prefix)
|
"%s/lib/spack/..." % prefix)
|
||||||
|
|
||||||
os.chdir(prefix)
|
with working_dir(prefix):
|
||||||
git = which('git', required=True)
|
git = which('git', required=True)
|
||||||
git('init', '--shared', '-q')
|
git('init', '--shared', '-q')
|
||||||
git('remote', 'add', 'origin', origin_url)
|
git('remote', 'add', 'origin', origin_url)
|
||||||
git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
|
git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
|
||||||
'-n', '-q')
|
'-n', '-q')
|
||||||
git('reset', '--hard', 'origin/%s' % branch, '-q')
|
git('reset', '--hard', 'origin/%s' % branch, '-q')
|
||||||
git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
|
git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
|
||||||
|
|
||||||
tty.msg("Successfully created a new spack in %s" % prefix,
|
tty.msg("Successfully created a new spack in %s" % prefix,
|
||||||
"Run %s/bin/spack to use this installation." % prefix)
|
"Run %s/bin/spack to use this installation." % prefix)
|
||||||
|
@ -153,16 +153,15 @@ def matches(cls, args):
|
|||||||
|
|
||||||
@pattern.composite(interface=FetchStrategy)
|
@pattern.composite(interface=FetchStrategy)
|
||||||
class FetchStrategyComposite(object):
|
class FetchStrategyComposite(object):
|
||||||
|
"""Composite for a FetchStrategy object.
|
||||||
|
|
||||||
"""
|
Implements the GoF composite pattern.
|
||||||
Composite for a FetchStrategy object. Implements the GoF composite pattern.
|
|
||||||
"""
|
"""
|
||||||
matches = FetchStrategy.matches
|
matches = FetchStrategy.matches
|
||||||
set_stage = FetchStrategy.set_stage
|
set_stage = FetchStrategy.set_stage
|
||||||
|
|
||||||
|
|
||||||
class URLFetchStrategy(FetchStrategy):
|
class URLFetchStrategy(FetchStrategy):
|
||||||
|
|
||||||
"""FetchStrategy that pulls source code from a URL for an archive,
|
"""FetchStrategy that pulls source code from a URL for an archive,
|
||||||
checks the archive against a checksum,and decompresses the archive.
|
checks the archive against a checksum,and decompresses the archive.
|
||||||
"""
|
"""
|
||||||
@ -200,8 +199,6 @@ def curl(self):
|
|||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
self.stage.chdir()
|
|
||||||
|
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
tty.msg("Already downloaded %s" % self.archive_file)
|
tty.msg("Already downloaded %s" % self.archive_file)
|
||||||
return
|
return
|
||||||
@ -242,7 +239,8 @@ def fetch(self):
|
|||||||
|
|
||||||
# Run curl but grab the mime type from the http headers
|
# Run curl but grab the mime type from the http headers
|
||||||
curl = self.curl
|
curl = self.curl
|
||||||
headers = curl(*curl_args, output=str, fail_on_error=False)
|
with working_dir(self.stage.path):
|
||||||
|
headers = curl(*curl_args, output=str, fail_on_error=False)
|
||||||
|
|
||||||
if curl.returncode != 0:
|
if curl.returncode != 0:
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
@ -310,7 +308,6 @@ def expand(self):
|
|||||||
|
|
||||||
tty.msg("Staging archive: %s" % self.archive_file)
|
tty.msg("Staging archive: %s" % self.archive_file)
|
||||||
|
|
||||||
self.stage.chdir()
|
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise NoArchiveFileError(
|
raise NoArchiveFileError(
|
||||||
"Couldn't find archive file",
|
"Couldn't find archive file",
|
||||||
@ -318,15 +315,17 @@ def expand(self):
|
|||||||
|
|
||||||
if not self.extension:
|
if not self.extension:
|
||||||
self.extension = extension(self.archive_file)
|
self.extension = extension(self.archive_file)
|
||||||
|
|
||||||
decompress = decompressor_for(self.archive_file, self.extension)
|
decompress = decompressor_for(self.archive_file, self.extension)
|
||||||
|
|
||||||
# Expand all tarballs in their own directory to contain
|
# Expand all tarballs in their own directory to contain
|
||||||
# exploding tarballs.
|
# exploding tarballs.
|
||||||
tarball_container = os.path.join(self.stage.path,
|
tarball_container = os.path.join(self.stage.path,
|
||||||
"spack-expanded-archive")
|
"spack-expanded-archive")
|
||||||
|
|
||||||
mkdirp(tarball_container)
|
mkdirp(tarball_container)
|
||||||
os.chdir(tarball_container)
|
with working_dir(tarball_container):
|
||||||
decompress(self.archive_file)
|
decompress(self.archive_file)
|
||||||
|
|
||||||
# Check for an exploding tarball, i.e. one that doesn't expand
|
# Check for an exploding tarball, i.e. one that doesn't expand
|
||||||
# to a single directory. If the tarball *didn't* explode,
|
# to a single directory. If the tarball *didn't* explode,
|
||||||
@ -345,10 +344,9 @@ def expand(self):
|
|||||||
shutil.move(os.path.join(tarball_container, f),
|
shutil.move(os.path.join(tarball_container, f),
|
||||||
os.path.join(self.stage.path, f))
|
os.path.join(self.stage.path, f))
|
||||||
os.rmdir(tarball_container)
|
os.rmdir(tarball_container)
|
||||||
|
|
||||||
if not files:
|
if not files:
|
||||||
os.rmdir(tarball_container)
|
os.rmdir(tarball_container)
|
||||||
# Set the wd back to the stage when done.
|
|
||||||
self.stage.chdir()
|
|
||||||
|
|
||||||
def archive(self, destination):
|
def archive(self, destination):
|
||||||
"""Just moves this archive to the destination."""
|
"""Just moves this archive to the destination."""
|
||||||
@ -416,8 +414,6 @@ def fetch(self):
|
|||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
raise NoCacheError('No cache of %s' % path)
|
raise NoCacheError('No cache of %s' % path)
|
||||||
|
|
||||||
self.stage.chdir()
|
|
||||||
|
|
||||||
# remove old symlink if one is there.
|
# remove old symlink if one is there.
|
||||||
filename = self.stage.save_filename
|
filename = self.stage.save_filename
|
||||||
if os.path.exists(filename):
|
if os.path.exists(filename):
|
||||||
@ -484,8 +480,8 @@ def archive(self, destination, **kwargs):
|
|||||||
for p in patterns:
|
for p in patterns:
|
||||||
tar.add_default_arg('--exclude=%s' % p)
|
tar.add_default_arg('--exclude=%s' % p)
|
||||||
|
|
||||||
self.stage.chdir()
|
with working_dir(self.stage.path):
|
||||||
tar('-czf', destination, os.path.basename(self.stage.source_path))
|
tar('-czf', destination, os.path.basename(self.stage.source_path))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "VCS: %s" % self.url
|
return "VCS: %s" % self.url
|
||||||
@ -495,9 +491,8 @@ def __repr__(self):
|
|||||||
|
|
||||||
|
|
||||||
class GoFetchStrategy(VCSFetchStrategy):
|
class GoFetchStrategy(VCSFetchStrategy):
|
||||||
|
"""Fetch strategy that employs the `go get` infrastructure.
|
||||||
|
|
||||||
"""
|
|
||||||
Fetch strategy that employs the `go get` infrastructure
|
|
||||||
Use like this in a package:
|
Use like this in a package:
|
||||||
|
|
||||||
version('name',
|
version('name',
|
||||||
@ -530,25 +525,24 @@ def go(self):
|
|||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
self.stage.chdir()
|
|
||||||
|
|
||||||
tty.msg("Trying to get go resource:", self.url)
|
tty.msg("Trying to get go resource:", self.url)
|
||||||
|
|
||||||
try:
|
with working_dir(self.stage.path):
|
||||||
os.mkdir('go')
|
try:
|
||||||
except OSError:
|
os.mkdir('go')
|
||||||
pass
|
except OSError:
|
||||||
env = dict(os.environ)
|
pass
|
||||||
env['GOPATH'] = os.path.join(os.getcwd(), 'go')
|
env = dict(os.environ)
|
||||||
self.go('get', '-v', '-d', self.url, env=env)
|
env['GOPATH'] = os.path.join(os.getcwd(), 'go')
|
||||||
|
self.go('get', '-v', '-d', self.url, env=env)
|
||||||
|
|
||||||
def archive(self, destination):
|
def archive(self, destination):
|
||||||
super(GoFetchStrategy, self).archive(destination, exclude='.git')
|
super(GoFetchStrategy, self).archive(destination, exclude='.git')
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.stage.chdir_to_source()
|
with working_dir(self.stage.source_path):
|
||||||
self.go('clean')
|
self.go('clean')
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[go] %s" % self.url
|
return "[go] %s" % self.url
|
||||||
@ -607,10 +601,7 @@ def git(self):
|
|||||||
def cachable(self):
|
def cachable(self):
|
||||||
return bool(self.commit or self.tag)
|
return bool(self.commit or self.tag)
|
||||||
|
|
||||||
@_needs_stage
|
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
self.stage.chdir()
|
|
||||||
|
|
||||||
if self.stage.source_path:
|
if self.stage.source_path:
|
||||||
tty.msg("Already fetched %s" % self.stage.source_path)
|
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||||
return
|
return
|
||||||
@ -622,20 +613,24 @@ def fetch(self):
|
|||||||
args = 'at tag %s' % self.tag
|
args = 'at tag %s' % self.tag
|
||||||
elif self.branch:
|
elif self.branch:
|
||||||
args = 'on branch %s' % self.branch
|
args = 'on branch %s' % self.branch
|
||||||
|
|
||||||
tty.msg("Trying to clone git repository: %s %s" % (self.url, args))
|
tty.msg("Trying to clone git repository: %s %s" % (self.url, args))
|
||||||
|
|
||||||
|
git = self.git
|
||||||
if self.commit:
|
if self.commit:
|
||||||
# Need to do a regular clone and check out everything if
|
# Need to do a regular clone and check out everything if
|
||||||
# they asked for a particular commit.
|
# they asked for a particular commit.
|
||||||
if spack.debug:
|
with working_dir(self.stage.path):
|
||||||
self.git('clone', self.url)
|
if spack.debug:
|
||||||
else:
|
git('clone', self.url)
|
||||||
self.git('clone', '--quiet', self.url)
|
else:
|
||||||
self.stage.chdir_to_source()
|
git('clone', '--quiet', self.url)
|
||||||
if spack.debug:
|
|
||||||
self.git('checkout', self.commit)
|
with working_dir(self.stage.source_path):
|
||||||
else:
|
if spack.debug:
|
||||||
self.git('checkout', '--quiet', self.commit)
|
git('checkout', self.commit)
|
||||||
|
else:
|
||||||
|
git('checkout', '--quiet', self.commit)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Can be more efficient if not checking out a specific commit.
|
# Can be more efficient if not checking out a specific commit.
|
||||||
@ -654,57 +649,58 @@ def fetch(self):
|
|||||||
if self.git_version > ver('1.7.10'):
|
if self.git_version > ver('1.7.10'):
|
||||||
args.append('--single-branch')
|
args.append('--single-branch')
|
||||||
|
|
||||||
cloned = False
|
with working_dir(self.stage.path):
|
||||||
# Yet more efficiency, only download a 1-commit deep tree
|
cloned = False
|
||||||
if self.git_version >= ver('1.7.1'):
|
# Yet more efficiency, only download a 1-commit deep tree
|
||||||
try:
|
if self.git_version >= ver('1.7.1'):
|
||||||
self.git(*(args + ['--depth', '1', self.url]))
|
try:
|
||||||
cloned = True
|
git(*(args + ['--depth', '1', self.url]))
|
||||||
except spack.error.SpackError:
|
cloned = True
|
||||||
# This will fail with the dumb HTTP transport
|
except spack.error.SpackError:
|
||||||
# continue and try without depth, cleanup first
|
# This will fail with the dumb HTTP transport
|
||||||
pass
|
# continue and try without depth, cleanup first
|
||||||
|
pass
|
||||||
|
|
||||||
if not cloned:
|
if not cloned:
|
||||||
args.append(self.url)
|
args.append(self.url)
|
||||||
self.git(*args)
|
git(*args)
|
||||||
|
|
||||||
self.stage.chdir_to_source()
|
with working_dir(self.stage.source_path):
|
||||||
|
# For tags, be conservative and check them out AFTER
|
||||||
|
# cloning. Later git versions can do this with clone
|
||||||
|
# --branch, but older ones fail.
|
||||||
|
if self.tag and self.git_version < ver('1.8.5.2'):
|
||||||
|
# pull --tags returns a "special" error code of 1 in
|
||||||
|
# older versions that we have to ignore.
|
||||||
|
# see: https://github.com/git/git/commit/19d122b
|
||||||
|
if spack.debug:
|
||||||
|
git('pull', '--tags', ignore_errors=1)
|
||||||
|
git('checkout', self.tag)
|
||||||
|
else:
|
||||||
|
git('pull', '--quiet', '--tags', ignore_errors=1)
|
||||||
|
git('checkout', '--quiet', self.tag)
|
||||||
|
|
||||||
# For tags, be conservative and check them out AFTER
|
with working_dir(self.stage.source_path):
|
||||||
# cloning. Later git versions can do this with clone
|
# Init submodules if the user asked for them.
|
||||||
# --branch, but older ones fail.
|
if self.submodules:
|
||||||
if self.tag and self.git_version < ver('1.8.5.2'):
|
|
||||||
# pull --tags returns a "special" error code of 1 in
|
|
||||||
# older versions that we have to ignore.
|
|
||||||
# see: https://github.com/git/git/commit/19d122b
|
|
||||||
if spack.debug:
|
if spack.debug:
|
||||||
self.git('pull', '--tags', ignore_errors=1)
|
git('submodule', 'update', '--init', '--recursive')
|
||||||
self.git('checkout', self.tag)
|
|
||||||
else:
|
else:
|
||||||
self.git('pull', '--quiet', '--tags', ignore_errors=1)
|
git('submodule', '--quiet', 'update', '--init',
|
||||||
self.git('checkout', '--quiet', self.tag)
|
'--recursive')
|
||||||
|
|
||||||
# Init submodules if the user asked for them.
|
|
||||||
if self.submodules:
|
|
||||||
if spack.debug:
|
|
||||||
self.git('submodule', 'update', '--init', '--recursive')
|
|
||||||
else:
|
|
||||||
self.git('submodule', '--quiet', 'update', '--init',
|
|
||||||
'--recursive')
|
|
||||||
|
|
||||||
def archive(self, destination):
|
def archive(self, destination):
|
||||||
super(GitFetchStrategy, self).archive(destination, exclude='.git')
|
super(GitFetchStrategy, self).archive(destination, exclude='.git')
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.stage.chdir_to_source()
|
with working_dir(self.stage.source_path):
|
||||||
if spack.debug:
|
if spack.debug:
|
||||||
self.git('checkout', '.')
|
self.git('checkout', '.')
|
||||||
self.git('clean', '-f')
|
self.git('clean', '-f')
|
||||||
else:
|
else:
|
||||||
self.git('checkout', '--quiet', '.')
|
self.git('checkout', '--quiet', '.')
|
||||||
self.git('clean', '--quiet', '-f')
|
self.git('clean', '--quiet', '-f')
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[git] %s" % self.url
|
return "[git] %s" % self.url
|
||||||
@ -749,8 +745,6 @@ def cachable(self):
|
|||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
self.stage.chdir()
|
|
||||||
|
|
||||||
if self.stage.source_path:
|
if self.stage.source_path:
|
||||||
tty.msg("Already fetched %s" % self.stage.source_path)
|
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||||
return
|
return
|
||||||
@ -762,30 +756,31 @@ def fetch(self):
|
|||||||
args += ['-r', self.revision]
|
args += ['-r', self.revision]
|
||||||
args.append(self.url)
|
args.append(self.url)
|
||||||
|
|
||||||
self.svn(*args)
|
with working_dir(self.stage.path):
|
||||||
self.stage.chdir_to_source()
|
self.svn(*args)
|
||||||
|
|
||||||
def _remove_untracked_files(self):
|
def _remove_untracked_files(self):
|
||||||
"""Removes untracked files in an svn repository."""
|
"""Removes untracked files in an svn repository."""
|
||||||
status = self.svn('status', '--no-ignore', output=str)
|
with working_dir(self.stage.source_path):
|
||||||
self.svn('status', '--no-ignore')
|
status = self.svn('status', '--no-ignore', output=str)
|
||||||
for line in status.split('\n'):
|
self.svn('status', '--no-ignore')
|
||||||
if not re.match('^[I?]', line):
|
for line in status.split('\n'):
|
||||||
continue
|
if not re.match('^[I?]', line):
|
||||||
path = line[8:].strip()
|
continue
|
||||||
if os.path.isfile(path):
|
path = line[8:].strip()
|
||||||
os.unlink(path)
|
if os.path.isfile(path):
|
||||||
elif os.path.isdir(path):
|
os.unlink(path)
|
||||||
shutil.rmtree(path, ignore_errors=True)
|
elif os.path.isdir(path):
|
||||||
|
shutil.rmtree(path, ignore_errors=True)
|
||||||
|
|
||||||
def archive(self, destination):
|
def archive(self, destination):
|
||||||
super(SvnFetchStrategy, self).archive(destination, exclude='.svn')
|
super(SvnFetchStrategy, self).archive(destination, exclude='.svn')
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.stage.chdir_to_source()
|
|
||||||
self._remove_untracked_files()
|
self._remove_untracked_files()
|
||||||
self.svn('revert', '.', '-R')
|
with working_dir(self.stage.source_path):
|
||||||
|
self.svn('revert', '.', '-R')
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[svn] %s" % self.url
|
return "[svn] %s" % self.url
|
||||||
@ -845,8 +840,6 @@ def cachable(self):
|
|||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
self.stage.chdir()
|
|
||||||
|
|
||||||
if self.stage.source_path:
|
if self.stage.source_path:
|
||||||
tty.msg("Already fetched %s" % self.stage.source_path)
|
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||||
return
|
return
|
||||||
@ -866,27 +859,26 @@ def fetch(self):
|
|||||||
if self.revision:
|
if self.revision:
|
||||||
args.extend(['-r', self.revision])
|
args.extend(['-r', self.revision])
|
||||||
|
|
||||||
self.hg(*args)
|
with working_dir(self.stage.path):
|
||||||
|
self.hg(*args)
|
||||||
|
|
||||||
def archive(self, destination):
|
def archive(self, destination):
|
||||||
super(HgFetchStrategy, self).archive(destination, exclude='.hg')
|
super(HgFetchStrategy, self).archive(destination, exclude='.hg')
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.stage.chdir()
|
with working_dir(self.stage.path):
|
||||||
|
source_path = self.stage.source_path
|
||||||
|
scrubbed = "scrubbed-source-tmp"
|
||||||
|
|
||||||
source_path = self.stage.source_path
|
args = ['clone']
|
||||||
scrubbed = "scrubbed-source-tmp"
|
if self.revision:
|
||||||
|
args += ['-r', self.revision]
|
||||||
|
args += [source_path, scrubbed]
|
||||||
|
self.hg(*args)
|
||||||
|
|
||||||
args = ['clone']
|
shutil.rmtree(source_path, ignore_errors=True)
|
||||||
if self.revision:
|
shutil.move(scrubbed, source_path)
|
||||||
args += ['-r', self.revision]
|
|
||||||
args += [source_path, scrubbed]
|
|
||||||
self.hg(*args)
|
|
||||||
|
|
||||||
shutil.rmtree(source_path, ignore_errors=True)
|
|
||||||
shutil.move(scrubbed, source_path)
|
|
||||||
self.stage.chdir_to_source()
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[hg] %s" % self.url
|
return "[hg] %s" % self.url
|
||||||
|
@ -984,14 +984,15 @@ def do_fetch(self, mirror_only=False):
|
|||||||
self.stage.cache_local()
|
self.stage.cache_local()
|
||||||
|
|
||||||
def do_stage(self, mirror_only=False):
|
def do_stage(self, mirror_only=False):
|
||||||
"""Unpacks the fetched tarball, then changes into the expanded tarball
|
"""Unpacks and expands the fetched tarball."""
|
||||||
directory."""
|
|
||||||
if not self.spec.concrete:
|
if not self.spec.concrete:
|
||||||
raise ValueError("Can only stage concrete packages.")
|
raise ValueError("Can only stage concrete packages.")
|
||||||
|
|
||||||
self.do_fetch(mirror_only)
|
self.do_fetch(mirror_only)
|
||||||
self.stage.expand_archive()
|
self.stage.expand_archive()
|
||||||
self.stage.chdir_to_source()
|
|
||||||
|
if not os.listdir(self.stage.path):
|
||||||
|
raise FetchError("Archive was empty for %s" % self.name)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def lookup_patch(cls, sha256):
|
def lookup_patch(cls, sha256):
|
||||||
@ -1059,8 +1060,6 @@ def do_patch(self):
|
|||||||
tty.msg("Patching failed last time. Restaging.")
|
tty.msg("Patching failed last time. Restaging.")
|
||||||
self.stage.restage()
|
self.stage.restage()
|
||||||
|
|
||||||
self.stage.chdir_to_source()
|
|
||||||
|
|
||||||
# If this file exists, then we already applied all the patches.
|
# If this file exists, then we already applied all the patches.
|
||||||
if os.path.isfile(good_file):
|
if os.path.isfile(good_file):
|
||||||
tty.msg("Already patched %s" % self.name)
|
tty.msg("Already patched %s" % self.name)
|
||||||
@ -1073,7 +1072,8 @@ def do_patch(self):
|
|||||||
patched = False
|
patched = False
|
||||||
for patch in patches:
|
for patch in patches:
|
||||||
try:
|
try:
|
||||||
patch.apply(self.stage)
|
with working_dir(self.stage.source_path):
|
||||||
|
patch.apply(self.stage)
|
||||||
tty.msg('Applied patch %s' % patch.path_or_url)
|
tty.msg('Applied patch %s' % patch.path_or_url)
|
||||||
patched = True
|
patched = True
|
||||||
except:
|
except:
|
||||||
@ -1084,7 +1084,8 @@ def do_patch(self):
|
|||||||
|
|
||||||
if has_patch_fun:
|
if has_patch_fun:
|
||||||
try:
|
try:
|
||||||
self.patch()
|
with working_dir(self.stage.source_path):
|
||||||
|
self.patch()
|
||||||
tty.msg("Ran patch() for %s" % self.name)
|
tty.msg("Ran patch() for %s" % self.name)
|
||||||
patched = True
|
patched = True
|
||||||
except spack.multimethod.NoSuchMethodError:
|
except spack.multimethod.NoSuchMethodError:
|
||||||
@ -1384,23 +1385,23 @@ def build_process():
|
|||||||
install_tree(self.stage.source_path, src_target)
|
install_tree(self.stage.source_path, src_target)
|
||||||
|
|
||||||
# Do the real install in the source directory.
|
# Do the real install in the source directory.
|
||||||
self.stage.chdir_to_source()
|
with working_dir(self.stage.source_path):
|
||||||
|
# Save the build environment in a file before building.
|
||||||
|
dump_environment(self.env_path)
|
||||||
|
|
||||||
# Save the build environment in a file before building.
|
# Spawn a daemon that reads from a pipe and redirects
|
||||||
dump_environment(self.env_path)
|
# everything to log_path
|
||||||
|
with log_output(self.log_path, echo, True) as logger:
|
||||||
|
for phase_name, phase_attr in zip(
|
||||||
|
self.phases, self._InstallPhase_phases):
|
||||||
|
|
||||||
# Spawn a daemon that reads from a pipe and redirects
|
with logger.force_echo():
|
||||||
# everything to log_path
|
tty.msg(
|
||||||
with log_output(self.log_path, echo, True) as logger:
|
"Executing phase: '%s'" % phase_name)
|
||||||
for phase_name, phase_attr in zip(
|
|
||||||
self.phases, self._InstallPhase_phases):
|
|
||||||
|
|
||||||
with logger.force_echo():
|
# Redirect stdout and stderr to daemon pipe
|
||||||
tty.msg("Executing phase: '%s'" % phase_name)
|
phase = getattr(self, phase_attr)
|
||||||
|
phase(self.spec, self.prefix)
|
||||||
# Redirect stdout and stderr to daemon pipe
|
|
||||||
phase = getattr(self, phase_attr)
|
|
||||||
phase(self.spec, self.prefix)
|
|
||||||
|
|
||||||
echo = logger.echo
|
echo = logger.echo
|
||||||
self.log()
|
self.log()
|
||||||
|
@ -366,18 +366,8 @@ def source_path(self):
|
|||||||
return p
|
return p
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def chdir(self):
|
|
||||||
"""Changes directory to the stage path. Or dies if it is not set
|
|
||||||
up."""
|
|
||||||
if os.path.isdir(self.path):
|
|
||||||
os.chdir(self.path)
|
|
||||||
else:
|
|
||||||
raise ChdirError("Setup failed: no such directory: " + self.path)
|
|
||||||
|
|
||||||
def fetch(self, mirror_only=False):
|
def fetch(self, mirror_only=False):
|
||||||
"""Downloads an archive or checks out code from a repository."""
|
"""Downloads an archive or checks out code from a repository."""
|
||||||
self.chdir()
|
|
||||||
|
|
||||||
fetchers = []
|
fetchers = []
|
||||||
if not mirror_only:
|
if not mirror_only:
|
||||||
fetchers.append(self.default_fetcher)
|
fetchers.append(self.default_fetcher)
|
||||||
@ -478,18 +468,6 @@ def expand_archive(self):
|
|||||||
else:
|
else:
|
||||||
tty.msg("Already staged %s in %s" % (self.name, self.path))
|
tty.msg("Already staged %s in %s" % (self.name, self.path))
|
||||||
|
|
||||||
def chdir_to_source(self):
|
|
||||||
"""Changes directory to the expanded archive directory.
|
|
||||||
Dies with an error if there was no expanded archive.
|
|
||||||
"""
|
|
||||||
path = self.source_path
|
|
||||||
if not path:
|
|
||||||
raise StageError("Attempt to chdir before expanding archive.")
|
|
||||||
else:
|
|
||||||
os.chdir(path)
|
|
||||||
if not os.listdir(path):
|
|
||||||
raise StageError("Archive was empty for %s" % self.name)
|
|
||||||
|
|
||||||
def restage(self):
|
def restage(self):
|
||||||
"""Removes the expanded archive path if it exists, then re-expands
|
"""Removes the expanded archive path if it exists, then re-expands
|
||||||
the archive.
|
the archive.
|
||||||
@ -613,9 +591,6 @@ def source_path(self):
|
|||||||
def path(self):
|
def path(self):
|
||||||
return self[0].path
|
return self[0].path
|
||||||
|
|
||||||
def chdir_to_source(self):
|
|
||||||
return self[0].chdir_to_source()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def archive_file(self):
|
def archive_file(self):
|
||||||
return self[0].archive_file
|
return self[0].archive_file
|
||||||
@ -634,12 +609,6 @@ def __init__(self, path):
|
|||||||
self.source_path = path
|
self.source_path = path
|
||||||
self.created = True
|
self.created = True
|
||||||
|
|
||||||
def chdir(self):
|
|
||||||
if os.path.isdir(self.path):
|
|
||||||
os.chdir(self.path)
|
|
||||||
else:
|
|
||||||
raise ChdirError("Setup failed: no such directory: " + self.path)
|
|
||||||
|
|
||||||
# DIY stages do nothing as context managers.
|
# DIY stages do nothing as context managers.
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
pass
|
pass
|
||||||
@ -647,9 +616,6 @@ def __enter__(self):
|
|||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def chdir_to_source(self):
|
|
||||||
self.chdir()
|
|
||||||
|
|
||||||
def fetch(self, *args, **kwargs):
|
def fetch(self, *args, **kwargs):
|
||||||
tty.msg("No need to fetch for DIY.")
|
tty.msg("No need to fetch for DIY.")
|
||||||
|
|
||||||
@ -701,9 +667,5 @@ class RestageError(StageError):
|
|||||||
""""Error encountered during restaging."""
|
""""Error encountered during restaging."""
|
||||||
|
|
||||||
|
|
||||||
class ChdirError(StageError):
|
|
||||||
"""Raised when Spack can't change directories."""
|
|
||||||
|
|
||||||
|
|
||||||
# Keep this in namespace for convenience
|
# Keep this in namespace for convenience
|
||||||
FailedDownloadError = fs.FailedDownloadError
|
FailedDownloadError = fs.FailedDownloadError
|
||||||
|
@ -57,8 +57,8 @@ def test_install_package_and_dependency(
|
|||||||
tmpdir, builtin_mock, mock_archive, mock_fetch, config,
|
tmpdir, builtin_mock, mock_archive, mock_fetch, config,
|
||||||
install_mockery):
|
install_mockery):
|
||||||
|
|
||||||
tmpdir.chdir()
|
with tmpdir.as_cwd():
|
||||||
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
|
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
|
||||||
|
|
||||||
files = tmpdir.listdir()
|
files = tmpdir.listdir()
|
||||||
filename = tmpdir.join('test.xml')
|
filename = tmpdir.join('test.xml')
|
||||||
@ -104,9 +104,9 @@ def test_install_package_already_installed(
|
|||||||
tmpdir, builtin_mock, mock_archive, mock_fetch, config,
|
tmpdir, builtin_mock, mock_archive, mock_fetch, config,
|
||||||
install_mockery):
|
install_mockery):
|
||||||
|
|
||||||
tmpdir.chdir()
|
with tmpdir.as_cwd():
|
||||||
install('libdwarf')
|
install('libdwarf')
|
||||||
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
|
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
|
||||||
|
|
||||||
files = tmpdir.listdir()
|
files = tmpdir.listdir()
|
||||||
filename = tmpdir.join('test.xml')
|
filename = tmpdir.join('test.xml')
|
||||||
|
@ -49,9 +49,28 @@
|
|||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
|
|
||||||
##########
|
#
|
||||||
# Monkey-patching that is applied to all tests
|
# These fixtures are applied to all tests
|
||||||
##########
|
#
|
||||||
|
@pytest.fixture(scope='function', autouse=True)
|
||||||
|
def no_chdir():
|
||||||
|
"""Ensure that no test changes Spack's working dirctory.
|
||||||
|
|
||||||
|
This prevents Spack tests (and therefore Spack commands) from
|
||||||
|
changing the working directory and causing other tests to fail
|
||||||
|
mysteriously. Tests should use ``working_dir`` or ``py.path``'s
|
||||||
|
``.as_cwd()`` instead of ``os.chdir`` to avoid failing this check.
|
||||||
|
|
||||||
|
We assert that the working directory hasn't changed, unless the
|
||||||
|
original wd somehow ceased to exist.
|
||||||
|
|
||||||
|
"""
|
||||||
|
original_wd = os.getcwd()
|
||||||
|
yield
|
||||||
|
if os.path.isdir(original_wd):
|
||||||
|
assert os.getcwd() == original_wd
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def mock_fetch_cache(monkeypatch):
|
def mock_fetch_cache(monkeypatch):
|
||||||
"""Substitutes spack.fetch_cache with a mock object that does nothing
|
"""Substitutes spack.fetch_cache with a mock object that does nothing
|
||||||
@ -200,12 +219,11 @@ def database(tmpdir_factory, builtin_mock, config):
|
|||||||
|
|
||||||
# Make a fake install directory
|
# Make a fake install directory
|
||||||
install_path = tmpdir_factory.mktemp('install_for_database')
|
install_path = tmpdir_factory.mktemp('install_for_database')
|
||||||
spack_install_path = py.path.local(spack.store.root)
|
spack_install_path = spack.store.root
|
||||||
spack.store.root = str(install_path)
|
|
||||||
|
|
||||||
|
spack.store.root = str(install_path)
|
||||||
install_layout = spack.directory_layout.YamlDirectoryLayout(
|
install_layout = spack.directory_layout.YamlDirectoryLayout(
|
||||||
str(install_path)
|
str(install_path))
|
||||||
)
|
|
||||||
spack_install_layout = spack.store.layout
|
spack_install_layout = spack.store.layout
|
||||||
spack.store.layout = install_layout
|
spack.store.layout = install_layout
|
||||||
|
|
||||||
@ -216,14 +234,12 @@ def database(tmpdir_factory, builtin_mock, config):
|
|||||||
|
|
||||||
Entry = collections.namedtuple('Entry', ['path', 'layout', 'db'])
|
Entry = collections.namedtuple('Entry', ['path', 'layout', 'db'])
|
||||||
Database = collections.namedtuple(
|
Database = collections.namedtuple(
|
||||||
'Database', ['real', 'mock', 'install', 'uninstall', 'refresh']
|
'Database', ['real', 'mock', 'install', 'uninstall', 'refresh'])
|
||||||
)
|
|
||||||
|
|
||||||
real = Entry(
|
real = Entry(
|
||||||
path=spack_install_path,
|
path=spack_install_path,
|
||||||
layout=spack_install_layout,
|
layout=spack_install_layout,
|
||||||
db=spack_install_db
|
db=spack_install_db)
|
||||||
)
|
|
||||||
mock = Entry(path=install_path, layout=install_layout, db=install_db)
|
mock = Entry(path=install_path, layout=install_layout, db=install_db)
|
||||||
|
|
||||||
def _install(spec):
|
def _install(spec):
|
||||||
@ -249,8 +265,8 @@ def _refresh():
|
|||||||
mock=mock,
|
mock=mock,
|
||||||
install=_install,
|
install=_install,
|
||||||
uninstall=_uninstall,
|
uninstall=_uninstall,
|
||||||
refresh=_refresh
|
refresh=_refresh)
|
||||||
)
|
|
||||||
# Transaction used to avoid repeated writes.
|
# Transaction used to avoid repeated writes.
|
||||||
with spack.store.db.write_transaction():
|
with spack.store.db.write_transaction():
|
||||||
t.install('mpileaks ^mpich')
|
t.install('mpileaks ^mpich')
|
||||||
@ -268,7 +284,7 @@ def _refresh():
|
|||||||
spack.store.db.remove(spec)
|
spack.store.db.remove(spec)
|
||||||
|
|
||||||
install_path.remove(rec=1)
|
install_path.remove(rec=1)
|
||||||
spack.store.root = str(spack_install_path)
|
spack.store.root = spack_install_path
|
||||||
spack.store.layout = spack_install_layout
|
spack.store.layout = spack_install_layout
|
||||||
spack.store.db = spack_install_db
|
spack.store.db = spack_install_db
|
||||||
|
|
||||||
@ -285,11 +301,13 @@ def install_mockery(tmpdir, config, builtin_mock):
|
|||||||
"""Hooks a fake install directory and a fake db into Spack."""
|
"""Hooks a fake install directory and a fake db into Spack."""
|
||||||
layout = spack.store.layout
|
layout = spack.store.layout
|
||||||
db = spack.store.db
|
db = spack.store.db
|
||||||
|
new_opt = str(tmpdir.join('opt'))
|
||||||
|
|
||||||
# Use a fake install directory to avoid conflicts bt/w
|
# Use a fake install directory to avoid conflicts bt/w
|
||||||
# installed pkgs and mock packages.
|
# installed pkgs and mock packages.
|
||||||
spack.store.layout = spack.directory_layout.YamlDirectoryLayout(
|
spack.store.layout = spack.directory_layout.YamlDirectoryLayout(new_opt)
|
||||||
str(tmpdir))
|
spack.store.db = spack.database.Database(new_opt)
|
||||||
spack.store.db = spack.database.Database(str(tmpdir))
|
|
||||||
# We use a fake package, so skip the checksum.
|
# We use a fake package, so skip the checksum.
|
||||||
spack.do_checksum = False
|
spack.do_checksum = False
|
||||||
yield
|
yield
|
||||||
@ -328,6 +346,7 @@ def mock_archive():
|
|||||||
"""
|
"""
|
||||||
tar = spack.util.executable.which('tar', required=True)
|
tar = spack.util.executable.which('tar', required=True)
|
||||||
stage = spack.stage.Stage('mock-archive-stage')
|
stage = spack.stage.Stage('mock-archive-stage')
|
||||||
|
|
||||||
tmpdir = py.path.local(stage.path)
|
tmpdir = py.path.local(stage.path)
|
||||||
repo_name = 'mock-archive-repo'
|
repo_name = 'mock-archive-repo'
|
||||||
tmpdir.ensure(repo_name, dir=True)
|
tmpdir.ensure(repo_name, dir=True)
|
||||||
@ -350,10 +369,10 @@ def mock_archive():
|
|||||||
os.chmod(configure_path, 0o755)
|
os.chmod(configure_path, 0o755)
|
||||||
|
|
||||||
# Archive it
|
# Archive it
|
||||||
current = tmpdir.chdir()
|
with tmpdir.as_cwd():
|
||||||
archive_name = '{0}.tar.gz'.format(repo_name)
|
archive_name = '{0}.tar.gz'.format(repo_name)
|
||||||
tar('-czf', archive_name, repo_name)
|
tar('-czf', archive_name, repo_name)
|
||||||
current.chdir()
|
|
||||||
Archive = collections.namedtuple('Archive',
|
Archive = collections.namedtuple('Archive',
|
||||||
['url', 'path', 'archive_file'])
|
['url', 'path', 'archive_file'])
|
||||||
archive_file = str(tmpdir.join(archive_name))
|
archive_file = str(tmpdir.join(archive_name))
|
||||||
@ -379,46 +398,45 @@ def mock_git_repository():
|
|||||||
repodir = tmpdir.join(repo_name)
|
repodir = tmpdir.join(repo_name)
|
||||||
|
|
||||||
# Initialize the repository
|
# Initialize the repository
|
||||||
current = repodir.chdir()
|
with repodir.as_cwd():
|
||||||
git('init')
|
git('init')
|
||||||
url = 'file://' + str(repodir)
|
url = 'file://' + str(repodir)
|
||||||
|
|
||||||
# r0 is just the first commit
|
# r0 is just the first commit
|
||||||
r0_file = 'r0_file'
|
r0_file = 'r0_file'
|
||||||
repodir.ensure(r0_file)
|
repodir.ensure(r0_file)
|
||||||
git('add', r0_file)
|
git('add', r0_file)
|
||||||
git('commit', '-m', 'mock-git-repo r0')
|
git('commit', '-m', 'mock-git-repo r0')
|
||||||
|
|
||||||
branch = 'test-branch'
|
branch = 'test-branch'
|
||||||
branch_file = 'branch_file'
|
branch_file = 'branch_file'
|
||||||
git('branch', branch)
|
git('branch', branch)
|
||||||
|
|
||||||
tag_branch = 'tag-branch'
|
tag_branch = 'tag-branch'
|
||||||
tag_file = 'tag_file'
|
tag_file = 'tag_file'
|
||||||
git('branch', tag_branch)
|
git('branch', tag_branch)
|
||||||
|
|
||||||
# Check out first branch
|
# Check out first branch
|
||||||
git('checkout', branch)
|
git('checkout', branch)
|
||||||
repodir.ensure(branch_file)
|
repodir.ensure(branch_file)
|
||||||
git('add', branch_file)
|
git('add', branch_file)
|
||||||
git('commit', '-m' 'r1 test branch')
|
git('commit', '-m' 'r1 test branch')
|
||||||
|
|
||||||
# Check out a second branch and tag it
|
# Check out a second branch and tag it
|
||||||
git('checkout', tag_branch)
|
git('checkout', tag_branch)
|
||||||
repodir.ensure(tag_file)
|
repodir.ensure(tag_file)
|
||||||
git('add', tag_file)
|
git('add', tag_file)
|
||||||
git('commit', '-m' 'tag test branch')
|
git('commit', '-m' 'tag test branch')
|
||||||
|
|
||||||
tag = 'test-tag'
|
tag = 'test-tag'
|
||||||
git('tag', tag)
|
git('tag', tag)
|
||||||
|
|
||||||
git('checkout', 'master')
|
git('checkout', 'master')
|
||||||
|
|
||||||
# R1 test is the same as test for branch
|
# R1 test is the same as test for branch
|
||||||
rev_hash = lambda x: git('rev-parse', x, output=str).strip()
|
rev_hash = lambda x: git('rev-parse', x, output=str).strip()
|
||||||
r1 = rev_hash(branch)
|
r1 = rev_hash(branch)
|
||||||
r1_file = branch_file
|
r1_file = branch_file
|
||||||
current.chdir()
|
|
||||||
|
|
||||||
Bunch = spack.util.pattern.Bunch
|
Bunch = spack.util.pattern.Bunch
|
||||||
|
|
||||||
@ -457,22 +475,23 @@ def mock_hg_repository():
|
|||||||
get_rev = lambda: hg('id', '-i', output=str).strip()
|
get_rev = lambda: hg('id', '-i', output=str).strip()
|
||||||
|
|
||||||
# Initialize the repository
|
# Initialize the repository
|
||||||
current = repodir.chdir()
|
with repodir.as_cwd():
|
||||||
url = 'file://' + str(repodir)
|
url = 'file://' + str(repodir)
|
||||||
hg('init')
|
hg('init')
|
||||||
# Commit file r0
|
|
||||||
r0_file = 'r0_file'
|
# Commit file r0
|
||||||
repodir.ensure(r0_file)
|
r0_file = 'r0_file'
|
||||||
hg('add', r0_file)
|
repodir.ensure(r0_file)
|
||||||
hg('commit', '-m', 'revision 0', '-u', 'test')
|
hg('add', r0_file)
|
||||||
r0 = get_rev()
|
hg('commit', '-m', 'revision 0', '-u', 'test')
|
||||||
# Commit file r1
|
r0 = get_rev()
|
||||||
r1_file = 'r1_file'
|
|
||||||
repodir.ensure(r1_file)
|
# Commit file r1
|
||||||
hg('add', r1_file)
|
r1_file = 'r1_file'
|
||||||
hg('commit', '-m' 'revision 1', '-u', 'test')
|
repodir.ensure(r1_file)
|
||||||
r1 = get_rev()
|
hg('add', r1_file)
|
||||||
current.chdir()
|
hg('commit', '-m' 'revision 1', '-u', 'test')
|
||||||
|
r1 = get_rev()
|
||||||
|
|
||||||
Bunch = spack.util.pattern.Bunch
|
Bunch = spack.util.pattern.Bunch
|
||||||
|
|
||||||
@ -497,64 +516,64 @@ def mock_svn_repository():
|
|||||||
svn = spack.util.executable.which('svn', required=True)
|
svn = spack.util.executable.which('svn', required=True)
|
||||||
svnadmin = spack.util.executable.which('svnadmin', required=True)
|
svnadmin = spack.util.executable.which('svnadmin', required=True)
|
||||||
stage = spack.stage.Stage('mock-svn-stage')
|
stage = spack.stage.Stage('mock-svn-stage')
|
||||||
|
|
||||||
tmpdir = py.path.local(stage.path)
|
tmpdir = py.path.local(stage.path)
|
||||||
repo_name = 'mock-svn-repo'
|
repo_name = 'mock-svn-repo'
|
||||||
tmpdir.ensure(repo_name, dir=True)
|
tmpdir.ensure(repo_name, dir=True)
|
||||||
repodir = tmpdir.join(repo_name)
|
repodir = tmpdir.join(repo_name)
|
||||||
url = 'file://' + str(repodir)
|
url = 'file://' + str(repodir)
|
||||||
|
|
||||||
# Initialize the repository
|
# Initialize the repository
|
||||||
current = repodir.chdir()
|
with repodir.as_cwd():
|
||||||
# NOTE: Adding --pre-1.5-compatible works for NERSC
|
# NOTE: Adding --pre-1.5-compatible works for NERSC
|
||||||
# Unknown if this is also an issue at other sites.
|
# Unknown if this is also an issue at other sites.
|
||||||
svnadmin('create', '--pre-1.5-compatible', str(repodir))
|
svnadmin('create', '--pre-1.5-compatible', str(repodir))
|
||||||
|
|
||||||
# Import a structure (first commit)
|
# Import a structure (first commit)
|
||||||
r0_file = 'r0_file'
|
r0_file = 'r0_file'
|
||||||
tmpdir.ensure('tmp-path', r0_file)
|
tmpdir.ensure('tmp-path', r0_file)
|
||||||
svn(
|
tmp_path = tmpdir.join('tmp-path')
|
||||||
'import',
|
svn('import',
|
||||||
str(tmpdir.join('tmp-path')),
|
str(tmp_path),
|
||||||
url,
|
url,
|
||||||
'-m',
|
'-m',
|
||||||
'Initial import r0'
|
'Initial import r0')
|
||||||
)
|
tmp_path.remove()
|
||||||
shutil.rmtree(str(tmpdir.join('tmp-path')))
|
|
||||||
# Second commit
|
|
||||||
r1_file = 'r1_file'
|
|
||||||
svn('checkout', url, str(tmpdir.join('tmp-path')))
|
|
||||||
tmpdir.ensure('tmp-path', r1_file)
|
|
||||||
tmpdir.join('tmp-path').chdir()
|
|
||||||
svn('add', str(tmpdir.ensure('tmp-path', r1_file)))
|
|
||||||
svn('ci', '-m', 'second revision r1')
|
|
||||||
repodir.chdir()
|
|
||||||
shutil.rmtree(str(tmpdir.join('tmp-path')))
|
|
||||||
r0 = '1'
|
|
||||||
r1 = '2'
|
|
||||||
|
|
||||||
Bunch = spack.util.pattern.Bunch
|
# Second commit
|
||||||
|
r1_file = 'r1_file'
|
||||||
|
svn('checkout', url, str(tmp_path))
|
||||||
|
tmpdir.ensure('tmp-path', r1_file)
|
||||||
|
|
||||||
checks = {
|
with tmp_path.as_cwd():
|
||||||
'default': Bunch(
|
svn('add', str(tmpdir.ensure('tmp-path', r1_file)))
|
||||||
revision=r1, file=r1_file, args={'svn': url}
|
svn('ci', '-m', 'second revision r1')
|
||||||
),
|
|
||||||
'rev0': Bunch(
|
|
||||||
revision=r0, file=r0_file, args={
|
|
||||||
'svn': url, 'revision': r0
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_rev():
|
tmp_path.remove()
|
||||||
output = svn('info', output=str)
|
r0 = '1'
|
||||||
assert "Revision" in output
|
r1 = '2'
|
||||||
for line in output.split('\n'):
|
|
||||||
match = re.match(r'Revision: (\d+)', line)
|
Bunch = spack.util.pattern.Bunch
|
||||||
if match:
|
|
||||||
return match.group(1)
|
checks = {
|
||||||
|
'default': Bunch(
|
||||||
|
revision=r1, file=r1_file, args={'svn': url}),
|
||||||
|
'rev0': Bunch(
|
||||||
|
revision=r0, file=r0_file, args={
|
||||||
|
'svn': url, 'revision': r0})
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_rev():
|
||||||
|
output = svn('info', output=str)
|
||||||
|
assert "Revision" in output
|
||||||
|
for line in output.split('\n'):
|
||||||
|
match = re.match(r'Revision: (\d+)', line)
|
||||||
|
if match:
|
||||||
|
return match.group(1)
|
||||||
|
|
||||||
|
t = Bunch(checks=checks, url=url, hash=get_rev, path=str(repodir))
|
||||||
|
|
||||||
t = Bunch(checks=checks, url=url, hash=get_rev, path=str(repodir))
|
|
||||||
yield t
|
yield t
|
||||||
current.chdir()
|
|
||||||
|
|
||||||
|
|
||||||
##########
|
##########
|
||||||
|
@ -47,7 +47,6 @@ def test_inspect_path(tmpdir):
|
|||||||
'': ['CMAKE_PREFIX_PATH']
|
'': ['CMAKE_PREFIX_PATH']
|
||||||
}
|
}
|
||||||
|
|
||||||
tmpdir.chdir()
|
|
||||||
tmpdir.mkdir('bin')
|
tmpdir.mkdir('bin')
|
||||||
tmpdir.mkdir('lib')
|
tmpdir.mkdir('lib')
|
||||||
tmpdir.mkdir('include')
|
tmpdir.mkdir('include')
|
||||||
|
@ -72,22 +72,23 @@ def test_fetch(
|
|||||||
finally:
|
finally:
|
||||||
spack.insecure = False
|
spack.insecure = False
|
||||||
|
|
||||||
assert h('HEAD') == h(t.revision)
|
with working_dir(pkg.stage.source_path):
|
||||||
|
assert h('HEAD') == h(t.revision)
|
||||||
|
|
||||||
file_path = join_path(pkg.stage.source_path, t.file)
|
file_path = join_path(pkg.stage.source_path, t.file)
|
||||||
assert os.path.isdir(pkg.stage.source_path)
|
assert os.path.isdir(pkg.stage.source_path)
|
||||||
assert os.path.isfile(file_path)
|
assert os.path.isfile(file_path)
|
||||||
|
|
||||||
os.unlink(file_path)
|
os.unlink(file_path)
|
||||||
assert not os.path.isfile(file_path)
|
assert not os.path.isfile(file_path)
|
||||||
|
|
||||||
untracked_file = 'foobarbaz'
|
untracked_file = 'foobarbaz'
|
||||||
touch(untracked_file)
|
touch(untracked_file)
|
||||||
assert os.path.isfile(untracked_file)
|
assert os.path.isfile(untracked_file)
|
||||||
pkg.do_restage()
|
pkg.do_restage()
|
||||||
assert not os.path.isfile(untracked_file)
|
assert not os.path.isfile(untracked_file)
|
||||||
|
|
||||||
assert os.path.isdir(pkg.stage.source_path)
|
assert os.path.isdir(pkg.stage.source_path)
|
||||||
assert os.path.isfile(file_path)
|
assert os.path.isfile(file_path)
|
||||||
|
|
||||||
assert h('HEAD') == h(t.revision)
|
assert h('HEAD') == h(t.revision)
|
||||||
|
@ -72,22 +72,23 @@ def test_fetch(
|
|||||||
finally:
|
finally:
|
||||||
spack.insecure = False
|
spack.insecure = False
|
||||||
|
|
||||||
assert h() == t.revision
|
with working_dir(pkg.stage.source_path):
|
||||||
|
assert h() == t.revision
|
||||||
|
|
||||||
file_path = join_path(pkg.stage.source_path, t.file)
|
file_path = join_path(pkg.stage.source_path, t.file)
|
||||||
assert os.path.isdir(pkg.stage.source_path)
|
assert os.path.isdir(pkg.stage.source_path)
|
||||||
assert os.path.isfile(file_path)
|
assert os.path.isfile(file_path)
|
||||||
|
|
||||||
os.unlink(file_path)
|
os.unlink(file_path)
|
||||||
assert not os.path.isfile(file_path)
|
assert not os.path.isfile(file_path)
|
||||||
|
|
||||||
untracked_file = 'foobarbaz'
|
untracked_file = 'foobarbaz'
|
||||||
touch(untracked_file)
|
touch(untracked_file)
|
||||||
assert os.path.isfile(untracked_file)
|
assert os.path.isfile(untracked_file)
|
||||||
pkg.do_restage()
|
pkg.do_restage()
|
||||||
assert not os.path.isfile(untracked_file)
|
assert not os.path.isfile(untracked_file)
|
||||||
|
|
||||||
assert os.path.isdir(pkg.stage.source_path)
|
assert os.path.isdir(pkg.stage.source_path)
|
||||||
assert os.path.isfile(file_path)
|
assert os.path.isfile(file_path)
|
||||||
|
|
||||||
assert h() == t.revision
|
assert h() == t.revision
|
||||||
|
@ -70,11 +70,7 @@ def check_mirror():
|
|||||||
# register mirror with spack config
|
# register mirror with spack config
|
||||||
mirrors = {'spack-mirror-test': 'file://' + mirror_root}
|
mirrors = {'spack-mirror-test': 'file://' + mirror_root}
|
||||||
spack.config.update_config('mirrors', mirrors)
|
spack.config.update_config('mirrors', mirrors)
|
||||||
|
spack.mirror.create(mirror_root, repos, no_checksum=True)
|
||||||
os.chdir(stage.path)
|
|
||||||
spack.mirror.create(
|
|
||||||
mirror_root, repos, no_checksum=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Stage directory exists
|
# Stage directory exists
|
||||||
assert os.path.isdir(mirror_root)
|
assert os.path.isdir(mirror_root)
|
||||||
|
@ -25,20 +25,22 @@
|
|||||||
"""
|
"""
|
||||||
This test checks the binary packaging infrastructure
|
This test checks the binary packaging infrastructure
|
||||||
"""
|
"""
|
||||||
import pytest
|
|
||||||
import spack
|
|
||||||
import spack.store
|
|
||||||
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
|
|
||||||
from spack.spec import Spec
|
|
||||||
import spack.binary_distribution as bindist
|
|
||||||
from llnl.util.filesystem import join_path, mkdirp
|
|
||||||
import argparse
|
|
||||||
import spack.cmd.buildcache as buildcache
|
|
||||||
from spack.relocate import *
|
|
||||||
import os
|
import os
|
||||||
import stat
|
import stat
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
|
import pytest
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from llnl.util.filesystem import mkdirp
|
||||||
|
|
||||||
|
import spack
|
||||||
|
import spack.store
|
||||||
|
import spack.binary_distribution as bindist
|
||||||
|
import spack.cmd.buildcache as buildcache
|
||||||
|
from spack.spec import Spec
|
||||||
|
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
|
||||||
|
from spack.relocate import *
|
||||||
from spack.util.executable import ProcessError
|
from spack.util.executable import ProcessError
|
||||||
|
|
||||||
|
|
||||||
@ -72,8 +74,8 @@ def test_packaging(mock_archive, tmpdir):
|
|||||||
spec.concretize()
|
spec.concretize()
|
||||||
pkg = spack.repo.get(spec)
|
pkg = spack.repo.get(spec)
|
||||||
fake_fetchify(pkg.fetcher, pkg)
|
fake_fetchify(pkg.fetcher, pkg)
|
||||||
mkdirp(join_path(pkg.prefix, "bin"))
|
mkdirp(os.path.join(pkg.prefix, "bin"))
|
||||||
patchelfscr = join_path(pkg.prefix, "bin", "patchelf")
|
patchelfscr = os.path.join(pkg.prefix, "bin", "patchelf")
|
||||||
f = open(patchelfscr, 'w')
|
f = open(patchelfscr, 'w')
|
||||||
body = """#!/bin/bash
|
body = """#!/bin/bash
|
||||||
echo $PATH"""
|
echo $PATH"""
|
||||||
@ -91,14 +93,14 @@ def test_packaging(mock_archive, tmpdir):
|
|||||||
pkg.do_install()
|
pkg.do_install()
|
||||||
|
|
||||||
# Put some non-relocatable file in there
|
# Put some non-relocatable file in there
|
||||||
filename = join_path(spec.prefix, "dummy.txt")
|
filename = os.path.join(spec.prefix, "dummy.txt")
|
||||||
with open(filename, "w") as script:
|
with open(filename, "w") as script:
|
||||||
script.write(spec.prefix)
|
script.write(spec.prefix)
|
||||||
|
|
||||||
# Create the build cache and
|
# Create the build cache and
|
||||||
# put it directly into the mirror
|
# put it directly into the mirror
|
||||||
|
|
||||||
mirror_path = join_path(tmpdir, 'test-mirror')
|
mirror_path = os.path.join(str(tmpdir), 'test-mirror')
|
||||||
specs = [spec]
|
specs = [spec]
|
||||||
spack.mirror.create(
|
spack.mirror.create(
|
||||||
mirror_path, specs, no_checksum=True
|
mirror_path, specs, no_checksum=True
|
||||||
@ -286,18 +288,25 @@ def test_relocate():
|
|||||||
|
|
||||||
@pytest.mark.skipif(sys.platform != 'darwin',
|
@pytest.mark.skipif(sys.platform != 'darwin',
|
||||||
reason="only works with Mach-o objects")
|
reason="only works with Mach-o objects")
|
||||||
def test_relocate_macho():
|
def test_relocate_macho(tmpdir):
|
||||||
get_patchelf()
|
with tmpdir.as_cwd():
|
||||||
assert (needs_binary_relocation('Mach-O') is True)
|
get_patchelf()
|
||||||
macho_get_paths('/bin/bash')
|
assert (needs_binary_relocation('Mach-O') is True)
|
||||||
shutil.copyfile('/bin/bash', 'bash')
|
|
||||||
modify_macho_object('bash', '/bin/bash', '/usr', '/opt', False)
|
macho_get_paths('/bin/bash')
|
||||||
modify_macho_object('bash', '/bin/bash', '/usr', '/opt', True)
|
shutil.copyfile('/bin/bash', 'bash')
|
||||||
shutil.copyfile('/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib')
|
|
||||||
modify_macho_object('libncurses.5.4.dylib',
|
modify_macho_object('bash', '/bin/bash', '/usr', '/opt', False)
|
||||||
'/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', False)
|
modify_macho_object('bash', '/bin/bash', '/usr', '/opt', True)
|
||||||
modify_macho_object('libncurses.5.4.dylib',
|
|
||||||
'/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', True)
|
shutil.copyfile(
|
||||||
|
'/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib')
|
||||||
|
modify_macho_object(
|
||||||
|
'libncurses.5.4.dylib',
|
||||||
|
'/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', False)
|
||||||
|
modify_macho_object(
|
||||||
|
'libncurses.5.4.dylib',
|
||||||
|
'/usr/lib/libncurses.5.4.dylib', '/usr', '/opt', True)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform != 'linux2',
|
@pytest.mark.skipif(sys.platform != 'linux2',
|
||||||
|
@ -30,18 +30,10 @@
|
|||||||
import spack
|
import spack
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
from llnl.util.filesystem import join_path
|
from llnl.util.filesystem import join_path, working_dir
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
|
|
||||||
|
|
||||||
def check_chdir_to_source(stage, stage_name):
|
|
||||||
stage_path = get_stage_path(stage, stage_name)
|
|
||||||
archive_dir = 'test-files'
|
|
||||||
assert join_path(
|
|
||||||
os.path.realpath(stage_path), archive_dir
|
|
||||||
) == os.getcwd()
|
|
||||||
|
|
||||||
|
|
||||||
def check_expand_archive(stage, stage_name, mock_archive):
|
def check_expand_archive(stage, stage_name, mock_archive):
|
||||||
stage_path = get_stage_path(stage, stage_name)
|
stage_path = get_stage_path(stage, stage_name)
|
||||||
archive_name = 'test-files.tar.gz'
|
archive_name = 'test-files.tar.gz'
|
||||||
@ -64,11 +56,6 @@ def check_fetch(stage, stage_name):
|
|||||||
assert join_path(stage_path, archive_name) == stage.fetcher.archive_file
|
assert join_path(stage_path, archive_name) == stage.fetcher.archive_file
|
||||||
|
|
||||||
|
|
||||||
def check_chdir(stage, stage_name):
|
|
||||||
stage_path = get_stage_path(stage, stage_name)
|
|
||||||
assert os.path.realpath(stage_path) == os.getcwd()
|
|
||||||
|
|
||||||
|
|
||||||
def check_destroy(stage, stage_name):
|
def check_destroy(stage, stage_name):
|
||||||
"""Figure out whether a stage was destroyed correctly."""
|
"""Figure out whether a stage was destroyed correctly."""
|
||||||
stage_path = get_stage_path(stage, stage_name)
|
stage_path = get_stage_path(stage, stage_name)
|
||||||
@ -162,10 +149,9 @@ def mock_archive(tmpdir, monkeypatch):
|
|||||||
test_tmp_path.ensure(dir=True)
|
test_tmp_path.ensure(dir=True)
|
||||||
test_readme.write('hello world!\n')
|
test_readme.write('hello world!\n')
|
||||||
|
|
||||||
current = tmpdir.chdir()
|
with tmpdir.as_cwd():
|
||||||
tar = spack.util.executable.which('tar', required=True)
|
tar = spack.util.executable.which('tar', required=True)
|
||||||
tar('czf', str(archive_name), 'test-files')
|
tar('czf', str(archive_name), 'test-files')
|
||||||
current.chdir()
|
|
||||||
|
|
||||||
# Make spack use the test environment for tmp stuff.
|
# Make spack use the test environment for tmp stuff.
|
||||||
monkeypatch.setattr(spack.stage, '_tmp_root', None)
|
monkeypatch.setattr(spack.stage, '_tmp_root', None)
|
||||||
@ -180,9 +166,6 @@ def mock_archive(tmpdir, monkeypatch):
|
|||||||
test_tmp_dir=test_tmp_path,
|
test_tmp_dir=test_tmp_path,
|
||||||
archive_dir=archive_dir
|
archive_dir=archive_dir
|
||||||
)
|
)
|
||||||
# record this since this test changes to directories that will
|
|
||||||
# be removed.
|
|
||||||
current.chdir()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
@ -245,18 +228,10 @@ def test_setup_and_destroy_no_name_without_tmp(self, mock_archive):
|
|||||||
check_setup(stage, None, mock_archive)
|
check_setup(stage, None, mock_archive)
|
||||||
check_destroy(stage, None)
|
check_destroy(stage, None)
|
||||||
|
|
||||||
def test_chdir(self, mock_archive):
|
|
||||||
with Stage(mock_archive.url, name=self.stage_name) as stage:
|
|
||||||
stage.chdir()
|
|
||||||
check_setup(stage, self.stage_name, mock_archive)
|
|
||||||
check_chdir(stage, self.stage_name)
|
|
||||||
check_destroy(stage, self.stage_name)
|
|
||||||
|
|
||||||
def test_fetch(self, mock_archive):
|
def test_fetch(self, mock_archive):
|
||||||
with Stage(mock_archive.url, name=self.stage_name) as stage:
|
with Stage(mock_archive.url, name=self.stage_name) as stage:
|
||||||
stage.fetch()
|
stage.fetch()
|
||||||
check_setup(stage, self.stage_name, mock_archive)
|
check_setup(stage, self.stage_name, mock_archive)
|
||||||
check_chdir(stage, self.stage_name)
|
|
||||||
check_fetch(stage, self.stage_name)
|
check_fetch(stage, self.stage_name)
|
||||||
check_destroy(stage, self.stage_name)
|
check_destroy(stage, self.stage_name)
|
||||||
|
|
||||||
@ -307,37 +282,23 @@ def test_expand_archive(self, mock_archive):
|
|||||||
check_expand_archive(stage, self.stage_name, mock_archive)
|
check_expand_archive(stage, self.stage_name, mock_archive)
|
||||||
check_destroy(stage, self.stage_name)
|
check_destroy(stage, self.stage_name)
|
||||||
|
|
||||||
def test_expand_archive_with_chdir(self, mock_archive):
|
|
||||||
with Stage(mock_archive.url, name=self.stage_name) as stage:
|
|
||||||
stage.fetch()
|
|
||||||
check_setup(stage, self.stage_name, mock_archive)
|
|
||||||
check_fetch(stage, self.stage_name)
|
|
||||||
stage.expand_archive()
|
|
||||||
stage.chdir_to_source()
|
|
||||||
check_expand_archive(stage, self.stage_name, mock_archive)
|
|
||||||
check_chdir_to_source(stage, self.stage_name)
|
|
||||||
check_destroy(stage, self.stage_name)
|
|
||||||
|
|
||||||
def test_restage(self, mock_archive):
|
def test_restage(self, mock_archive):
|
||||||
with Stage(mock_archive.url, name=self.stage_name) as stage:
|
with Stage(mock_archive.url, name=self.stage_name) as stage:
|
||||||
stage.fetch()
|
stage.fetch()
|
||||||
stage.expand_archive()
|
stage.expand_archive()
|
||||||
stage.chdir_to_source()
|
|
||||||
check_expand_archive(stage, self.stage_name, mock_archive)
|
|
||||||
check_chdir_to_source(stage, self.stage_name)
|
|
||||||
|
|
||||||
# Try to make a file in the old archive dir
|
with working_dir(stage.source_path):
|
||||||
with open('foobar', 'w') as file:
|
check_expand_archive(stage, self.stage_name, mock_archive)
|
||||||
file.write("this file is to be destroyed.")
|
|
||||||
|
# Try to make a file in the old archive dir
|
||||||
|
with open('foobar', 'w') as file:
|
||||||
|
file.write("this file is to be destroyed.")
|
||||||
|
|
||||||
assert 'foobar' in os.listdir(stage.source_path)
|
assert 'foobar' in os.listdir(stage.source_path)
|
||||||
|
|
||||||
# Make sure the file is not there after restage.
|
# Make sure the file is not there after restage.
|
||||||
stage.restage()
|
stage.restage()
|
||||||
check_chdir(stage, self.stage_name)
|
|
||||||
check_fetch(stage, self.stage_name)
|
check_fetch(stage, self.stage_name)
|
||||||
stage.chdir_to_source()
|
|
||||||
check_chdir_to_source(stage, self.stage_name)
|
|
||||||
assert 'foobar' not in os.listdir(stage.source_path)
|
assert 'foobar' not in os.listdir(stage.source_path)
|
||||||
check_destroy(stage, self.stage_name)
|
check_destroy(stage, self.stage_name)
|
||||||
|
|
||||||
|
@ -72,22 +72,23 @@ def test_fetch(
|
|||||||
finally:
|
finally:
|
||||||
spack.insecure = False
|
spack.insecure = False
|
||||||
|
|
||||||
assert h() == t.revision
|
with working_dir(pkg.stage.source_path):
|
||||||
|
assert h() == t.revision
|
||||||
|
|
||||||
file_path = join_path(pkg.stage.source_path, t.file)
|
file_path = join_path(pkg.stage.source_path, t.file)
|
||||||
assert os.path.isdir(pkg.stage.source_path)
|
assert os.path.isdir(pkg.stage.source_path)
|
||||||
assert os.path.isfile(file_path)
|
assert os.path.isfile(file_path)
|
||||||
|
|
||||||
os.unlink(file_path)
|
os.unlink(file_path)
|
||||||
assert not os.path.isfile(file_path)
|
assert not os.path.isfile(file_path)
|
||||||
|
|
||||||
untracked_file = 'foobarbaz'
|
untracked_file = 'foobarbaz'
|
||||||
touch(untracked_file)
|
touch(untracked_file)
|
||||||
assert os.path.isfile(untracked_file)
|
assert os.path.isfile(untracked_file)
|
||||||
pkg.do_restage()
|
pkg.do_restage()
|
||||||
assert not os.path.isfile(untracked_file)
|
assert not os.path.isfile(untracked_file)
|
||||||
|
|
||||||
assert os.path.isdir(pkg.stage.source_path)
|
assert os.path.isdir(pkg.stage.source_path)
|
||||||
assert os.path.isfile(file_path)
|
assert os.path.isfile(file_path)
|
||||||
|
|
||||||
assert h() == t.revision
|
assert h() == t.revision
|
||||||
|
@ -72,13 +72,14 @@ def test_fetch(
|
|||||||
finally:
|
finally:
|
||||||
spack.insecure = False
|
spack.insecure = False
|
||||||
|
|
||||||
assert os.path.exists('configure')
|
with working_dir(pkg.stage.source_path):
|
||||||
assert is_exe('configure')
|
assert os.path.exists('configure')
|
||||||
|
assert is_exe('configure')
|
||||||
|
|
||||||
with open('configure') as f:
|
with open('configure') as f:
|
||||||
contents = f.read()
|
contents = f.read()
|
||||||
assert contents.startswith('#!/bin/sh')
|
assert contents.startswith('#!/bin/sh')
|
||||||
assert 'echo Building...' in contents
|
assert 'echo Building...' in contents
|
||||||
|
|
||||||
|
|
||||||
def test_hash_detection(checksum_type):
|
def test_hash_detection(checksum_type):
|
||||||
|
Loading…
Reference in New Issue
Block a user