stage: make source_path available before stage is built
				
					
				
			- `stage.source_path` was previously overloaded; it returned `None` if it
  didn't exist and this was used by client code
  - we want to be able to know the `source_path` before it's created
- make stage.source_path available before it exists.
  - use a well-known stage source path name, `$stage_path/src` that is
    available when `Stage` is instantiated but does not exist until it's
    "expanded"
  - client code can now use the variable before the stage is created.
  - client code can test whether the tarball is expanded by using the new
    `stage.expanded` property instead of testing whether `source_path` is
    `None`
- add tests for the new source_path semantics
			
			
This commit is contained in:
		
				
					committed by
					
						
						Todd Gamblin
					
				
			
			
				
	
			
			
			
						parent
						
							eb584d895b
						
					
				
				
					commit
					1842873f85
				
			@@ -107,7 +107,7 @@ def location(parser, args):
 | 
			
		||||
                    print(pkg.stage.path)
 | 
			
		||||
 | 
			
		||||
                else:  # args.build_dir is the default.
 | 
			
		||||
                    if not pkg.stage.source_path:
 | 
			
		||||
                    if not pkg.stage.expanded:
 | 
			
		||||
                        tty.die("Build directory does not exist yet. "
 | 
			
		||||
                                "Run this to create it:",
 | 
			
		||||
                                "spack stage " + " ".join(args.spec))
 | 
			
		||||
 
 | 
			
		||||
@@ -60,6 +60,13 @@ def wrapper(self, *args, **kwargs):
 | 
			
		||||
    return wrapper
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _ensure_one_stage_entry(stage_path):
 | 
			
		||||
    """Ensure there is only one stage entry in the stage path."""
 | 
			
		||||
    stage_entries = os.listdir(stage_path)
 | 
			
		||||
    assert len(stage_entries) == 1
 | 
			
		||||
    return os.path.join(stage_path, stage_entries[0])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FSMeta(type):
 | 
			
		||||
    """This metaclass registers all fetch strategies in a list."""
 | 
			
		||||
    def __init__(cls, name, bases, dict):
 | 
			
		||||
@@ -302,6 +309,7 @@ def fetch(self):
 | 
			
		||||
            raise FailedDownloadError(self.url)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    @_needs_stage
 | 
			
		||||
    def archive_file(self):
 | 
			
		||||
        """Path to the source archive within this stage directory."""
 | 
			
		||||
        return self.stage.archive_file
 | 
			
		||||
@@ -313,7 +321,13 @@ def cachable(self):
 | 
			
		||||
    @_needs_stage
 | 
			
		||||
    def expand(self):
 | 
			
		||||
        if not self.expand_archive:
 | 
			
		||||
            tty.msg("Skipping expand step for %s" % self.archive_file)
 | 
			
		||||
            tty.msg("Staging unexpanded archive %s in %s" % (
 | 
			
		||||
                    self.archive_file, self.stage.source_path))
 | 
			
		||||
            if not self.stage.expanded:
 | 
			
		||||
                mkdirp(self.stage.source_path)
 | 
			
		||||
            dest = os.path.join(self.stage.source_path,
 | 
			
		||||
                                os.path.basename(self.archive_file))
 | 
			
		||||
            shutil.move(self.archive_file, dest)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        tty.msg("Staging archive: %s" % self.archive_file)
 | 
			
		||||
@@ -326,6 +340,10 @@ def expand(self):
 | 
			
		||||
        if not self.extension:
 | 
			
		||||
            self.extension = extension(self.archive_file)
 | 
			
		||||
 | 
			
		||||
        if self.stage.expanded:
 | 
			
		||||
            tty.debug('Source already staged to %s' % self.stage.source_path)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        decompress = decompressor_for(self.archive_file, self.extension)
 | 
			
		||||
 | 
			
		||||
        # Expand all tarballs in their own directory to contain
 | 
			
		||||
@@ -337,26 +355,37 @@ def expand(self):
 | 
			
		||||
        with working_dir(tarball_container):
 | 
			
		||||
            decompress(self.archive_file)
 | 
			
		||||
 | 
			
		||||
        # Check for an exploding tarball, i.e. one that doesn't expand
 | 
			
		||||
        # to a single directory.  If the tarball *didn't* explode,
 | 
			
		||||
        # move contents up & remove the container directory.
 | 
			
		||||
        # Check for an exploding tarball, i.e. one that doesn't expand to
 | 
			
		||||
        # a single directory.  If the tarball *didn't* explode, move its
 | 
			
		||||
        # contents to the staging source directory & remove the container
 | 
			
		||||
        # directory.  If the tarball did explode, just rename the tarball
 | 
			
		||||
        # directory to the staging source directory.
 | 
			
		||||
        #
 | 
			
		||||
        # NOTE: The tar program on Mac OS X will encode HFS metadata
 | 
			
		||||
        # in hidden files, which can end up *alongside* a single
 | 
			
		||||
        # top-level directory.  We ignore hidden files to accomodate
 | 
			
		||||
        # these "semi-exploding" tarballs.
 | 
			
		||||
        # NOTE: The tar program on Mac OS X will encode HFS metadata in
 | 
			
		||||
        # hidden files, which can end up *alongside* a single top-level
 | 
			
		||||
        # directory.  We initially ignore presence of hidden files to
 | 
			
		||||
        # accomodate these "semi-exploding" tarballs but ensure the files
 | 
			
		||||
        # are copied to the source directory.
 | 
			
		||||
        files = os.listdir(tarball_container)
 | 
			
		||||
        non_hidden = [f for f in files if not f.startswith('.')]
 | 
			
		||||
        if len(non_hidden) == 1:
 | 
			
		||||
            expanded_dir = os.path.join(tarball_container, non_hidden[0])
 | 
			
		||||
            if os.path.isdir(expanded_dir):
 | 
			
		||||
                for f in files:
 | 
			
		||||
                    shutil.move(os.path.join(tarball_container, f),
 | 
			
		||||
                                os.path.join(self.stage.path, f))
 | 
			
		||||
            src = os.path.join(tarball_container, non_hidden[0])
 | 
			
		||||
            if os.path.isdir(src):
 | 
			
		||||
                shutil.move(src, self.stage.source_path)
 | 
			
		||||
                if len(files) > 1:
 | 
			
		||||
                    files.remove(non_hidden[0])
 | 
			
		||||
                    for f in files:
 | 
			
		||||
                        src = os.path.join(tarball_container, f)
 | 
			
		||||
                        dest = os.path.join(self.stage.path, f)
 | 
			
		||||
                        shutil.move(src, dest)
 | 
			
		||||
                os.rmdir(tarball_container)
 | 
			
		||||
            else:
 | 
			
		||||
                # This is a non-directory entry (e.g., a patch file) so simply
 | 
			
		||||
                # rename the tarball container to be the source path.
 | 
			
		||||
                shutil.move(tarball_container, self.stage.source_path)
 | 
			
		||||
 | 
			
		||||
        if not files:
 | 
			
		||||
            os.rmdir(tarball_container)
 | 
			
		||||
        else:
 | 
			
		||||
            shutil.move(tarball_container, self.stage.source_path)
 | 
			
		||||
 | 
			
		||||
    def archive(self, destination):
 | 
			
		||||
        """Just moves this archive to the destination."""
 | 
			
		||||
@@ -390,7 +419,7 @@ def reset(self):
 | 
			
		||||
                "Tried to reset URLFetchStrategy before fetching",
 | 
			
		||||
                "Failed on reset() for URL %s" % self.url)
 | 
			
		||||
 | 
			
		||||
        # Remove everythigng but the archive from the stage
 | 
			
		||||
        # Remove everything but the archive from the stage
 | 
			
		||||
        for filename in os.listdir(self.stage.path):
 | 
			
		||||
            abspath = os.path.join(self.stage.path, filename)
 | 
			
		||||
            if abspath != self.archive_file:
 | 
			
		||||
@@ -549,6 +578,15 @@ def fetch(self):
 | 
			
		||||
    def archive(self, destination):
 | 
			
		||||
        super(GoFetchStrategy, self).archive(destination, exclude='.git')
 | 
			
		||||
 | 
			
		||||
    @_needs_stage
 | 
			
		||||
    def expand(self):
 | 
			
		||||
        tty.debug(
 | 
			
		||||
            "Source fetched with %s is already expanded." % self.url_attr)
 | 
			
		||||
 | 
			
		||||
        # Move the directory to the well-known stage source path
 | 
			
		||||
        repo_root = _ensure_one_stage_entry(self.stage.path)
 | 
			
		||||
        shutil.move(repo_root, self.stage.source_path)
 | 
			
		||||
 | 
			
		||||
    @_needs_stage
 | 
			
		||||
    def reset(self):
 | 
			
		||||
        with working_dir(self.stage.source_path):
 | 
			
		||||
@@ -634,8 +672,9 @@ def _repo_info(self):
 | 
			
		||||
 | 
			
		||||
        return '{0}{1}'.format(self.url, args)
 | 
			
		||||
 | 
			
		||||
    @_needs_stage
 | 
			
		||||
    def fetch(self):
 | 
			
		||||
        if self.stage.source_path:
 | 
			
		||||
        if self.stage.expanded:
 | 
			
		||||
            tty.msg("Already fetched {0}".format(self.stage.source_path))
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
@@ -645,17 +684,16 @@ def fetch(self):
 | 
			
		||||
        if self.commit:
 | 
			
		||||
            # Need to do a regular clone and check out everything if
 | 
			
		||||
            # they asked for a particular commit.
 | 
			
		||||
            with working_dir(self.stage.path):
 | 
			
		||||
                if spack.config.get('config:debug'):
 | 
			
		||||
                    git('clone', self.url)
 | 
			
		||||
                else:
 | 
			
		||||
                    git('clone', '--quiet', self.url)
 | 
			
		||||
            args = ['clone', self.url, self.stage.source_path]
 | 
			
		||||
            if not spack.config.get('config:debug'):
 | 
			
		||||
                args.insert(1, '--quiet')
 | 
			
		||||
            git(*args)
 | 
			
		||||
 | 
			
		||||
            with working_dir(self.stage.source_path):
 | 
			
		||||
                if spack.config.get('config:debug'):
 | 
			
		||||
                    git('checkout', self.commit)
 | 
			
		||||
                else:
 | 
			
		||||
                    git('checkout', '--quiet', self.commit)
 | 
			
		||||
                args = ['checkout', self.commit]
 | 
			
		||||
                if not spack.config.get('config:debug'):
 | 
			
		||||
                    args.insert(1, '--quiet')
 | 
			
		||||
                git(*args)
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            # Can be more efficient if not checking out a specific commit.
 | 
			
		||||
@@ -674,45 +712,46 @@ def fetch(self):
 | 
			
		||||
            if self.git_version > ver('1.7.10'):
 | 
			
		||||
                args.append('--single-branch')
 | 
			
		||||
 | 
			
		||||
            with working_dir(self.stage.path):
 | 
			
		||||
                cloned = False
 | 
			
		||||
                # Yet more efficiency, only download a 1-commit deep tree
 | 
			
		||||
                if self.git_version >= ver('1.7.1'):
 | 
			
		||||
                    try:
 | 
			
		||||
                        git(*(args + ['--depth', '1', self.url]))
 | 
			
		||||
                        cloned = True
 | 
			
		||||
                    except spack.error.SpackError:
 | 
			
		||||
                        # This will fail with the dumb HTTP transport
 | 
			
		||||
                        # continue and try without depth, cleanup first
 | 
			
		||||
                        pass
 | 
			
		||||
            cloned = False
 | 
			
		||||
            # Yet more efficiency, only download a 1-commit deep tree
 | 
			
		||||
            if self.git_version >= ver('1.7.1'):
 | 
			
		||||
                try:
 | 
			
		||||
                    git(*(args + ['--depth', '1', self.url,
 | 
			
		||||
                                  self.stage.source_path]))
 | 
			
		||||
                    cloned = True
 | 
			
		||||
                except spack.error.SpackError as e:
 | 
			
		||||
                    # This will fail with the dumb HTTP transport
 | 
			
		||||
                    # continue and try without depth, cleanup first
 | 
			
		||||
                    tty.debug(e)
 | 
			
		||||
 | 
			
		||||
                if not cloned:
 | 
			
		||||
                    args.append(self.url)
 | 
			
		||||
                    git(*args)
 | 
			
		||||
            if not cloned:
 | 
			
		||||
                args.extend([self.url, self.stage.source_path])
 | 
			
		||||
                git(*args)
 | 
			
		||||
 | 
			
		||||
                with working_dir(self.stage.source_path):
 | 
			
		||||
                    # For tags, be conservative and check them out AFTER
 | 
			
		||||
                    # cloning.  Later git versions can do this with clone
 | 
			
		||||
                    # --branch, but older ones fail.
 | 
			
		||||
                    if self.tag and self.git_version < ver('1.8.5.2'):
 | 
			
		||||
                        # pull --tags returns a "special" error code of 1 in
 | 
			
		||||
                        # older versions that we have to ignore.
 | 
			
		||||
                        # see: https://github.com/git/git/commit/19d122b
 | 
			
		||||
                        if spack.config.get('config:debug'):
 | 
			
		||||
                            git('pull', '--tags', ignore_errors=1)
 | 
			
		||||
                            git('checkout', self.tag)
 | 
			
		||||
                        else:
 | 
			
		||||
                            git('pull', '--quiet', '--tags', ignore_errors=1)
 | 
			
		||||
                            git('checkout', '--quiet', self.tag)
 | 
			
		||||
            with working_dir(self.stage.source_path):
 | 
			
		||||
                # For tags, be conservative and check them out AFTER
 | 
			
		||||
                # cloning.  Later git versions can do this with clone
 | 
			
		||||
                # --branch, but older ones fail.
 | 
			
		||||
                if self.tag and self.git_version < ver('1.8.5.2'):
 | 
			
		||||
                    # pull --tags returns a "special" error code of 1 in
 | 
			
		||||
                    # older versions that we have to ignore.
 | 
			
		||||
                    # see: https://github.com/git/git/commit/19d122b
 | 
			
		||||
                    pull_args = ['pull', '--tags']
 | 
			
		||||
                    co_args = ['checkout', self.tag]
 | 
			
		||||
                    if not spack.config.get('config:debug'):
 | 
			
		||||
                        pull_args.insert(1, '--quiet')
 | 
			
		||||
                        co_args.insert(1, '--quiet')
 | 
			
		||||
 | 
			
		||||
        with working_dir(self.stage.source_path):
 | 
			
		||||
            # Init submodules if the user asked for them.
 | 
			
		||||
            if self.submodules:
 | 
			
		||||
                if spack.config.get('config:debug'):
 | 
			
		||||
                    git('submodule', 'update', '--init', '--recursive')
 | 
			
		||||
                else:
 | 
			
		||||
                    git('submodule', '--quiet', 'update', '--init',
 | 
			
		||||
                        '--recursive')
 | 
			
		||||
                    git(*pull_args, ignore_errors=1)
 | 
			
		||||
                    git(*co_args)
 | 
			
		||||
 | 
			
		||||
        # Init submodules if the user asked for them.
 | 
			
		||||
        if self.submodules:
 | 
			
		||||
            with working_dir(self.stage.source_path):
 | 
			
		||||
                args = ['submodule', 'update', '--init', '--recursive']
 | 
			
		||||
                if not spack.config.get('config:debug'):
 | 
			
		||||
                    args.insert(1, '--quiet')
 | 
			
		||||
                git(*args)
 | 
			
		||||
 | 
			
		||||
    def archive(self, destination):
 | 
			
		||||
        super(GitFetchStrategy, self).archive(destination, exclude='.git')
 | 
			
		||||
@@ -720,12 +759,14 @@ def archive(self, destination):
 | 
			
		||||
    @_needs_stage
 | 
			
		||||
    def reset(self):
 | 
			
		||||
        with working_dir(self.stage.source_path):
 | 
			
		||||
            co_args = ['checkout', '.']
 | 
			
		||||
            clean_args = ['clean', '-f']
 | 
			
		||||
            if spack.config.get('config:debug'):
 | 
			
		||||
                self.git('checkout', '.')
 | 
			
		||||
                self.git('clean', '-f')
 | 
			
		||||
            else:
 | 
			
		||||
                self.git('checkout', '--quiet', '.')
 | 
			
		||||
                self.git('clean', '--quiet', '-f')
 | 
			
		||||
                co_args.insert(1, '--quiet')
 | 
			
		||||
                clean_args.insert(1, '--quiet')
 | 
			
		||||
 | 
			
		||||
            self.git(*co_args)
 | 
			
		||||
            self.git(*clean_args)
 | 
			
		||||
 | 
			
		||||
    def __str__(self):
 | 
			
		||||
        return '[git] {0}'.format(self._repo_info())
 | 
			
		||||
@@ -778,7 +819,7 @@ def get_source_id(self):
 | 
			
		||||
 | 
			
		||||
    @_needs_stage
 | 
			
		||||
    def fetch(self):
 | 
			
		||||
        if self.stage.source_path:
 | 
			
		||||
        if self.stage.expanded:
 | 
			
		||||
            tty.msg("Already fetched %s" % self.stage.source_path)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
@@ -787,10 +828,8 @@ def fetch(self):
 | 
			
		||||
        args = ['checkout', '--force', '--quiet']
 | 
			
		||||
        if self.revision:
 | 
			
		||||
            args += ['-r', self.revision]
 | 
			
		||||
        args.append(self.url)
 | 
			
		||||
 | 
			
		||||
        with working_dir(self.stage.path):
 | 
			
		||||
            self.svn(*args)
 | 
			
		||||
        args.extend([self.url, self.stage.source_path])
 | 
			
		||||
        self.svn(*args)
 | 
			
		||||
 | 
			
		||||
    def _remove_untracked_files(self):
 | 
			
		||||
        """Removes untracked files in an svn repository."""
 | 
			
		||||
@@ -881,7 +920,7 @@ def get_source_id(self):
 | 
			
		||||
 | 
			
		||||
    @_needs_stage
 | 
			
		||||
    def fetch(self):
 | 
			
		||||
        if self.stage.source_path:
 | 
			
		||||
        if self.stage.expanded:
 | 
			
		||||
            tty.msg("Already fetched %s" % self.stage.source_path)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
@@ -895,13 +934,11 @@ def fetch(self):
 | 
			
		||||
        if not spack.config.get('config:verify_ssl'):
 | 
			
		||||
            args.append('--insecure')
 | 
			
		||||
 | 
			
		||||
        args.append(self.url)
 | 
			
		||||
 | 
			
		||||
        if self.revision:
 | 
			
		||||
            args.extend(['-r', self.revision])
 | 
			
		||||
 | 
			
		||||
        with working_dir(self.stage.path):
 | 
			
		||||
            self.hg(*args)
 | 
			
		||||
        args.extend([self.url, self.stage.source_path])
 | 
			
		||||
        self.hg(*args)
 | 
			
		||||
 | 
			
		||||
    def archive(self, destination):
 | 
			
		||||
        super(HgFetchStrategy, self).archive(destination, exclude='.hg')
 | 
			
		||||
 
 | 
			
		||||
@@ -519,12 +519,12 @@ def mock_archive(tmpdir_factory):
 | 
			
		||||
    tar = spack.util.executable.which('tar', required=True)
 | 
			
		||||
 | 
			
		||||
    tmpdir = tmpdir_factory.mktemp('mock-archive-dir')
 | 
			
		||||
    expanded_archive_basedir = 'mock-archive-repo'
 | 
			
		||||
    tmpdir.ensure(expanded_archive_basedir, dir=True)
 | 
			
		||||
    repodir = tmpdir.join(expanded_archive_basedir)
 | 
			
		||||
    tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
 | 
			
		||||
    repodir = tmpdir.join(spack.stage._source_path_subdir)
 | 
			
		||||
 | 
			
		||||
    # Create the configure script
 | 
			
		||||
    configure_path = str(tmpdir.join(expanded_archive_basedir, 'configure'))
 | 
			
		||||
    configure_path = str(tmpdir.join(spack.stage._source_path_subdir,
 | 
			
		||||
                                     'configure'))
 | 
			
		||||
    with open(configure_path, 'w') as f:
 | 
			
		||||
        f.write(
 | 
			
		||||
            "#!/bin/sh\n"
 | 
			
		||||
@@ -541,8 +541,8 @@ def mock_archive(tmpdir_factory):
 | 
			
		||||
 | 
			
		||||
    # Archive it
 | 
			
		||||
    with tmpdir.as_cwd():
 | 
			
		||||
        archive_name = '{0}.tar.gz'.format(expanded_archive_basedir)
 | 
			
		||||
        tar('-czf', archive_name, expanded_archive_basedir)
 | 
			
		||||
        archive_name = '{0}.tar.gz'.format(spack.stage._source_path_subdir)
 | 
			
		||||
        tar('-czf', archive_name, spack.stage._source_path_subdir)
 | 
			
		||||
 | 
			
		||||
    Archive = collections.namedtuple('Archive',
 | 
			
		||||
                                     ['url', 'path', 'archive_file',
 | 
			
		||||
@@ -554,7 +554,7 @@ def mock_archive(tmpdir_factory):
 | 
			
		||||
        url=('file://' + archive_file),
 | 
			
		||||
        archive_file=archive_file,
 | 
			
		||||
        path=str(repodir),
 | 
			
		||||
        expanded_archive_basedir=expanded_archive_basedir)
 | 
			
		||||
        expanded_archive_basedir=spack.stage._source_path_subdir)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture(scope='session')
 | 
			
		||||
@@ -565,9 +565,8 @@ def mock_git_repository(tmpdir_factory):
 | 
			
		||||
    git = spack.util.executable.which('git', required=True)
 | 
			
		||||
 | 
			
		||||
    tmpdir = tmpdir_factory.mktemp('mock-git-repo-dir')
 | 
			
		||||
    expanded_archive_basedir = 'mock-git-repo'
 | 
			
		||||
    tmpdir.ensure(expanded_archive_basedir, dir=True)
 | 
			
		||||
    repodir = tmpdir.join(expanded_archive_basedir)
 | 
			
		||||
    tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
 | 
			
		||||
    repodir = tmpdir.join(spack.stage._source_path_subdir)
 | 
			
		||||
 | 
			
		||||
    # Initialize the repository
 | 
			
		||||
    with repodir.as_cwd():
 | 
			
		||||
@@ -639,9 +638,8 @@ def mock_hg_repository(tmpdir_factory):
 | 
			
		||||
    hg = spack.util.executable.which('hg', required=True)
 | 
			
		||||
 | 
			
		||||
    tmpdir = tmpdir_factory.mktemp('mock-hg-repo-dir')
 | 
			
		||||
    expanded_archive_basedir = 'mock-hg-repo'
 | 
			
		||||
    tmpdir.ensure(expanded_archive_basedir, dir=True)
 | 
			
		||||
    repodir = tmpdir.join(expanded_archive_basedir)
 | 
			
		||||
    tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
 | 
			
		||||
    repodir = tmpdir.join(spack.stage._source_path_subdir)
 | 
			
		||||
 | 
			
		||||
    get_rev = lambda: hg('id', '-i', output=str).strip()
 | 
			
		||||
 | 
			
		||||
@@ -685,9 +683,8 @@ def mock_svn_repository(tmpdir_factory):
 | 
			
		||||
    svnadmin = spack.util.executable.which('svnadmin', required=True)
 | 
			
		||||
 | 
			
		||||
    tmpdir = tmpdir_factory.mktemp('mock-svn-stage')
 | 
			
		||||
    expanded_archive_basedir = 'mock-svn-repo'
 | 
			
		||||
    tmpdir.ensure(expanded_archive_basedir, dir=True)
 | 
			
		||||
    repodir = tmpdir.join(expanded_archive_basedir)
 | 
			
		||||
    tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
 | 
			
		||||
    repodir = tmpdir.join(spack.stage._source_path_subdir)
 | 
			
		||||
    url = 'file://' + str(repodir)
 | 
			
		||||
 | 
			
		||||
    # Initialize the repository
 | 
			
		||||
 
 | 
			
		||||
@@ -155,7 +155,8 @@ def successful_fetch(_class):
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
    def successful_expand(_class):
 | 
			
		||||
        expanded_path = os.path.join(_class.stage.path, 'expanded-dir')
 | 
			
		||||
        expanded_path = os.path.join(_class.stage.path,
 | 
			
		||||
                                     spack.stage._source_path_subdir)
 | 
			
		||||
        os.mkdir(expanded_path)
 | 
			
		||||
        with open(os.path.join(expanded_path, 'test.patch'), 'w'):
 | 
			
		||||
            pass
 | 
			
		||||
 
 | 
			
		||||
@@ -63,9 +63,9 @@ def test_url_patch(mock_stage, filename, sha256, archive_sha256):
 | 
			
		||||
        # TODO: there is probably a better way to mock this.
 | 
			
		||||
        stage.mirror_path = mock_stage  # don't disrupt the spack install
 | 
			
		||||
 | 
			
		||||
        # fake a source path
 | 
			
		||||
        # Fake a source path and ensure the directory exists
 | 
			
		||||
        with working_dir(stage.path):
 | 
			
		||||
            mkdirp('spack-expanded-archive')
 | 
			
		||||
            mkdirp(spack.stage._source_path_subdir)
 | 
			
		||||
 | 
			
		||||
        with working_dir(stage.source_path):
 | 
			
		||||
            # write a file to be patched
 | 
			
		||||
 
 | 
			
		||||
@@ -6,6 +6,9 @@
 | 
			
		||||
"""Test that the Stage class works correctly."""
 | 
			
		||||
import os
 | 
			
		||||
import collections
 | 
			
		||||
import shutil
 | 
			
		||||
import tempfile
 | 
			
		||||
import getpass
 | 
			
		||||
 | 
			
		||||
import pytest
 | 
			
		||||
 | 
			
		||||
@@ -18,27 +21,108 @@
 | 
			
		||||
from spack.resource import Resource
 | 
			
		||||
from spack.stage import Stage, StageComposite, ResourceStage
 | 
			
		||||
 | 
			
		||||
# The following values are used for common fetch and stage mocking fixtures:
 | 
			
		||||
_archive_base = 'test-files'
 | 
			
		||||
_archive_fn = '%s.tar.gz' % _archive_base
 | 
			
		||||
_extra_fn = 'extra.sh'
 | 
			
		||||
_hidden_fn = '.hidden'
 | 
			
		||||
_readme_fn = 'README.txt'
 | 
			
		||||
 | 
			
		||||
def check_expand_archive(stage, stage_name, mock_archive):
 | 
			
		||||
_extra_contents = '#!/bin/sh\n'
 | 
			
		||||
_hidden_contents = ''
 | 
			
		||||
_readme_contents = 'hello world!\n'
 | 
			
		||||
 | 
			
		||||
# TODO: Replace the following with an enum once guarantee supported (or
 | 
			
		||||
# include enum34 for python versions < 3.4.
 | 
			
		||||
_include_readme = 1
 | 
			
		||||
_include_hidden = 2
 | 
			
		||||
_include_extra = 3
 | 
			
		||||
 | 
			
		||||
# Mock fetch directories are expected to appear as follows:
 | 
			
		||||
#
 | 
			
		||||
# TMPDIR/
 | 
			
		||||
#     _archive_fn     archive_url = file:///path/to/_archive_fn
 | 
			
		||||
#
 | 
			
		||||
# Mock expanded stage directories are expected to have one of two forms,
 | 
			
		||||
# depending on how the tarball expands.  Non-exploding tarballs are expected
 | 
			
		||||
# to have the following structure:
 | 
			
		||||
#
 | 
			
		||||
# TMPDIR/                 temp stage dir
 | 
			
		||||
#     src/                well-known stage source directory
 | 
			
		||||
#         _readme_fn      Optional test_readme (contains _readme_contents)
 | 
			
		||||
#     _hidden_fn          Optional hidden file (contains _hidden_contents)
 | 
			
		||||
#     _archive_fn         archive_url = file:///path/to/_archive_fn
 | 
			
		||||
#
 | 
			
		||||
# while exploding tarball directories are expected to be structured as follows:
 | 
			
		||||
#
 | 
			
		||||
# TMPDIR/                 temp stage dir
 | 
			
		||||
#     src/                well-known stage source directory
 | 
			
		||||
#         archive_name/   archive dir
 | 
			
		||||
#             _readme_fn  test_readme (contains _readme_contents)
 | 
			
		||||
#         _extra_fn       test_extra file (contains _extra_contents)
 | 
			
		||||
#     _archive_fn         archive_url = file:///path/to/_archive_fn
 | 
			
		||||
#
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_expand_archive(stage, stage_name, expected_file_list):
 | 
			
		||||
    """
 | 
			
		||||
    Ensure the expanded archive directory contains the expected structure and
 | 
			
		||||
    files as described in the module-level comments above.
 | 
			
		||||
    """
 | 
			
		||||
    stage_path = get_stage_path(stage, stage_name)
 | 
			
		||||
    archive_name = 'test-files.tar.gz'
 | 
			
		||||
    archive_dir = 'test-files'
 | 
			
		||||
    assert archive_name in os.listdir(stage_path)
 | 
			
		||||
    assert archive_dir in os.listdir(stage_path)
 | 
			
		||||
    archive_dir = spack.stage._source_path_subdir
 | 
			
		||||
 | 
			
		||||
    assert os.path.join(stage_path, archive_dir) == stage.source_path
 | 
			
		||||
    stage_contents = os.listdir(stage_path)
 | 
			
		||||
    assert _archive_fn in stage_contents
 | 
			
		||||
    assert archive_dir in stage_contents
 | 
			
		||||
 | 
			
		||||
    readme = os.path.join(stage_path, archive_dir, 'README.txt')
 | 
			
		||||
    assert os.path.isfile(readme)
 | 
			
		||||
    with open(readme) as file:
 | 
			
		||||
        'hello world!\n' == file.read()
 | 
			
		||||
    source_path = os.path.join(stage_path, archive_dir)
 | 
			
		||||
    assert source_path == stage.source_path
 | 
			
		||||
 | 
			
		||||
    source_contents = os.listdir(source_path)
 | 
			
		||||
 | 
			
		||||
    for _include in expected_file_list:
 | 
			
		||||
        if _include == _include_hidden:
 | 
			
		||||
            # The hidden file represent the HFS metadata associated with Mac
 | 
			
		||||
            # OS X tar files so is expected to be in the same directory as
 | 
			
		||||
            # the archive directory.
 | 
			
		||||
            assert _hidden_fn in stage_contents
 | 
			
		||||
 | 
			
		||||
            fn = os.path.join(stage_path, _hidden_fn)
 | 
			
		||||
            contents = _hidden_contents
 | 
			
		||||
 | 
			
		||||
        elif _include == _include_readme:
 | 
			
		||||
            # The standard README.txt file will be in the source directory if
 | 
			
		||||
            # the tarball didn't explode; otherwise, it will be in the
 | 
			
		||||
            # original archive subdirectory of it.
 | 
			
		||||
            if _archive_base in source_contents:
 | 
			
		||||
                fn = os.path.join(source_path, _archive_base, _readme_fn)
 | 
			
		||||
            else:
 | 
			
		||||
                fn = os.path.join(source_path, _readme_fn)
 | 
			
		||||
            contents = _readme_contents
 | 
			
		||||
 | 
			
		||||
        elif _include == _include_extra:
 | 
			
		||||
            assert _extra_fn in source_contents
 | 
			
		||||
 | 
			
		||||
            fn = os.path.join(source_path, _extra_fn)
 | 
			
		||||
            contents = _extra_contents
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            assert False
 | 
			
		||||
 | 
			
		||||
        assert os.path.isfile(fn)
 | 
			
		||||
        with open(fn) as _file:
 | 
			
		||||
            _file.read() == contents
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_fetch(stage, stage_name):
 | 
			
		||||
    archive_name = 'test-files.tar.gz'
 | 
			
		||||
    """
 | 
			
		||||
    Ensure the fetch resulted in a properly placed archive file as described in
 | 
			
		||||
    the module-level comments.
 | 
			
		||||
    """
 | 
			
		||||
    stage_path = get_stage_path(stage, stage_name)
 | 
			
		||||
    assert archive_name in os.listdir(stage_path)
 | 
			
		||||
    assert os.path.join(stage_path, archive_name) == stage.fetcher.archive_file
 | 
			
		||||
    assert _archive_fn in os.listdir(stage_path)
 | 
			
		||||
    assert os.path.join(stage_path, _archive_fn) == stage.fetcher.archive_file
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_destroy(stage, stage_name):
 | 
			
		||||
@@ -76,7 +160,7 @@ def check_setup(stage, stage_name, archive):
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
        # Make sure the stage path is NOT a link for a non-tmp stage
 | 
			
		||||
        assert os.path.islink(stage_path)
 | 
			
		||||
        assert not os.path.islink(stage_path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_stage_path(stage, stage_name):
 | 
			
		||||
@@ -93,71 +177,187 @@ def get_stage_path(stage, stage_name):
 | 
			
		||||
        return stage.path
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture()
 | 
			
		||||
def tmpdir_for_stage(mock_archive):
 | 
			
		||||
    """Uses a temporary directory for staging"""
 | 
			
		||||
    current = spack.paths.stage_path
 | 
			
		||||
    spack.config.set(
 | 
			
		||||
        'config',
 | 
			
		||||
        {'build_stage': [str(mock_archive.test_tmp_dir)]},
 | 
			
		||||
        scope='user')
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def no_tmp_root_stage(monkeypatch):
 | 
			
		||||
    """
 | 
			
		||||
    Disable use of a temporary root for staging.
 | 
			
		||||
 | 
			
		||||
    Note that it can be important for other tests that the previous settings be
 | 
			
		||||
    restored when the test case is over.
 | 
			
		||||
    """
 | 
			
		||||
    monkeypatch.setattr(spack.stage, '_tmp_root', None)
 | 
			
		||||
    monkeypatch.setattr(spack.stage, '_use_tmp_stage', False)
 | 
			
		||||
    yield
 | 
			
		||||
    spack.config.set('config', {'build_stage': [current]}, scope='user')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture()
 | 
			
		||||
def mock_archive(tmpdir, monkeypatch):
 | 
			
		||||
    """Creates a mock archive with the structure expected by the tests"""
 | 
			
		||||
    # Mock up a stage area that looks like this:
 | 
			
		||||
    #
 | 
			
		||||
    # TMPDIR/                    test_files_dir
 | 
			
		||||
    #     tmp/                   test_tmp_path (where stage should be)
 | 
			
		||||
    #     test-files/            archive_dir_path
 | 
			
		||||
    #         README.txt         test_readme (contains "hello world!\n")
 | 
			
		||||
    #     test-files.tar.gz      archive_url = file:///path/to/this
 | 
			
		||||
    #
 | 
			
		||||
    test_tmp_path = tmpdir.join('tmp')
 | 
			
		||||
    # set _test_tmp_path as the default test directory to use for stages.
 | 
			
		||||
    spack.config.set(
 | 
			
		||||
        'config', {'build_stage': [str(test_tmp_path)]}, scope='user')
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def non_user_path_for_stage():
 | 
			
		||||
    """
 | 
			
		||||
    Use a non-user path for staging.
 | 
			
		||||
 | 
			
		||||
    archive_dir = tmpdir.join('test-files')
 | 
			
		||||
    archive_name = 'test-files.tar.gz'
 | 
			
		||||
    archive = tmpdir.join(archive_name)
 | 
			
		||||
    archive_url = 'file://' + str(archive)
 | 
			
		||||
    test_readme = archive_dir.join('README.txt')
 | 
			
		||||
    archive_dir.ensure(dir=True)
 | 
			
		||||
    test_tmp_path.ensure(dir=True)
 | 
			
		||||
    test_readme.write('hello world!\n')
 | 
			
		||||
    Note that it can be important for other tests that the previous settings be
 | 
			
		||||
    restored when the test case is over.
 | 
			
		||||
    """
 | 
			
		||||
    current = spack.config.get('config:build_stage')
 | 
			
		||||
    spack.config.set('config', {'build_stage': ['/var/spack/non-path']},
 | 
			
		||||
                     scope='user')
 | 
			
		||||
    yield
 | 
			
		||||
    spack.config.set('config', {'build_stage': current}, scope='user')
 | 
			
		||||
 | 
			
		||||
    with tmpdir.as_cwd():
 | 
			
		||||
        tar = spack.util.executable.which('tar', required=True)
 | 
			
		||||
        tar('czf', str(archive_name), 'test-files')
 | 
			
		||||
 | 
			
		||||
    # Make spack use the test environment for tmp stuff.
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def stage_path_for_stage():
 | 
			
		||||
    """
 | 
			
		||||
    Use the basic stage_path for staging.
 | 
			
		||||
 | 
			
		||||
    Note that it can be important for other tests that the previous settings be
 | 
			
		||||
    restored when the test case is over.
 | 
			
		||||
    """
 | 
			
		||||
    current = spack.config.get('config:build_stage')
 | 
			
		||||
    spack.config.set('config',
 | 
			
		||||
                     {'build_stage': spack.paths.stage_path}, scope='user')
 | 
			
		||||
    yield
 | 
			
		||||
    spack.config.set('config', {'build_stage': current}, scope='user')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def tmp_path_for_stage(tmpdir):
 | 
			
		||||
    """
 | 
			
		||||
    Use a built-in, temporary, test directory for staging.
 | 
			
		||||
 | 
			
		||||
    Note that it can be important for other tests that the previous settings be
 | 
			
		||||
    restored when the test case is over.
 | 
			
		||||
    """
 | 
			
		||||
    current = spack.config.get('config:build_stage')
 | 
			
		||||
    spack.config.set('config', {'build_stage': [str(tmpdir)]}, scope='user')
 | 
			
		||||
    yield
 | 
			
		||||
    spack.config.set('config', {'build_stage': current}, scope='user')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def tmp_root_stage(monkeypatch):
 | 
			
		||||
    """
 | 
			
		||||
    Enable use of a temporary root for staging.
 | 
			
		||||
 | 
			
		||||
    Note that it can be important for other tests that the previous settings be
 | 
			
		||||
    restored when the test case is over.
 | 
			
		||||
    """
 | 
			
		||||
    monkeypatch.setattr(spack.stage, '_tmp_root', None)
 | 
			
		||||
    monkeypatch.setattr(spack.stage, '_use_tmp_stage', True)
 | 
			
		||||
 | 
			
		||||
    Archive = collections.namedtuple(
 | 
			
		||||
        'Archive', ['url', 'tmpdir', 'test_tmp_dir', 'archive_dir']
 | 
			
		||||
    )
 | 
			
		||||
    yield Archive(
 | 
			
		||||
        url=archive_url,
 | 
			
		||||
        tmpdir=tmpdir,
 | 
			
		||||
        test_tmp_dir=test_tmp_path,
 | 
			
		||||
        archive_dir=archive_dir
 | 
			
		||||
    )
 | 
			
		||||
    yield
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture()
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def tmpdir_for_stage(mock_stage_archive):
 | 
			
		||||
    """
 | 
			
		||||
    Use the mock_stage_archive's temporary directory for staging.
 | 
			
		||||
 | 
			
		||||
    Note that it can be important for other tests that the previous settings be
 | 
			
		||||
    restored when the test case is over.
 | 
			
		||||
    """
 | 
			
		||||
    archive = mock_stage_archive()
 | 
			
		||||
    current = spack.config.get('config:build_stage')
 | 
			
		||||
    spack.config.set(
 | 
			
		||||
        'config',
 | 
			
		||||
        {'build_stage': [str(archive.test_tmp_dir)]},
 | 
			
		||||
        scope='user')
 | 
			
		||||
    yield
 | 
			
		||||
    spack.config.set('config', {'build_stage': current}, scope='user')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def tmp_build_stage_dir(tmpdir):
 | 
			
		||||
    """Establish the temporary build_stage for the mock archive."""
 | 
			
		||||
    test_tmp_path = tmpdir.join('tmp')
 | 
			
		||||
 | 
			
		||||
    # Set test_tmp_path as the default test directory to use for stages.
 | 
			
		||||
    current = spack.config.get('config:build_stage')
 | 
			
		||||
    spack.config.set('config',
 | 
			
		||||
                     {'build_stage': [str(test_tmp_path)]}, scope='user')
 | 
			
		||||
 | 
			
		||||
    yield (tmpdir, test_tmp_path)
 | 
			
		||||
 | 
			
		||||
    spack.config.set('config', {'build_stage': current}, scope='user')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def mock_stage_archive(tmp_build_stage_dir, tmp_root_stage, request):
 | 
			
		||||
    """
 | 
			
		||||
    Create the directories and files for the staged mock archive.
 | 
			
		||||
 | 
			
		||||
    Note that it can be important for other tests that the previous settings be
 | 
			
		||||
    restored when the test case is over.
 | 
			
		||||
    """
 | 
			
		||||
    # Mock up a stage area that looks like this:
 | 
			
		||||
    #
 | 
			
		||||
    # tmpdir/                test_files_dir
 | 
			
		||||
    #     tmp/               test_tmp_path (where stage should be)
 | 
			
		||||
    #     <_archive_base>/   archive_dir_path
 | 
			
		||||
    #         <_readme_fn>   Optional test_readme (contains _readme_contents)
 | 
			
		||||
    #     <_extra_fn>        Optional extra file (contains _extra_contents)
 | 
			
		||||
    #     <_hidden_fn>       Optional hidden file (contains _hidden_contents)
 | 
			
		||||
    #     <_archive_fn>      archive_url = file:///path/to/<_archive_fn>
 | 
			
		||||
    #
 | 
			
		||||
    def create_stage_archive(expected_file_list=[_include_readme]):
 | 
			
		||||
        tmpdir, test_tmp_path = tmp_build_stage_dir
 | 
			
		||||
        test_tmp_path.ensure(dir=True)
 | 
			
		||||
 | 
			
		||||
        # Create the archive directory and associated file
 | 
			
		||||
        archive_dir = tmpdir.join(_archive_base)
 | 
			
		||||
        archive = tmpdir.join(_archive_fn)
 | 
			
		||||
        archive_url = 'file://' + str(archive)
 | 
			
		||||
        archive_dir.ensure(dir=True)
 | 
			
		||||
 | 
			
		||||
        # Create the optional files as requested and make sure expanded
 | 
			
		||||
        # archive peers are included.
 | 
			
		||||
        tar_args = ['czf', str(_archive_fn), _archive_base]
 | 
			
		||||
        for _include in expected_file_list:
 | 
			
		||||
            if _include == _include_hidden:
 | 
			
		||||
                # The hidden file case stands in for the way Mac OS X tar files
 | 
			
		||||
                # represent HFS metadata.  Locate in the same directory as the
 | 
			
		||||
                # archive file.
 | 
			
		||||
                tar_args.append(_hidden_fn)
 | 
			
		||||
                fn, contents = (tmpdir.join(_hidden_fn), _hidden_contents)
 | 
			
		||||
 | 
			
		||||
            elif _include == _include_readme:
 | 
			
		||||
                # The usual README.txt file is contained in the archive dir.
 | 
			
		||||
                fn, contents = (archive_dir.join(_readme_fn), _readme_contents)
 | 
			
		||||
 | 
			
		||||
            elif _include == _include_extra:
 | 
			
		||||
                # The extra file stands in for exploding tar files so needs
 | 
			
		||||
                # to be in the same directory as the archive file.
 | 
			
		||||
                tar_args.append(_extra_fn)
 | 
			
		||||
                fn, contents = (tmpdir.join(_extra_fn), _extra_contents)
 | 
			
		||||
            else:
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
            fn.write(contents)
 | 
			
		||||
 | 
			
		||||
        # Create the archive file
 | 
			
		||||
        with tmpdir.as_cwd():
 | 
			
		||||
            tar = spack.util.executable.which('tar', required=True)
 | 
			
		||||
            tar(*tar_args)
 | 
			
		||||
 | 
			
		||||
        Archive = collections.namedtuple(
 | 
			
		||||
            'Archive', ['url', 'tmpdir', 'test_tmp_dir', 'archive_dir']
 | 
			
		||||
        )
 | 
			
		||||
        return Archive(url=archive_url, tmpdir=tmpdir,
 | 
			
		||||
                       test_tmp_dir=test_tmp_path, archive_dir=archive_dir)
 | 
			
		||||
 | 
			
		||||
    return create_stage_archive
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def mock_noexpand_resource(tmpdir):
 | 
			
		||||
    """Set up a non-expandable resource in the tmpdir prior to staging."""
 | 
			
		||||
    test_resource = tmpdir.join('resource-no-expand.sh')
 | 
			
		||||
    test_resource.write("an example resource")
 | 
			
		||||
    return str(test_resource)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture()
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def mock_expand_resource(tmpdir):
 | 
			
		||||
    """Sets up an expandable resource in tmpdir prior to staging."""
 | 
			
		||||
    resource_dir = tmpdir.join('resource-expand')
 | 
			
		||||
    archive_name = 'resource.tar.gz'
 | 
			
		||||
    archive = tmpdir.join(archive_name)
 | 
			
		||||
@@ -165,10 +365,10 @@ def mock_expand_resource(tmpdir):
 | 
			
		||||
    test_file = resource_dir.join('resource-file.txt')
 | 
			
		||||
    resource_dir.ensure(dir=True)
 | 
			
		||||
    test_file.write('test content\n')
 | 
			
		||||
    current = tmpdir.chdir()
 | 
			
		||||
    tar = spack.util.executable.which('tar', required=True)
 | 
			
		||||
    tar('czf', str(archive_name), 'resource-expand')
 | 
			
		||||
    current.chdir()
 | 
			
		||||
 | 
			
		||||
    with tmpdir.as_cwd():
 | 
			
		||||
        tar = spack.util.executable.which('tar', required=True)
 | 
			
		||||
        tar('czf', str(archive_name), 'resource-expand')
 | 
			
		||||
 | 
			
		||||
    MockResource = collections.namedtuple(
 | 
			
		||||
        'MockResource', ['url', 'files'])
 | 
			
		||||
@@ -176,11 +376,13 @@ def mock_expand_resource(tmpdir):
 | 
			
		||||
    return MockResource(archive_url, ['resource-file.txt'])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture()
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def composite_stage_with_expanding_resource(
 | 
			
		||||
        mock_archive, mock_expand_resource):
 | 
			
		||||
        mock_stage_archive, mock_expand_resource):
 | 
			
		||||
    """Sets up a composite for expanding resources prior to staging."""
 | 
			
		||||
    composite_stage = StageComposite()
 | 
			
		||||
    root_stage = Stage(mock_archive.url)
 | 
			
		||||
    archive = mock_stage_archive()
 | 
			
		||||
    root_stage = Stage(archive.url)
 | 
			
		||||
    composite_stage.append(root_stage)
 | 
			
		||||
 | 
			
		||||
    test_resource_fetcher = spack.fetch_strategy.from_kwargs(
 | 
			
		||||
@@ -195,7 +397,7 @@ def composite_stage_with_expanding_resource(
 | 
			
		||||
    return composite_stage, root_stage, resource_stage
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture()
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def failing_search_fn():
 | 
			
		||||
    """Returns a search function that fails! Always!"""
 | 
			
		||||
    def _mock():
 | 
			
		||||
@@ -203,7 +405,7 @@ def _mock():
 | 
			
		||||
    return _mock
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture()
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def failing_fetch_strategy():
 | 
			
		||||
    """Returns a fetch strategy that fails."""
 | 
			
		||||
    class FailingFetchStrategy(spack.fetch_strategy.FetchStrategy):
 | 
			
		||||
@@ -215,7 +417,7 @@ def fetch(self):
 | 
			
		||||
    return FailingFetchStrategy()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.fixture()
 | 
			
		||||
@pytest.fixture
 | 
			
		||||
def search_fn():
 | 
			
		||||
    """Returns a search function that always succeeds."""
 | 
			
		||||
    class _Mock(object):
 | 
			
		||||
@@ -234,28 +436,32 @@ class TestStage(object):
 | 
			
		||||
    stage_name = 'spack-test-stage'
 | 
			
		||||
 | 
			
		||||
    @pytest.mark.usefixtures('tmpdir_for_stage')
 | 
			
		||||
    def test_setup_and_destroy_name_with_tmp(self, mock_archive):
 | 
			
		||||
        with Stage(mock_archive.url, name=self.stage_name) as stage:
 | 
			
		||||
            check_setup(stage, self.stage_name, mock_archive)
 | 
			
		||||
    def test_setup_and_destroy_name_with_tmp(self, mock_stage_archive):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        with Stage(archive.url, name=self.stage_name) as stage:
 | 
			
		||||
            check_setup(stage, self.stage_name, archive)
 | 
			
		||||
        check_destroy(stage, self.stage_name)
 | 
			
		||||
 | 
			
		||||
    def test_setup_and_destroy_name_without_tmp(self, mock_archive):
 | 
			
		||||
        with Stage(mock_archive.url, name=self.stage_name) as stage:
 | 
			
		||||
            check_setup(stage, self.stage_name, mock_archive)
 | 
			
		||||
    def test_setup_and_destroy_name_without_tmp(self, mock_stage_archive):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        with Stage(archive.url, name=self.stage_name) as stage:
 | 
			
		||||
            check_setup(stage, self.stage_name, archive)
 | 
			
		||||
        check_destroy(stage, self.stage_name)
 | 
			
		||||
 | 
			
		||||
    @pytest.mark.usefixtures('tmpdir_for_stage')
 | 
			
		||||
    def test_setup_and_destroy_no_name_with_tmp(self, mock_archive):
 | 
			
		||||
        with Stage(mock_archive.url) as stage:
 | 
			
		||||
            check_setup(stage, None, mock_archive)
 | 
			
		||||
    def test_setup_and_destroy_no_name_with_tmp(self, mock_stage_archive):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        with Stage(archive.url) as stage:
 | 
			
		||||
            check_setup(stage, None, archive)
 | 
			
		||||
        check_destroy(stage, None)
 | 
			
		||||
 | 
			
		||||
    @pytest.mark.disable_clean_stage_check
 | 
			
		||||
    @pytest.mark.usefixtures('tmpdir_for_stage')
 | 
			
		||||
    def test_composite_stage_with_noexpand_resource(
 | 
			
		||||
            self, mock_archive, mock_noexpand_resource):
 | 
			
		||||
            self, mock_stage_archive, mock_noexpand_resource):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        composite_stage = StageComposite()
 | 
			
		||||
        root_stage = Stage(mock_archive.url)
 | 
			
		||||
        root_stage = Stage(archive.url)
 | 
			
		||||
        composite_stage.append(root_stage)
 | 
			
		||||
 | 
			
		||||
        resource_dst_name = 'resource-dst-name.sh'
 | 
			
		||||
@@ -276,7 +482,7 @@ def test_composite_stage_with_noexpand_resource(
 | 
			
		||||
    @pytest.mark.disable_clean_stage_check
 | 
			
		||||
    @pytest.mark.usefixtures('tmpdir_for_stage')
 | 
			
		||||
    def test_composite_stage_with_expand_resource(
 | 
			
		||||
            self, mock_archive, mock_expand_resource,
 | 
			
		||||
            self, mock_stage_archive, mock_expand_resource,
 | 
			
		||||
            composite_stage_with_expanding_resource):
 | 
			
		||||
 | 
			
		||||
        composite_stage, root_stage, resource_stage = (
 | 
			
		||||
@@ -291,22 +497,26 @@ def test_composite_stage_with_expand_resource(
 | 
			
		||||
                root_stage.source_path, 'resource-dir', fname)
 | 
			
		||||
            assert os.path.exists(file_path)
 | 
			
		||||
 | 
			
		||||
    def test_setup_and_destroy_no_name_without_tmp(self, mock_archive):
 | 
			
		||||
        with Stage(mock_archive.url) as stage:
 | 
			
		||||
            check_setup(stage, None, mock_archive)
 | 
			
		||||
    def test_setup_and_destroy_no_name_without_tmp(self, mock_stage_archive):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        with Stage(archive.url) as stage:
 | 
			
		||||
            check_setup(stage, None, archive)
 | 
			
		||||
        check_destroy(stage, None)
 | 
			
		||||
 | 
			
		||||
    def test_fetch(self, mock_archive):
 | 
			
		||||
        with Stage(mock_archive.url, name=self.stage_name) as stage:
 | 
			
		||||
            stage.fetch()
 | 
			
		||||
            check_setup(stage, self.stage_name, mock_archive)
 | 
			
		||||
            check_fetch(stage, self.stage_name)
 | 
			
		||||
        check_destroy(stage, self.stage_name)
 | 
			
		||||
    @pytest.mark.parametrize('debug', [False, True])
 | 
			
		||||
    def test_fetch(self, mock_stage_archive, debug):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        with spack.config.override('config:debug', debug):
 | 
			
		||||
            with Stage(archive.url, name=self.stage_name) as stage:
 | 
			
		||||
                stage.fetch()
 | 
			
		||||
                check_setup(stage, self.stage_name, archive)
 | 
			
		||||
                check_fetch(stage, self.stage_name)
 | 
			
		||||
            check_destroy(stage, self.stage_name)
 | 
			
		||||
 | 
			
		||||
    def test_no_search_if_default_succeeds(
 | 
			
		||||
            self, mock_archive, failing_search_fn):
 | 
			
		||||
        stage = Stage(mock_archive.url,
 | 
			
		||||
                      name=self.stage_name,
 | 
			
		||||
            self, mock_stage_archive, failing_search_fn):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        stage = Stage(archive.url, name=self.stage_name,
 | 
			
		||||
                      search_fn=failing_search_fn)
 | 
			
		||||
        with stage:
 | 
			
		||||
            stage.fetch()
 | 
			
		||||
@@ -336,22 +546,49 @@ def test_search_if_default_fails(self, failing_fetch_strategy, search_fn):
 | 
			
		||||
        check_destroy(stage, self.stage_name)
 | 
			
		||||
        assert search_fn.performed_search
 | 
			
		||||
 | 
			
		||||
    def test_expand_archive(self, mock_archive):
 | 
			
		||||
        with Stage(mock_archive.url, name=self.stage_name) as stage:
 | 
			
		||||
    def test_ensure_one_stage_entry(self, mock_stage_archive):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        with Stage(archive.url, name=self.stage_name) as stage:
 | 
			
		||||
            stage.fetch()
 | 
			
		||||
            check_setup(stage, self.stage_name, mock_archive)
 | 
			
		||||
            check_fetch(stage, self.stage_name)
 | 
			
		||||
            stage.expand_archive()
 | 
			
		||||
            check_expand_archive(stage, self.stage_name, mock_archive)
 | 
			
		||||
            stage_path = get_stage_path(stage, self.stage_name)
 | 
			
		||||
            spack.fetch_strategy._ensure_one_stage_entry(stage_path)
 | 
			
		||||
        check_destroy(stage, self.stage_name)
 | 
			
		||||
 | 
			
		||||
    def test_restage(self, mock_archive):
 | 
			
		||||
        with Stage(mock_archive.url, name=self.stage_name) as stage:
 | 
			
		||||
    @pytest.mark.parametrize("expected_file_list", [
 | 
			
		||||
                             [],
 | 
			
		||||
                             [_include_readme],
 | 
			
		||||
                             [_include_extra, _include_readme],
 | 
			
		||||
                             [_include_hidden, _include_readme]])
 | 
			
		||||
    def test_expand_archive(self, expected_file_list, mock_stage_archive):
 | 
			
		||||
        archive = mock_stage_archive(expected_file_list)
 | 
			
		||||
        with Stage(archive.url, name=self.stage_name) as stage:
 | 
			
		||||
            stage.fetch()
 | 
			
		||||
            check_setup(stage, self.stage_name, archive)
 | 
			
		||||
            check_fetch(stage, self.stage_name)
 | 
			
		||||
            stage.expand_archive()
 | 
			
		||||
            check_expand_archive(stage, self.stage_name, expected_file_list)
 | 
			
		||||
        check_destroy(stage, self.stage_name)
 | 
			
		||||
 | 
			
		||||
    def test_expand_archive_extra_expand(self, mock_stage_archive):
 | 
			
		||||
        """Test expand with an extra expand after expand (i.e., no-op)."""
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        with Stage(archive.url, name=self.stage_name) as stage:
 | 
			
		||||
            stage.fetch()
 | 
			
		||||
            check_setup(stage, self.stage_name, archive)
 | 
			
		||||
            check_fetch(stage, self.stage_name)
 | 
			
		||||
            stage.expand_archive()
 | 
			
		||||
            stage.fetcher.expand()
 | 
			
		||||
            check_expand_archive(stage, self.stage_name, [_include_readme])
 | 
			
		||||
        check_destroy(stage, self.stage_name)
 | 
			
		||||
 | 
			
		||||
    def test_restage(self, mock_stage_archive):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        with Stage(archive.url, name=self.stage_name) as stage:
 | 
			
		||||
            stage.fetch()
 | 
			
		||||
            stage.expand_archive()
 | 
			
		||||
 | 
			
		||||
            with working_dir(stage.source_path):
 | 
			
		||||
                check_expand_archive(stage, self.stage_name, mock_archive)
 | 
			
		||||
                check_expand_archive(stage, self.stage_name, [_include_readme])
 | 
			
		||||
 | 
			
		||||
                # Try to make a file in the old archive dir
 | 
			
		||||
                with open('foobar', 'w') as file:
 | 
			
		||||
@@ -365,26 +602,29 @@ def test_restage(self, mock_archive):
 | 
			
		||||
            assert 'foobar' not in os.listdir(stage.source_path)
 | 
			
		||||
        check_destroy(stage, self.stage_name)
 | 
			
		||||
 | 
			
		||||
    def test_no_keep_without_exceptions(self, mock_archive):
 | 
			
		||||
        stage = Stage(mock_archive.url, name=self.stage_name, keep=False)
 | 
			
		||||
    def test_no_keep_without_exceptions(self, mock_stage_archive):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        stage = Stage(archive.url, name=self.stage_name, keep=False)
 | 
			
		||||
        with stage:
 | 
			
		||||
            pass
 | 
			
		||||
        check_destroy(stage, self.stage_name)
 | 
			
		||||
 | 
			
		||||
    @pytest.mark.disable_clean_stage_check
 | 
			
		||||
    def test_keep_without_exceptions(self, mock_archive):
 | 
			
		||||
        stage = Stage(mock_archive.url, name=self.stage_name, keep=True)
 | 
			
		||||
    def test_keep_without_exceptions(self, mock_stage_archive):
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        stage = Stage(archive.url, name=self.stage_name, keep=True)
 | 
			
		||||
        with stage:
 | 
			
		||||
            pass
 | 
			
		||||
        path = get_stage_path(stage, self.stage_name)
 | 
			
		||||
        assert os.path.isdir(path)
 | 
			
		||||
 | 
			
		||||
    @pytest.mark.disable_clean_stage_check
 | 
			
		||||
    def test_no_keep_with_exceptions(self, mock_archive):
 | 
			
		||||
    def test_no_keep_with_exceptions(self, mock_stage_archive):
 | 
			
		||||
        class ThisMustFailHere(Exception):
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
        stage = Stage(mock_archive.url, name=self.stage_name, keep=False)
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        stage = Stage(archive.url, name=self.stage_name, keep=False)
 | 
			
		||||
        try:
 | 
			
		||||
            with stage:
 | 
			
		||||
                raise ThisMustFailHere()
 | 
			
		||||
@@ -394,11 +634,12 @@ class ThisMustFailHere(Exception):
 | 
			
		||||
            assert os.path.isdir(path)
 | 
			
		||||
 | 
			
		||||
    @pytest.mark.disable_clean_stage_check
 | 
			
		||||
    def test_keep_exceptions(self, mock_archive):
 | 
			
		||||
    def test_keep_exceptions(self, mock_stage_archive):
 | 
			
		||||
        class ThisMustFailHere(Exception):
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
        stage = Stage(mock_archive.url, name=self.stage_name, keep=True)
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        stage = Stage(archive.url, name=self.stage_name, keep=True)
 | 
			
		||||
        try:
 | 
			
		||||
            with stage:
 | 
			
		||||
                raise ThisMustFailHere()
 | 
			
		||||
@@ -406,3 +647,55 @@ class ThisMustFailHere(Exception):
 | 
			
		||||
        except ThisMustFailHere:
 | 
			
		||||
            path = get_stage_path(stage, self.stage_name)
 | 
			
		||||
            assert os.path.isdir(path)
 | 
			
		||||
 | 
			
		||||
    def test_source_path_available(self, mock_stage_archive):
 | 
			
		||||
        """Ensure source path available but does not exist on instantiation."""
 | 
			
		||||
        archive = mock_stage_archive()
 | 
			
		||||
        stage = Stage(archive.url, name=self.stage_name)
 | 
			
		||||
 | 
			
		||||
        source_path = stage.source_path
 | 
			
		||||
        assert source_path
 | 
			
		||||
        assert source_path.endswith(spack.stage._source_path_subdir)
 | 
			
		||||
        assert not os.path.exists(source_path)
 | 
			
		||||
 | 
			
		||||
    def test_first_accessible_path_error(self):
 | 
			
		||||
        """Test _first_accessible_path handling of an OSError."""
 | 
			
		||||
        with tempfile.NamedTemporaryFile() as _file:
 | 
			
		||||
            assert spack.stage._first_accessible_path([_file.name]) is None
 | 
			
		||||
 | 
			
		||||
    def test_get_tmp_root_no_use(self, no_tmp_root_stage):
 | 
			
		||||
        """Ensure not using tmp root results in no path."""
 | 
			
		||||
        assert spack.stage.get_tmp_root() is None
 | 
			
		||||
 | 
			
		||||
    def test_get_tmp_root_non_user_path(self, tmp_root_stage,
 | 
			
		||||
                                        non_user_path_for_stage):
 | 
			
		||||
        """Ensure build_stage of tmp root with non-user path includes user."""
 | 
			
		||||
        path = spack.stage.get_tmp_root()
 | 
			
		||||
        assert path.endswith(os.path.join(getpass.getuser(), 'spack-stage'))
 | 
			
		||||
 | 
			
		||||
    def test_get_tmp_root_use(self, tmp_root_stage, tmp_path_for_stage):
 | 
			
		||||
        """Ensure build_stage of tmp root provides has right ancestors."""
 | 
			
		||||
        path = spack.stage.get_tmp_root()
 | 
			
		||||
        shutil.rmtree(path)
 | 
			
		||||
        assert path.rfind('test_get_tmp_root_use') > 0
 | 
			
		||||
        assert path.endswith('spack-stage')
 | 
			
		||||
 | 
			
		||||
    def test_get_tmp_root_stage_path(self, tmp_root_stage,
 | 
			
		||||
                                     stage_path_for_stage):
 | 
			
		||||
        """
 | 
			
		||||
        Ensure build_stage of tmp root with stage_path means use local path.
 | 
			
		||||
        """
 | 
			
		||||
        assert spack.stage.get_tmp_root() is None
 | 
			
		||||
        assert not spack.stage._use_tmp_stage
 | 
			
		||||
 | 
			
		||||
    def test_stage_constructor_no_fetcher(self):
 | 
			
		||||
        """Ensure Stage constructor with no URL or fetch strategy fails."""
 | 
			
		||||
        with pytest.raises(ValueError):
 | 
			
		||||
            with Stage(None):
 | 
			
		||||
                pass
 | 
			
		||||
 | 
			
		||||
    def test_stage_constructor_with_path(self, tmpdir):
 | 
			
		||||
        """Ensure Stage constructor with a path uses it."""
 | 
			
		||||
        testpath = str(tmpdir)
 | 
			
		||||
        with Stage('file:///does-not-exist', path=testpath) as stage:
 | 
			
		||||
            assert stage.path == testpath
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user