SPACK-1: Multi-version installation now works front to back with specs.

This commit is contained in:
Todd Gamblin
2013-12-20 14:30:45 -08:00
parent af639dca16
commit e645bb065a
40 changed files with 618 additions and 236 deletions

View File

@@ -7,6 +7,9 @@
import spack.tty as tty
from spack.util.lang import attr_setdefault
# cmd has a submodule called "list" so preserve the python list module
python_list = list
# Patterns to ignore in the commands directory when looking for commands.
ignore_files = r'^\.|^__init__.py$|^#'
@@ -50,15 +53,25 @@ def get_command(name):
return getattr(get_module(name), get_cmd_function_name(name))
def parse_specs(args):
def parse_specs(args, **kwargs):
"""Convenience function for parsing arguments from specs. Handles common
exceptions and dies if there are errors.
"""
if type(args) == list:
concretize = kwargs.get('concretize', False)
normalize = kwargs.get('normalize', False)
if isinstance(args, (python_list, tuple)):
args = " ".join(args)
try:
return spack.spec.parse(" ".join(args))
specs = spack.spec.parse(args)
for spec in specs:
if concretize:
spec.concretize() # implies normalize
elif normalize:
spec.normalize()
return specs
except spack.parse.ParseError, e:
tty.error(e.message, e.string, e.pos * " " + "^")

View File

@@ -1,6 +1,7 @@
import os
import re
import argparse
import hashlib
from pprint import pprint
from subprocess import CalledProcessError
@@ -8,17 +9,20 @@
import spack.packages as packages
from spack.stage import Stage
from spack.colify import colify
from spack.util.crypto import md5
from spack.util.crypto import checksum
from spack.version import *
group='foo'
description ="Checksum available versions of a package, print out checksums for addition to a package file."
def setup_parser(subparser):
subparser.add_argument('package', metavar='PACKAGE', help='Package to list versions for')
subparser.add_argument('versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
subparser.add_argument('-n', '--number', dest='number', type=int,
default=10, help='Number of versions to list')
subparser.add_argument(
'package', metavar='PACKAGE', help='Package to list versions for')
subparser.add_argument(
'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
subparser.add_argument(
'-n', '--number', dest='number', type=int,
default=10, help='Number of versions to list')
def checksum(parser, args):
@@ -50,7 +54,7 @@ def checksum(parser, args):
stage = Stage(url)
try:
stage.fetch()
hashes.append(md5(stage.archive_file))
hashes.append(checksum(hashlib.md5, stage.archive_file))
finally:
stage.destroy()

View File

@@ -22,8 +22,9 @@ def clean(parser, args):
if not args.packages:
tty.die("spack clean requires at least one package argument")
specs = spack.cmd.parse_specs(args.packages)
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
tty.message("Cleaning for spec:", spec)
package = packages.get(spec.name)
if args.dist:
package.do_clean_dist()

View File

@@ -1,18 +1,26 @@
import argparse
import spack.cmd
import spack.packages as packages
description = "Fetch archives for packages"
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to fetch")
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch")
def fetch(parser, args):
if not args.packages:
tty.die("fetch requires at least one package argument")
specs = spack.cmd.parse_specs(args.packages)
if args.no_checksum:
spack.do_checksum = False
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = packages.get(spec.name)
package = packages.get(spec)
package.do_fetch()

View File

@@ -0,0 +1,87 @@
import collections
import argparse
import spack
import spack.packages as packages
import spack.colify
from spack.colify import colify
description ="Find installed spack packages"
def setup_parser(subparser):
subparser.add_argument(
'-p', '--paths', action='store_true', dest='paths',
help='Show paths to package install directories')
subparser.add_argument(
'-l', '--long', action='store_true', dest='full_specs',
help='Show full-length specs of installed packages')
subparser.add_argument(
'query_specs', nargs=argparse.REMAINDER,
help='optional specs to filter results')
# TODO: move this and colify to tty.
def hline(label, char):
max_width = 64
cols, rows = spack.colify.get_terminal_size()
if not cols:
cols = max_width
else:
cols -= 2
cols = min(max_width, cols)
label = str(label)
out = char * 2 + " " + label + " "
out += (cols - len(out)) * char
return out
def find(parser, args):
def hasher():
return collections.defaultdict(hasher)
query_specs = []
if args.query_specs:
query_specs = spack.cmd.parse_specs(args.query_specs, normalize=True)
# Make a dict with specs keyed by architecture and compiler.
index = hasher()
for spec in packages.installed_package_specs():
if query_specs and not any(spec.satisfies(q) for q in query_specs):
continue
if spec.compiler not in index[spec.architecture]:
index[spec.architecture][spec.compiler] = []
index[spec.architecture][spec.compiler].append(spec)
# Traverse the index and print out each package
for architecture in index:
print hline(architecture, "=")
for compiler in index[architecture]:
print hline(compiler, "-")
specs = index[architecture][compiler]
specs.sort()
abbreviated = []
for s in specs:
abbrv = "%s@%s%s" % (s.name, s.version, s.variants)
if s.dependencies:
abbrv += '-' + s.dependencies.sha1()[:6]
abbreviated.append(abbrv)
if args.paths:
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %-{}s%s".format(width)
for abbrv, spec in zip(abbreviated, specs):
print format % (abbrv, spec.package.prefix)
elif args.full_specs:
for spec in specs:
print spec.tree(indent=4),
else:
for abbrv in abbreviated:
print " %s" % abbrv

View File

@@ -5,7 +5,7 @@
import spack.packages as packages
from spack.colify import colify
description = "Build and install packages"
description = "Get detailed information on a particular package"
def setup_parser(subparser):
subparser.add_argument('name', metavar="PACKAGE", help="name of packages to get info on")

View File

@@ -8,21 +8,30 @@
description = "Build and install packages"
def setup_parser(subparser):
subparser.add_argument('-i', '--ignore-dependencies',
action='store_true', dest='ignore_dependencies',
help="Do not try to install dependencies of requested packages.")
subparser.add_argument('-d', '--dirty', action='store_true', dest='dirty',
help="Don't clean up staging area when install completes.")
subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to install")
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_dependencies',
help="Do not try to install dependencies of requested packages.")
subparser.add_argument(
'-d', '--dirty', action='store_true', dest='dirty',
help="Don't clean up staging area when install completes.")
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to install")
def install(parser, args):
if not args.packages:
tty.die("install requires at least one package argument")
if args.no_checksum:
spack.do_checksum = False
spack.ignore_dependencies = args.ignore_dependencies
specs = spack.cmd.parse_specs(args.packages)
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = packages.get(spec.name)
package = packages.get(spec)
package.dirty = args.dirty
package.do_install()

View File

@@ -1,23 +1,13 @@
import os
import re
from subprocess import CalledProcessError
import spack
import spack.packages as packages
from spack.version import ver
from spack.colify import colify
import spack.url as url
import spack.tty as tty
description ="List spack packages"
description ="List available spack packages"
def setup_parser(subparser):
subparser.add_argument('-i', '--installed', action='store_true', dest='installed',
help='List installed packages for each platform along with versions.')
pass
def list(parser, args):
if args.installed:
colify(str(pkg) for pkg in packages.installed_packages())
else:
colify(packages.all_package_names())
# Print all the package names in columns
colify(packages.all_package_names())

View File

@@ -1,10 +1,24 @@
import argparse
import spack.packages as packages
description="Expand downloaded archive in preparation for install"
def setup_parser(subparser):
subparser.add_argument('name', help="name of package to stage")
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to stage")
def stage(parser, args):
package = packages.get(args.name)
package.do_stage()
if not args.packages:
tty.die("stage requires at least one package argument")
if args.no_checksum:
spack.do_checksum = False
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = packages.get(spec)
package.do_stage()

View File

@@ -1,13 +1,18 @@
import spack.cmd
import spack.packages as packages
import argparse
import spack.cmd
import spack.tty as tty
import spack.packages as packages
description="Remove an installed package"
def setup_parser(subparser):
subparser.add_argument('-f', '--force', action='store_true', dest='force',
help="Ignore installed packages that depend on this one and remove it anyway.")
subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="Remove regardless of whether other packages depend on this one.")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
def uninstall(parser, args):
if not args.packages:
@@ -15,8 +20,20 @@ def uninstall(parser, args):
specs = spack.cmd.parse_specs(args.packages)
# get packages to uninstall as a list.
pkgs = [packages.get(spec.name) for spec in specs]
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
pkgs = []
for spec in specs:
matching_specs = packages.get_installed(spec)
if len(matching_specs) > 1:
tty.die("%s matches multiple packages. Which one did you mean?"
% spec, *matching_specs)
elif len(matching_specs) == 0:
tty.die("%s does not match any installed packages." % spec)
installed_spec = matching_specs[0]
pkgs.append(packages.get(installed_spec))
# Sort packages to be uninstalled by the number of installed dependents
# This ensures we do things in the right order

View File

@@ -39,7 +39,7 @@ def concretize_version(self, spec):
if valid_versions:
spec.versions = ver([valid_versions[-1]])
else:
spec.versions = ver([pkg.version])
spec.versions = ver([pkg.default_version])
def concretize_architecture(self, spec):

View File

@@ -1,9 +1,11 @@
import exceptions
import re
import os
import os.path
import exceptions
import hashlib
import spack.spec as spec
from spack.util import *
from spack.spec import Spec
from spack.util.filesystem import *
from spack.error import SpackError
@@ -30,11 +32,14 @@ def relative_path_for_spec(self, spec):
raise NotImplementedError()
def make_path_for_spec(self, spec):
"""Creates the installation directory for a spec."""
raise NotImplementedError()
def path_for_spec(self, spec):
"""Return an absolute path from the root to a directory for the spec."""
if not spec.concrete:
raise ValueError("path_for_spec requires a concrete spec.")
assert(spec.concrete)
path = self.relative_path_for_spec(spec)
assert(not path.startswith(self.root))
return os.path.join(self.root, path)
@@ -70,22 +75,90 @@ def traverse_dirs_at_depth(root, depth, path_tuple=(), curdepth=0):
yield tup
class DefaultDirectoryLayout(DirectoryLayout):
def __init__(self, root):
super(DefaultDirectoryLayout, self).__init__(root)
class SpecHashDirectoryLayout(DirectoryLayout):
"""Lays out installation directories like this::
<install_root>/
<architecture>/
<compiler>/
name@version+variant-<dependency_hash>
Where dependency_hash is a SHA-1 hash prefix for the full package spec.
This accounts for dependencies.
If there is ever a hash collision, you won't be able to install a new
package unless you use a larger prefix. However, the full spec is stored
in a file called .spec in each directory, so you can migrate an entire
install directory to a new hash size pretty easily.
TODO: make a tool to migrate install directories to different hash sizes.
"""
def __init__(self, root, **kwargs):
"""Prefix size is number of characters in the SHA-1 prefix to use
to make each hash unique.
"""
prefix_size = kwargs.get('prefix_size', 8)
spec_file = kwargs.get('spec_file', '.spec')
super(SpecHashDirectoryLayout, self).__init__(root)
self.prefix_size = prefix_size
self.spec_file = spec_file
def relative_path_for_spec(self, spec):
if not spec.concrete:
raise ValueError("relative_path_for_spec requires a concrete spec.")
assert(spec.concrete)
return new_path(
path = new_path(
spec.architecture,
spec.compiler,
"%s@%s%s%s" % (spec.name,
spec.version,
spec.variants,
spec.dependencies))
"%s@%s%s" % (spec.name, spec.version, spec.variants))
if spec.dependencies:
path += "-"
sha1 = spec.dependencies.sha1()
path += sha1[:self.prefix_size]
return path
def write_spec(self, spec, path):
"""Write a spec out to a file."""
with closing(open(path, 'w')) as spec_file:
spec_file.write(spec.tree(ids=False, cover='nodes'))
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
with closing(open(path)) as spec_file:
string = spec_file.read().replace('\n', '')
return Spec(string)
def make_path_for_spec(self, spec):
assert(spec.concrete)
path = self.path_for_spec(spec)
spec_file_path = new_path(path, self.spec_file)
if os.path.isdir(path):
if not os.path.isfile(spec_file_path):
raise InconsistentInstallDirectoryError(
'No spec file found at path %s' % spec_file_path)
installed_spec = self.read_spec(spec_file_path)
if installed_spec == self.spec:
raise InstallDirectoryAlreadyExistsError(path)
spec_hash = self.hash_spec(spec)
installed_hash = self.hash_spec(installed_spec)
if installed_spec == spec_hash:
raise SpecHashCollisionError(installed_hash, spec_hash)
else:
raise InconsistentInstallDirectoryError(
'Spec file in %s does not match SHA-1 hash!'
% (installed_spec, spec_file_path))
mkdirp(path)
self.write_spec(spec, spec_file_path)
def all_specs(self):
@@ -94,5 +167,37 @@ def all_specs(self):
for path in traverse_dirs_at_depth(self.root, 3):
arch, compiler, last_dir = path
spec_str = "%s%%%s=%s" % (last_dir, compiler, arch)
yield spec.parse(spec_str)
spec_file_path = new_path(
self.root, arch, compiler, last_dir, self.spec_file)
if os.path.exists(spec_file_path):
spec = self.read_spec(spec_file_path)
yield spec
class DirectoryLayoutError(SpackError):
"""Superclass for directory layout errors."""
def __init__(self, message):
super(DirectoryLayoutError, self).__init__(message)
class SpecHashCollisionError(DirectoryLayoutError):
"""Raised when there is a hash collision in an SpecHashDirectoryLayout."""
def __init__(self, installed_spec, new_spec, prefix_size):
super(SpecHashDirectoryLayout, self).__init__(
'Specs %s and %s have the same %d character SHA-1 prefix!'
% prefix_size, installed_spec, new_spec)
class InconsistentInstallDirectoryError(DirectoryLayoutError):
"""Raised when a package seems to be installed to the wrong place."""
def __init__(self, message):
super(InconsistentInstallDirectoryError, self).__init__(message)
class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
"""Raised when make_path_for_sec is called unnecessarily."""
def __init__(self, path):
super(InstallDirectoryAlreadyExistsError, self).__init__(
"Install path %s already exists!")

View File

@@ -3,7 +3,7 @@
from spack.version import Version
from spack.util.filesystem import *
from spack.util.executable import *
from spack.directory_layout import DefaultDirectoryLayout
from spack.directory_layout import SpecHashDirectoryLayout
from spack.concretize import DefaultConcretizer
# This lives in $prefix/lib/spac/spack/__file__
@@ -29,7 +29,7 @@
# This controls how spack lays out install prefixes and
# stage directories.
#
install_layout = DefaultDirectoryLayout(install_path)
install_layout = SpecHashDirectoryLayout(install_path, prefix_size=6)
#
# This controls how things are concretized in spack.
@@ -39,7 +39,7 @@
concretizer = DefaultConcretizer()
# Version information
spack_version = Version("0.5")
spack_version = Version("1.0")
# User's editor from the environment
editor = Executable(os.environ.get("EDITOR", ""))
@@ -60,6 +60,11 @@
'/var/tmp/%u/spcak-stage',
'/tmp/%u/spack-stage']
# Whether spack should allow installation of unsafe versions of
# software. "Unsafe" versions are ones it doesn't have a checksum
# for.
do_checksum = True
#
# SYS_TYPE to use for the spack installation.
# Value of this determines what platform spack thinks it is by

View File

@@ -22,14 +22,16 @@
import packages
import tty
import validate
import multiprocessing
import url
from spack.multi_function import platform
import spack.util.crypto as crypto
from spack.version import *
from spack.stage import Stage
from spack.util.lang import *
from spack.util.crypto import md5
from spack.util.web import get_pages
from spack.util.environment import *
class Package(object):
@@ -256,7 +258,7 @@ class SomePackage(Package):
p.do_clean() # runs make clean
p.do_clean_work() # removes the build directory and
# re-expands the archive.
# re-expands the archive.
p.do_clean_dist() # removes the stage directory entirely
The convention used here is that a do_* function is intended to be called
@@ -297,9 +299,8 @@ class SomePackage(Package):
def __init__(self, spec):
# These attributes are required for all packages.
attr_required(self, 'homepage')
attr_required(self, 'url')
attr_required(self, 'md5')
attr_required(self.__class__, 'homepage')
attr_required(self.__class__, 'url')
# this determines how the package should be built.
self.spec = spec
@@ -307,39 +308,34 @@ def __init__(self, spec):
# Name of package is the name of its module (the file that contains it)
self.name = inspect.getmodulename(self.module.__file__)
# Don't allow the default homepage.
if re.search(r'example.com', self.homepage):
tty.die("Bad homepage in %s: %s" % (self.name, self.homepage))
# Make sure URL is an allowed type
validate.url(self.url)
# Set up version
# TODO: get rid of version attr and use spec
# TODO: roll this into available_versions
if not hasattr(self, 'version'):
try:
self.version = url.parse_version(self.url)
except UndetectableVersionError:
tty.die("Couldn't extract a default version from %s. You " +
"must specify it explicitly in the package." % self.url)
elif not isinstance(self.version, Version):
self.version = Version(self.version)
# patch up the URL with a new version if the spec version is concrete
if self.spec.versions.concrete:
self.url = self.url_for_version(self.spec.version)
# This is set by scraping a web page.
self._available_versions = None
# This list overrides available_versions if set by the user.
attr_setdefault(self, 'versions', None)
if self.versions and not isinstance(self.versions, VersionList):
self.versions = VersionList(self.versions)
# versions should be a dict from version to checksum, for safe versions
# of this package. If it's not present, make it an empty dict.
if not hasattr(self, 'versions'):
self.versions = {}
# Empty at first; only compute dependent packages if necessary
self._dependents = None
if not isinstance(self.versions, dict):
raise ValueError("versions attribute of package %s must be a dict!"
% self.name)
# Version-ize the keys in versions dict
try:
self.versions = { Version(v):h for v,h in self.versions.items() }
except ValueError:
raise ValueError("Keys of versions dict in package %s must be versions!"
% self.name)
# stage used to build this package.
# TODO: hash the concrete spec and use that as the stage name.
self.stage = Stage(self.url, "%s-%s" % (self.name, self.version))
self._stage = None
# Set a default list URL (place to find available versions)
if not hasattr(self, 'list_url'):
@@ -349,6 +345,34 @@ def __init__(self, spec):
self.list_depth = 1
@property
def default_version(self):
"""Get the version in the default URL for this package,
or fails."""
try:
return url.parse_version(self.__class__.url)
except UndetectableVersionError:
tty.die("Couldn't extract a default version from %s. You " +
"must specify it explicitly in the package." % self.url)
@property
def version(self):
if not self.spec.concrete:
raise ValueError("Can only get version of concrete package.")
return self.spec.versions[0]
@property
def stage(self):
if not self.spec.concrete:
raise ValueError("Can only get a stage for a concrete package.")
if self._stage is None:
self._stage = Stage(self.url, str(self.spec))
return self._stage
def add_commands_to_module(self):
"""Populate the module scope of install() with some useful functions.
This makes things easier for package writers.
@@ -406,13 +430,6 @@ def add_commands_to_module(self):
m.man7 = new_path(m.man, 'man7')
m.man8 = new_path(m.man, 'man8')
@property
def dependents(self):
"""List of names of packages that depend on this one."""
if self._dependents is None:
packages.compute_dependents()
return tuple(self._dependents)
def preorder_traversal(self, visited=None, **kwargs):
"""This does a preorder traversal of the package's dependence DAG."""
@@ -499,20 +516,15 @@ def installed(self):
@property
def installed_dependents(self):
installed = [d for d in self.dependents if packages.get(d).installed]
all_deps = []
for d in installed:
all_deps.append(d)
all_deps.extend(packages.get(d).installed_dependents)
return tuple(all_deps)
@property
def all_dependents(self):
all_deps = list(self.dependents)
for pkg in self.dependents:
all_deps.extend(packages.get(pkg).all_dependents)
return tuple(all_deps)
"""Return a list of the specs of all installed packages that depend
on this one."""
dependents = []
for spec in packages.installed_package_specs():
if self.name in spec.dependencies:
dep_spec = spec.dependencies[self.name]
if self.spec == dep_spec:
dependents.append(dep_spec)
return dependents
@property
@@ -533,7 +545,7 @@ def url_version(self, version):
def url_for_version(self, version):
"""Gives a URL that you can download a new version of this package from."""
return url.substitute_version(self.url, self.url_version(version))
return url.substitute_version(self.__class__.url, self.url_version(version))
def remove_prefix(self):
@@ -547,28 +559,38 @@ def do_fetch(self):
"""Creates a stage directory and downloads the taball for this package.
Working directory will be set to the stage directory.
"""
stage = self.stage
stage.setup()
stage.fetch()
self.stage.setup()
archive_md5 = md5(stage.archive_file)
if archive_md5 != self.md5:
tty.die("MD5 Checksum failed for %s. Expected %s but got %s."
% (self.name, self.md5, archive_md5))
if spack.do_checksum and not self.version in self.versions:
tty.die("Cannot fetch %s@%s safely; there is no checksum on file for this "
"version." % (self.name, self.version),
"Add a checksum to the package file, or use --no-checksum to "
"skip this check.")
self.stage.fetch()
if self.version in self.versions:
digest = self.versions[self.version]
checker = crypto.Checker(digest)
if checker.check(self.stage.archive_file):
tty.msg("Checksum passed for %s" % self.name)
else:
tty.die("%s checksum failed for %s. Expected %s but got %s."
% (checker.hash_name, self.name, digest, checker.sum))
def do_stage(self):
"""Unpacks the fetched tarball, then changes into the expanded tarball directory."""
"""Unpacks the fetched tarball, then changes into the expanded tarball
directory."""
self.do_fetch()
stage = self.stage
archive_dir = stage.expanded_archive_path
archive_dir = self.stage.expanded_archive_path
if not archive_dir:
tty.msg("Staging archive: %s" % stage.archive_file)
stage.expand_archive()
tty.msg("Staging archive: %s" % self.stage.archive_file)
self.stage.expand_archive()
else:
tty.msg("Already staged %s" % self.name)
stage.chdir_to_archive()
self.stage.chdir_to_archive()
def do_install(self):
@@ -595,18 +617,14 @@ def do_install(self):
tty.msg("Building %s." % self.name)
try:
# create the install directory (allow the layout to handle this in
# case it needs to add extra files)
spack.install_layout.make_path_for_spec(self.spec)
self.install(self.prefix)
if not os.path.isdir(self.prefix):
tty.die("Install failed for %s. No install dir created." % self.name)
except subprocess.CalledProcessError, e:
self.remove_prefix()
tty.die("Install failed for %s" % self.name, e.message)
except KeyboardInterrupt, e:
self.remove_prefix()
raise
except Exception, e:
if not self.dirty:
self.remove_prefix()
@@ -640,8 +658,9 @@ def setup_install_environment(self):
path_set(SPACK_ENV_PATH, env_paths)
# Pass along prefixes of dependencies here
path_set(SPACK_DEPENDENCIES,
[dep.package.prefix for dep in self.dependencies.values()])
path_set(
SPACK_DEPENDENCIES,
[dep.package.prefix for dep in self.spec.dependencies.values()])
# Install location
os.environ[SPACK_PREFIX] = self.prefix
@@ -652,7 +671,7 @@ def setup_install_environment(self):
def do_install_dependencies(self):
# Pass along paths of dependencies here
for dep in self.dependencies.values():
for dep in self.spec.dependencies.values():
dep.package.do_install()
@@ -717,7 +736,7 @@ def fetch_available_versions(self):
if not self._available_versions:
self._available_versions = VersionList()
url_regex = os.path.basename(url.wildcard_version(self.url))
wildcard = self.version.wildcard()
wildcard = self.default_version.wildcard()
try:
page_map = get_pages(self.list_url, depth=self.list_depth)
@@ -748,7 +767,7 @@ def fetch_available_versions(self):
def available_versions(self):
# If the package overrode available_versions, then use that.
if self.versions is not None:
return self.versions
return VersionList(self.versions.keys())
else:
vlist = self.fetch_available_versions()
if not vlist:

View File

@@ -21,6 +21,16 @@
instances = {}
def autospec(function):
"""Decorator that automatically converts the argument of a single-arg
function to a Spec."""
def converter(arg):
if not isinstance(arg, spack.spec.Spec):
arg = spack.spec.Spec(arg)
return function(arg)
return converter
class ProviderIndex(object):
"""This is a dict of dicts used for finding providers of particular
virtual dependencies. The dict of dicts looks like:
@@ -87,23 +97,32 @@ def providers_for(self, *vpkg_specs):
return sorted(providers)
def get(pkg_name):
if not pkg_name in instances:
package_class = get_class_for_package_name(pkg_name)
instances[pkg_name] = package_class(pkg_name)
@autospec
def get(spec):
if spec.virtual:
raise UnknownPackageError(spec.name)
return instances[pkg_name]
if not spec in instances:
package_class = get_class_for_package_name(spec.name)
instances[spec.name] = package_class(spec)
return instances[spec.name]
@autospec
def get_installed(spec):
return [s for s in installed_package_specs() if s.satisfies(spec)]
@autospec
def providers_for(vpkg_spec):
if providers_for.index is None:
if not hasattr(providers_for, 'index'):
providers_for.index = ProviderIndex(all_package_names())
providers = providers_for.index.providers_for(vpkg_spec)
if not providers:
raise UnknownPackageError("No such virtual package: %s" % vpkg_spec)
return providers
providers_for.index = None
def valid_package_name(pkg_name):
@@ -122,7 +141,7 @@ def filename_for_package_name(pkg_name):
return new_path(spack.packages_path, "%s.py" % pkg_name)
def installed_packages():
def installed_package_specs():
return spack.install_layout.all_specs()
@@ -198,6 +217,9 @@ def compute_dependents():
"""Reads in all package files and sets dependence information on
Package objects in memory.
"""
if not hasattr(compute_dependents, index):
compute_dependents.index = {}
for pkg in all_packages():
if pkg._dependents is None:
pkg._dependents = []

View File

@@ -3,7 +3,6 @@
class Callpath(Package):
homepage = "https://github.com/tgamblin/callpath"
url = "http://github.com/tgamblin/callpath-0.2.tar.gz"
md5 = "foobarbaz"
depends_on("dyninst")
depends_on("mpich")

View File

@@ -3,7 +3,7 @@
class Cmake(Package):
homepage = 'https://www.cmake.org'
url = 'http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz'
md5 = '097278785da7182ec0aea8769d06860c'
versions = { '2.8.10.2' : '097278785da7182ec0aea8769d06860c' }
def install(self, prefix):
configure('--prefix=%s' % prefix,

View File

@@ -3,9 +3,11 @@
class Dyninst(Package):
homepage = "https://paradyn.org"
url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
md5 = "bf03b33375afa66fe0efa46ce3f4b17a"
list_url = "http://www.dyninst.org/downloads/dyninst-8.x"
versions = {'8.1.2' : 'bf03b33375afa66fe0efa46ce3f4b17a',
'8.1.1' : '1f8743e3a5662b25ce64a7edf647e77d' }
depends_on("libelf")
depends_on("libdwarf")

View File

@@ -9,7 +9,9 @@ class Libdwarf(Package):
url = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
list_url = homepage
md5 = "64b42692e947d5180e162e46c689dfbf"
versions = { '20130729' : '4cc5e48693f7b93b7aa0261e63c0e21d',
'20130207' : '64b42692e947d5180e162e46c689dfbf',
'20130126' : 'ded74a5e90edb5a12aac3c29d260c5db' }
depends_on("libelf")
@@ -49,7 +51,7 @@ def install(self, prefix):
install('dwarfdump.1', man1)
@platform('macosx_10.8_x86_64')
def install(self, prefix):
raise UnsupportedPlatformError(
"libdwarf doesn't currently build on Mac OS X.")
# @platform('macosx_10.8_x86_64')
# def install(self, prefix):
# raise UnsupportedPlatformError(
# "libdwarf doesn't currently build on Mac OS X.")

View File

@@ -3,7 +3,8 @@
class Libelf(Package):
homepage = "http://www.mr511.de/software/english.html"
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
md5 = "4136d7b4c04df68b686570afa26988ac"
versions = { '0.8.13' : '4136d7b4c04df68b686570afa26988ac' }
def install(self, prefix):
configure("--prefix=%s" % prefix,

View File

@@ -3,7 +3,8 @@
class Libunwind(Package):
homepage = "http://www.nongnu.org/libunwind/"
url = "http://download.savannah.gnu.org/releases/libunwind/libunwind-1.1.tar.gz"
md5 = "fb4ea2f6fbbe45bf032cd36e586883ce"
versions = { '1.1' : 'fb4ea2f6fbbe45bf032cd36e586883ce' }
def install(self, prefix):
configure("--prefix=%s" % prefix)

View File

@@ -3,15 +3,12 @@
class Mpich(Package):
"""MPICH is a high performance and widely portable implementation of
the Message Passing Interface (MPI) standard."""
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
md5 = "9c5d5d4fe1e17dd12153f40bc5b6dbc0"
list_url = "http://www.mpich.org/static/downloads/"
list_depth = 2
versions = ['3.0.4', '3.0.3', '3.0.2', '3.0.1', '3.0']
versions = { '3.0.4' : '9c5d5d4fe1e17dd12153f40bc5b6dbc0' }
provides('mpi@:3', when='@3:')
provides('mpi@:1', when='@1:')

View File

@@ -3,7 +3,6 @@
class Mpileaks(Package):
homepage = "http://www.llnl.gov"
url = "http://www.llnl.gov/mpileaks-1.0.tar.gz"
md5 = "foobarbaz"
depends_on("mpich")
depends_on("callpath")

View File

@@ -70,6 +70,7 @@
from StringIO import StringIO
import tty
import hashlib
import spack.parse
import spack.error
import spack.compilers
@@ -238,6 +239,12 @@ def satisfies(self, other):
if name in other)
def sha1(self):
sha = hashlib.sha1()
sha.update(str(self))
return sha.hexdigest()
def __str__(self):
sorted_dep_names = sorted(self.keys())
return ''.join(
@@ -341,10 +348,7 @@ def root(self):
@property
def package(self):
if self.virtual:
raise TypeError("Cannot get package for virtual spec '" +
self.name + "'")
return packages.get(self.name)
return packages.get(self)
@property
@@ -766,8 +770,8 @@ def copy(self, **kwargs):
@property
def version(self):
if not self.concrete:
raise SpecError("Spec is not concrete: " + str(self))
if not self.versions.concrete:
raise SpecError("Spec version is not concrete: " + str(self))
return self.versions[0]
@@ -825,25 +829,37 @@ def str_no_deps(self, **kwargs):
def tree(self, **kwargs):
"""Prints out this spec and its dependencies, tree-formatted
with indentation."""
color = kwargs.get('color', False)
depth = kwargs.get('depth', False)
cover = kwargs.get('cover', 'paths')
color = kwargs.get('color', False)
depth = kwargs.get('depth', False)
showid = kwargs.get('ids', False)
cover = kwargs.get('cover', 'nodes')
indent = kwargs.get('indent', 0)
out = ""
cur_id = 0
ids = {}
for d, node in self.preorder_traversal(cover=cover, depth=True):
out += " " * indent
if depth:
out += "%-4d" % d
if not id(node) in ids:
cur_id += 1
ids[id(node)] = cur_id
out += "%-4d" % ids[id(node)]
if showid:
out += "%-4d" % ids[id(node)]
out += (" " * d)
if d > 0:
out += "^"
out += node.str_no_deps(color=color) + "\n"
return out
def sha1(self):
sha = hashlib.sha1()
sha.update(str(self))
return sha.hexdigest()
def __repr__(self):
return str(self)

View File

@@ -269,11 +269,6 @@ def destroy(self):
def can_access(file=spack.stage_path):
"""True if we have read/write access to the file."""
return os.access(file, os.R_OK|os.W_OK)
def ensure_access(file=spack.stage_path):
"""Ensure we can access a directory and die with an error if we can't."""
if not can_access(file):
@@ -305,10 +300,18 @@ def find_tmp_root():
if spack.use_tmp_stage:
for tmp in spack.tmp_dirs:
try:
mkdirp(expand_user(tmp))
return tmp
# Replace %u with username
expanded = expand_user(tmp)
# try to create a directory for spack stuff
mkdirp(expanded)
# return it if successful.
return expanded
except OSError:
continue
return None

View File

@@ -1 +0,0 @@
skip_test = True

View File

@@ -2,12 +2,11 @@
class Callpath(Package):
homepage = "https://github.com/tgamblin/callpath"
url = "http://github.com/tgamblin/callpath-0.2.tar.gz"
md5 = "foobarbaz"
url = "http://github.com/tgamblin/callpath-1.0.tar.gz"
versions = { 0.8 : 'bf03b33375afa66fe0efa46ce3f4b17a',
0.9 : 'bf03b33375afa66fe0efa46ce3f4b17a',
1.0 : 'bf03b33375afa66fe0efa46ce3f4b17a' }
versions = { 0.8 : 'foobarbaz',
0.9 : 'foobarbaz',
1.0 : 'foobarbaz' }
depends_on("dyninst")
depends_on("mpi")

View File

@@ -3,8 +3,6 @@
class Dyninst(Package):
homepage = "https://paradyn.org"
url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
md5 = "bf03b33375afa66fe0efa46ce3f4b17a"
list_url = "http://www.dyninst.org/downloads/dyninst-8.x"
versions = {

View File

@@ -3,9 +3,7 @@
class Fake(Package):
homepage = "http://www.fake-spack-example.org"
url = "http://www.fake-spack-example.org/downloads/fake-1.0.tar.gz"
md5 = "foobarbaz"
versions = '1.0'
versions = { '1.0' : 'foobarbaz' }
def install(self, prefix):
configure("--prefix=%s" % prefix)

View File

@@ -9,13 +9,13 @@ class Libdwarf(Package):
url = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
list_url = homepage
md5 = "64b42692e947d5180e162e46c689dfbf"
versions = [20070703, 20111030, 20130207]
versions = { 20130729 : "64b42692e947d5180e162e46c689dfbf",
20130207 : 'foobarbaz',
20111030 : 'foobarbaz',
20070703 : 'foobarbaz' }
depends_on("libelf")
def clean(self):
for dir in dwarf_dirs:
with working_dir(dir):
@@ -49,9 +49,3 @@ def install(self, prefix):
install('dwarfdump', bin)
install('dwarfdump.conf', lib)
install('dwarfdump.1', man1)
@platform('macosx_10.8_x86_64')
def install(self, prefix):
raise UnsupportedPlatformError(
"libdwarf doesn't currently build on Mac OS X.")

View File

@@ -3,12 +3,10 @@
class Libelf(Package):
homepage = "http://www.mr511.de/software/english.html"
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
md5 = "4136d7b4c04df68b686570afa26988ac"
versions = {
'0.8.13' : '4136d7b4c04df68b686570afa26988ac',
'0.8.12' : 'e21f8273d9f5f6d43a59878dc274fec7',
'0.8.10' : '9db4d36c283d9790d8fa7df1f4d7b4d9' }
versions = {'0.8.13' : '4136d7b4c04df68b686570afa26988ac',
'0.8.12' : 'e21f8273d9f5f6d43a59878dc274fec7',
'0.8.10' : '9db4d36c283d9790d8fa7df1f4d7b4d9' }
def install(self, prefix):
configure("--prefix=%s" % prefix,

View File

@@ -1,14 +1,16 @@
from spack import *
class Mpich(Package):
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
md5 = "9c5d5d4fe1e17dd12153f40bc5b6dbc0"
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
list_url = "http://www.mpich.org/static/downloads/"
list_depth = 2
versions = '3.0.4, 3.0.3, 3.0.2, 3.0.1, 3.0'
versions = { '3.0.4' : '9c5d5d4fe1e17dd12153f40bc5b6dbc0',
'3.0.3' : 'foobarbaz',
'3.0.2' : 'foobarbaz',
'3.0.1' : 'foobarbaz',
'3.0' : 'foobarbaz' }
provides('mpi@:3', when='@3:')
provides('mpi@:1', when='@1:')

View File

@@ -1,14 +1,17 @@
from spack import *
class Mpich2(Package):
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/1.5/mpich2-1.5.tar.gz"
md5 = "9c5d5d4fe1e17dd12153f40bc5b6dbc0"
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/1.5/mpich2-1.5.tar.gz"
list_url = "http://www.mpich.org/static/downloads/"
list_depth = 2
versions = '1.5, 1.4, 1.3, 1.2, 1.1, 1.0'
versions = { '1.5' : '9c5d5d4fe1e17dd12153f40bc5b6dbc0',
'1.4' : 'foobarbaz',
'1.3' : 'foobarbaz',
'1.2' : 'foobarbaz',
'1.1' : 'foobarbaz',
'1.0' : 'foobarbaz' }
provides('mpi@:2.0')
provides('mpi@:2.1', when='@1.1:')

View File

@@ -3,12 +3,11 @@
class Mpileaks(Package):
homepage = "http://www.llnl.gov"
url = "http://www.llnl.gov/mpileaks-1.0.tar.gz"
md5 = "foobarbaz"
versions = { 1.0 : None,
2.1 : None,
2.2 : None,
2.3 : None }
versions = { 1.0 : 'foobarbaz',
2.1 : 'foobarbaz',
2.2 : 'foobarbaz',
2.3 : 'foobarbaz' }
depends_on("mpi")
depends_on("callpath")

View File

@@ -5,9 +5,8 @@ class Zmpi(Package):
with dependencies."""
homepage = "http://www.spack-fake-zmpi.org"
url = "http://www.spack-fake-zmpi.org/downloads/zmpi-1.0.tar.gz"
md5 = "foobarbaz"
versions = '1.0'
versions = { '1.0' : 'foobarbaz' }
provides('mpi@10.0:')
depends_on('fake')

View File

@@ -47,7 +47,7 @@ def pkg(message):
mac_ver = platform.mac_ver()[0]
if mac_ver and Version(mac_ver) >= Version('10.7'):
print u"\U0001F4E6" + indent,
print u"\U0001F4E6" + indent
else:
cprint('@*g{[+]} ')
cwrite('@*g{[+]} ')
print message

View File

@@ -1,13 +1,81 @@
import hashlib
from contextlib import closing
def md5(filename, block_size=2**20):
"""Computes the md5 hash of a file."""
md5 = hashlib.md5()
"""Set of acceptable hashes that Spack will use."""
_acceptable_hashes = [
hashlib.md5,
hashlib.sha1,
hashlib.sha224,
hashlib.sha256,
hashlib.sha384,
hashlib.sha512 ]
"""Index for looking up hasher for a digest."""
_size_to_hash = { h().digest_size : h for h in _acceptable_hashes }
def checksum(hashlib_algo, filename, **kwargs):
"""Returns a hex digest of the filename generated using an
algorithm from hashlib.
"""
block_size = kwargs.get('block_size', 2**20)
hasher = hashlib_algo()
with closing(open(filename)) as file:
while True:
data = file.read(block_size)
if not data:
break
md5.update(data)
return md5.hexdigest()
hasher.update(data)
return hasher.hexdigest()
class Checker(object):
"""A checker checks files against one particular hex digest.
It will automatically determine what hashing algorithm
to used based on the length of the digest it's initialized
with. e.g., if the digest is 32 hex characters long this will
use md5.
Example: know your tarball should hash to 'abc123'. You want
to check files against this. You would use this class like so::
hexdigest = 'abc123'
checker = Checker(hexdigest)
success = checker.check('downloaded.tar.gz')
After the call to check, the actual checksum is available in
checker.sum, in case it's needed for error output.
You can trade read performance and memory usage by
adjusting the block_size optional arg. By default it's
a 1MB (2**20 bytes) buffer.
"""
def __init__(self, hexdigest, **kwargs):
self.block_size = kwargs.get('block_size', 2**20)
self.hexdigest = hexdigest
self.sum = None
bytes = len(hexdigest) / 2
if not bytes in _size_to_hash:
raise ValueError(
'Spack knows no hash algorithm for this digest: %s' % hexdigest)
self.hash_fun = _size_to_hash[bytes]
@property
def hash_name(self):
"""Get the name of the hash function this Checker is using."""
return self.hash_fun().name
def check(self, filename):
"""Read the file with the specified name and check its checksum
against self.hexdigest. Return True if they match, False
otherwise. Actual checksum is stored in self.sum.
"""
self.sum = checksum(
self.hash_fun, filename, block_size=self.block_size)
return self.sum == self.hexdigest

View File

@@ -1,3 +1,4 @@
import os
def env_flag(name):
if name in os.environ:

View File

@@ -68,3 +68,10 @@ def stem(path):
if re.search(suffix, path):
return re.sub(suffix, "", path)
return path
def can_access(file_name):
"""True if we have read/write access to the file."""
return os.access(file_name, os.R_OK|os.W_OK)

View File

@@ -23,7 +23,7 @@
import sys
import re
from bisect import bisect_left
from functools import total_ordering
from functools import total_ordering, wraps
import spack.util.none_high as none_high
import spack.util.none_low as none_low
@@ -71,6 +71,7 @@ def check_type(t):
def coerced(method):
"""Decorator that ensures that argument types of a method are coerced."""
@wraps(method)
def coercing_method(a, b):
if type(a) == type(b) or a is None or b is None:
return method(a, b)
@@ -84,6 +85,8 @@ def coercing_method(a, b):
class Version(object):
"""Class to represent versions"""
def __init__(self, string):
string = str(string)
if not re.match(VALID_VERSION, string):
raise ValueError("Bad characters in version string: %s" % string)