SPACK-1: Multi-version installation now works front to back with specs.
This commit is contained in:
@@ -7,6 +7,9 @@
|
|||||||
import spack.tty as tty
|
import spack.tty as tty
|
||||||
from spack.util.lang import attr_setdefault
|
from spack.util.lang import attr_setdefault
|
||||||
|
|
||||||
|
# cmd has a submodule called "list" so preserve the python list module
|
||||||
|
python_list = list
|
||||||
|
|
||||||
# Patterns to ignore in the commands directory when looking for commands.
|
# Patterns to ignore in the commands directory when looking for commands.
|
||||||
ignore_files = r'^\.|^__init__.py$|^#'
|
ignore_files = r'^\.|^__init__.py$|^#'
|
||||||
|
|
||||||
@@ -50,15 +53,25 @@ def get_command(name):
|
|||||||
return getattr(get_module(name), get_cmd_function_name(name))
|
return getattr(get_module(name), get_cmd_function_name(name))
|
||||||
|
|
||||||
|
|
||||||
def parse_specs(args):
|
def parse_specs(args, **kwargs):
|
||||||
"""Convenience function for parsing arguments from specs. Handles common
|
"""Convenience function for parsing arguments from specs. Handles common
|
||||||
exceptions and dies if there are errors.
|
exceptions and dies if there are errors.
|
||||||
"""
|
"""
|
||||||
if type(args) == list:
|
concretize = kwargs.get('concretize', False)
|
||||||
|
normalize = kwargs.get('normalize', False)
|
||||||
|
|
||||||
|
if isinstance(args, (python_list, tuple)):
|
||||||
args = " ".join(args)
|
args = " ".join(args)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return spack.spec.parse(" ".join(args))
|
specs = spack.spec.parse(args)
|
||||||
|
for spec in specs:
|
||||||
|
if concretize:
|
||||||
|
spec.concretize() # implies normalize
|
||||||
|
elif normalize:
|
||||||
|
spec.normalize()
|
||||||
|
|
||||||
|
return specs
|
||||||
|
|
||||||
except spack.parse.ParseError, e:
|
except spack.parse.ParseError, e:
|
||||||
tty.error(e.message, e.string, e.pos * " " + "^")
|
tty.error(e.message, e.string, e.pos * " " + "^")
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import argparse
|
import argparse
|
||||||
|
import hashlib
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
from subprocess import CalledProcessError
|
from subprocess import CalledProcessError
|
||||||
|
|
||||||
@@ -8,16 +9,19 @@
|
|||||||
import spack.packages as packages
|
import spack.packages as packages
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
from spack.colify import colify
|
from spack.colify import colify
|
||||||
from spack.util.crypto import md5
|
from spack.util.crypto import checksum
|
||||||
from spack.version import *
|
from spack.version import *
|
||||||
|
|
||||||
group='foo'
|
group='foo'
|
||||||
description ="Checksum available versions of a package, print out checksums for addition to a package file."
|
description ="Checksum available versions of a package, print out checksums for addition to a package file."
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('package', metavar='PACKAGE', help='Package to list versions for')
|
subparser.add_argument(
|
||||||
subparser.add_argument('versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
|
'package', metavar='PACKAGE', help='Package to list versions for')
|
||||||
subparser.add_argument('-n', '--number', dest='number', type=int,
|
subparser.add_argument(
|
||||||
|
'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
|
||||||
|
subparser.add_argument(
|
||||||
|
'-n', '--number', dest='number', type=int,
|
||||||
default=10, help='Number of versions to list')
|
default=10, help='Number of versions to list')
|
||||||
|
|
||||||
|
|
||||||
@@ -50,7 +54,7 @@ def checksum(parser, args):
|
|||||||
stage = Stage(url)
|
stage = Stage(url)
|
||||||
try:
|
try:
|
||||||
stage.fetch()
|
stage.fetch()
|
||||||
hashes.append(md5(stage.archive_file))
|
hashes.append(checksum(hashlib.md5, stage.archive_file))
|
||||||
finally:
|
finally:
|
||||||
stage.destroy()
|
stage.destroy()
|
||||||
|
|
||||||
|
@@ -22,8 +22,9 @@ def clean(parser, args):
|
|||||||
if not args.packages:
|
if not args.packages:
|
||||||
tty.die("spack clean requires at least one package argument")
|
tty.die("spack clean requires at least one package argument")
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.packages)
|
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
|
tty.message("Cleaning for spec:", spec)
|
||||||
package = packages.get(spec.name)
|
package = packages.get(spec.name)
|
||||||
if args.dist:
|
if args.dist:
|
||||||
package.do_clean_dist()
|
package.do_clean_dist()
|
||||||
|
@@ -1,18 +1,26 @@
|
|||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.packages as packages
|
import spack.packages as packages
|
||||||
|
|
||||||
description = "Fetch archives for packages"
|
description = "Fetch archives for packages"
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to fetch")
|
subparser.add_argument(
|
||||||
|
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||||
|
help="Do not check packages against checksum")
|
||||||
|
subparser.add_argument(
|
||||||
|
'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch")
|
||||||
|
|
||||||
|
|
||||||
def fetch(parser, args):
|
def fetch(parser, args):
|
||||||
if not args.packages:
|
if not args.packages:
|
||||||
tty.die("fetch requires at least one package argument")
|
tty.die("fetch requires at least one package argument")
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.packages)
|
if args.no_checksum:
|
||||||
|
spack.do_checksum = False
|
||||||
|
|
||||||
|
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
package = packages.get(spec.name)
|
package = packages.get(spec)
|
||||||
package.do_fetch()
|
package.do_fetch()
|
||||||
|
87
lib/spack/spack/cmd/find.py
Normal file
87
lib/spack/spack/cmd/find.py
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
import collections
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
import spack
|
||||||
|
import spack.packages as packages
|
||||||
|
import spack.colify
|
||||||
|
from spack.colify import colify
|
||||||
|
|
||||||
|
description ="Find installed spack packages"
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
subparser.add_argument(
|
||||||
|
'-p', '--paths', action='store_true', dest='paths',
|
||||||
|
help='Show paths to package install directories')
|
||||||
|
subparser.add_argument(
|
||||||
|
'-l', '--long', action='store_true', dest='full_specs',
|
||||||
|
help='Show full-length specs of installed packages')
|
||||||
|
subparser.add_argument(
|
||||||
|
'query_specs', nargs=argparse.REMAINDER,
|
||||||
|
help='optional specs to filter results')
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: move this and colify to tty.
|
||||||
|
def hline(label, char):
|
||||||
|
max_width = 64
|
||||||
|
cols, rows = spack.colify.get_terminal_size()
|
||||||
|
if not cols:
|
||||||
|
cols = max_width
|
||||||
|
else:
|
||||||
|
cols -= 2
|
||||||
|
cols = min(max_width, cols)
|
||||||
|
|
||||||
|
label = str(label)
|
||||||
|
out = char * 2 + " " + label + " "
|
||||||
|
out += (cols - len(out)) * char
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def find(parser, args):
|
||||||
|
def hasher():
|
||||||
|
return collections.defaultdict(hasher)
|
||||||
|
|
||||||
|
query_specs = []
|
||||||
|
if args.query_specs:
|
||||||
|
query_specs = spack.cmd.parse_specs(args.query_specs, normalize=True)
|
||||||
|
|
||||||
|
# Make a dict with specs keyed by architecture and compiler.
|
||||||
|
index = hasher()
|
||||||
|
for spec in packages.installed_package_specs():
|
||||||
|
if query_specs and not any(spec.satisfies(q) for q in query_specs):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if spec.compiler not in index[spec.architecture]:
|
||||||
|
index[spec.architecture][spec.compiler] = []
|
||||||
|
index[spec.architecture][spec.compiler].append(spec)
|
||||||
|
|
||||||
|
# Traverse the index and print out each package
|
||||||
|
for architecture in index:
|
||||||
|
print hline(architecture, "=")
|
||||||
|
for compiler in index[architecture]:
|
||||||
|
print hline(compiler, "-")
|
||||||
|
|
||||||
|
specs = index[architecture][compiler]
|
||||||
|
specs.sort()
|
||||||
|
|
||||||
|
abbreviated = []
|
||||||
|
for s in specs:
|
||||||
|
abbrv = "%s@%s%s" % (s.name, s.version, s.variants)
|
||||||
|
if s.dependencies:
|
||||||
|
abbrv += '-' + s.dependencies.sha1()[:6]
|
||||||
|
abbreviated.append(abbrv)
|
||||||
|
|
||||||
|
if args.paths:
|
||||||
|
# Print one spec per line along with prefix path
|
||||||
|
width = max(len(s) for s in abbreviated)
|
||||||
|
width += 2
|
||||||
|
format = " %-{}s%s".format(width)
|
||||||
|
|
||||||
|
for abbrv, spec in zip(abbreviated, specs):
|
||||||
|
print format % (abbrv, spec.package.prefix)
|
||||||
|
|
||||||
|
elif args.full_specs:
|
||||||
|
for spec in specs:
|
||||||
|
print spec.tree(indent=4),
|
||||||
|
else:
|
||||||
|
for abbrv in abbreviated:
|
||||||
|
print " %s" % abbrv
|
@@ -5,7 +5,7 @@
|
|||||||
import spack.packages as packages
|
import spack.packages as packages
|
||||||
from spack.colify import colify
|
from spack.colify import colify
|
||||||
|
|
||||||
description = "Build and install packages"
|
description = "Get detailed information on a particular package"
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('name', metavar="PACKAGE", help="name of packages to get info on")
|
subparser.add_argument('name', metavar="PACKAGE", help="name of packages to get info on")
|
||||||
|
@@ -8,21 +8,30 @@
|
|||||||
description = "Build and install packages"
|
description = "Build and install packages"
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('-i', '--ignore-dependencies',
|
subparser.add_argument(
|
||||||
action='store_true', dest='ignore_dependencies',
|
'-i', '--ignore-dependencies', action='store_true', dest='ignore_dependencies',
|
||||||
help="Do not try to install dependencies of requested packages.")
|
help="Do not try to install dependencies of requested packages.")
|
||||||
subparser.add_argument('-d', '--dirty', action='store_true', dest='dirty',
|
subparser.add_argument(
|
||||||
|
'-d', '--dirty', action='store_true', dest='dirty',
|
||||||
help="Don't clean up staging area when install completes.")
|
help="Don't clean up staging area when install completes.")
|
||||||
subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to install")
|
subparser.add_argument(
|
||||||
|
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||||
|
help="Do not check packages against checksum")
|
||||||
|
subparser.add_argument(
|
||||||
|
'packages', nargs=argparse.REMAINDER, help="specs of packages to install")
|
||||||
|
|
||||||
|
|
||||||
def install(parser, args):
|
def install(parser, args):
|
||||||
if not args.packages:
|
if not args.packages:
|
||||||
tty.die("install requires at least one package argument")
|
tty.die("install requires at least one package argument")
|
||||||
|
|
||||||
|
if args.no_checksum:
|
||||||
|
spack.do_checksum = False
|
||||||
|
|
||||||
spack.ignore_dependencies = args.ignore_dependencies
|
spack.ignore_dependencies = args.ignore_dependencies
|
||||||
specs = spack.cmd.parse_specs(args.packages)
|
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
package = packages.get(spec.name)
|
package = packages.get(spec)
|
||||||
package.dirty = args.dirty
|
package.dirty = args.dirty
|
||||||
package.do_install()
|
package.do_install()
|
||||||
|
@@ -1,23 +1,13 @@
|
|||||||
import os
|
|
||||||
import re
|
|
||||||
from subprocess import CalledProcessError
|
|
||||||
|
|
||||||
import spack
|
|
||||||
import spack.packages as packages
|
import spack.packages as packages
|
||||||
from spack.version import ver
|
|
||||||
from spack.colify import colify
|
from spack.colify import colify
|
||||||
import spack.url as url
|
|
||||||
import spack.tty as tty
|
|
||||||
|
|
||||||
description ="List spack packages"
|
description ="List available spack packages"
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('-i', '--installed', action='store_true', dest='installed',
|
pass
|
||||||
help='List installed packages for each platform along with versions.')
|
|
||||||
|
|
||||||
|
|
||||||
def list(parser, args):
|
def list(parser, args):
|
||||||
if args.installed:
|
# Print all the package names in columns
|
||||||
colify(str(pkg) for pkg in packages.installed_packages())
|
|
||||||
else:
|
|
||||||
colify(packages.all_package_names())
|
colify(packages.all_package_names())
|
||||||
|
|
||||||
|
@@ -1,10 +1,24 @@
|
|||||||
|
import argparse
|
||||||
import spack.packages as packages
|
import spack.packages as packages
|
||||||
|
|
||||||
description="Expand downloaded archive in preparation for install"
|
description="Expand downloaded archive in preparation for install"
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('name', help="name of package to stage")
|
subparser.add_argument(
|
||||||
|
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||||
|
help="Do not check packages against checksum")
|
||||||
|
subparser.add_argument(
|
||||||
|
'packages', nargs=argparse.REMAINDER, help="specs of packages to stage")
|
||||||
|
|
||||||
|
|
||||||
def stage(parser, args):
|
def stage(parser, args):
|
||||||
package = packages.get(args.name)
|
if not args.packages:
|
||||||
|
tty.die("stage requires at least one package argument")
|
||||||
|
|
||||||
|
if args.no_checksum:
|
||||||
|
spack.do_checksum = False
|
||||||
|
|
||||||
|
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||||
|
for spec in specs:
|
||||||
|
package = packages.get(spec)
|
||||||
package.do_stage()
|
package.do_stage()
|
||||||
|
@@ -1,13 +1,18 @@
|
|||||||
import spack.cmd
|
|
||||||
import spack.packages as packages
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
import spack.cmd
|
||||||
|
import spack.tty as tty
|
||||||
|
import spack.packages as packages
|
||||||
|
|
||||||
description="Remove an installed package"
|
description="Remove an installed package"
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('-f', '--force', action='store_true', dest='force',
|
subparser.add_argument(
|
||||||
help="Ignore installed packages that depend on this one and remove it anyway.")
|
'-f', '--force', action='store_true', dest='force',
|
||||||
subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
|
help="Remove regardless of whether other packages depend on this one.")
|
||||||
|
subparser.add_argument(
|
||||||
|
'packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
|
||||||
|
|
||||||
|
|
||||||
def uninstall(parser, args):
|
def uninstall(parser, args):
|
||||||
if not args.packages:
|
if not args.packages:
|
||||||
@@ -15,8 +20,20 @@ def uninstall(parser, args):
|
|||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.packages)
|
specs = spack.cmd.parse_specs(args.packages)
|
||||||
|
|
||||||
# get packages to uninstall as a list.
|
# For each spec provided, make sure it refers to only one package.
|
||||||
pkgs = [packages.get(spec.name) for spec in specs]
|
# Fail and ask user to be unambiguous if it doesn't
|
||||||
|
pkgs = []
|
||||||
|
for spec in specs:
|
||||||
|
matching_specs = packages.get_installed(spec)
|
||||||
|
if len(matching_specs) > 1:
|
||||||
|
tty.die("%s matches multiple packages. Which one did you mean?"
|
||||||
|
% spec, *matching_specs)
|
||||||
|
|
||||||
|
elif len(matching_specs) == 0:
|
||||||
|
tty.die("%s does not match any installed packages." % spec)
|
||||||
|
|
||||||
|
installed_spec = matching_specs[0]
|
||||||
|
pkgs.append(packages.get(installed_spec))
|
||||||
|
|
||||||
# Sort packages to be uninstalled by the number of installed dependents
|
# Sort packages to be uninstalled by the number of installed dependents
|
||||||
# This ensures we do things in the right order
|
# This ensures we do things in the right order
|
||||||
|
@@ -39,7 +39,7 @@ def concretize_version(self, spec):
|
|||||||
if valid_versions:
|
if valid_versions:
|
||||||
spec.versions = ver([valid_versions[-1]])
|
spec.versions = ver([valid_versions[-1]])
|
||||||
else:
|
else:
|
||||||
spec.versions = ver([pkg.version])
|
spec.versions = ver([pkg.default_version])
|
||||||
|
|
||||||
|
|
||||||
def concretize_architecture(self, spec):
|
def concretize_architecture(self, spec):
|
||||||
|
@@ -1,9 +1,11 @@
|
|||||||
import exceptions
|
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
|
import exceptions
|
||||||
|
import hashlib
|
||||||
|
|
||||||
import spack.spec as spec
|
from spack.spec import Spec
|
||||||
from spack.util import *
|
from spack.util.filesystem import *
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
|
|
||||||
|
|
||||||
@@ -30,11 +32,14 @@ def relative_path_for_spec(self, spec):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def make_path_for_spec(self, spec):
|
||||||
|
"""Creates the installation directory for a spec."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
def path_for_spec(self, spec):
|
def path_for_spec(self, spec):
|
||||||
"""Return an absolute path from the root to a directory for the spec."""
|
"""Return an absolute path from the root to a directory for the spec."""
|
||||||
if not spec.concrete:
|
assert(spec.concrete)
|
||||||
raise ValueError("path_for_spec requires a concrete spec.")
|
|
||||||
|
|
||||||
path = self.relative_path_for_spec(spec)
|
path = self.relative_path_for_spec(spec)
|
||||||
assert(not path.startswith(self.root))
|
assert(not path.startswith(self.root))
|
||||||
return os.path.join(self.root, path)
|
return os.path.join(self.root, path)
|
||||||
@@ -70,22 +75,90 @@ def traverse_dirs_at_depth(root, depth, path_tuple=(), curdepth=0):
|
|||||||
yield tup
|
yield tup
|
||||||
|
|
||||||
|
|
||||||
class DefaultDirectoryLayout(DirectoryLayout):
|
class SpecHashDirectoryLayout(DirectoryLayout):
|
||||||
def __init__(self, root):
|
"""Lays out installation directories like this::
|
||||||
super(DefaultDirectoryLayout, self).__init__(root)
|
<install_root>/
|
||||||
|
<architecture>/
|
||||||
|
<compiler>/
|
||||||
|
name@version+variant-<dependency_hash>
|
||||||
|
|
||||||
|
Where dependency_hash is a SHA-1 hash prefix for the full package spec.
|
||||||
|
This accounts for dependencies.
|
||||||
|
|
||||||
|
If there is ever a hash collision, you won't be able to install a new
|
||||||
|
package unless you use a larger prefix. However, the full spec is stored
|
||||||
|
in a file called .spec in each directory, so you can migrate an entire
|
||||||
|
install directory to a new hash size pretty easily.
|
||||||
|
|
||||||
|
TODO: make a tool to migrate install directories to different hash sizes.
|
||||||
|
"""
|
||||||
|
def __init__(self, root, **kwargs):
|
||||||
|
"""Prefix size is number of characters in the SHA-1 prefix to use
|
||||||
|
to make each hash unique.
|
||||||
|
"""
|
||||||
|
prefix_size = kwargs.get('prefix_size', 8)
|
||||||
|
spec_file = kwargs.get('spec_file', '.spec')
|
||||||
|
|
||||||
|
super(SpecHashDirectoryLayout, self).__init__(root)
|
||||||
|
self.prefix_size = prefix_size
|
||||||
|
self.spec_file = spec_file
|
||||||
|
|
||||||
|
|
||||||
def relative_path_for_spec(self, spec):
|
def relative_path_for_spec(self, spec):
|
||||||
if not spec.concrete:
|
assert(spec.concrete)
|
||||||
raise ValueError("relative_path_for_spec requires a concrete spec.")
|
|
||||||
|
|
||||||
return new_path(
|
path = new_path(
|
||||||
spec.architecture,
|
spec.architecture,
|
||||||
spec.compiler,
|
spec.compiler,
|
||||||
"%s@%s%s%s" % (spec.name,
|
"%s@%s%s" % (spec.name, spec.version, spec.variants))
|
||||||
spec.version,
|
|
||||||
spec.variants,
|
if spec.dependencies:
|
||||||
spec.dependencies))
|
path += "-"
|
||||||
|
sha1 = spec.dependencies.sha1()
|
||||||
|
path += sha1[:self.prefix_size]
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def write_spec(self, spec, path):
|
||||||
|
"""Write a spec out to a file."""
|
||||||
|
with closing(open(path, 'w')) as spec_file:
|
||||||
|
spec_file.write(spec.tree(ids=False, cover='nodes'))
|
||||||
|
|
||||||
|
|
||||||
|
def read_spec(self, path):
|
||||||
|
"""Read the contents of a file and parse them as a spec"""
|
||||||
|
with closing(open(path)) as spec_file:
|
||||||
|
string = spec_file.read().replace('\n', '')
|
||||||
|
return Spec(string)
|
||||||
|
|
||||||
|
|
||||||
|
def make_path_for_spec(self, spec):
|
||||||
|
assert(spec.concrete)
|
||||||
|
|
||||||
|
path = self.path_for_spec(spec)
|
||||||
|
spec_file_path = new_path(path, self.spec_file)
|
||||||
|
|
||||||
|
if os.path.isdir(path):
|
||||||
|
if not os.path.isfile(spec_file_path):
|
||||||
|
raise InconsistentInstallDirectoryError(
|
||||||
|
'No spec file found at path %s' % spec_file_path)
|
||||||
|
|
||||||
|
installed_spec = self.read_spec(spec_file_path)
|
||||||
|
if installed_spec == self.spec:
|
||||||
|
raise InstallDirectoryAlreadyExistsError(path)
|
||||||
|
|
||||||
|
spec_hash = self.hash_spec(spec)
|
||||||
|
installed_hash = self.hash_spec(installed_spec)
|
||||||
|
if installed_spec == spec_hash:
|
||||||
|
raise SpecHashCollisionError(installed_hash, spec_hash)
|
||||||
|
else:
|
||||||
|
raise InconsistentInstallDirectoryError(
|
||||||
|
'Spec file in %s does not match SHA-1 hash!'
|
||||||
|
% (installed_spec, spec_file_path))
|
||||||
|
|
||||||
|
mkdirp(path)
|
||||||
|
self.write_spec(spec, spec_file_path)
|
||||||
|
|
||||||
|
|
||||||
def all_specs(self):
|
def all_specs(self):
|
||||||
@@ -94,5 +167,37 @@ def all_specs(self):
|
|||||||
|
|
||||||
for path in traverse_dirs_at_depth(self.root, 3):
|
for path in traverse_dirs_at_depth(self.root, 3):
|
||||||
arch, compiler, last_dir = path
|
arch, compiler, last_dir = path
|
||||||
spec_str = "%s%%%s=%s" % (last_dir, compiler, arch)
|
spec_file_path = new_path(
|
||||||
yield spec.parse(spec_str)
|
self.root, arch, compiler, last_dir, self.spec_file)
|
||||||
|
if os.path.exists(spec_file_path):
|
||||||
|
spec = self.read_spec(spec_file_path)
|
||||||
|
yield spec
|
||||||
|
|
||||||
|
|
||||||
|
class DirectoryLayoutError(SpackError):
|
||||||
|
"""Superclass for directory layout errors."""
|
||||||
|
def __init__(self, message):
|
||||||
|
super(DirectoryLayoutError, self).__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
class SpecHashCollisionError(DirectoryLayoutError):
|
||||||
|
"""Raised when there is a hash collision in an SpecHashDirectoryLayout."""
|
||||||
|
def __init__(self, installed_spec, new_spec, prefix_size):
|
||||||
|
super(SpecHashDirectoryLayout, self).__init__(
|
||||||
|
'Specs %s and %s have the same %d character SHA-1 prefix!'
|
||||||
|
% prefix_size, installed_spec, new_spec)
|
||||||
|
|
||||||
|
|
||||||
|
class InconsistentInstallDirectoryError(DirectoryLayoutError):
|
||||||
|
"""Raised when a package seems to be installed to the wrong place."""
|
||||||
|
def __init__(self, message):
|
||||||
|
super(InconsistentInstallDirectoryError, self).__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
|
||||||
|
"""Raised when make_path_for_sec is called unnecessarily."""
|
||||||
|
def __init__(self, path):
|
||||||
|
super(InstallDirectoryAlreadyExistsError, self).__init__(
|
||||||
|
"Install path %s already exists!")
|
||||||
|
|
||||||
|
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
from spack.util.filesystem import *
|
from spack.util.filesystem import *
|
||||||
from spack.util.executable import *
|
from spack.util.executable import *
|
||||||
from spack.directory_layout import DefaultDirectoryLayout
|
from spack.directory_layout import SpecHashDirectoryLayout
|
||||||
from spack.concretize import DefaultConcretizer
|
from spack.concretize import DefaultConcretizer
|
||||||
|
|
||||||
# This lives in $prefix/lib/spac/spack/__file__
|
# This lives in $prefix/lib/spac/spack/__file__
|
||||||
@@ -29,7 +29,7 @@
|
|||||||
# This controls how spack lays out install prefixes and
|
# This controls how spack lays out install prefixes and
|
||||||
# stage directories.
|
# stage directories.
|
||||||
#
|
#
|
||||||
install_layout = DefaultDirectoryLayout(install_path)
|
install_layout = SpecHashDirectoryLayout(install_path, prefix_size=6)
|
||||||
|
|
||||||
#
|
#
|
||||||
# This controls how things are concretized in spack.
|
# This controls how things are concretized in spack.
|
||||||
@@ -39,7 +39,7 @@
|
|||||||
concretizer = DefaultConcretizer()
|
concretizer = DefaultConcretizer()
|
||||||
|
|
||||||
# Version information
|
# Version information
|
||||||
spack_version = Version("0.5")
|
spack_version = Version("1.0")
|
||||||
|
|
||||||
# User's editor from the environment
|
# User's editor from the environment
|
||||||
editor = Executable(os.environ.get("EDITOR", ""))
|
editor = Executable(os.environ.get("EDITOR", ""))
|
||||||
@@ -60,6 +60,11 @@
|
|||||||
'/var/tmp/%u/spcak-stage',
|
'/var/tmp/%u/spcak-stage',
|
||||||
'/tmp/%u/spack-stage']
|
'/tmp/%u/spack-stage']
|
||||||
|
|
||||||
|
# Whether spack should allow installation of unsafe versions of
|
||||||
|
# software. "Unsafe" versions are ones it doesn't have a checksum
|
||||||
|
# for.
|
||||||
|
do_checksum = True
|
||||||
|
|
||||||
#
|
#
|
||||||
# SYS_TYPE to use for the spack installation.
|
# SYS_TYPE to use for the spack installation.
|
||||||
# Value of this determines what platform spack thinks it is by
|
# Value of this determines what platform spack thinks it is by
|
||||||
|
@@ -22,14 +22,16 @@
|
|||||||
import packages
|
import packages
|
||||||
import tty
|
import tty
|
||||||
import validate
|
import validate
|
||||||
|
import multiprocessing
|
||||||
import url
|
import url
|
||||||
|
|
||||||
from spack.multi_function import platform
|
from spack.multi_function import platform
|
||||||
|
import spack.util.crypto as crypto
|
||||||
from spack.version import *
|
from spack.version import *
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
from spack.util.lang import *
|
from spack.util.lang import *
|
||||||
from spack.util.crypto import md5
|
|
||||||
from spack.util.web import get_pages
|
from spack.util.web import get_pages
|
||||||
|
from spack.util.environment import *
|
||||||
|
|
||||||
|
|
||||||
class Package(object):
|
class Package(object):
|
||||||
@@ -297,9 +299,8 @@ class SomePackage(Package):
|
|||||||
|
|
||||||
def __init__(self, spec):
|
def __init__(self, spec):
|
||||||
# These attributes are required for all packages.
|
# These attributes are required for all packages.
|
||||||
attr_required(self, 'homepage')
|
attr_required(self.__class__, 'homepage')
|
||||||
attr_required(self, 'url')
|
attr_required(self.__class__, 'url')
|
||||||
attr_required(self, 'md5')
|
|
||||||
|
|
||||||
# this determines how the package should be built.
|
# this determines how the package should be built.
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
@@ -307,39 +308,34 @@ def __init__(self, spec):
|
|||||||
# Name of package is the name of its module (the file that contains it)
|
# Name of package is the name of its module (the file that contains it)
|
||||||
self.name = inspect.getmodulename(self.module.__file__)
|
self.name = inspect.getmodulename(self.module.__file__)
|
||||||
|
|
||||||
# Don't allow the default homepage.
|
|
||||||
if re.search(r'example.com', self.homepage):
|
|
||||||
tty.die("Bad homepage in %s: %s" % (self.name, self.homepage))
|
|
||||||
|
|
||||||
# Make sure URL is an allowed type
|
# Make sure URL is an allowed type
|
||||||
validate.url(self.url)
|
validate.url(self.url)
|
||||||
|
|
||||||
# Set up version
|
# patch up the URL with a new version if the spec version is concrete
|
||||||
# TODO: get rid of version attr and use spec
|
if self.spec.versions.concrete:
|
||||||
# TODO: roll this into available_versions
|
self.url = self.url_for_version(self.spec.version)
|
||||||
if not hasattr(self, 'version'):
|
|
||||||
try:
|
|
||||||
self.version = url.parse_version(self.url)
|
|
||||||
except UndetectableVersionError:
|
|
||||||
tty.die("Couldn't extract a default version from %s. You " +
|
|
||||||
"must specify it explicitly in the package." % self.url)
|
|
||||||
elif not isinstance(self.version, Version):
|
|
||||||
self.version = Version(self.version)
|
|
||||||
|
|
||||||
# This is set by scraping a web page.
|
# This is set by scraping a web page.
|
||||||
self._available_versions = None
|
self._available_versions = None
|
||||||
|
|
||||||
# This list overrides available_versions if set by the user.
|
# versions should be a dict from version to checksum, for safe versions
|
||||||
attr_setdefault(self, 'versions', None)
|
# of this package. If it's not present, make it an empty dict.
|
||||||
if self.versions and not isinstance(self.versions, VersionList):
|
if not hasattr(self, 'versions'):
|
||||||
self.versions = VersionList(self.versions)
|
self.versions = {}
|
||||||
|
|
||||||
# Empty at first; only compute dependent packages if necessary
|
if not isinstance(self.versions, dict):
|
||||||
self._dependents = None
|
raise ValueError("versions attribute of package %s must be a dict!"
|
||||||
|
% self.name)
|
||||||
|
|
||||||
|
# Version-ize the keys in versions dict
|
||||||
|
try:
|
||||||
|
self.versions = { Version(v):h for v,h in self.versions.items() }
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError("Keys of versions dict in package %s must be versions!"
|
||||||
|
% self.name)
|
||||||
|
|
||||||
# stage used to build this package.
|
# stage used to build this package.
|
||||||
# TODO: hash the concrete spec and use that as the stage name.
|
self._stage = None
|
||||||
self.stage = Stage(self.url, "%s-%s" % (self.name, self.version))
|
|
||||||
|
|
||||||
# Set a default list URL (place to find available versions)
|
# Set a default list URL (place to find available versions)
|
||||||
if not hasattr(self, 'list_url'):
|
if not hasattr(self, 'list_url'):
|
||||||
@@ -349,6 +345,34 @@ def __init__(self, spec):
|
|||||||
self.list_depth = 1
|
self.list_depth = 1
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_version(self):
|
||||||
|
"""Get the version in the default URL for this package,
|
||||||
|
or fails."""
|
||||||
|
try:
|
||||||
|
return url.parse_version(self.__class__.url)
|
||||||
|
except UndetectableVersionError:
|
||||||
|
tty.die("Couldn't extract a default version from %s. You " +
|
||||||
|
"must specify it explicitly in the package." % self.url)
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self):
|
||||||
|
if not self.spec.concrete:
|
||||||
|
raise ValueError("Can only get version of concrete package.")
|
||||||
|
return self.spec.versions[0]
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stage(self):
|
||||||
|
if not self.spec.concrete:
|
||||||
|
raise ValueError("Can only get a stage for a concrete package.")
|
||||||
|
|
||||||
|
if self._stage is None:
|
||||||
|
self._stage = Stage(self.url, str(self.spec))
|
||||||
|
return self._stage
|
||||||
|
|
||||||
|
|
||||||
def add_commands_to_module(self):
|
def add_commands_to_module(self):
|
||||||
"""Populate the module scope of install() with some useful functions.
|
"""Populate the module scope of install() with some useful functions.
|
||||||
This makes things easier for package writers.
|
This makes things easier for package writers.
|
||||||
@@ -406,13 +430,6 @@ def add_commands_to_module(self):
|
|||||||
m.man7 = new_path(m.man, 'man7')
|
m.man7 = new_path(m.man, 'man7')
|
||||||
m.man8 = new_path(m.man, 'man8')
|
m.man8 = new_path(m.man, 'man8')
|
||||||
|
|
||||||
@property
|
|
||||||
def dependents(self):
|
|
||||||
"""List of names of packages that depend on this one."""
|
|
||||||
if self._dependents is None:
|
|
||||||
packages.compute_dependents()
|
|
||||||
return tuple(self._dependents)
|
|
||||||
|
|
||||||
|
|
||||||
def preorder_traversal(self, visited=None, **kwargs):
|
def preorder_traversal(self, visited=None, **kwargs):
|
||||||
"""This does a preorder traversal of the package's dependence DAG."""
|
"""This does a preorder traversal of the package's dependence DAG."""
|
||||||
@@ -499,20 +516,15 @@ def installed(self):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def installed_dependents(self):
|
def installed_dependents(self):
|
||||||
installed = [d for d in self.dependents if packages.get(d).installed]
|
"""Return a list of the specs of all installed packages that depend
|
||||||
all_deps = []
|
on this one."""
|
||||||
for d in installed:
|
dependents = []
|
||||||
all_deps.append(d)
|
for spec in packages.installed_package_specs():
|
||||||
all_deps.extend(packages.get(d).installed_dependents)
|
if self.name in spec.dependencies:
|
||||||
return tuple(all_deps)
|
dep_spec = spec.dependencies[self.name]
|
||||||
|
if self.spec == dep_spec:
|
||||||
|
dependents.append(dep_spec)
|
||||||
@property
|
return dependents
|
||||||
def all_dependents(self):
|
|
||||||
all_deps = list(self.dependents)
|
|
||||||
for pkg in self.dependents:
|
|
||||||
all_deps.extend(packages.get(pkg).all_dependents)
|
|
||||||
return tuple(all_deps)
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -533,7 +545,7 @@ def url_version(self, version):
|
|||||||
|
|
||||||
def url_for_version(self, version):
|
def url_for_version(self, version):
|
||||||
"""Gives a URL that you can download a new version of this package from."""
|
"""Gives a URL that you can download a new version of this package from."""
|
||||||
return url.substitute_version(self.url, self.url_version(version))
|
return url.substitute_version(self.__class__.url, self.url_version(version))
|
||||||
|
|
||||||
|
|
||||||
def remove_prefix(self):
|
def remove_prefix(self):
|
||||||
@@ -547,28 +559,38 @@ def do_fetch(self):
|
|||||||
"""Creates a stage directory and downloads the taball for this package.
|
"""Creates a stage directory and downloads the taball for this package.
|
||||||
Working directory will be set to the stage directory.
|
Working directory will be set to the stage directory.
|
||||||
"""
|
"""
|
||||||
stage = self.stage
|
self.stage.setup()
|
||||||
stage.setup()
|
|
||||||
stage.fetch()
|
|
||||||
|
|
||||||
archive_md5 = md5(stage.archive_file)
|
if spack.do_checksum and not self.version in self.versions:
|
||||||
if archive_md5 != self.md5:
|
tty.die("Cannot fetch %s@%s safely; there is no checksum on file for this "
|
||||||
tty.die("MD5 Checksum failed for %s. Expected %s but got %s."
|
"version." % (self.name, self.version),
|
||||||
% (self.name, self.md5, archive_md5))
|
"Add a checksum to the package file, or use --no-checksum to "
|
||||||
|
"skip this check.")
|
||||||
|
|
||||||
|
self.stage.fetch()
|
||||||
|
|
||||||
|
if self.version in self.versions:
|
||||||
|
digest = self.versions[self.version]
|
||||||
|
checker = crypto.Checker(digest)
|
||||||
|
if checker.check(self.stage.archive_file):
|
||||||
|
tty.msg("Checksum passed for %s" % self.name)
|
||||||
|
else:
|
||||||
|
tty.die("%s checksum failed for %s. Expected %s but got %s."
|
||||||
|
% (checker.hash_name, self.name, digest, checker.sum))
|
||||||
|
|
||||||
|
|
||||||
def do_stage(self):
|
def do_stage(self):
|
||||||
"""Unpacks the fetched tarball, then changes into the expanded tarball directory."""
|
"""Unpacks the fetched tarball, then changes into the expanded tarball
|
||||||
|
directory."""
|
||||||
self.do_fetch()
|
self.do_fetch()
|
||||||
stage = self.stage
|
|
||||||
|
|
||||||
archive_dir = stage.expanded_archive_path
|
archive_dir = self.stage.expanded_archive_path
|
||||||
if not archive_dir:
|
if not archive_dir:
|
||||||
tty.msg("Staging archive: %s" % stage.archive_file)
|
tty.msg("Staging archive: %s" % self.stage.archive_file)
|
||||||
stage.expand_archive()
|
self.stage.expand_archive()
|
||||||
else:
|
else:
|
||||||
tty.msg("Already staged %s" % self.name)
|
tty.msg("Already staged %s" % self.name)
|
||||||
stage.chdir_to_archive()
|
self.stage.chdir_to_archive()
|
||||||
|
|
||||||
|
|
||||||
def do_install(self):
|
def do_install(self):
|
||||||
@@ -595,18 +617,14 @@ def do_install(self):
|
|||||||
|
|
||||||
tty.msg("Building %s." % self.name)
|
tty.msg("Building %s." % self.name)
|
||||||
try:
|
try:
|
||||||
|
# create the install directory (allow the layout to handle this in
|
||||||
|
# case it needs to add extra files)
|
||||||
|
spack.install_layout.make_path_for_spec(self.spec)
|
||||||
|
|
||||||
self.install(self.prefix)
|
self.install(self.prefix)
|
||||||
if not os.path.isdir(self.prefix):
|
if not os.path.isdir(self.prefix):
|
||||||
tty.die("Install failed for %s. No install dir created." % self.name)
|
tty.die("Install failed for %s. No install dir created." % self.name)
|
||||||
|
|
||||||
except subprocess.CalledProcessError, e:
|
|
||||||
self.remove_prefix()
|
|
||||||
tty.die("Install failed for %s" % self.name, e.message)
|
|
||||||
|
|
||||||
except KeyboardInterrupt, e:
|
|
||||||
self.remove_prefix()
|
|
||||||
raise
|
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
if not self.dirty:
|
if not self.dirty:
|
||||||
self.remove_prefix()
|
self.remove_prefix()
|
||||||
@@ -640,8 +658,9 @@ def setup_install_environment(self):
|
|||||||
path_set(SPACK_ENV_PATH, env_paths)
|
path_set(SPACK_ENV_PATH, env_paths)
|
||||||
|
|
||||||
# Pass along prefixes of dependencies here
|
# Pass along prefixes of dependencies here
|
||||||
path_set(SPACK_DEPENDENCIES,
|
path_set(
|
||||||
[dep.package.prefix for dep in self.dependencies.values()])
|
SPACK_DEPENDENCIES,
|
||||||
|
[dep.package.prefix for dep in self.spec.dependencies.values()])
|
||||||
|
|
||||||
# Install location
|
# Install location
|
||||||
os.environ[SPACK_PREFIX] = self.prefix
|
os.environ[SPACK_PREFIX] = self.prefix
|
||||||
@@ -652,7 +671,7 @@ def setup_install_environment(self):
|
|||||||
|
|
||||||
def do_install_dependencies(self):
|
def do_install_dependencies(self):
|
||||||
# Pass along paths of dependencies here
|
# Pass along paths of dependencies here
|
||||||
for dep in self.dependencies.values():
|
for dep in self.spec.dependencies.values():
|
||||||
dep.package.do_install()
|
dep.package.do_install()
|
||||||
|
|
||||||
|
|
||||||
@@ -717,7 +736,7 @@ def fetch_available_versions(self):
|
|||||||
if not self._available_versions:
|
if not self._available_versions:
|
||||||
self._available_versions = VersionList()
|
self._available_versions = VersionList()
|
||||||
url_regex = os.path.basename(url.wildcard_version(self.url))
|
url_regex = os.path.basename(url.wildcard_version(self.url))
|
||||||
wildcard = self.version.wildcard()
|
wildcard = self.default_version.wildcard()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
page_map = get_pages(self.list_url, depth=self.list_depth)
|
page_map = get_pages(self.list_url, depth=self.list_depth)
|
||||||
@@ -748,7 +767,7 @@ def fetch_available_versions(self):
|
|||||||
def available_versions(self):
|
def available_versions(self):
|
||||||
# If the package overrode available_versions, then use that.
|
# If the package overrode available_versions, then use that.
|
||||||
if self.versions is not None:
|
if self.versions is not None:
|
||||||
return self.versions
|
return VersionList(self.versions.keys())
|
||||||
else:
|
else:
|
||||||
vlist = self.fetch_available_versions()
|
vlist = self.fetch_available_versions()
|
||||||
if not vlist:
|
if not vlist:
|
||||||
|
@@ -21,6 +21,16 @@
|
|||||||
instances = {}
|
instances = {}
|
||||||
|
|
||||||
|
|
||||||
|
def autospec(function):
|
||||||
|
"""Decorator that automatically converts the argument of a single-arg
|
||||||
|
function to a Spec."""
|
||||||
|
def converter(arg):
|
||||||
|
if not isinstance(arg, spack.spec.Spec):
|
||||||
|
arg = spack.spec.Spec(arg)
|
||||||
|
return function(arg)
|
||||||
|
return converter
|
||||||
|
|
||||||
|
|
||||||
class ProviderIndex(object):
|
class ProviderIndex(object):
|
||||||
"""This is a dict of dicts used for finding providers of particular
|
"""This is a dict of dicts used for finding providers of particular
|
||||||
virtual dependencies. The dict of dicts looks like:
|
virtual dependencies. The dict of dicts looks like:
|
||||||
@@ -87,23 +97,32 @@ def providers_for(self, *vpkg_specs):
|
|||||||
return sorted(providers)
|
return sorted(providers)
|
||||||
|
|
||||||
|
|
||||||
def get(pkg_name):
|
@autospec
|
||||||
if not pkg_name in instances:
|
def get(spec):
|
||||||
package_class = get_class_for_package_name(pkg_name)
|
if spec.virtual:
|
||||||
instances[pkg_name] = package_class(pkg_name)
|
raise UnknownPackageError(spec.name)
|
||||||
|
|
||||||
return instances[pkg_name]
|
if not spec in instances:
|
||||||
|
package_class = get_class_for_package_name(spec.name)
|
||||||
|
instances[spec.name] = package_class(spec)
|
||||||
|
|
||||||
|
return instances[spec.name]
|
||||||
|
|
||||||
|
|
||||||
|
@autospec
|
||||||
|
def get_installed(spec):
|
||||||
|
return [s for s in installed_package_specs() if s.satisfies(spec)]
|
||||||
|
|
||||||
|
|
||||||
|
@autospec
|
||||||
def providers_for(vpkg_spec):
|
def providers_for(vpkg_spec):
|
||||||
if providers_for.index is None:
|
if not hasattr(providers_for, 'index'):
|
||||||
providers_for.index = ProviderIndex(all_package_names())
|
providers_for.index = ProviderIndex(all_package_names())
|
||||||
|
|
||||||
providers = providers_for.index.providers_for(vpkg_spec)
|
providers = providers_for.index.providers_for(vpkg_spec)
|
||||||
if not providers:
|
if not providers:
|
||||||
raise UnknownPackageError("No such virtual package: %s" % vpkg_spec)
|
raise UnknownPackageError("No such virtual package: %s" % vpkg_spec)
|
||||||
return providers
|
return providers
|
||||||
providers_for.index = None
|
|
||||||
|
|
||||||
|
|
||||||
def valid_package_name(pkg_name):
|
def valid_package_name(pkg_name):
|
||||||
@@ -122,7 +141,7 @@ def filename_for_package_name(pkg_name):
|
|||||||
return new_path(spack.packages_path, "%s.py" % pkg_name)
|
return new_path(spack.packages_path, "%s.py" % pkg_name)
|
||||||
|
|
||||||
|
|
||||||
def installed_packages():
|
def installed_package_specs():
|
||||||
return spack.install_layout.all_specs()
|
return spack.install_layout.all_specs()
|
||||||
|
|
||||||
|
|
||||||
@@ -198,6 +217,9 @@ def compute_dependents():
|
|||||||
"""Reads in all package files and sets dependence information on
|
"""Reads in all package files and sets dependence information on
|
||||||
Package objects in memory.
|
Package objects in memory.
|
||||||
"""
|
"""
|
||||||
|
if not hasattr(compute_dependents, index):
|
||||||
|
compute_dependents.index = {}
|
||||||
|
|
||||||
for pkg in all_packages():
|
for pkg in all_packages():
|
||||||
if pkg._dependents is None:
|
if pkg._dependents is None:
|
||||||
pkg._dependents = []
|
pkg._dependents = []
|
||||||
|
@@ -3,7 +3,6 @@
|
|||||||
class Callpath(Package):
|
class Callpath(Package):
|
||||||
homepage = "https://github.com/tgamblin/callpath"
|
homepage = "https://github.com/tgamblin/callpath"
|
||||||
url = "http://github.com/tgamblin/callpath-0.2.tar.gz"
|
url = "http://github.com/tgamblin/callpath-0.2.tar.gz"
|
||||||
md5 = "foobarbaz"
|
|
||||||
|
|
||||||
depends_on("dyninst")
|
depends_on("dyninst")
|
||||||
depends_on("mpich")
|
depends_on("mpich")
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
class Cmake(Package):
|
class Cmake(Package):
|
||||||
homepage = 'https://www.cmake.org'
|
homepage = 'https://www.cmake.org'
|
||||||
url = 'http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz'
|
url = 'http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz'
|
||||||
md5 = '097278785da7182ec0aea8769d06860c'
|
versions = { '2.8.10.2' : '097278785da7182ec0aea8769d06860c' }
|
||||||
|
|
||||||
def install(self, prefix):
|
def install(self, prefix):
|
||||||
configure('--prefix=%s' % prefix,
|
configure('--prefix=%s' % prefix,
|
||||||
|
@@ -3,9 +3,11 @@
|
|||||||
class Dyninst(Package):
|
class Dyninst(Package):
|
||||||
homepage = "https://paradyn.org"
|
homepage = "https://paradyn.org"
|
||||||
url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
|
url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
|
||||||
md5 = "bf03b33375afa66fe0efa46ce3f4b17a"
|
|
||||||
list_url = "http://www.dyninst.org/downloads/dyninst-8.x"
|
list_url = "http://www.dyninst.org/downloads/dyninst-8.x"
|
||||||
|
|
||||||
|
versions = {'8.1.2' : 'bf03b33375afa66fe0efa46ce3f4b17a',
|
||||||
|
'8.1.1' : '1f8743e3a5662b25ce64a7edf647e77d' }
|
||||||
|
|
||||||
depends_on("libelf")
|
depends_on("libelf")
|
||||||
depends_on("libdwarf")
|
depends_on("libdwarf")
|
||||||
|
|
||||||
|
@@ -9,7 +9,9 @@ class Libdwarf(Package):
|
|||||||
url = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
|
url = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
|
||||||
list_url = homepage
|
list_url = homepage
|
||||||
|
|
||||||
md5 = "64b42692e947d5180e162e46c689dfbf"
|
versions = { '20130729' : '4cc5e48693f7b93b7aa0261e63c0e21d',
|
||||||
|
'20130207' : '64b42692e947d5180e162e46c689dfbf',
|
||||||
|
'20130126' : 'ded74a5e90edb5a12aac3c29d260c5db' }
|
||||||
|
|
||||||
depends_on("libelf")
|
depends_on("libelf")
|
||||||
|
|
||||||
@@ -49,7 +51,7 @@ def install(self, prefix):
|
|||||||
install('dwarfdump.1', man1)
|
install('dwarfdump.1', man1)
|
||||||
|
|
||||||
|
|
||||||
@platform('macosx_10.8_x86_64')
|
# @platform('macosx_10.8_x86_64')
|
||||||
def install(self, prefix):
|
# def install(self, prefix):
|
||||||
raise UnsupportedPlatformError(
|
# raise UnsupportedPlatformError(
|
||||||
"libdwarf doesn't currently build on Mac OS X.")
|
# "libdwarf doesn't currently build on Mac OS X.")
|
||||||
|
@@ -3,7 +3,8 @@
|
|||||||
class Libelf(Package):
|
class Libelf(Package):
|
||||||
homepage = "http://www.mr511.de/software/english.html"
|
homepage = "http://www.mr511.de/software/english.html"
|
||||||
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
|
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
|
||||||
md5 = "4136d7b4c04df68b686570afa26988ac"
|
|
||||||
|
versions = { '0.8.13' : '4136d7b4c04df68b686570afa26988ac' }
|
||||||
|
|
||||||
def install(self, prefix):
|
def install(self, prefix):
|
||||||
configure("--prefix=%s" % prefix,
|
configure("--prefix=%s" % prefix,
|
||||||
|
@@ -3,7 +3,8 @@
|
|||||||
class Libunwind(Package):
|
class Libunwind(Package):
|
||||||
homepage = "http://www.nongnu.org/libunwind/"
|
homepage = "http://www.nongnu.org/libunwind/"
|
||||||
url = "http://download.savannah.gnu.org/releases/libunwind/libunwind-1.1.tar.gz"
|
url = "http://download.savannah.gnu.org/releases/libunwind/libunwind-1.1.tar.gz"
|
||||||
md5 = "fb4ea2f6fbbe45bf032cd36e586883ce"
|
|
||||||
|
versions = { '1.1' : 'fb4ea2f6fbbe45bf032cd36e586883ce' }
|
||||||
|
|
||||||
def install(self, prefix):
|
def install(self, prefix):
|
||||||
configure("--prefix=%s" % prefix)
|
configure("--prefix=%s" % prefix)
|
||||||
|
@@ -3,15 +3,12 @@
|
|||||||
class Mpich(Package):
|
class Mpich(Package):
|
||||||
"""MPICH is a high performance and widely portable implementation of
|
"""MPICH is a high performance and widely portable implementation of
|
||||||
the Message Passing Interface (MPI) standard."""
|
the Message Passing Interface (MPI) standard."""
|
||||||
|
|
||||||
homepage = "http://www.mpich.org"
|
homepage = "http://www.mpich.org"
|
||||||
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
|
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
|
||||||
md5 = "9c5d5d4fe1e17dd12153f40bc5b6dbc0"
|
|
||||||
|
|
||||||
list_url = "http://www.mpich.org/static/downloads/"
|
list_url = "http://www.mpich.org/static/downloads/"
|
||||||
list_depth = 2
|
list_depth = 2
|
||||||
|
|
||||||
versions = ['3.0.4', '3.0.3', '3.0.2', '3.0.1', '3.0']
|
versions = { '3.0.4' : '9c5d5d4fe1e17dd12153f40bc5b6dbc0' }
|
||||||
|
|
||||||
provides('mpi@:3', when='@3:')
|
provides('mpi@:3', when='@3:')
|
||||||
provides('mpi@:1', when='@1:')
|
provides('mpi@:1', when='@1:')
|
||||||
|
@@ -3,7 +3,6 @@
|
|||||||
class Mpileaks(Package):
|
class Mpileaks(Package):
|
||||||
homepage = "http://www.llnl.gov"
|
homepage = "http://www.llnl.gov"
|
||||||
url = "http://www.llnl.gov/mpileaks-1.0.tar.gz"
|
url = "http://www.llnl.gov/mpileaks-1.0.tar.gz"
|
||||||
md5 = "foobarbaz"
|
|
||||||
|
|
||||||
depends_on("mpich")
|
depends_on("mpich")
|
||||||
depends_on("callpath")
|
depends_on("callpath")
|
||||||
|
@@ -70,6 +70,7 @@
|
|||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
|
|
||||||
import tty
|
import tty
|
||||||
|
import hashlib
|
||||||
import spack.parse
|
import spack.parse
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
@@ -238,6 +239,12 @@ def satisfies(self, other):
|
|||||||
if name in other)
|
if name in other)
|
||||||
|
|
||||||
|
|
||||||
|
def sha1(self):
|
||||||
|
sha = hashlib.sha1()
|
||||||
|
sha.update(str(self))
|
||||||
|
return sha.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
sorted_dep_names = sorted(self.keys())
|
sorted_dep_names = sorted(self.keys())
|
||||||
return ''.join(
|
return ''.join(
|
||||||
@@ -341,10 +348,7 @@ def root(self):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def package(self):
|
def package(self):
|
||||||
if self.virtual:
|
return packages.get(self)
|
||||||
raise TypeError("Cannot get package for virtual spec '" +
|
|
||||||
self.name + "'")
|
|
||||||
return packages.get(self.name)
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -766,8 +770,8 @@ def copy(self, **kwargs):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
if not self.concrete:
|
if not self.versions.concrete:
|
||||||
raise SpecError("Spec is not concrete: " + str(self))
|
raise SpecError("Spec version is not concrete: " + str(self))
|
||||||
return self.versions[0]
|
return self.versions[0]
|
||||||
|
|
||||||
|
|
||||||
@@ -827,23 +831,35 @@ def tree(self, **kwargs):
|
|||||||
with indentation."""
|
with indentation."""
|
||||||
color = kwargs.get('color', False)
|
color = kwargs.get('color', False)
|
||||||
depth = kwargs.get('depth', False)
|
depth = kwargs.get('depth', False)
|
||||||
cover = kwargs.get('cover', 'paths')
|
showid = kwargs.get('ids', False)
|
||||||
|
cover = kwargs.get('cover', 'nodes')
|
||||||
|
indent = kwargs.get('indent', 0)
|
||||||
|
|
||||||
out = ""
|
out = ""
|
||||||
cur_id = 0
|
cur_id = 0
|
||||||
ids = {}
|
ids = {}
|
||||||
for d, node in self.preorder_traversal(cover=cover, depth=True):
|
for d, node in self.preorder_traversal(cover=cover, depth=True):
|
||||||
|
out += " " * indent
|
||||||
if depth:
|
if depth:
|
||||||
out += "%-4d" % d
|
out += "%-4d" % d
|
||||||
if not id(node) in ids:
|
if not id(node) in ids:
|
||||||
cur_id += 1
|
cur_id += 1
|
||||||
ids[id(node)] = cur_id
|
ids[id(node)] = cur_id
|
||||||
|
if showid:
|
||||||
out += "%-4d" % ids[id(node)]
|
out += "%-4d" % ids[id(node)]
|
||||||
out += (" " * d)
|
out += (" " * d)
|
||||||
|
if d > 0:
|
||||||
|
out += "^"
|
||||||
out += node.str_no_deps(color=color) + "\n"
|
out += node.str_no_deps(color=color) + "\n"
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def sha1(self):
|
||||||
|
sha = hashlib.sha1()
|
||||||
|
sha.update(str(self))
|
||||||
|
return sha.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self)
|
return str(self)
|
||||||
|
|
||||||
|
@@ -269,11 +269,6 @@ def destroy(self):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
def can_access(file=spack.stage_path):
|
|
||||||
"""True if we have read/write access to the file."""
|
|
||||||
return os.access(file, os.R_OK|os.W_OK)
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_access(file=spack.stage_path):
|
def ensure_access(file=spack.stage_path):
|
||||||
"""Ensure we can access a directory and die with an error if we can't."""
|
"""Ensure we can access a directory and die with an error if we can't."""
|
||||||
if not can_access(file):
|
if not can_access(file):
|
||||||
@@ -305,10 +300,18 @@ def find_tmp_root():
|
|||||||
if spack.use_tmp_stage:
|
if spack.use_tmp_stage:
|
||||||
for tmp in spack.tmp_dirs:
|
for tmp in spack.tmp_dirs:
|
||||||
try:
|
try:
|
||||||
mkdirp(expand_user(tmp))
|
# Replace %u with username
|
||||||
return tmp
|
expanded = expand_user(tmp)
|
||||||
|
|
||||||
|
# try to create a directory for spack stuff
|
||||||
|
mkdirp(expanded)
|
||||||
|
|
||||||
|
# return it if successful.
|
||||||
|
return expanded
|
||||||
|
|
||||||
except OSError:
|
except OSError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@@ -1 +0,0 @@
|
|||||||
skip_test = True
|
|
||||||
|
@@ -2,12 +2,11 @@
|
|||||||
|
|
||||||
class Callpath(Package):
|
class Callpath(Package):
|
||||||
homepage = "https://github.com/tgamblin/callpath"
|
homepage = "https://github.com/tgamblin/callpath"
|
||||||
url = "http://github.com/tgamblin/callpath-0.2.tar.gz"
|
url = "http://github.com/tgamblin/callpath-1.0.tar.gz"
|
||||||
md5 = "foobarbaz"
|
|
||||||
|
|
||||||
versions = { 0.8 : 'bf03b33375afa66fe0efa46ce3f4b17a',
|
versions = { 0.8 : 'foobarbaz',
|
||||||
0.9 : 'bf03b33375afa66fe0efa46ce3f4b17a',
|
0.9 : 'foobarbaz',
|
||||||
1.0 : 'bf03b33375afa66fe0efa46ce3f4b17a' }
|
1.0 : 'foobarbaz' }
|
||||||
|
|
||||||
depends_on("dyninst")
|
depends_on("dyninst")
|
||||||
depends_on("mpi")
|
depends_on("mpi")
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
class Dyninst(Package):
|
class Dyninst(Package):
|
||||||
homepage = "https://paradyn.org"
|
homepage = "https://paradyn.org"
|
||||||
url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
|
url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
|
||||||
md5 = "bf03b33375afa66fe0efa46ce3f4b17a"
|
|
||||||
|
|
||||||
list_url = "http://www.dyninst.org/downloads/dyninst-8.x"
|
list_url = "http://www.dyninst.org/downloads/dyninst-8.x"
|
||||||
|
|
||||||
versions = {
|
versions = {
|
||||||
|
@@ -3,9 +3,7 @@
|
|||||||
class Fake(Package):
|
class Fake(Package):
|
||||||
homepage = "http://www.fake-spack-example.org"
|
homepage = "http://www.fake-spack-example.org"
|
||||||
url = "http://www.fake-spack-example.org/downloads/fake-1.0.tar.gz"
|
url = "http://www.fake-spack-example.org/downloads/fake-1.0.tar.gz"
|
||||||
md5 = "foobarbaz"
|
versions = { '1.0' : 'foobarbaz' }
|
||||||
|
|
||||||
versions = '1.0'
|
|
||||||
|
|
||||||
def install(self, prefix):
|
def install(self, prefix):
|
||||||
configure("--prefix=%s" % prefix)
|
configure("--prefix=%s" % prefix)
|
||||||
|
@@ -9,13 +9,13 @@ class Libdwarf(Package):
|
|||||||
url = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
|
url = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
|
||||||
list_url = homepage
|
list_url = homepage
|
||||||
|
|
||||||
md5 = "64b42692e947d5180e162e46c689dfbf"
|
versions = { 20130729 : "64b42692e947d5180e162e46c689dfbf",
|
||||||
|
20130207 : 'foobarbaz',
|
||||||
versions = [20070703, 20111030, 20130207]
|
20111030 : 'foobarbaz',
|
||||||
|
20070703 : 'foobarbaz' }
|
||||||
|
|
||||||
depends_on("libelf")
|
depends_on("libelf")
|
||||||
|
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
for dir in dwarf_dirs:
|
for dir in dwarf_dirs:
|
||||||
with working_dir(dir):
|
with working_dir(dir):
|
||||||
@@ -49,9 +49,3 @@ def install(self, prefix):
|
|||||||
install('dwarfdump', bin)
|
install('dwarfdump', bin)
|
||||||
install('dwarfdump.conf', lib)
|
install('dwarfdump.conf', lib)
|
||||||
install('dwarfdump.1', man1)
|
install('dwarfdump.1', man1)
|
||||||
|
|
||||||
|
|
||||||
@platform('macosx_10.8_x86_64')
|
|
||||||
def install(self, prefix):
|
|
||||||
raise UnsupportedPlatformError(
|
|
||||||
"libdwarf doesn't currently build on Mac OS X.")
|
|
||||||
|
@@ -3,10 +3,8 @@
|
|||||||
class Libelf(Package):
|
class Libelf(Package):
|
||||||
homepage = "http://www.mr511.de/software/english.html"
|
homepage = "http://www.mr511.de/software/english.html"
|
||||||
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
|
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
|
||||||
md5 = "4136d7b4c04df68b686570afa26988ac"
|
|
||||||
|
|
||||||
versions = {
|
versions = {'0.8.13' : '4136d7b4c04df68b686570afa26988ac',
|
||||||
'0.8.13' : '4136d7b4c04df68b686570afa26988ac',
|
|
||||||
'0.8.12' : 'e21f8273d9f5f6d43a59878dc274fec7',
|
'0.8.12' : 'e21f8273d9f5f6d43a59878dc274fec7',
|
||||||
'0.8.10' : '9db4d36c283d9790d8fa7df1f4d7b4d9' }
|
'0.8.10' : '9db4d36c283d9790d8fa7df1f4d7b4d9' }
|
||||||
|
|
||||||
|
@@ -3,12 +3,14 @@
|
|||||||
class Mpich(Package):
|
class Mpich(Package):
|
||||||
homepage = "http://www.mpich.org"
|
homepage = "http://www.mpich.org"
|
||||||
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
|
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
|
||||||
md5 = "9c5d5d4fe1e17dd12153f40bc5b6dbc0"
|
|
||||||
|
|
||||||
list_url = "http://www.mpich.org/static/downloads/"
|
list_url = "http://www.mpich.org/static/downloads/"
|
||||||
list_depth = 2
|
list_depth = 2
|
||||||
|
|
||||||
versions = '3.0.4, 3.0.3, 3.0.2, 3.0.1, 3.0'
|
versions = { '3.0.4' : '9c5d5d4fe1e17dd12153f40bc5b6dbc0',
|
||||||
|
'3.0.3' : 'foobarbaz',
|
||||||
|
'3.0.2' : 'foobarbaz',
|
||||||
|
'3.0.1' : 'foobarbaz',
|
||||||
|
'3.0' : 'foobarbaz' }
|
||||||
|
|
||||||
provides('mpi@:3', when='@3:')
|
provides('mpi@:3', when='@3:')
|
||||||
provides('mpi@:1', when='@1:')
|
provides('mpi@:1', when='@1:')
|
||||||
|
@@ -3,12 +3,15 @@
|
|||||||
class Mpich2(Package):
|
class Mpich2(Package):
|
||||||
homepage = "http://www.mpich.org"
|
homepage = "http://www.mpich.org"
|
||||||
url = "http://www.mpich.org/static/downloads/1.5/mpich2-1.5.tar.gz"
|
url = "http://www.mpich.org/static/downloads/1.5/mpich2-1.5.tar.gz"
|
||||||
md5 = "9c5d5d4fe1e17dd12153f40bc5b6dbc0"
|
|
||||||
|
|
||||||
list_url = "http://www.mpich.org/static/downloads/"
|
list_url = "http://www.mpich.org/static/downloads/"
|
||||||
list_depth = 2
|
list_depth = 2
|
||||||
|
|
||||||
versions = '1.5, 1.4, 1.3, 1.2, 1.1, 1.0'
|
versions = { '1.5' : '9c5d5d4fe1e17dd12153f40bc5b6dbc0',
|
||||||
|
'1.4' : 'foobarbaz',
|
||||||
|
'1.3' : 'foobarbaz',
|
||||||
|
'1.2' : 'foobarbaz',
|
||||||
|
'1.1' : 'foobarbaz',
|
||||||
|
'1.0' : 'foobarbaz' }
|
||||||
|
|
||||||
provides('mpi@:2.0')
|
provides('mpi@:2.0')
|
||||||
provides('mpi@:2.1', when='@1.1:')
|
provides('mpi@:2.1', when='@1.1:')
|
||||||
|
@@ -3,12 +3,11 @@
|
|||||||
class Mpileaks(Package):
|
class Mpileaks(Package):
|
||||||
homepage = "http://www.llnl.gov"
|
homepage = "http://www.llnl.gov"
|
||||||
url = "http://www.llnl.gov/mpileaks-1.0.tar.gz"
|
url = "http://www.llnl.gov/mpileaks-1.0.tar.gz"
|
||||||
md5 = "foobarbaz"
|
|
||||||
|
|
||||||
versions = { 1.0 : None,
|
versions = { 1.0 : 'foobarbaz',
|
||||||
2.1 : None,
|
2.1 : 'foobarbaz',
|
||||||
2.2 : None,
|
2.2 : 'foobarbaz',
|
||||||
2.3 : None }
|
2.3 : 'foobarbaz' }
|
||||||
|
|
||||||
depends_on("mpi")
|
depends_on("mpi")
|
||||||
depends_on("callpath")
|
depends_on("callpath")
|
||||||
|
@@ -5,9 +5,8 @@ class Zmpi(Package):
|
|||||||
with dependencies."""
|
with dependencies."""
|
||||||
homepage = "http://www.spack-fake-zmpi.org"
|
homepage = "http://www.spack-fake-zmpi.org"
|
||||||
url = "http://www.spack-fake-zmpi.org/downloads/zmpi-1.0.tar.gz"
|
url = "http://www.spack-fake-zmpi.org/downloads/zmpi-1.0.tar.gz"
|
||||||
md5 = "foobarbaz"
|
|
||||||
|
|
||||||
versions = '1.0'
|
versions = { '1.0' : 'foobarbaz' }
|
||||||
|
|
||||||
provides('mpi@10.0:')
|
provides('mpi@10.0:')
|
||||||
depends_on('fake')
|
depends_on('fake')
|
||||||
|
@@ -47,7 +47,7 @@ def pkg(message):
|
|||||||
|
|
||||||
mac_ver = platform.mac_ver()[0]
|
mac_ver = platform.mac_ver()[0]
|
||||||
if mac_ver and Version(mac_ver) >= Version('10.7'):
|
if mac_ver and Version(mac_ver) >= Version('10.7'):
|
||||||
print u"\U0001F4E6" + indent,
|
print u"\U0001F4E6" + indent
|
||||||
else:
|
else:
|
||||||
cprint('@*g{[+]} ')
|
cwrite('@*g{[+]} ')
|
||||||
print message
|
print message
|
||||||
|
@@ -1,13 +1,81 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
def md5(filename, block_size=2**20):
|
"""Set of acceptable hashes that Spack will use."""
|
||||||
"""Computes the md5 hash of a file."""
|
_acceptable_hashes = [
|
||||||
md5 = hashlib.md5()
|
hashlib.md5,
|
||||||
|
hashlib.sha1,
|
||||||
|
hashlib.sha224,
|
||||||
|
hashlib.sha256,
|
||||||
|
hashlib.sha384,
|
||||||
|
hashlib.sha512 ]
|
||||||
|
|
||||||
|
"""Index for looking up hasher for a digest."""
|
||||||
|
_size_to_hash = { h().digest_size : h for h in _acceptable_hashes }
|
||||||
|
|
||||||
|
|
||||||
|
def checksum(hashlib_algo, filename, **kwargs):
|
||||||
|
"""Returns a hex digest of the filename generated using an
|
||||||
|
algorithm from hashlib.
|
||||||
|
"""
|
||||||
|
block_size = kwargs.get('block_size', 2**20)
|
||||||
|
hasher = hashlib_algo()
|
||||||
with closing(open(filename)) as file:
|
with closing(open(filename)) as file:
|
||||||
while True:
|
while True:
|
||||||
data = file.read(block_size)
|
data = file.read(block_size)
|
||||||
if not data:
|
if not data:
|
||||||
break
|
break
|
||||||
md5.update(data)
|
hasher.update(data)
|
||||||
return md5.hexdigest()
|
return hasher.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Checker(object):
|
||||||
|
"""A checker checks files against one particular hex digest.
|
||||||
|
It will automatically determine what hashing algorithm
|
||||||
|
to used based on the length of the digest it's initialized
|
||||||
|
with. e.g., if the digest is 32 hex characters long this will
|
||||||
|
use md5.
|
||||||
|
|
||||||
|
Example: know your tarball should hash to 'abc123'. You want
|
||||||
|
to check files against this. You would use this class like so::
|
||||||
|
|
||||||
|
hexdigest = 'abc123'
|
||||||
|
checker = Checker(hexdigest)
|
||||||
|
success = checker.check('downloaded.tar.gz')
|
||||||
|
|
||||||
|
After the call to check, the actual checksum is available in
|
||||||
|
checker.sum, in case it's needed for error output.
|
||||||
|
|
||||||
|
You can trade read performance and memory usage by
|
||||||
|
adjusting the block_size optional arg. By default it's
|
||||||
|
a 1MB (2**20 bytes) buffer.
|
||||||
|
"""
|
||||||
|
def __init__(self, hexdigest, **kwargs):
|
||||||
|
self.block_size = kwargs.get('block_size', 2**20)
|
||||||
|
self.hexdigest = hexdigest
|
||||||
|
self.sum = None
|
||||||
|
|
||||||
|
bytes = len(hexdigest) / 2
|
||||||
|
if not bytes in _size_to_hash:
|
||||||
|
raise ValueError(
|
||||||
|
'Spack knows no hash algorithm for this digest: %s' % hexdigest)
|
||||||
|
|
||||||
|
self.hash_fun = _size_to_hash[bytes]
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hash_name(self):
|
||||||
|
"""Get the name of the hash function this Checker is using."""
|
||||||
|
return self.hash_fun().name
|
||||||
|
|
||||||
|
|
||||||
|
def check(self, filename):
|
||||||
|
"""Read the file with the specified name and check its checksum
|
||||||
|
against self.hexdigest. Return True if they match, False
|
||||||
|
otherwise. Actual checksum is stored in self.sum.
|
||||||
|
"""
|
||||||
|
self.sum = checksum(
|
||||||
|
self.hash_fun, filename, block_size=self.block_size)
|
||||||
|
return self.sum == self.hexdigest
|
||||||
|
|
||||||
|
@@ -1,3 +1,4 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
def env_flag(name):
|
def env_flag(name):
|
||||||
if name in os.environ:
|
if name in os.environ:
|
||||||
|
@@ -68,3 +68,10 @@ def stem(path):
|
|||||||
if re.search(suffix, path):
|
if re.search(suffix, path):
|
||||||
return re.sub(suffix, "", path)
|
return re.sub(suffix, "", path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def can_access(file_name):
|
||||||
|
"""True if we have read/write access to the file."""
|
||||||
|
return os.access(file_name, os.R_OK|os.W_OK)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -23,7 +23,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
from bisect import bisect_left
|
from bisect import bisect_left
|
||||||
from functools import total_ordering
|
from functools import total_ordering, wraps
|
||||||
|
|
||||||
import spack.util.none_high as none_high
|
import spack.util.none_high as none_high
|
||||||
import spack.util.none_low as none_low
|
import spack.util.none_low as none_low
|
||||||
@@ -71,6 +71,7 @@ def check_type(t):
|
|||||||
|
|
||||||
def coerced(method):
|
def coerced(method):
|
||||||
"""Decorator that ensures that argument types of a method are coerced."""
|
"""Decorator that ensures that argument types of a method are coerced."""
|
||||||
|
@wraps(method)
|
||||||
def coercing_method(a, b):
|
def coercing_method(a, b):
|
||||||
if type(a) == type(b) or a is None or b is None:
|
if type(a) == type(b) or a is None or b is None:
|
||||||
return method(a, b)
|
return method(a, b)
|
||||||
@@ -84,6 +85,8 @@ def coercing_method(a, b):
|
|||||||
class Version(object):
|
class Version(object):
|
||||||
"""Class to represent versions"""
|
"""Class to represent versions"""
|
||||||
def __init__(self, string):
|
def __init__(self, string):
|
||||||
|
string = str(string)
|
||||||
|
|
||||||
if not re.match(VALID_VERSION, string):
|
if not re.match(VALID_VERSION, string):
|
||||||
raise ValueError("Bad characters in version string: %s" % string)
|
raise ValueError("Bad characters in version string: %s" % string)
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user