Initial ability to swap compilers.

Fixes SPACK-16 and forces compiler script to build using compiler wrappers.

- works with gcc and clang on laptop.
This commit is contained in:
Todd Gamblin 2014-05-19 16:07:42 -07:00
parent ed6454fe78
commit f784757113
19 changed files with 235 additions and 122 deletions

View File

@ -41,7 +41,7 @@ sys.path.insert(0, SPACK_LIB_PATH)
# If there is no working directory, use the spack prefix. # If there is no working directory, use the spack prefix.
try: try:
os.getcwd() working_dir = os.getcwd()
except OSError: except OSError:
os.chdir(SPACK_PREFIX) os.chdir(SPACK_PREFIX)
@ -79,6 +79,7 @@ args = parser.parse_args()
# Set up environment based on args. # Set up environment based on args.
spack.verbose = args.verbose spack.verbose = args.verbose
spack.debug = args.debug spack.debug = args.debug
spack.spack_working_dir = working_dir
if args.mock: if args.mock:
from spack.packages import PackageDB from spack.packages import PackageDB
spack.db = PackageDB(spack.mock_packages_path) spack.db = PackageDB(spack.mock_packages_path)

47
lib/spack/env/cc vendored
View File

@ -10,7 +10,7 @@ import argparse
from contextlib import closing from contextlib import closing
# Import spack parameters through the build environment. # Import spack parameters through the build environment.
spack_lib = os.environ.get("SPACK_LIB") spack_lib = os.environ.get("SPACK_LIB")
if not spack_lib: if not spack_lib:
print "Spack compiler must be run from spack!" print "Spack compiler must be run from spack!"
sys.exit(1) sys.exit(1)
@ -20,24 +20,23 @@ sys.path.append(spack_lib)
from spack.compilation import * from spack.compilation import *
import llnl.util.tty as tty import llnl.util.tty as tty
spack_prefix = get_env_var("SPACK_PREFIX") spack_prefix = get_env_var("SPACK_PREFIX")
spack_build_root = get_env_var("SPACK_BUILD_ROOT") spack_debug = get_env_flag("SPACK_DEBUG")
spack_debug = get_env_flag("SPACK_DEBUG") spack_deps = get_path("SPACK_DEPENDENCIES")
spack_deps = get_path("SPACK_DEPENDENCIES") spack_env_path = get_path("SPACK_ENV_PATH")
spack_env_path = get_path("SPACK_ENV_PATH") spack_debug_log_dir = get_env_var("SPACK_DEBUG_LOG_DIR")
spack_spec = get_env_var("SPACK_SPEC")
spack_cc = get_env_var("SPACK_CC")
spack_cxx = get_env_var("SPACK_CXX")
spack_f77 = get_env_var("SPACK_F77")
spack_fc = get_env_var("SPACK_FC")
# Figure out what type of operation we're doing # Figure out what type of operation we're doing
command = os.path.basename(sys.argv[0]) command = os.path.basename(sys.argv[0])
cpp, cc, ccld, ld, version_check = range(5) cpp, cc, ccld, ld, version_check = range(5)
########################################################################
# TODO: this can to be removed once JIRA issue SPACK-16 is resolved
#
if command == 'CC':
command = 'c++'
########################################################################
if command == 'cpp': if command == 'cpp':
mode = cpp mode = cpp
elif command == 'ld': elif command == 'ld':
@ -49,7 +48,23 @@ elif '-c' in sys.argv:
else: else:
mode = ccld mode = ccld
if '-V' in sys.argv or '-v' in sys.argv or '--version' in sys.argv:
if command in ('cc', 'gcc', 'c89', 'c99', 'clang'):
command = spack_cc
elif command in ('c++', 'CC', 'g++', 'clang++'):
command = spack_cxx
elif command in ('f77'):
command = spack_f77
elif command in ('fc'):
command = spack_fc
elif command in ('ld', 'cpp'):
pass # leave it the same. TODO: what's the right thing?
else:
raise Exception("Unknown compiler: %s" % command)
version_args = ['-V', '-v', '--version', '-dumpversion']
if any(arg in sys.argv for arg in version_args):
mode = version_check mode = version_check
# Parse out the includes, libs, etc. so we can adjust them if need be. # Parse out the includes, libs, etc. so we can adjust them if need be.
@ -104,8 +119,8 @@ os.environ["PATH"] = ":".join(path)
full_command = [command] + arguments full_command = [command] + arguments
if spack_debug: if spack_debug:
input_log = os.path.join(spack_build_root, 'spack_cc_in.log') input_log = os.path.join(spack_debug_log_dir, 'spack-cc-%s.in.log' % spack_spec)
output_log = os.path.join(spack_build_root, 'spack_cc_out.log') output_log = os.path.join(spack_debug_log_dir, 'spack-cc-%s.out.log' % spack_spec)
with closing(open(input_log, 'a')) as log: with closing(open(input_log, 'a')) as log:
args = [os.path.basename(sys.argv[0])] + sys.argv[1:] args = [os.path.basename(sys.argv[0])] + sys.argv[1:]
log.write("%s\n" % " ".join(arg.replace(' ', r'\ ') for arg in args)) log.write("%s\n" % " ".join(arg.replace(' ', r'\ ') for arg in args))

View File

@ -34,7 +34,7 @@
from llnl.util.filesystem import * from llnl.util.filesystem import *
import spack import spack
from spack.compilers import compiler_for_spec import spack.compilers as compilers
from spack.util.executable import Executable, which from spack.util.executable import Executable, which
from spack.util.environment import * from spack.util.environment import *
@ -52,7 +52,9 @@
SPACK_ENV_PATH = 'SPACK_ENV_PATH' SPACK_ENV_PATH = 'SPACK_ENV_PATH'
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES' SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
SPACK_PREFIX = 'SPACK_PREFIX' SPACK_PREFIX = 'SPACK_PREFIX'
SPACK_BUILD_ROOT = 'SPACK_BUILD_ROOT' SPACK_DEBUG = 'SPACK_DEBUG'
SPACK_SPEC = 'SPACK_SPEC'
SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
class MakeExecutable(Executable): class MakeExecutable(Executable):
@ -82,7 +84,19 @@ def __call__(self, *args, **kwargs):
def set_compiler_environment_variables(pkg): def set_compiler_environment_variables(pkg):
assert(pkg.spec.concrete) assert(pkg.spec.concrete)
compiler = compiler_for_spec(pkg.spec.compiler) compiler = compilers.compiler_for_spec(pkg.spec.compiler)
# Set compiler variables used by CMake and autotools
os.environ['CC'] = 'cc'
os.environ['CXX'] = 'c++'
os.environ['F77'] = 'f77'
os.environ['FC'] = 'fc'
# Set SPACK compiler variables so that our wrapper knows what to call
os.environ['SPACK_CC'] = compiler.cc.command
os.environ['SPACK_CXX'] = compiler.cxx.command
os.environ['SPACK_F77'] = compiler.f77.command
os.environ['SPACK_FC'] = compiler.fc.command
def set_build_environment_variables(pkg): def set_build_environment_variables(pkg):
@ -108,9 +122,6 @@ def set_build_environment_variables(pkg):
# Install prefix # Install prefix
os.environ[SPACK_PREFIX] = pkg.prefix os.environ[SPACK_PREFIX] = pkg.prefix
# Build root for logging.
os.environ[SPACK_BUILD_ROOT] = pkg.stage.expanded_archive_path
# Remove these vars from the environment during build becaus they # Remove these vars from the environment during build becaus they
# can affect how some packages find libraries. We want to make # can affect how some packages find libraries. We want to make
# sure that builds never pull in unintended external dependencies. # sure that builds never pull in unintended external dependencies.
@ -120,6 +131,12 @@ def set_build_environment_variables(pkg):
bin_dirs = ['%s/bin' % prefix for prefix in dep_prefixes] bin_dirs = ['%s/bin' % prefix for prefix in dep_prefixes]
path_put_first('PATH', [bin for bin in bin_dirs if os.path.isdir(bin)]) path_put_first('PATH', [bin for bin in bin_dirs if os.path.isdir(bin)])
# Working directory for the spack command itself, for debug logs.
if spack.debug:
os.environ[SPACK_DEBUG] = "TRUE"
os.environ[SPACK_SPEC] = str(pkg.spec)
os.environ[SPACK_DEBUG_LOG_DIR] = spack.spack_working_dir
def set_module_variables_for_package(pkg): def set_module_variables_for_package(pkg):
"""Populate the module scope of install() with some useful functions. """Populate the module scope of install() with some useful functions.

View File

@ -34,7 +34,7 @@
def compilers(parser, args): def compilers(parser, args):
tty.msg("Available compilers") tty.msg("Available compilers")
index = index_by(spack.compilers.available_compilers(), 'name') index = index_by(spack.compilers.all_compilers(), 'name')
for name, compilers in index.items(): for name, compilers in index.items():
tty.hline(name, char='-', color=spack.spec.compiler_color) tty.hline(name, char='-', color=spack.spec.compiler_color)
colify(compilers, indent=4) colify(compilers, indent=4)

View File

@ -62,7 +62,7 @@ def find(parser, args):
# Make a dict with specs keyed by architecture and compiler. # Make a dict with specs keyed by architecture and compiler.
specs = [s for s in spack.db.installed_package_specs() specs = [s for s in spack.db.installed_package_specs()
if not query_specs or any(spec.satisfies(q) for q in query_specs)] if not query_specs or any(s.satisfies(q) for q in query_specs)]
index = index_by(specs, 'architecture', 'compiler') index = index_by(specs, 'architecture', 'compiler')
# Traverse the index and print out each package # Traverse the index and print out each package

View File

@ -36,7 +36,10 @@ def setup_parser(subparser):
help="Do not try to install dependencies of requested packages.") help="Do not try to install dependencies of requested packages.")
subparser.add_argument( subparser.add_argument(
'--keep-prefix', action='store_true', dest='keep_prefix', '--keep-prefix', action='store_true', dest='keep_prefix',
help="Don't clean up staging area when install completes.") help="Don't remove the install prefix if installation fails.")
subparser.add_argument(
'--keep-stage', action='store_true', dest='keep_stage',
help="Don't remove the build stage if installation succeeds.")
subparser.add_argument( subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum', '-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum") help="Do not check packages against checksum")
@ -55,4 +58,5 @@ def install(parser, args):
for spec in specs: for spec in specs:
package = spack.db.get(spec) package = spack.db.get(spec)
package.do_install(keep_prefix=args.keep_prefix, package.do_install(keep_prefix=args.keep_prefix,
keep_stage=args.keep_stage,
ignore_deps=args.ignore_deps) ignore_deps=args.ignore_deps)

View File

@ -35,6 +35,11 @@ def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
'-f', '--force', action='store_true', dest='force', '-f', '--force', action='store_true', dest='force',
help="Remove regardless of whether other packages depend on this one.") help="Remove regardless of whether other packages depend on this one.")
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
help="USE CAREFULLY. Remove ALL installed packages that match each supplied spec. " +
"i.e., if you say uninstall libelf, ALL versions of libelf are uninstalled. " +
"This is both useful and dangerous, like rm -r.")
subparser.add_argument( subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall") 'packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
@ -50,15 +55,17 @@ def uninstall(parser, args):
pkgs = [] pkgs = []
for spec in specs: for spec in specs:
matching_specs = spack.db.get_installed(spec) matching_specs = spack.db.get_installed(spec)
if len(matching_specs) > 1: if not args.all and len(matching_specs) > 1:
tty.die("%s matches multiple packages. Which one did you mean?" tty.die("%s matches multiple packages." % spec,
% spec, *matching_specs) "You can either:",
" a) Use spack uninstall -a to uninstall ALL matching specs, or",
" b) use a more specific spec.",
"Matching packages:", *(" " + str(s) for s in matching_specs))
elif len(matching_specs) == 0: if len(matching_specs) == 0:
tty.die("%s does not match any installed packages." % spec) tty.die("%s does not match any installed packages." % spec)
installed_spec = matching_specs[0] pkgs.extend(spack.db.get(s) for s in matching_specs)
pkgs.append(spack.db.get(installed_spec))
# Sort packages to be uninstalled by the number of installed dependents # Sort packages to be uninstalled by the number of installed dependents
# This ensures we do things in the right order # This ensures we do things in the right order

View File

@ -16,7 +16,7 @@ def _verify_executables(*paths):
class Compiler(object): class Compiler(object):
"""This class encapsulates a Spack "compiler", which includes C, """This class encapsulates a Spack "compiler", which includes C,
C++, Fortran, and F90 compilers. Subclasses should implement C++, and Fortran compilers. Subclasses should implement
support for specific compilers, their possible names, arguments, support for specific compilers, their possible names, arguments,
and how to identify the particular type of compiler.""" and how to identify the particular type of compiler."""
@ -30,20 +30,20 @@ class Compiler(object):
f77_names = [] f77_names = []
# Subclasses use possible names of Fortran 90 compiler # Subclasses use possible names of Fortran 90 compiler
f90_names = [] fc_names = []
# Names of generic arguments used by this compiler # Names of generic arguments used by this compiler
arg_version = '-dumpversion' arg_version = '-dumpversion'
arg_rpath = '-Wl,-rpath,%s' arg_rpath = '-Wl,-rpath,%s'
def __init__(self, cc, cxx, f77, f90): def __init__(self, cc, cxx, f77, fc):
_verify_executables(cc, cxx, f77, f90) _verify_executables(cc, cxx, f77, fc)
self.cc = Executable(cc) self.cc = Executable(cc)
self.cxx = Executable(cxx) self.cxx = Executable(cxx)
self.f77 = Executable(f77) self.f77 = Executable(f77)
self.f90 = Executable(f90) self.fc = Executable(fc)
@property @property
@memoized @memoized

View File

@ -41,49 +41,69 @@
_imported_versions_module = 'spack.compilers' _imported_versions_module = 'spack.compilers'
def _auto_compiler_spec(function):
def converter(cspec_like):
if not isinstance(cspec_like, spack.spec.CompilerSpec):
cspec_like = spack.spec.CompilerSpec(cspec_like)
return function(cspec_like)
return converter
@memoized @memoized
def supported_compilers(): def supported_compilers():
"""Return a list of names of compilers supported by Spack. """Return a set of names of compilers supported by Spack.
See available_compilers() to get a list of all the available See available_compilers() to get a list of all the available
versions of supported compilers. versions of supported compilers.
""" """
return sorted(c for c in list_modules(spack.compilers_path)) return sorted(name for name in list_modules(spack.compilers_path))
@_auto_compiler_spec
def supported(compiler_spec): def supported(compiler_spec):
"""Test if a particular compiler is supported.""" """Test if a particular compiler is supported."""
if not isinstance(compiler_spec, spack.spec.CompilerSpec):
compiler_spec = spack.spec.CompilerSpec(compiler_spec)
return compiler_spec.name in supported_compilers() return compiler_spec.name in supported_compilers()
def available_compilers(): @memoized
"""Return a list of specs for all the compiler versions currently def all_compilers():
available to build with. These are instances of """Return a set of specs for all the compiler versions currently
CompilerSpec. available to build with. These are instances of CompilerSpec.
""" """
return [spack.spec.CompilerSpec(c) return set(spack.spec.CompilerSpec(c)
for c in list_modules(spack.compiler_version_path)] for c in list_modules(spack.compiler_version_path))
@_auto_compiler_spec
def find(compiler_spec):
"""Return specs of available compilers that match the supplied
compiler spec. Return an list if nothing found."""
return [c for c in all_compilers() if c.satisfies(compiler_spec)]
@_auto_compiler_spec
def compilers_for_spec(compiler_spec):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
matches = find(compiler_spec)
compilers = []
for cspec in matches:
path = join_path(spack.compiler_version_path, "%s.py" % cspec)
mod = imp.load_source(_imported_versions_module, path)
cls = class_for_compiler_name(cspec.name)
compilers.append(cls(mod.cc, mod.cxx, mod.f77, mod.fc))
return compilers
@_auto_compiler_spec
def compiler_for_spec(compiler_spec): def compiler_for_spec(compiler_spec):
"""This gets an instance of an actual spack.compiler.Compiler object assert(compiler_spec.concrete)
from a compiler spec. The spec needs to be concrete for this to compilers = compilers_for_spec(compiler_spec)
work; it will raise an error if passed an abstract compiler. assert(len(compilers) == 1)
""" return compilers[0]
matches = [c for c in available_compilers() if c.satisfies(compiler_spec)]
# TODO: do something when there are zero matches.
assert(len(matches) >= 1)
compiler = matches[0]
file_path = join_path(spack.compiler_version_path, "%s.py" % compiler)
mod = imp.load_source(_imported_versions_module, file_path)
compiler_class = class_for_compiler_name(compiler.name)
return compiler_class(mod.cc, mod.cxx, mod.f77, mod.f90)
def class_for_compiler_name(compiler_name): def class_for_compiler_name(compiler_name):

View File

@ -35,7 +35,7 @@ class Clang(Compiler):
f77_names = [] f77_names = []
# Subclasses use possible names of Fortran 90 compiler # Subclasses use possible names of Fortran 90 compiler
f90_names = [] fc_names = []
def __init__(self, cc, cxx, f77, f90): def __init__(self, cc, cxx, f77, fc):
super(Gcc, self).__init__(cc, cxx, f77, f90) super(Clang, self).__init__(cc, cxx, f77, fc)

View File

@ -35,7 +35,7 @@ class Gcc(Compiler):
f77_names = ['gfortran'] f77_names = ['gfortran']
# Subclasses use possible names of Fortran 90 compiler # Subclasses use possible names of Fortran 90 compiler
f90_names = ['gfortran'] fc_names = ['gfortran']
def __init__(self, cc, cxx, f77, f90): def __init__(self, cc, cxx, f77, fc):
super(Gcc, self).__init__(cc, cxx, f77, f90) super(Gcc, self).__init__(cc, cxx, f77, fc)

View File

@ -35,7 +35,7 @@ class Intel(Compiler):
f77_names = ['ifort'] f77_names = ['ifort']
# Subclasses use possible names of Fortran 90 compiler # Subclasses use possible names of Fortran 90 compiler
f90_names = ['ifort'] fc_names = ['ifort']
def __init__(self, cc, cxx, f77, f90): def __init__(self, cc, cxx, f77, fc):
super(Gcc, self).__init__(cc, cxx, f77, f90) super(Intel, self).__init__(cc, cxx, f77, fc)

View File

@ -33,9 +33,10 @@
TODO: make this customizable and allow users to configure TODO: make this customizable and allow users to configure
concretization policies. concretization policies.
""" """
import spack.architecture
import spack.compilers
import spack.spec import spack.spec
import spack.compilers
import spack.architecture
import spack.error
from spack.version import * from spack.version import *
@ -117,9 +118,13 @@ def concretize_compiler(self, spec):
if p.compiler is not None).compiler if p.compiler is not None).compiler
if not nearest.concrete: if not nearest.concrete:
matches = [c for c in spack.compilers.available_compilers() # Take the newest compiler that saisfies the spec
if c.name == nearest.name] matches = sorted(spack.compilers.find(nearest))
nearest.versions = sorted(matches)[-1].versions.copy() if not matches:
raise UnavailableCompilerVersionError(nearest)
# copy concrete version into nearest spec
nearest.versions = matches[-1].versions.copy()
assert(nearest.concrete) assert(nearest.concrete)
spec.compiler = nearest.copy() spec.compiler = nearest.copy()
@ -140,3 +145,12 @@ def choose_provider(self, spec, providers):
first_key = sorted(index.keys())[0] first_key = sorted(index.keys())[0]
latest_version = sorted(index[first_key])[-1] latest_version = sorted(index[first_key])[-1]
return latest_version return latest_version
class UnavailableCompilerVersionError(spack.error.SpackError):
"""Raised when there is no available compiler that satisfies a
compiler spec."""
def __init__(self, compiler_spec):
super(UnavailableCompilerVersionError, self).__init__(
"No available compiler version matches '%s'" % compiler_spec,
"Run 'spack compilers' to see available compiler Options.")

View File

@ -626,6 +626,7 @@ def do_install(self, **kwargs):
""" """
# whether to keep the prefix on failure. Default is to destroy it. # whether to keep the prefix on failure. Default is to destroy it.
keep_prefix = kwargs.get('keep_prefix', False) keep_prefix = kwargs.get('keep_prefix', False)
keep_stage = kwargs.get('keep_stage', False)
ignore_deps = kwargs.get('ignore_deps', False) ignore_deps = kwargs.get('ignore_deps', False)
if not self.spec.concrete: if not self.spec.concrete:
@ -671,9 +672,10 @@ def do_install(self, **kwargs):
"Install failed for %s. Nothing was installed!" "Install failed for %s. Nothing was installed!"
% self.name) % self.name)
# On successful install, remove the stage. if not keep_stage:
# Leave if there is an error # On successful install, remove the stage.
self.stage.destroy() # Leave it if there is an error
self.stage.destroy()
tty.msg("Successfully installed %s" % self.name) tty.msg("Successfully installed %s" % self.name)
print_pkg(self.prefix) print_pkg(self.prefix)
@ -725,16 +727,16 @@ def do_uninstall(self, **kwargs):
force = kwargs.get('force', False) force = kwargs.get('force', False)
if not self.installed: if not self.installed:
raise InstallError(self.name + " is not installed.") raise InstallError(self.spec + " is not installed.")
if not force: if not force:
deps = self.installed_dependents deps = self.installed_dependents
if deps: raise InstallError( if deps: raise InstallError(
"Cannot uninstall %s. The following installed packages depend on it: %s" "Cannot uninstall %s. The following installed packages depend on it: %s"
% (self.name, deps)) % (self.spec, deps))
self.remove_prefix() self.remove_prefix()
tty.msg("Successfully uninstalled %s." % self.name) tty.msg("Successfully uninstalled %s." % self.spec)
def do_clean(self): def do_clean(self):

View File

@ -102,7 +102,7 @@
import spack import spack
import spack.parse import spack.parse
import spack.error import spack.error
from spack.compilers import supported as supported_compiler import spack.compilers as compilers
from spack.version import * from spack.version import *
from spack.util.string import * from spack.util.string import *
@ -231,8 +231,9 @@ def constrain(self, other):
@property @property
def concrete(self): def concrete(self):
"""A CompilerSpec is concrete if its versions are concrete.""" """A CompilerSpec is concrete if its versions are concrete and there
return self.versions.concrete is an available compiler with the right version."""
return self.versions.concrete and self in compilers.all_compilers()
@property @property
@ -260,6 +261,9 @@ def __str__(self):
out += "@%s" % vlist out += "@%s" % vlist
return out return out
def __repr__(self):
return str(self)
@key_ordering @key_ordering
class Variant(object): class Variant(object):
@ -821,12 +825,13 @@ def validate_names(self):
# validate compiler in addition to the package name. # validate compiler in addition to the package name.
if spec.compiler: if spec.compiler:
if not supported_compiler(spec.compiler): if not compilers.supported(spec.compiler):
raise UnsupportedCompilerError(spec.compiler.name) raise UnsupportedCompilerError(spec.compiler.name)
def constrain(self, other, **kwargs): def constrain(self, other, **kwargs):
other = self._autospec(other) other = self._autospec(other)
constrain_deps = kwargs.get('deps', True)
if not self.name == other.name: if not self.name == other.name:
raise UnsatisfiableSpecNameError(self.name, other.name) raise UnsatisfiableSpecNameError(self.name, other.name)
@ -854,7 +859,7 @@ def constrain(self, other, **kwargs):
self.variants.update(other.variants) self.variants.update(other.variants)
self.architecture = self.architecture or other.architecture self.architecture = self.architecture or other.architecture
if kwargs.get('deps', True): if constrain_deps:
self._constrain_dependencies(other) self._constrain_dependencies(other)
@ -911,28 +916,28 @@ def _autospec(self, spec_like):
def satisfies(self, other, **kwargs): def satisfies(self, other, **kwargs):
other = self._autospec(other) other = self._autospec(other)
satisfy_deps = kwargs.get('deps', True)
# First thing we care about is whether the name matches # First thing we care about is whether the name matches
if self.name != other.name: if self.name != other.name:
return False return False
# This function simplifies null checking below # All these attrs have satisfies criteria of their own,
def check(attribute, op): # but can be None to indicate no constraints.
s = getattr(self, attribute) for s, o in ((self.versions, other.versions),
o = getattr(other, attribute) (self.variants, other.variants),
return not s or not o or op(s,o) (self.compiler, other.compiler)):
if s and o and not s.satisfies(o):
# All these attrs have satisfies criteria of their own
for attr in ('versions', 'variants', 'compiler'):
if not check(attr, lambda s, o: s.satisfies(o)):
return False return False
# Architecture is just a string # Architecture satisfaction is currently just string equality.
# TODO: inviestigate making an Architecture class for symmetry # Can be None for unconstrained, though.
if not check('architecture', lambda s,o: s == o): if (self.architecture and other.architecture and
self.architecture != other.architecture):
return False return False
if kwargs.get('deps', True): # If we need to descend into dependencies, do it, otherwise we're done.
if satisfy_deps:
return self.satisfies_dependencies(other) return self.satisfies_dependencies(other)
else: else:
return True return True

View File

@ -37,16 +37,13 @@ def check_satisfies(self, spec, anon_spec):
left = Spec(spec) left = Spec(spec)
right = parse_anonymous_spec(anon_spec, left.name) right = parse_anonymous_spec(anon_spec, left.name)
# Satisfies is one-directional.
self.assertTrue(left.satisfies(right)) self.assertTrue(left.satisfies(right))
self.assertTrue(left.satisfies(anon_spec)) self.assertTrue(left.satisfies(anon_spec))
self.assertTrue(right.satisfies(left))
try: # if left satisfies right, then we should be able to consrain
left.copy().constrain(right) # right by left. Reverse is not always true.
left.copy().constrain(anon_spec) right.copy().constrain(left)
right.copy().constrain(left)
except SpecError, e:
self.fail("Got a SpecError in constrain! " + e.message)
def check_unsatisfiable(self, spec, anon_spec): def check_unsatisfiable(self, spec, anon_spec):
@ -56,25 +53,21 @@ def check_unsatisfiable(self, spec, anon_spec):
self.assertFalse(left.satisfies(right)) self.assertFalse(left.satisfies(right))
self.assertFalse(left.satisfies(anon_spec)) self.assertFalse(left.satisfies(anon_spec))
self.assertFalse(right.satisfies(left)) self.assertRaises(UnsatisfiableSpecError, right.copy().constrain, left)
self.assertRaises(UnsatisfiableSpecError, left.constrain, right)
self.assertRaises(UnsatisfiableSpecError, left.constrain, anon_spec)
self.assertRaises(UnsatisfiableSpecError, right.constrain, left)
def check_constrain(self, expected, constrained, constraint): def check_constrain(self, expected, spec, constraint):
exp = Spec(expected) exp = Spec(expected)
constrained = Spec(constrained) spec = Spec(spec)
constraint = Spec(constraint) constraint = Spec(constraint)
constrained.constrain(constraint) spec.constrain(constraint)
self.assertEqual(exp, constrained) self.assertEqual(exp, spec)
def check_invalid_constraint(self, constrained, constraint): def check_invalid_constraint(self, spec, constraint):
constrained = Spec(constrained) spec = Spec(spec)
constraint = Spec(constraint) constraint = Spec(constraint)
self.assertRaises(UnsatisfiableSpecError, constrained.constrain, constraint) self.assertRaises(UnsatisfiableSpecError, spec.constrain, constraint)
# ================================================================================ # ================================================================================
@ -177,3 +170,8 @@ def test_invalid_constraint(self):
self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo') self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
self.check_invalid_constraint('libelf=bgqos_0', 'libelf=x86_54') self.check_invalid_constraint('libelf=bgqos_0', 'libelf=x86_54')
def test_compiler_satisfies(self):
self.check_satisfies('foo %gcc@4.7.3', '%gcc@4.7')
self.check_unsatisfiable('foo %gcc@4.7', '%gcc@4.7.3')

View File

@ -311,7 +311,7 @@ def test_intersection(self):
self.check_intersection(['0:1'], [':'], ['0:1']) self.check_intersection(['0:1'], [':'], ['0:1'])
def test_satisfaction(self): def test_basic_version_satisfaction(self):
self.assert_satisfies('4.7.3', '4.7.3') self.assert_satisfies('4.7.3', '4.7.3')
self.assert_satisfies('4.7.3', '4.7') self.assert_satisfies('4.7.3', '4.7')
@ -326,6 +326,22 @@ def test_satisfaction(self):
self.assert_does_not_satisfy('4.8', '4.9') self.assert_does_not_satisfy('4.8', '4.9')
self.assert_does_not_satisfy('4', '4.9') self.assert_does_not_satisfy('4', '4.9')
def test_basic_version_satisfaction_in_lists(self):
self.assert_satisfies(['4.7.3'], ['4.7.3'])
self.assert_satisfies(['4.7.3'], ['4.7'])
self.assert_satisfies(['4.7.3b2'], ['4.7'])
self.assert_satisfies(['4.7b6'], ['4.7'])
self.assert_satisfies(['4.7.3'], ['4'])
self.assert_satisfies(['4.7.3b2'], ['4'])
self.assert_satisfies(['4.7b6'], ['4'])
self.assert_does_not_satisfy(['4.8.0'], ['4.9'])
self.assert_does_not_satisfy(['4.8'], ['4.9'])
self.assert_does_not_satisfy(['4'], ['4.9'])
def test_version_range_satisfaction(self):
self.assert_satisfies('4.7b6', '4.3:4.7') self.assert_satisfies('4.7b6', '4.3:4.7')
self.assert_satisfies('4.3.0', '4.3:4.7') self.assert_satisfies('4.3.0', '4.3:4.7')
self.assert_satisfies('4.3.2', '4.3:4.7') self.assert_satisfies('4.3.2', '4.3:4.7')
@ -336,6 +352,18 @@ def test_satisfaction(self):
self.assert_satisfies('4.7b6', '4.3:4.7') self.assert_satisfies('4.7b6', '4.3:4.7')
self.assert_does_not_satisfy('4.8.0', '4.3:4.7') self.assert_does_not_satisfy('4.8.0', '4.3:4.7')
def test_version_range_satisfaction_in_lists(self):
self.assert_satisfies(['4.7b6'], ['4.3:4.7'])
self.assert_satisfies(['4.3.0'], ['4.3:4.7'])
self.assert_satisfies(['4.3.2'], ['4.3:4.7'])
self.assert_does_not_satisfy(['4.8.0'], ['4.3:4.7'])
self.assert_does_not_satisfy(['4.3'], ['4.4:4.7'])
self.assert_satisfies(['4.7b6'], ['4.3:4.7'])
self.assert_does_not_satisfy(['4.8.0'], ['4.3:4.7'])
def test_satisfaction_with_lists(self):
self.assert_satisfies('4.7', '4.3, 4.6, 4.7') self.assert_satisfies('4.7', '4.3, 4.6, 4.7')
self.assert_satisfies('4.7.3', '4.3, 4.6, 4.7') self.assert_satisfies('4.7.3', '4.3, 4.6, 4.7')
self.assert_satisfies('4.6.5', '4.3, 4.6, 4.7') self.assert_satisfies('4.6.5', '4.3, 4.6, 4.7')

View File

@ -43,7 +43,7 @@ def add_default_arg(self, arg):
@property @property
def command(self): def command(self):
return self.exe[0] return ' '.join(self.exe)
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):

View File

@ -39,6 +39,8 @@ class Libdwarf(Package):
depends_on("libelf") depends_on("libelf")
parallel = False
def clean(self): def clean(self):
for dir in dwarf_dirs: for dir in dwarf_dirs: