Merge branch 'features/cflags' into develop
Conflicts: lib/spack/spack/cmd/find.py var/spack/repos/builtin/packages/ghostscript/package.py
This commit is contained in:
commit
502420ceff
53
lib/spack/env/cc
vendored
53
lib/spack/env/cc
vendored
@ -55,7 +55,10 @@ parameters=(
|
||||
|
||||
# The compiler input variables are checked for sanity later:
|
||||
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
|
||||
# Debug flag is optional; set to "TRUE" for debug logging:
|
||||
# The default compiler flags are passed from these variables:
|
||||
# SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FCFLAGS, SPACK_FFLAGS,
|
||||
# SPACK_LDFLAGS, SPACK_LDLIBS
|
||||
# Debug env var is optional; set to true for debug logging:
|
||||
# SPACK_DEBUG
|
||||
# Test command is used to unit test the compiler script.
|
||||
# SPACK_TEST_COMMAND
|
||||
@ -99,21 +102,25 @@ case "$command" in
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
;;
|
||||
c++|CC|g++|clang++|icpc|pgc++|xlc++)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
;;
|
||||
f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
;;
|
||||
f77|gfortran|ifort|pgfortran|xlf|nagfor)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
lang_flags=F
|
||||
;;
|
||||
ld)
|
||||
mode=ld
|
||||
@ -131,7 +138,7 @@ if [[ -z $mode ]]; then
|
||||
if [[ $arg == -v || $arg == -V || $arg == --version || $arg == -dumpversion ]]; then
|
||||
mode=vcheck
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
@ -188,6 +195,42 @@ fi
|
||||
input_command="$@"
|
||||
args=("$@")
|
||||
|
||||
# Prepend cppflags, cflags, cxxflags, fcflags, fflags, and ldflags
|
||||
|
||||
# Add ldflags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
args=(${SPACK_LDFLAGS[@]} "${args[@]}") ;;
|
||||
esac
|
||||
|
||||
# Add compiler flags.
|
||||
case "$mode" in
|
||||
cc|ccld)
|
||||
# Add c, cxx, fc, and f flags
|
||||
case $lang_flags in
|
||||
C)
|
||||
args=(${SPACK_CFLAGS[@]} "${args[@]}") ;;
|
||||
CXX)
|
||||
args=(${SPACK_CXXFLAGS[@]} "${args[@]}") ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
# Add cppflags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
args=(${SPACK_CPPFLAGS[@]} "${args[@]}") ;;
|
||||
esac
|
||||
|
||||
case "$mode" in cc|ccld)
|
||||
# Add fortran flags
|
||||
case $lang_flags in
|
||||
F)
|
||||
args=(${SPACK_FFLAGS[@]} "${args[@]}") ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
# Read spack dependencies from the path environment variable
|
||||
IFS=':' read -ra deps <<< "$SPACK_DEPENDENCIES"
|
||||
for dep in "${deps[@]}"; do
|
||||
@ -230,6 +273,12 @@ elif [[ $mode == ld ]]; then
|
||||
$add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib" "${args[@]}")
|
||||
fi
|
||||
|
||||
# Add SPACK_LDLIBS to args
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
args=("${args[@]}" ${SPACK_LDLIBS[@]}) ;;
|
||||
esac
|
||||
|
||||
#
|
||||
# Unset pesky environment variables that could affect build sanity.
|
||||
#
|
||||
|
@ -51,15 +51,16 @@
|
||||
Skimming this module is a nice way to get acquainted with the types of
|
||||
calls you can make from within the install() function.
|
||||
"""
|
||||
import multiprocessing
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import sys
|
||||
import shutil
|
||||
import multiprocessing
|
||||
import platform
|
||||
|
||||
import spack
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
from spack.environment import EnvironmentModifications, validate
|
||||
from spack.util.environment import *
|
||||
from spack.util.executable import Executable, which
|
||||
@ -115,22 +116,24 @@ def __call__(self, *args, **kwargs):
|
||||
|
||||
def set_compiler_environment_variables(pkg, env):
|
||||
assert pkg.spec.concrete
|
||||
compiler = pkg.compiler
|
||||
flags = pkg.spec.compiler_flags
|
||||
|
||||
# Set compiler variables used by CMake and autotools
|
||||
assert all(key in pkg.compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
|
||||
assert all(key in compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
|
||||
|
||||
# Populate an object with the list of environment modifications
|
||||
# and return it
|
||||
# TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc.
|
||||
link_dir = spack.build_env_path
|
||||
env.set('CC', join_path(link_dir, pkg.compiler.link_paths['cc']))
|
||||
env.set('CXX', join_path(link_dir, pkg.compiler.link_paths['cxx']))
|
||||
env.set('F77', join_path(link_dir, pkg.compiler.link_paths['f77']))
|
||||
env.set('FC', join_path(link_dir, pkg.compiler.link_paths['fc']))
|
||||
env.set('CC', join_path(link_dir, compiler.link_paths['cc']))
|
||||
env.set('CXX', join_path(link_dir, compiler.link_paths['cxx']))
|
||||
env.set('F77', join_path(link_dir, compiler.link_paths['f77']))
|
||||
env.set('FC', join_path(link_dir, compiler.link_paths['fc']))
|
||||
|
||||
# Set SPACK compiler variables so that our wrapper knows what to call
|
||||
compiler = pkg.compiler
|
||||
if compiler.cc:
|
||||
env.set('SPACK_CC', compiler.cc)
|
||||
env.set('SPACK_CC', compiler.cc)
|
||||
if compiler.cxx:
|
||||
env.set('SPACK_CXX', compiler.cxx)
|
||||
if compiler.f77:
|
||||
@ -144,6 +147,12 @@ def set_compiler_environment_variables(pkg, env):
|
||||
env.set('SPACK_F77_RPATH_ARG', compiler.f77_rpath_arg)
|
||||
env.set('SPACK_FC_RPATH_ARG', compiler.fc_rpath_arg)
|
||||
|
||||
# Add every valid compiler flag to the environment, prefixed with "SPACK_"
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
# Concreteness guarantees key safety here
|
||||
if flags[flag] != []:
|
||||
env.set('SPACK_' + flag.upper(), ' '.join(f for f in flags[flag]))
|
||||
|
||||
env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
|
||||
return env
|
||||
|
||||
|
@ -51,11 +51,14 @@ def setup_parser(subparser):
|
||||
help='Show full dependency DAG of installed packages')
|
||||
|
||||
subparser.add_argument(
|
||||
'-l', '--long', action='store_true',
|
||||
'-l', '--long', action='store_true', dest='long',
|
||||
help='Show dependency hashes as well as versions.')
|
||||
subparser.add_argument(
|
||||
'-L', '--very-long', action='store_true',
|
||||
'-L', '--very-long', action='store_true', dest='very_long',
|
||||
help='Show dependency hashes as well as versions.')
|
||||
subparser.add_argument(
|
||||
'-f', '--show-flags', action='store_true', dest='show_flags',
|
||||
help='Show spec compiler flags.')
|
||||
|
||||
subparser.add_argument(
|
||||
'-e', '--explicit', action='store_true',
|
||||
@ -64,13 +67,13 @@ def setup_parser(subparser):
|
||||
'-E', '--implicit', action='store_true',
|
||||
help='Show only specs that were installed as dependencies')
|
||||
subparser.add_argument(
|
||||
'-u', '--unknown', action='store_true',
|
||||
'-u', '--unknown', action='store_true', dest='unknown',
|
||||
help='Show only specs Spack does not have a package for.')
|
||||
subparser.add_argument(
|
||||
'-m', '--missing', action='store_true',
|
||||
'-m', '--missing', action='store_true', dest='missing',
|
||||
help='Show missing dependencies as well as installed specs.')
|
||||
subparser.add_argument(
|
||||
'-M', '--only-missing', action='store_true',
|
||||
'-M', '--only-missing', action='store_true', dest='only_missing',
|
||||
help='Show only missing dependencies.')
|
||||
subparser.add_argument(
|
||||
'-N', '--namespace', action='store_true',
|
||||
@ -95,6 +98,12 @@ def display_specs(specs, **kwargs):
|
||||
hashes = True
|
||||
hlen = None
|
||||
|
||||
nfmt = '.' if namespace else '_'
|
||||
format_string = '$%s$@$+' % nfmt
|
||||
flags = kwargs.get('show_flags', False)
|
||||
if flags:
|
||||
format_string = '$%s$@$%%+$+' % nfmt
|
||||
|
||||
# Make a dict with specs keyed by architecture and compiler.
|
||||
index = index_by(specs, ('architecture', 'compiler'))
|
||||
|
||||
@ -110,8 +119,7 @@ def display_specs(specs, **kwargs):
|
||||
specs = index[(architecture,compiler)]
|
||||
specs.sort()
|
||||
|
||||
nfmt = '.' if namespace else '_'
|
||||
abbreviated = [s.format('$%s$@$+' % nfmt, color=True) for s in specs]
|
||||
abbreviated = [s.format(format_string, color=True) for s in specs]
|
||||
if mode == 'paths':
|
||||
# Print one spec per line along with prefix path
|
||||
width = max(len(s) for s in abbreviated)
|
||||
@ -126,20 +134,28 @@ def display_specs(specs, **kwargs):
|
||||
elif mode == 'deps':
|
||||
for spec in specs:
|
||||
print spec.tree(
|
||||
format='$%s$@$+' % nfmt,
|
||||
format=format_string,
|
||||
color=True,
|
||||
indent=4,
|
||||
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None)
|
||||
|
||||
elif mode == 'short':
|
||||
def fmt(s):
|
||||
string = ""
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + ' '
|
||||
string += s.format('$-%s$@$+' % nfmt, color=True)
|
||||
# Print columns of output if not printing flags
|
||||
if not flags:
|
||||
def fmt(s):
|
||||
string = ""
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + ' '
|
||||
string += s.format('$-%s$@$+' % nfmt, color=True)
|
||||
|
||||
return string
|
||||
colify(fmt(s) for s in specs)
|
||||
return string
|
||||
colify(fmt(s) for s in specs)
|
||||
# Print one entry per line if including flags
|
||||
else:
|
||||
for spec in specs:
|
||||
# Print the hash if necessary
|
||||
hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
|
||||
print hsh + spec.format(format_string, color=True) + '\n'
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
@ -151,7 +167,7 @@ def find(parser, args):
|
||||
# Filter out specs that don't exist.
|
||||
query_specs = spack.cmd.parse_specs(args.query_specs)
|
||||
query_specs, nonexisting = partition_list(
|
||||
query_specs, lambda s: spack.repo.exists(s.name))
|
||||
query_specs, lambda s: spack.repo.exists(s.name) or not s.name)
|
||||
|
||||
if nonexisting:
|
||||
msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
|
||||
@ -193,4 +209,4 @@ def find(parser, args):
|
||||
display_specs(specs, mode=args.mode,
|
||||
long=args.long,
|
||||
very_long=args.very_long,
|
||||
namespace=args.namespace)
|
||||
show_flags=args.show_flags)
|
||||
|
@ -92,7 +92,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
tty.error("%s matches multiple packages:" % spec)
|
||||
print()
|
||||
display_specs(matching, long=True)
|
||||
display_specs(matching, long=True, show_flags=True)
|
||||
print()
|
||||
has_errors = True
|
||||
|
||||
@ -186,7 +186,7 @@ def uninstall(parser, args):
|
||||
if not args.yes_to_all:
|
||||
tty.msg("The following packages will be uninstalled : ")
|
||||
print('')
|
||||
display_specs(uninstall_list, long=True)
|
||||
display_specs(uninstall_list, long=True, show_flags=True)
|
||||
print('')
|
||||
ask_for_confirmation('Do you want to proceed ? ')
|
||||
|
||||
|
@ -109,7 +109,7 @@ def fc_rpath_arg(self):
|
||||
return '-Wl,-rpath,'
|
||||
|
||||
|
||||
def __init__(self, cspec, cc, cxx, f77, fc):
|
||||
def __init__(self, cspec, cc, cxx, f77, fc, **kwargs):
|
||||
def check(exe):
|
||||
if exe is None:
|
||||
return None
|
||||
@ -121,6 +121,15 @@ def check(exe):
|
||||
self.f77 = check(f77)
|
||||
self.fc = check(fc)
|
||||
|
||||
# Unfortunately have to make sure these params are accepted
|
||||
# in the same order they are returned by sorted(flags)
|
||||
# in compilers/__init__.py
|
||||
self.flags = {}
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
value = kwargs.get(flag, None)
|
||||
if value is not None:
|
||||
self.flags[flag] = value.split()
|
||||
|
||||
self.spec = cspec
|
||||
|
||||
|
||||
@ -188,7 +197,6 @@ def f77_version(cls, f77):
|
||||
def fc_version(cls, fc):
|
||||
return cls.default_version(fc)
|
||||
|
||||
|
||||
@classmethod
|
||||
def _find_matches_in_path(cls, compiler_names, detect_version, *path):
|
||||
"""Finds compilers in the paths supplied.
|
||||
|
@ -255,7 +255,11 @@ def get_compiler(cspec):
|
||||
else:
|
||||
compiler_paths.append(None)
|
||||
|
||||
return cls(cspec, *compiler_paths)
|
||||
flags = {}
|
||||
for f in spack.spec.FlagMap.valid_compiler_flags():
|
||||
if f in items:
|
||||
flags[f] = items[f]
|
||||
return cls(cspec, *compiler_paths, **flags)
|
||||
|
||||
matches = find(compiler_spec, arch, scope)
|
||||
return [get_compiler(cspec) for cspec in matches]
|
||||
|
@ -44,6 +44,7 @@
|
||||
from itertools import chain
|
||||
from spack.config import *
|
||||
|
||||
|
||||
class DefaultConcretizer(object):
|
||||
"""This class doesn't have any state, it just provides some methods for
|
||||
concretization. You can subclass it to override just some of the
|
||||
@ -269,6 +270,59 @@ def concretize_compiler(self, spec):
|
||||
return True # things changed.
|
||||
|
||||
|
||||
def concretize_compiler_flags(self, spec):
|
||||
"""
|
||||
The compiler flags are updated to match those of the spec whose
|
||||
compiler is used, defaulting to no compiler flags in the spec.
|
||||
Default specs set at the compiler level will still be added later.
|
||||
"""
|
||||
ret = False
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
try:
|
||||
nearest = next(p for p in spec.traverse(direction='parents')
|
||||
if ((p.compiler == spec.compiler and p is not spec)
|
||||
and flag in p.compiler_flags))
|
||||
if ((not flag in spec.compiler_flags) or
|
||||
sorted(spec.compiler_flags[flag]) != sorted(nearest.compiler_flags[flag])):
|
||||
if flag in spec.compiler_flags:
|
||||
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
|
||||
set(nearest.compiler_flags[flag]))
|
||||
else:
|
||||
spec.compiler_flags[flag] = nearest.compiler_flags[flag]
|
||||
ret = True
|
||||
|
||||
except StopIteration:
|
||||
if (flag in spec.root.compiler_flags and ((not flag in spec.compiler_flags) or
|
||||
sorted(spec.compiler_flags[flag]) != sorted(spec.root.compiler_flags[flag]))):
|
||||
if flag in spec.compiler_flags:
|
||||
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
|
||||
set(spec.root.compiler_flags[flag]))
|
||||
else:
|
||||
spec.compiler_flags[flag] = spec.root.compiler_flags[flag]
|
||||
ret = True
|
||||
else:
|
||||
if not flag in spec.compiler_flags:
|
||||
spec.compiler_flags[flag] = []
|
||||
|
||||
# Include the compiler flag defaults from the config files
|
||||
# This ensures that spack will detect conflicts that stem from a change
|
||||
# in default compiler flags.
|
||||
compiler = spack.compilers.compiler_for_spec(spec.compiler)
|
||||
for flag in compiler.flags:
|
||||
if flag not in spec.compiler_flags:
|
||||
spec.compiler_flags[flag] = compiler.flags[flag]
|
||||
if compiler.flags[flag] != []:
|
||||
ret = True
|
||||
else:
|
||||
if ((sorted(spec.compiler_flags[flag]) != sorted(compiler.flags[flag])) and
|
||||
(not set(spec.compiler_flags[flag]) >= set(compiler.flags[flag]))):
|
||||
ret = True
|
||||
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
|
||||
set(compiler.flags[flag]))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def find_spec(spec, condition):
|
||||
"""Searches the dag from spec in an intelligent order and looks
|
||||
for a spec that matches a condition"""
|
||||
@ -330,7 +384,6 @@ def cmp_specs(lhs, rhs):
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no available compiler that satisfies a
|
||||
compiler spec."""
|
||||
|
@ -60,7 +60,7 @@
|
||||
_db_dirname = '.spack-db'
|
||||
|
||||
# DB version. This is stuck in the DB file to track changes in format.
|
||||
_db_version = Version('0.9')
|
||||
_db_version = Version('0.9.1')
|
||||
|
||||
# Default timeout for spack database locks is 5 min.
|
||||
_db_lock_timeout = 60
|
||||
@ -205,6 +205,11 @@ def _read_spec_from_yaml(self, hash_key, installs, parent_key=None):
|
||||
|
||||
spec_dict = installs[hash_key]['spec']
|
||||
|
||||
# Install records don't include hash with spec, so we add it in here
|
||||
# to ensure it is read properly.
|
||||
for name in spec_dict:
|
||||
spec_dict[name]['hash'] = hash_key
|
||||
|
||||
# Build spec from dict first.
|
||||
spec = Spec.from_node_dict(spec_dict)
|
||||
|
||||
@ -250,13 +255,18 @@ def check(cond, msg):
|
||||
check('installs' in db, "No 'installs' in YAML DB.")
|
||||
check('version' in db, "No 'version' in YAML DB.")
|
||||
|
||||
|
||||
installs = db['installs']
|
||||
|
||||
# TODO: better version checking semantics.
|
||||
version = Version(db['version'])
|
||||
if version != _db_version:
|
||||
if version > _db_version:
|
||||
raise InvalidDatabaseVersionError(_db_version, version)
|
||||
elif version < _db_version:
|
||||
self.reindex(spack.install_layout)
|
||||
installs = dict((k, v.to_dict()) for k, v in self._data.items())
|
||||
|
||||
# Iterate through database and check each record.
|
||||
installs = db['installs']
|
||||
data = {}
|
||||
for hash_key, rec in installs.items():
|
||||
try:
|
||||
|
@ -146,12 +146,12 @@ class SomePackage(Package):
|
||||
def install(self, prefix):
|
||||
# Do default install
|
||||
|
||||
@when('=chaos_5_x86_64_ib')
|
||||
@when('arch=chaos_5_x86_64_ib')
|
||||
def install(self, prefix):
|
||||
# This will be executed instead of the default install if
|
||||
# the package's sys_type() is chaos_5_x86_64_ib.
|
||||
|
||||
@when('=bgqos_0")
|
||||
@when('arch=bgqos_0")
|
||||
def install(self, prefix):
|
||||
# This will be executed if the package's sys_type is bgqos_0
|
||||
|
||||
|
@ -72,7 +72,9 @@
|
||||
dep_list = { ^ spec }
|
||||
spec = id [ options ]
|
||||
options = { @version-list | +variant | -variant | ~variant |
|
||||
%compiler | =architecture }
|
||||
%compiler | arch=architecture | [ flag ]=value}
|
||||
flag = { cflags | cxxflags | fcflags | fflags | cppflags |
|
||||
ldflags | ldlibs }
|
||||
variant = id
|
||||
architecture = id
|
||||
compiler = id [ version-list ]
|
||||
@ -80,6 +82,9 @@
|
||||
version = id | id: | :id | id:id
|
||||
id = [A-Za-z0-9_][A-Za-z0-9_.-]*
|
||||
|
||||
Identifiers using the <name>=<value> command, such as architectures and
|
||||
compiler flags, require a space before the name.
|
||||
|
||||
There is one context-sensitive part: ids in versions may contain '.', while
|
||||
other ids may not.
|
||||
|
||||
@ -108,6 +113,8 @@
|
||||
import spack.error
|
||||
import spack.compilers as compilers
|
||||
|
||||
# TODO: move display_specs to some other location.
|
||||
from spack.cmd.find import display_specs
|
||||
from spack.version import *
|
||||
from spack.util.string import *
|
||||
from spack.util.prefix import Prefix
|
||||
@ -143,7 +150,6 @@
|
||||
every time we call str()"""
|
||||
_any_version = VersionList([':'])
|
||||
|
||||
|
||||
def index_specs(specs):
|
||||
"""Take a list of specs and return a dict of lists. Dict is
|
||||
keyed by spec name and lists include all specs with the
|
||||
@ -296,22 +302,25 @@ class VariantSpec(object):
|
||||
on the particular package being built, and each named variant can
|
||||
be enabled or disabled.
|
||||
"""
|
||||
def __init__(self, name, enabled):
|
||||
def __init__(self, name, value):
|
||||
self.name = name
|
||||
self.enabled = enabled
|
||||
self.value = value
|
||||
|
||||
|
||||
def _cmp_key(self):
|
||||
return (self.name, self.enabled)
|
||||
return (self.name, self.value)
|
||||
|
||||
|
||||
def copy(self):
|
||||
return VariantSpec(self.name, self.enabled)
|
||||
return VariantSpec(self.name, self.value)
|
||||
|
||||
|
||||
def __str__(self):
|
||||
out = '+' if self.enabled else '~'
|
||||
return out + self.name
|
||||
if self.value in [True,False]:
|
||||
out = '+' if self.value else '~'
|
||||
return out + self.name
|
||||
else:
|
||||
return ' ' + self.name + "=" + self.value
|
||||
|
||||
|
||||
class VariantMap(HashableMap):
|
||||
@ -322,10 +331,10 @@ def __init__(self, spec):
|
||||
|
||||
def satisfies(self, other, strict=False):
|
||||
if strict or self.spec._concrete:
|
||||
return all(k in self and self[k].enabled == other[k].enabled
|
||||
return all(k in self and self[k].value == other[k].value
|
||||
for k in other)
|
||||
else:
|
||||
return all(self[k].enabled == other[k].enabled
|
||||
return all(self[k].value == other[k].value
|
||||
for k in other if k in self)
|
||||
|
||||
|
||||
@ -343,7 +352,7 @@ def constrain(self, other):
|
||||
changed = False
|
||||
for k in other:
|
||||
if k in self:
|
||||
if self[k].enabled != other[k].enabled:
|
||||
if self[k].value != other[k].value:
|
||||
raise UnsatisfiableVariantSpecError(self[k], other[k])
|
||||
else:
|
||||
self[k] = other[k].copy()
|
||||
@ -368,6 +377,70 @@ def __str__(self):
|
||||
return ''.join(str(self[key]) for key in sorted_keys)
|
||||
|
||||
|
||||
_valid_compiler_flags = [
|
||||
'cflags', 'cxxflags', 'fflags', 'ldflags', 'ldlibs', 'cppflags']
|
||||
|
||||
class FlagMap(HashableMap):
|
||||
def __init__(self, spec):
|
||||
super(FlagMap, self).__init__()
|
||||
self.spec = spec
|
||||
|
||||
|
||||
def satisfies(self, other, strict=False):
|
||||
if strict or (self.spec and self.spec._concrete):
|
||||
return all(f in self and set(self[f]) <= set(other[f])
|
||||
for f in other)
|
||||
else:
|
||||
return all(set(self[f]) <= set(other[f])
|
||||
for f in other if (other[f] != [] and f in self))
|
||||
|
||||
|
||||
def constrain(self, other):
|
||||
"""Add all flags in other that aren't in self to self.
|
||||
|
||||
Return whether the spec changed.
|
||||
"""
|
||||
if other.spec and other.spec._concrete:
|
||||
for k in self:
|
||||
if k not in other:
|
||||
raise UnsatisfiableCompilerFlagSpecError(self[k], '<absent>')
|
||||
|
||||
changed = False
|
||||
for k in other:
|
||||
if k in self and not set(self[k]) <= set(other[k]):
|
||||
raise UnsatisfiableCompilerFlagSpecError(
|
||||
' '.join(f for f in self[k]), ' '.join( f for f in other[k]))
|
||||
elif k not in self:
|
||||
self[k] = other[k]
|
||||
changed = True
|
||||
return changed
|
||||
|
||||
@staticmethod
|
||||
def valid_compiler_flags():
|
||||
return _valid_compiler_flags
|
||||
|
||||
@property
|
||||
def concrete(self):
|
||||
return all(flag in self for flag in _valid_compiler_flags)
|
||||
|
||||
|
||||
def copy(self):
|
||||
clone = FlagMap(None)
|
||||
for name, value in self.items():
|
||||
clone[name] = value
|
||||
return clone
|
||||
|
||||
|
||||
def _cmp_key(self):
|
||||
return ''.join(str(key) + ' '.join(str(v) for v in value) for key, value in sorted(self.items()))
|
||||
|
||||
|
||||
def __str__(self):
|
||||
sorted_keys = filter(lambda flag: self[flag] != [], sorted(self.keys()))
|
||||
cond_symbol = ' ' if len(sorted_keys)>0 else ''
|
||||
return cond_symbol + ' '.join(str(key) + '=\"' + ' '.join(str(f) for f in self[key]) + '\"' for key in sorted_keys)
|
||||
|
||||
|
||||
class DependencyMap(HashableMap):
|
||||
"""Each spec has a DependencyMap containing specs for its dependencies.
|
||||
The DependencyMap is keyed by name. """
|
||||
@ -409,10 +482,13 @@ def __init__(self, spec_like, *dep_like, **kwargs):
|
||||
self.versions = other.versions
|
||||
self.architecture = other.architecture
|
||||
self.compiler = other.compiler
|
||||
self.compiler_flags = other.compiler_flags
|
||||
self.compiler_flags.spec = self
|
||||
self.dependencies = other.dependencies
|
||||
self.variants = other.variants
|
||||
self.variants.spec = self
|
||||
self.namespace = other.namespace
|
||||
self._hash = other._hash
|
||||
|
||||
# Specs are by default not assumed to be normal, but in some
|
||||
# cases we've read them from a file want to assume normal.
|
||||
@ -440,13 +516,30 @@ def _add_version(self, version):
|
||||
self.versions.add(version)
|
||||
|
||||
|
||||
def _add_variant(self, name, enabled):
|
||||
def _add_variant(self, name, value):
|
||||
"""Called by the parser to add a variant."""
|
||||
if name in self.variants: raise DuplicateVariantError(
|
||||
"Cannot specify variant '%s' twice" % name)
|
||||
self.variants[name] = VariantSpec(name, enabled)
|
||||
if isinstance(value, basestring) and value.upper() == 'TRUE':
|
||||
value = True
|
||||
elif isinstance(value, basestring) and value.upper() == 'FALSE':
|
||||
value = False
|
||||
self.variants[name] = VariantSpec(name, value)
|
||||
|
||||
|
||||
def _add_flag(self, name, value):
|
||||
"""Called by the parser to add a known flag.
|
||||
Known flags currently include "arch"
|
||||
"""
|
||||
valid_flags = FlagMap.valid_compiler_flags()
|
||||
if name == 'arch':
|
||||
self._set_architecture(value)
|
||||
elif name in valid_flags:
|
||||
assert(self.compiler_flags is not None)
|
||||
self.compiler_flags[name] = value.split()
|
||||
else:
|
||||
self._add_variant(name,value)
|
||||
|
||||
def _set_compiler(self, compiler):
|
||||
"""Called by the parser to set the compiler."""
|
||||
if self.compiler: raise DuplicateCompilerSpecError(
|
||||
@ -473,7 +566,7 @@ def _add_dependency(self, spec):
|
||||
#
|
||||
@property
|
||||
def fullname(self):
|
||||
return '%s.%s' % (self.namespace, self.name) if self.namespace else self.name
|
||||
return '%s.%s' % (self.namespace, self.name) if self.namespace else (self.name if self.name else '')
|
||||
|
||||
|
||||
@property
|
||||
@ -523,7 +616,7 @@ def virtual(self):
|
||||
@staticmethod
|
||||
def is_virtual(name):
|
||||
"""Test if a name is virtual without requiring a Spec."""
|
||||
return not spack.repo.exists(name)
|
||||
return (not name is None) and ( not spack.repo.exists(name) )
|
||||
|
||||
|
||||
@property
|
||||
@ -541,8 +634,8 @@ def concrete(self):
|
||||
and self.variants.concrete
|
||||
and self.architecture
|
||||
and self.compiler and self.compiler.concrete
|
||||
and self.compiler_flags.concrete
|
||||
and self.dependencies.concrete)
|
||||
|
||||
return self._concrete
|
||||
|
||||
|
||||
@ -664,19 +757,25 @@ def dag_hash(self, length=None):
|
||||
"""
|
||||
Return a hash of the entire spec DAG, including connectivity.
|
||||
"""
|
||||
yaml_text = yaml.dump(
|
||||
self.to_node_dict(), default_flow_style=True, width=sys.maxint)
|
||||
sha = hashlib.sha1(yaml_text)
|
||||
return base64.b32encode(sha.digest()).lower()[:length]
|
||||
|
||||
if self._hash:
|
||||
return self._hash[:length]
|
||||
else:
|
||||
yaml_text = yaml.dump(
|
||||
self.to_node_dict(), default_flow_style=True, width=sys.maxint)
|
||||
sha = hashlib.sha1(yaml_text)
|
||||
b32_hash = base64.b32encode(sha.digest()).lower()[:length]
|
||||
if self.concrete:
|
||||
self._hash = b32_hash
|
||||
return b32_hash
|
||||
|
||||
def to_node_dict(self):
|
||||
params = dict( (name, v.value) for name, v in self.variants.items() )
|
||||
params.update( dict( (name, value) for name, value in self.compiler_flags.items()) )
|
||||
d = {
|
||||
'variants' : dict(
|
||||
(name,v.enabled) for name, v in self.variants.items()),
|
||||
'parameters' : params,
|
||||
'arch' : self.architecture,
|
||||
'dependencies' : dict((d, self.dependencies[d].dag_hash())
|
||||
for d in sorted(self.dependencies))
|
||||
for d in sorted(self.dependencies)),
|
||||
}
|
||||
|
||||
# Older concrete specs do not have a namespace. Omit for
|
||||
@ -689,6 +788,7 @@ def to_node_dict(self):
|
||||
else:
|
||||
d['compiler'] = None
|
||||
d.update(self.versions.to_dict())
|
||||
|
||||
return { self.name : d }
|
||||
|
||||
|
||||
@ -712,13 +812,27 @@ def from_node_dict(node):
|
||||
spec.versions = VersionList.from_dict(node)
|
||||
spec.architecture = node['arch']
|
||||
|
||||
if 'hash' in node:
|
||||
spec._hash = node['hash']
|
||||
|
||||
if node['compiler'] is None:
|
||||
spec.compiler = None
|
||||
else:
|
||||
spec.compiler = CompilerSpec.from_dict(node)
|
||||
|
||||
for name, enabled in node['variants'].items():
|
||||
spec.variants[name] = VariantSpec(name, enabled)
|
||||
if 'parameters' in node:
|
||||
for name, value in node['parameters'].items():
|
||||
if name in _valid_compiler_flags:
|
||||
spec.compiler_flags[name] = value
|
||||
else:
|
||||
spec.variants[name] = VariantSpec(name, value)
|
||||
elif 'variants' in node:
|
||||
for name, value in node['variants'].items():
|
||||
spec.variants[name] = VariantSpec(name, value)
|
||||
for name in FlagMap.valid_compiler_flags():
|
||||
spec.compiler_flags[name] = []
|
||||
else:
|
||||
raise SpackRecordError("Did not find a valid format for variants in YAML file")
|
||||
|
||||
return spec
|
||||
|
||||
@ -781,11 +895,13 @@ def _concretize_helper(self, presets=None, visited=None):
|
||||
# Concretize virtual dependencies last. Because they're added
|
||||
# to presets below, their constraints will all be merged, but we'll
|
||||
# still need to select a concrete package later.
|
||||
changed |= any(
|
||||
(spack.concretizer.concretize_architecture(self),
|
||||
spack.concretizer.concretize_compiler(self),
|
||||
spack.concretizer.concretize_version(self),
|
||||
spack.concretizer.concretize_variants(self)))
|
||||
if not self.virtual:
|
||||
changed |= any(
|
||||
(spack.concretizer.concretize_architecture(self),
|
||||
spack.concretizer.concretize_compiler(self),
|
||||
spack.concretizer.concretize_compiler_flags(self),#has to be concretized after compiler
|
||||
spack.concretizer.concretize_version(self),
|
||||
spack.concretizer.concretize_variants(self)))
|
||||
presets[self.name] = self
|
||||
|
||||
visited.add(self.name)
|
||||
@ -928,6 +1044,8 @@ def concretize(self):
|
||||
with requirements of its pacakges. See flatten() and normalize() for
|
||||
more details on this.
|
||||
"""
|
||||
if not self.name:
|
||||
raise SpecError("Attempting to concretize anonymous spec")
|
||||
|
||||
if self._concrete:
|
||||
return
|
||||
@ -1128,13 +1246,11 @@ def _merge_dependency(self, dep, visited, spec_deps, provider_index):
|
||||
if required:
|
||||
raise UnsatisfiableProviderSpecError(required[0], dep)
|
||||
provider_index.update(dep)
|
||||
|
||||
# If the spec isn't already in the set of dependencies, clone
|
||||
# it from the package description.
|
||||
if dep.name not in spec_deps:
|
||||
spec_deps[dep.name] = dep.copy()
|
||||
changed = True
|
||||
|
||||
# Constrain package information with spec info
|
||||
try:
|
||||
changed |= spec_deps[dep.name].constrain(dep)
|
||||
@ -1177,7 +1293,6 @@ def _normalize_helper(self, visited, spec_deps, provider_index):
|
||||
for dep_name in pkg.dependencies:
|
||||
# Do we depend on dep_name? If so pkg_dep is not None.
|
||||
pkg_dep = self._evaluate_dependency_conditions(dep_name)
|
||||
|
||||
# If pkg_dep is a dependency, merge it.
|
||||
if pkg_dep:
|
||||
changed |= self._merge_dependency(
|
||||
@ -1202,8 +1317,10 @@ def normalize(self, force=False):
|
||||
|
||||
TODO: normalize should probably implement some form of cycle detection,
|
||||
to ensure that the spec is actually a DAG.
|
||||
|
||||
"""
|
||||
if not self.name:
|
||||
raise SpecError("Attempting to normalize anonymous spec")
|
||||
|
||||
if self._normal and not force:
|
||||
return False
|
||||
|
||||
@ -1248,7 +1365,7 @@ def validate_names(self):
|
||||
"""
|
||||
for spec in self.traverse():
|
||||
# Don't get a package for a virtual name.
|
||||
if not spec.virtual:
|
||||
if (not spec.virtual) and spec.name:
|
||||
spack.repo.get(spec.fullname)
|
||||
|
||||
# validate compiler in addition to the package name.
|
||||
@ -1269,7 +1386,7 @@ def constrain(self, other, deps=True):
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
if not self.name == other.name:
|
||||
if not (self.name == other.name or (not self.name) or (not other.name) ):
|
||||
raise UnsatisfiableSpecNameError(self.name, other.name)
|
||||
|
||||
if other.namespace is not None:
|
||||
@ -1281,7 +1398,7 @@ def constrain(self, other, deps=True):
|
||||
|
||||
for v in other.variants:
|
||||
if (v in self.variants and
|
||||
self.variants[v].enabled != other.variants[v].enabled):
|
||||
self.variants[v].value != other.variants[v].value):
|
||||
raise UnsatisfiableVariantSpecError(self.variants[v],
|
||||
other.variants[v])
|
||||
|
||||
@ -1300,6 +1417,8 @@ def constrain(self, other, deps=True):
|
||||
changed |= self.versions.intersect(other.versions)
|
||||
changed |= self.variants.constrain(other.variants)
|
||||
|
||||
changed |= self.compiler_flags.constrain(other.compiler_flags)
|
||||
|
||||
old = self.architecture
|
||||
self.architecture = self.architecture or other.architecture
|
||||
changed |= (self.architecture != old)
|
||||
@ -1370,7 +1489,10 @@ def _autospec(self, spec_like):
|
||||
return spec_like
|
||||
|
||||
try:
|
||||
return spack.spec.Spec(spec_like)
|
||||
spec = spack.spec.Spec(spec_like)
|
||||
if not spec.name:
|
||||
raise SpecError("anonymous package -- this will always be handled")
|
||||
return spec
|
||||
except SpecError:
|
||||
return parse_anonymous_spec(spec_like, self.name)
|
||||
|
||||
@ -1390,7 +1512,7 @@ def satisfies(self, other, deps=True, strict=False):
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
if not self.virtual and other.virtual:
|
||||
pkg = spack.repo.get(self.fullname)
|
||||
if pkg.provides(other.name):
|
||||
@ -1401,7 +1523,7 @@ def satisfies(self, other, deps=True, strict=False):
|
||||
return False
|
||||
|
||||
# Otherwise, first thing we care about is whether the name matches
|
||||
if self.name != other.name:
|
||||
if self.name != other.name and self.name and other.name:
|
||||
return False
|
||||
|
||||
# namespaces either match, or other doesn't require one.
|
||||
@ -1422,7 +1544,10 @@ def satisfies(self, other, deps=True, strict=False):
|
||||
elif strict and (other.compiler and not self.compiler):
|
||||
return False
|
||||
|
||||
if not self.variants.satisfies(other.variants, strict=strict):
|
||||
var_strict = strict
|
||||
if (not self.name) or (not other.name):
|
||||
var_strict = True
|
||||
if not self.variants.satisfies(other.variants, strict=var_strict):
|
||||
return False
|
||||
|
||||
# Architecture satisfaction is currently just string equality.
|
||||
@ -1433,9 +1558,15 @@ def satisfies(self, other, deps=True, strict=False):
|
||||
elif strict and (other.architecture and not self.architecture):
|
||||
return False
|
||||
|
||||
if not self.compiler_flags.satisfies(other.compiler_flags, strict=strict):
|
||||
return False
|
||||
|
||||
# If we need to descend into dependencies, do it, otherwise we're done.
|
||||
if deps:
|
||||
return self.satisfies_dependencies(other, strict=strict)
|
||||
deps_strict = strict
|
||||
if not (self.name and other.name):
|
||||
deps_strict=True
|
||||
return self.satisfies_dependencies(other, strict=deps_strict)
|
||||
else:
|
||||
return True
|
||||
|
||||
@ -1515,10 +1646,12 @@ def _dup(self, other, **kwargs):
|
||||
if kwargs.get('cleardeps', True):
|
||||
self.dependents = DependencyMap()
|
||||
self.dependencies = DependencyMap()
|
||||
self.compiler_flags = other.compiler_flags.copy()
|
||||
self.variants = other.variants.copy()
|
||||
self.variants.spec = self
|
||||
self.external = other.external
|
||||
self.namespace = other.namespace
|
||||
self._hash = other._hash
|
||||
|
||||
# If we copy dependencies, preserve DAG structure in the new spec
|
||||
if kwargs.get('deps', True):
|
||||
@ -1643,7 +1776,9 @@ def _cmp_node(self):
|
||||
self.versions,
|
||||
self.variants,
|
||||
self.architecture,
|
||||
self.compiler)
|
||||
self.compiler,
|
||||
self.compiler_flags,
|
||||
self.dag_hash())
|
||||
|
||||
|
||||
def eq_node(self, other):
|
||||
@ -1672,7 +1807,7 @@ def colorized(self):
|
||||
return colorize_spec(self)
|
||||
|
||||
|
||||
def format(self, format_string='$_$@$%@$+$=', **kwargs):
|
||||
def format(self, format_string='$_$@$%@+$+$=', **kwargs):
|
||||
"""Prints out particular pieces of a spec, depending on what is
|
||||
in the format string. The format strings you can provide are::
|
||||
|
||||
@ -1681,8 +1816,10 @@ def format(self, format_string='$_$@$%@$+$=', **kwargs):
|
||||
$@ Version with '@' prefix
|
||||
$% Compiler with '%' prefix
|
||||
$%@ Compiler with '%' prefix & compiler version with '@' prefix
|
||||
$%+ Compiler with '%' prefix & compiler flags prefixed by name
|
||||
$%@+ Compiler, compiler version, and compiler flags with same prefixes as above
|
||||
$+ Options
|
||||
$= Architecture with '=' prefix
|
||||
$= Architecture prefixed by 'arch='
|
||||
$# 7-char prefix of DAG hash with '-' prefix
|
||||
$$ $
|
||||
|
||||
@ -1693,6 +1830,7 @@ def format(self, format_string='$_$@$%@$+$=', **kwargs):
|
||||
${COMPILER} Full compiler string
|
||||
${COMPILERNAME} Compiler name
|
||||
${COMPILERVER} Compiler version
|
||||
${COMPILERFLAGS} Compiler flags
|
||||
${OPTIONS} Options
|
||||
${ARCHITECTURE} Architecture
|
||||
${SHA1} Dependencies 8-char sha1 prefix
|
||||
@ -1739,7 +1877,8 @@ def write(s, c):
|
||||
fmt += 's'
|
||||
|
||||
if c == '_':
|
||||
out.write(fmt % self.name)
|
||||
name = self.name if self.name else ''
|
||||
out.write(fmt % name)
|
||||
elif c == '.':
|
||||
out.write(fmt % self.fullname)
|
||||
elif c == '@':
|
||||
@ -1754,7 +1893,7 @@ def write(s, c):
|
||||
write(fmt % str(self.variants), c)
|
||||
elif c == '=':
|
||||
if self.architecture:
|
||||
write(fmt % (c + str(self.architecture)), c)
|
||||
write(fmt % (' arch' + c + str(self.architecture)), c)
|
||||
elif c == '#':
|
||||
out.write('-' + fmt % (self.dag_hash(7)))
|
||||
elif c == '$':
|
||||
@ -1771,11 +1910,16 @@ def write(s, c):
|
||||
if (self.compiler and self.compiler.versions and
|
||||
self.compiler.versions != _any_version):
|
||||
write(c + str(self.compiler.versions), '%')
|
||||
elif c == '+':
|
||||
if self.compiler_flags:
|
||||
write(fmt % str(self.compiler_flags), '%')
|
||||
compiler = False
|
||||
elif c == '$':
|
||||
escape = True
|
||||
compiler = False
|
||||
else:
|
||||
out.write(c)
|
||||
compiler = False
|
||||
compiler = False
|
||||
|
||||
elif named:
|
||||
if not c == '}':
|
||||
@ -1785,6 +1929,7 @@ def write(s, c):
|
||||
named_str += c
|
||||
continue;
|
||||
if named_str == 'PACKAGE':
|
||||
name = self.name if self.name else ''
|
||||
write(fmt % self.name, '@')
|
||||
if named_str == 'VERSION':
|
||||
if self.versions and self.versions != _any_version:
|
||||
@ -1798,6 +1943,9 @@ def write(s, c):
|
||||
elif named_str == 'COMPILERVER':
|
||||
if self.compiler:
|
||||
write(fmt % self.compiler.versions, '%')
|
||||
elif named_str == 'COMPILERFLAGS':
|
||||
if self.compiler:
|
||||
write(fmt % str(self.compiler_flags), '%')
|
||||
elif named_str == 'OPTIONS':
|
||||
if self.variants:
|
||||
write(fmt % str(self.variants), '+')
|
||||
@ -1842,7 +1990,7 @@ def tree(self, **kwargs):
|
||||
showid = kwargs.pop('ids', False)
|
||||
cover = kwargs.pop('cover', 'nodes')
|
||||
indent = kwargs.pop('indent', 0)
|
||||
fmt = kwargs.pop('format', '$_$@$%@$+$=')
|
||||
fmt = kwargs.pop('format', '$_$@$%@+$+$=')
|
||||
prefix = kwargs.pop('prefix', None)
|
||||
check_kwargs(kwargs, self.tree)
|
||||
|
||||
@ -1874,12 +2022,13 @@ def __repr__(self):
|
||||
#
|
||||
# These are possible token types in the spec grammar.
|
||||
#
|
||||
DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, ID = range(9)
|
||||
HASH, DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, QT, ID = range(11)
|
||||
|
||||
class SpecLexer(spack.parse.Lexer):
|
||||
"""Parses tokens that make up spack specs."""
|
||||
def __init__(self):
|
||||
super(SpecLexer, self).__init__([
|
||||
(r'/', lambda scanner, val: self.token(HASH, val)),
|
||||
(r'\^', lambda scanner, val: self.token(DEP, val)),
|
||||
(r'\@', lambda scanner, val: self.token(AT, val)),
|
||||
(r'\:', lambda scanner, val: self.token(COLON, val)),
|
||||
@ -1891,6 +2040,7 @@ def __init__(self):
|
||||
(r'\=', lambda scanner, val: self.token(EQ, val)),
|
||||
# This is more liberal than identifier_re (see above).
|
||||
# Checked by check_identifier() for better error messages.
|
||||
(r'([\"\'])(?:(?=(\\?))\2.)*?\1',lambda scanner, val: self.token(QT, val)),
|
||||
(r'\w[\w.-]*', lambda scanner, val: self.token(ID, val)),
|
||||
(r'\s+', lambda scanner, val: None)])
|
||||
|
||||
@ -1898,27 +2048,52 @@ def __init__(self):
|
||||
class SpecParser(spack.parse.Parser):
|
||||
def __init__(self):
|
||||
super(SpecParser, self).__init__(SpecLexer())
|
||||
|
||||
self.previous = None
|
||||
|
||||
def do_parse(self):
|
||||
specs = []
|
||||
|
||||
try:
|
||||
while self.next:
|
||||
# TODO: clean this parsing up a bit
|
||||
if self.previous:
|
||||
specs.append(self.spec(self.previous.value))
|
||||
if self.accept(ID):
|
||||
specs.append(self.spec())
|
||||
self.previous = self.token
|
||||
if self.accept(EQ):
|
||||
if not specs:
|
||||
specs.append(self.spec(None))
|
||||
if self.accept(QT):
|
||||
self.token.value = self.token.value[1:-1]
|
||||
else:
|
||||
self.expect(ID)
|
||||
specs[-1]._add_flag(self.previous.value, self.token.value)
|
||||
else:
|
||||
specs.append(self.spec(self.previous.value))
|
||||
self.previous = None
|
||||
elif self.accept(HASH):
|
||||
specs.append(self.spec_by_hash())
|
||||
|
||||
elif self.accept(DEP):
|
||||
if not specs:
|
||||
self.last_token_error("Dependency has no package")
|
||||
self.expect(ID)
|
||||
specs[-1]._add_dependency(self.spec())
|
||||
self.previous = self.token
|
||||
specs.append(self.spec(None))
|
||||
self.previous = None
|
||||
if self.accept(HASH):
|
||||
specs[-1]._add_dependency(self.spec_by_hash())
|
||||
else:
|
||||
self.expect(ID)
|
||||
specs[-1]._add_dependency(self.spec(self.token.value))
|
||||
|
||||
else:
|
||||
self.unexpected_token()
|
||||
# Attempt to construct an anonymous spec, but check that the first token is valid
|
||||
# TODO: Is this check even necessary, or will it all be Lex errors now?
|
||||
specs.append(self.spec(None,True))
|
||||
|
||||
except spack.parse.ParseError, e:
|
||||
raise SpecParseError(e)
|
||||
|
||||
|
||||
return specs
|
||||
|
||||
|
||||
@ -1927,15 +2102,35 @@ def parse_compiler(self, text):
|
||||
return self.compiler()
|
||||
|
||||
|
||||
def spec(self):
|
||||
def spec_by_hash(self):
|
||||
self.expect(ID)
|
||||
|
||||
specs = spack.installed_db.query()
|
||||
matches = [spec for spec in specs if
|
||||
spec.dag_hash()[:len(self.token.value)] == self.token.value]
|
||||
|
||||
if not matches:
|
||||
tty.die("%s does not match any installed packages." %self.token.value)
|
||||
|
||||
if len(matches) != 1:
|
||||
raise AmbiguousHashError("Multiple packages specify hash %s." % self.token.value, *matches)
|
||||
|
||||
return matches[0]
|
||||
|
||||
|
||||
def spec(self, name, check_valid_token = False):
|
||||
"""Parse a spec out of the input. If a spec is supplied, then initialize
|
||||
and return it instead of creating a new one."""
|
||||
|
||||
spec_namespace, dot, spec_name = self.token.value.rpartition('.')
|
||||
if not spec_namespace:
|
||||
if name:
|
||||
spec_namespace, dot, spec_name = name.rpartition('.')
|
||||
if not spec_namespace:
|
||||
spec_namespace = None
|
||||
self.check_identifier(spec_name)
|
||||
else:
|
||||
spec_namespace = None
|
||||
spec_name = None
|
||||
|
||||
|
||||
self.check_identifier(spec_name)
|
||||
|
||||
# This will init the spec without calling __init__.
|
||||
spec = Spec.__new__(Spec)
|
||||
@ -1945,9 +2140,11 @@ def spec(self):
|
||||
spec.architecture = None
|
||||
spec.compiler = None
|
||||
spec.external = None
|
||||
spec.compiler_flags = FlagMap(spec)
|
||||
spec.dependents = DependencyMap()
|
||||
spec.dependencies = DependencyMap()
|
||||
spec.namespace = spec_namespace
|
||||
spec._hash = None
|
||||
|
||||
spec._normal = False
|
||||
spec._concrete = False
|
||||
@ -1956,26 +2153,50 @@ def spec(self):
|
||||
# unspecified or not.
|
||||
added_version = False
|
||||
|
||||
if self.previous and self.previous.value == DEP:
|
||||
if self.accept(HASH):
|
||||
spec.add_dependency(self.spec_by_hash())
|
||||
else:
|
||||
self.expect(ID)
|
||||
if self.accept(EQ):
|
||||
raise SpecParseError(spack.parse.ParseError("","","Expected dependency received anonymous spec"))
|
||||
spec.add_dependency(self.spec(self.token.value))
|
||||
|
||||
while self.next:
|
||||
if self.accept(AT):
|
||||
vlist = self.version_list()
|
||||
for version in vlist:
|
||||
spec._add_version(version)
|
||||
added_version = True
|
||||
check_valid_token = False
|
||||
|
||||
elif self.accept(ON):
|
||||
spec._add_variant(self.variant(), True)
|
||||
check_valid_token = False
|
||||
|
||||
elif self.accept(OFF):
|
||||
spec._add_variant(self.variant(), False)
|
||||
spec._add_variant(self.variant(),False)
|
||||
check_valid_token = False
|
||||
|
||||
elif self.accept(PCT):
|
||||
spec._set_compiler(self.compiler())
|
||||
check_valid_token = False
|
||||
|
||||
elif self.accept(EQ):
|
||||
spec._set_architecture(self.architecture())
|
||||
elif self.accept(ID):
|
||||
self.previous = self.token
|
||||
if self.accept(EQ):
|
||||
if self.accept(QT):
|
||||
self.token.value = self.token.value[1:-1]
|
||||
else:
|
||||
self.expect(ID)
|
||||
spec._add_flag(self.previous.value, self.token.value)
|
||||
self.previous = None
|
||||
else:
|
||||
return spec
|
||||
|
||||
else:
|
||||
if check_valid_token:
|
||||
self.unexpected_token()
|
||||
break
|
||||
|
||||
# If there was no version in the spec, consier it an open range
|
||||
@ -1985,13 +2206,17 @@ def spec(self):
|
||||
return spec
|
||||
|
||||
|
||||
def variant(self):
|
||||
self.expect(ID)
|
||||
self.check_identifier()
|
||||
return self.token.value
|
||||
|
||||
def variant(self,name=None):
|
||||
#TODO: Make generalized variants possible
|
||||
if name:
|
||||
return name
|
||||
else:
|
||||
self.expect(ID)
|
||||
self.check_identifier()
|
||||
return self.token.value
|
||||
|
||||
def architecture(self):
|
||||
#TODO: Make this work properly as a subcase of variant (includes adding names to grammar)
|
||||
self.expect(ID)
|
||||
return self.token.value
|
||||
|
||||
@ -2073,8 +2298,10 @@ def parse_anonymous_spec(spec_like, pkg_name):
|
||||
if isinstance(spec_like, str):
|
||||
try:
|
||||
anon_spec = Spec(spec_like)
|
||||
if anon_spec.name != pkg_name:
|
||||
raise SpecParseError(spack.parse.ParseError("","","Expected anonymous spec for package %s but found spec for package %s" % (pkg_name, anon_spec.name) ))
|
||||
except SpecParseError:
|
||||
anon_spec = Spec(pkg_name + spec_like)
|
||||
anon_spec = Spec(pkg_name + ' ' + spec_like)
|
||||
if anon_spec.name != pkg_name: raise ValueError(
|
||||
"Invalid spec for package %s: %s" % (pkg_name, spec_like))
|
||||
else:
|
||||
@ -2175,7 +2402,6 @@ def __init__(self, vpkg, providers):
|
||||
self.vpkg = vpkg
|
||||
self.providers = providers
|
||||
|
||||
|
||||
class UnsatisfiableSpecError(SpecError):
|
||||
"""Raised when a spec conflicts with package constraints.
|
||||
Provide the requirement that was violated when raising."""
|
||||
@ -2214,6 +2440,11 @@ def __init__(self, provided, required):
|
||||
super(UnsatisfiableVariantSpecError, self).__init__(
|
||||
provided, required, "variant")
|
||||
|
||||
class UnsatisfiableCompilerFlagSpecError(UnsatisfiableSpecError):
|
||||
"""Raised when a spec variant conflicts with package constraints."""
|
||||
def __init__(self, provided, required):
|
||||
super(UnsatisfiableCompilerFlagSpecError, self).__init__(
|
||||
provided, required, "compiler_flags")
|
||||
|
||||
class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError):
|
||||
"""Raised when a spec architecture conflicts with package constraints."""
|
||||
@ -2240,3 +2471,13 @@ def __init__(self, provided, required):
|
||||
class SpackYAMLError(spack.error.SpackError):
|
||||
def __init__(self, msg, yaml_error):
|
||||
super(SpackYAMLError, self).__init__(msg, str(yaml_error))
|
||||
|
||||
class SpackRecordError(spack.error.SpackError):
|
||||
def __init__(self, msg):
|
||||
super(SpackRecordError, self).__init__(msg)
|
||||
|
||||
class AmbiguousHashError(SpecError):
|
||||
def __init__(self, msg, *specs):
|
||||
super(AmbiguousHashError, self).__init__(msg)
|
||||
for spec in specs:
|
||||
print ' ', spec.format('$.$@$%@+$+$=$#')
|
||||
|
@ -56,11 +56,16 @@ def setUp(self):
|
||||
self.cc = Executable(join_path(spack.build_env_path, "cc"))
|
||||
self.ld = Executable(join_path(spack.build_env_path, "ld"))
|
||||
self.cpp = Executable(join_path(spack.build_env_path, "cpp"))
|
||||
self.cxx = Executable(join_path(spack.build_env_path, "c++"))
|
||||
self.fc = Executable(join_path(spack.build_env_path, "fc"))
|
||||
|
||||
self.realcc = "/bin/mycc"
|
||||
self.prefix = "/spack-test-prefix"
|
||||
|
||||
os.environ['SPACK_CC'] = self.realcc
|
||||
os.environ['SPACK_CXX'] = self.realcc
|
||||
os.environ['SPACK_FC'] = self.realcc
|
||||
|
||||
os.environ['SPACK_PREFIX'] = self.prefix
|
||||
os.environ['SPACK_ENV_PATH']="test"
|
||||
os.environ['SPACK_DEBUG_LOG_DIR'] = "."
|
||||
@ -102,6 +107,15 @@ def check_cc(self, command, args, expected):
|
||||
self.assertEqual(self.cc(*args, output=str).strip(), expected)
|
||||
|
||||
|
||||
def check_cxx(self, command, args, expected):
|
||||
os.environ['SPACK_TEST_COMMAND'] = command
|
||||
self.assertEqual(self.cxx(*args, output=str).strip(), expected)
|
||||
|
||||
def check_fc(self, command, args, expected):
|
||||
os.environ['SPACK_TEST_COMMAND'] = command
|
||||
self.assertEqual(self.fc(*args, output=str).strip(), expected)
|
||||
|
||||
|
||||
def check_ld(self, command, args, expected):
|
||||
os.environ['SPACK_TEST_COMMAND'] = command
|
||||
self.assertEqual(self.ld(*args, output=str).strip(), expected)
|
||||
@ -142,6 +156,64 @@ def test_ld_mode(self):
|
||||
self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ld")
|
||||
|
||||
|
||||
def test_flags(self):
|
||||
os.environ['SPACK_LDFLAGS'] = '-L foo'
|
||||
os.environ['SPACK_LDLIBS'] = '-lfoo'
|
||||
os.environ['SPACK_CPPFLAGS'] = '-g -O1'
|
||||
os.environ['SPACK_CFLAGS'] = '-Wall'
|
||||
os.environ['SPACK_CXXFLAGS'] = '-Werror'
|
||||
os.environ['SPACK_FFLAGS'] = '-w'
|
||||
|
||||
# Test ldflags added properly in ld mode
|
||||
self.check_ld('dump-args', test_command,
|
||||
"ld " +
|
||||
'-rpath ' + self.prefix + '/lib ' +
|
||||
'-rpath ' + self.prefix + '/lib64 ' +
|
||||
'-L foo ' +
|
||||
' '.join(test_command) + ' ' +
|
||||
'-lfoo')
|
||||
|
||||
# Test cppflags added properly in cpp mode
|
||||
self.check_cpp('dump-args', test_command,
|
||||
"cpp " +
|
||||
'-g -O1 ' +
|
||||
' '.join(test_command))
|
||||
|
||||
# Test ldflags, cppflags, and language specific flags are added in proper order
|
||||
self.check_cc('dump-args', test_command,
|
||||
self.realcc + ' ' +
|
||||
'-Wl,-rpath,' + self.prefix + '/lib ' +
|
||||
'-Wl,-rpath,' + self.prefix + '/lib64 ' +
|
||||
'-g -O1 ' +
|
||||
'-Wall ' +
|
||||
'-L foo ' +
|
||||
' '.join(test_command) + ' ' +
|
||||
'-lfoo')
|
||||
|
||||
self.check_cxx('dump-args', test_command,
|
||||
self.realcc + ' ' +
|
||||
'-Wl,-rpath,' + self.prefix + '/lib ' +
|
||||
'-Wl,-rpath,' + self.prefix + '/lib64 ' +
|
||||
'-g -O1 ' +
|
||||
'-Werror ' +
|
||||
'-L foo ' +
|
||||
' '.join(test_command) + ' ' +
|
||||
'-lfoo')
|
||||
|
||||
self.check_fc('dump-args', test_command,
|
||||
self.realcc + ' ' +
|
||||
'-Wl,-rpath,' + self.prefix + '/lib ' +
|
||||
'-Wl,-rpath,' + self.prefix + '/lib64 ' +
|
||||
'-w ' +
|
||||
'-g -O1 ' +
|
||||
'-L foo ' +
|
||||
' '.join(test_command) + ' ' +
|
||||
'-lfoo')
|
||||
|
||||
os.environ['SPACK_LDFLAGS']=''
|
||||
os.environ['SPACK_LDLIBS']=''
|
||||
|
||||
|
||||
def test_dep_rpath(self):
|
||||
"""Ensure RPATHs for root package are added."""
|
||||
self.check_cc('dump-args', test_command,
|
||||
|
@ -38,11 +38,20 @@ def check_spec(self, abstract, concrete):
|
||||
for name in abstract.variants:
|
||||
avariant = abstract.variants[name]
|
||||
cvariant = concrete.variants[name]
|
||||
self.assertEqual(avariant.enabled, cvariant.enabled)
|
||||
self.assertEqual(avariant.value, cvariant.value)
|
||||
|
||||
if abstract.compiler_flags:
|
||||
for flag in abstract.compiler_flags:
|
||||
aflag = abstract.compiler_flags[flag]
|
||||
cflag = concrete.compiler_flags[flag]
|
||||
self.assertTrue(set(aflag) <= set(cflag))
|
||||
|
||||
for name in abstract.package.variants:
|
||||
self.assertTrue(name in concrete.variants)
|
||||
|
||||
for flag in concrete.compiler_flags.valid_compiler_flags():
|
||||
self.assertTrue(flag in concrete.compiler_flags)
|
||||
|
||||
if abstract.compiler and abstract.compiler.concrete:
|
||||
self.assertEqual(abstract.compiler, concrete.compiler)
|
||||
|
||||
@ -75,9 +84,14 @@ def test_concretize_dag(self):
|
||||
def test_concretize_variant(self):
|
||||
self.check_concretize('mpich+debug')
|
||||
self.check_concretize('mpich~debug')
|
||||
self.check_concretize('mpich debug=2')
|
||||
self.check_concretize('mpich')
|
||||
|
||||
|
||||
def test_conretize_compiler_flags(self):
|
||||
self.check_concretize('mpich cppflags="-O3"')
|
||||
|
||||
|
||||
def test_concretize_preferred_version(self):
|
||||
spec = self.check_concretize('python')
|
||||
self.assertEqual(spec.versions, ver('2.7.11'))
|
||||
@ -231,7 +245,7 @@ def test_compiler_inheritance(self):
|
||||
|
||||
|
||||
def test_external_package(self):
|
||||
spec = Spec('externaltool')
|
||||
spec = Spec('externaltool%gcc')
|
||||
spec.concretize()
|
||||
|
||||
self.assertEqual(spec['externaltool'].external, '/path/to/external_tool')
|
||||
|
@ -73,7 +73,7 @@ def mock_open(filename, mode):
|
||||
'all': {
|
||||
'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
|
||||
},
|
||||
'=x86-linux': {
|
||||
'arch=x86-linux': {
|
||||
'environment': {'set': {'FOO': 'foo'},
|
||||
'unset': ['BAR']}
|
||||
}
|
||||
@ -123,26 +123,26 @@ def get_modulefile_content(self, spec):
|
||||
|
||||
def test_simple_case(self):
|
||||
spack.modules.CONFIGURATION = configuration_autoload_direct
|
||||
spec = spack.spec.Spec('mpich@3.0.4=x86-linux')
|
||||
spec = spack.spec.Spec('mpich@3.0.4 arch=x86-linux')
|
||||
content = self.get_modulefile_content(spec)
|
||||
self.assertTrue('module-whatis "mpich @3.0.4"' in content)
|
||||
|
||||
def test_autoload(self):
|
||||
spack.modules.CONFIGURATION = configuration_autoload_direct
|
||||
spec = spack.spec.Spec('mpileaks=x86-linux')
|
||||
spec = spack.spec.Spec('mpileaks arch=x86-linux')
|
||||
content = self.get_modulefile_content(spec)
|
||||
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
|
||||
self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
|
||||
|
||||
spack.modules.CONFIGURATION = configuration_autoload_all
|
||||
spec = spack.spec.Spec('mpileaks=x86-linux')
|
||||
spec = spack.spec.Spec('mpileaks arch=x86-linux')
|
||||
content = self.get_modulefile_content(spec)
|
||||
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5)
|
||||
self.assertEqual(len([x for x in content if 'module load ' in x]), 5)
|
||||
|
||||
def test_alter_environment(self):
|
||||
spack.modules.CONFIGURATION = configuration_alter_environment
|
||||
spec = spack.spec.Spec('mpileaks=x86-linux')
|
||||
spec = spack.spec.Spec('mpileaks arch=x86-linux')
|
||||
content = self.get_modulefile_content(spec)
|
||||
self.assertEqual(
|
||||
len([x
|
||||
@ -152,7 +152,7 @@ def test_alter_environment(self):
|
||||
len([x for x in content if 'setenv FOO "foo"' in x]), 1)
|
||||
self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1)
|
||||
|
||||
spec = spack.spec.Spec('libdwarf=x64-linux')
|
||||
spec = spack.spec.Spec('libdwarf arch=x64-linux')
|
||||
content = self.get_modulefile_content(spec)
|
||||
self.assertEqual(
|
||||
len([x
|
||||
@ -164,14 +164,14 @@ def test_alter_environment(self):
|
||||
|
||||
def test_blacklist(self):
|
||||
spack.modules.CONFIGURATION = configuration_blacklist
|
||||
spec = spack.spec.Spec('mpileaks=x86-linux')
|
||||
spec = spack.spec.Spec('mpileaks arch=x86-linux')
|
||||
content = self.get_modulefile_content(spec)
|
||||
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
|
||||
self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
|
||||
|
||||
def test_conflicts(self):
|
||||
spack.modules.CONFIGURATION = configuration_conflicts
|
||||
spec = spack.spec.Spec('mpileaks=x86-linux')
|
||||
spec = spack.spec.Spec('mpileaks arch=x86-linux')
|
||||
content = self.get_modulefile_content(spec)
|
||||
self.assertEqual(
|
||||
len([x for x in content if x.startswith('conflict')]), 2)
|
||||
|
@ -25,9 +25,13 @@
|
||||
"""
|
||||
Test for multi_method dispatch.
|
||||
"""
|
||||
import unittest
|
||||
|
||||
import spack
|
||||
from spack.multimethod import *
|
||||
from spack.version import *
|
||||
from spack.spec import Spec
|
||||
from spack.multimethod import when
|
||||
from spack.test.mock_packages_test import *
|
||||
from spack.version import *
|
||||
|
||||
@ -89,19 +93,19 @@ def test_default_works(self):
|
||||
|
||||
|
||||
def test_architecture_match(self):
|
||||
pkg = spack.repo.get('multimethod=x86_64')
|
||||
pkg = spack.repo.get('multimethod arch=x86_64')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'x86_64')
|
||||
|
||||
pkg = spack.repo.get('multimethod=ppc64')
|
||||
pkg = spack.repo.get('multimethod arch=ppc64')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'ppc64')
|
||||
|
||||
pkg = spack.repo.get('multimethod=ppc32')
|
||||
pkg = spack.repo.get('multimethod arch=ppc32')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'ppc32')
|
||||
|
||||
pkg = spack.repo.get('multimethod=arm64')
|
||||
pkg = spack.repo.get('multimethod arch=arm64')
|
||||
self.assertEqual(pkg.different_by_architecture(), 'arm64')
|
||||
|
||||
pkg = spack.repo.get('multimethod=macos')
|
||||
pkg = spack.repo.get('multimethod arch=macos')
|
||||
self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
|
||||
|
||||
|
||||
|
@ -42,6 +42,13 @@ def test_normalize_simple_conditionals(self):
|
||||
self.check_normalize('optional-dep-test+a',
|
||||
Spec('optional-dep-test+a', Spec('a')))
|
||||
|
||||
self.check_normalize('optional-dep-test a=true',
|
||||
Spec('optional-dep-test a=true', Spec('a')))
|
||||
|
||||
|
||||
self.check_normalize('optional-dep-test a=true',
|
||||
Spec('optional-dep-test+a', Spec('a')))
|
||||
|
||||
self.check_normalize('optional-dep-test@1.1',
|
||||
Spec('optional-dep-test@1.1', Spec('b')))
|
||||
|
||||
|
@ -31,6 +31,8 @@
|
||||
import spack
|
||||
import spack.package
|
||||
|
||||
from llnl.util.lang import list_modules
|
||||
|
||||
from spack.spec import Spec
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
@ -239,8 +241,8 @@ def test_unsatisfiable_compiler_version(self):
|
||||
|
||||
|
||||
def test_unsatisfiable_architecture(self):
|
||||
self.set_pkg_dep('mpileaks', 'mpich=bgqos_0')
|
||||
spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
|
||||
self.set_pkg_dep('mpileaks', 'mpich arch=bgqos_0')
|
||||
spec = Spec('mpileaks ^mpich arch=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
|
||||
self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
|
||||
|
||||
|
||||
|
@ -22,6 +22,7 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import unittest
|
||||
from spack.spec import *
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
@ -138,11 +139,11 @@ def test_satisfies_compiler_version(self):
|
||||
|
||||
|
||||
def test_satisfies_architecture(self):
|
||||
self.check_satisfies('foo=chaos_5_x86_64_ib', '=chaos_5_x86_64_ib')
|
||||
self.check_satisfies('foo=bgqos_0', '=bgqos_0')
|
||||
self.check_satisfies('foo arch=chaos_5_x86_64_ib', ' arch=chaos_5_x86_64_ib')
|
||||
self.check_satisfies('foo arch=bgqos_0', ' arch=bgqos_0')
|
||||
|
||||
self.check_unsatisfiable('foo=bgqos_0', '=chaos_5_x86_64_ib')
|
||||
self.check_unsatisfiable('foo=chaos_5_x86_64_ib', '=bgqos_0')
|
||||
self.check_unsatisfiable('foo arch=bgqos_0', ' arch=chaos_5_x86_64_ib')
|
||||
self.check_unsatisfiable('foo arch=chaos_5_x86_64_ib', ' arch=bgqos_0')
|
||||
|
||||
|
||||
def test_satisfies_dependencies(self):
|
||||
@ -190,12 +191,20 @@ def test_satisfies_virtual_dependency_versions(self):
|
||||
def test_satisfies_matching_variant(self):
|
||||
self.check_satisfies('mpich+foo', 'mpich+foo')
|
||||
self.check_satisfies('mpich~foo', 'mpich~foo')
|
||||
self.check_satisfies('mpich foo=1', 'mpich foo=1')
|
||||
|
||||
#confirm that synonymous syntax works correctly
|
||||
self.check_satisfies('mpich+foo', 'mpich foo=True')
|
||||
self.check_satisfies('mpich foo=true', 'mpich+foo')
|
||||
self.check_satisfies('mpich~foo', 'mpich foo=FALSE')
|
||||
self.check_satisfies('mpich foo=False', 'mpich~foo')
|
||||
|
||||
|
||||
def test_satisfies_unconstrained_variant(self):
|
||||
# only asked for mpich, no constraints. Either will do.
|
||||
self.check_satisfies('mpich+foo', 'mpich')
|
||||
self.check_satisfies('mpich~foo', 'mpich')
|
||||
self.check_satisfies('mpich foo=1', 'mpich')
|
||||
|
||||
|
||||
def test_unsatisfiable_variants(self):
|
||||
@ -204,16 +213,44 @@ def test_unsatisfiable_variants(self):
|
||||
# 'mpich' is not concrete:
|
||||
self.check_satisfies('mpich', 'mpich+foo', False)
|
||||
self.check_satisfies('mpich', 'mpich~foo', False)
|
||||
self.check_satisfies('mpich', 'mpich foo=1', False)
|
||||
|
||||
# 'mpich' is concrete:
|
||||
self.check_unsatisfiable('mpich', 'mpich+foo', True)
|
||||
self.check_unsatisfiable('mpich', 'mpich~foo', True)
|
||||
self.check_unsatisfiable('mpich', 'mpich foo=1', True)
|
||||
|
||||
|
||||
def test_unsatisfiable_variant_mismatch(self):
|
||||
# No matchi in specs
|
||||
self.check_unsatisfiable('mpich~foo', 'mpich+foo')
|
||||
self.check_unsatisfiable('mpich+foo', 'mpich~foo')
|
||||
self.check_unsatisfiable('mpich foo=1', 'mpich foo=2')
|
||||
|
||||
|
||||
def test_satisfies_matching_compiler_flag(self):
|
||||
self.check_satisfies('mpich cppflags="-O3"', 'mpich cppflags="-O3"')
|
||||
self.check_satisfies('mpich cppflags="-O3 -Wall"', 'mpich cppflags="-O3 -Wall"')
|
||||
|
||||
|
||||
def test_satisfies_unconstrained_compiler_flag(self):
|
||||
# only asked for mpich, no constraints. Any will do.
|
||||
self.check_satisfies('mpich cppflags="-O3"', 'mpich')
|
||||
|
||||
|
||||
def test_unsatisfiable_compiler_flag(self):
|
||||
# This case is different depending on whether the specs are concrete.
|
||||
|
||||
# 'mpich' is not concrete:
|
||||
self.check_satisfies('mpich', 'mpich cppflags="-O3"', False)
|
||||
|
||||
# 'mpich' is concrete:
|
||||
self.check_unsatisfiable('mpich', 'mpich cppflags="-O3"', True)
|
||||
|
||||
|
||||
def test_unsatisfiable_compiler_flag_mismatch(self):
|
||||
# No matchi in specs
|
||||
self.check_unsatisfiable('mpich cppflags="-O3"', 'mpich cppflags="-O2"')
|
||||
|
||||
|
||||
def test_satisfies_virtual(self):
|
||||
@ -301,18 +338,26 @@ def test_constrain_variants(self):
|
||||
self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+foo')
|
||||
self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+debug+foo')
|
||||
|
||||
self.check_constrain('libelf debug=2 foo=1', 'libelf debug=2', 'libelf foo=1')
|
||||
self.check_constrain('libelf debug=2 foo=1', 'libelf debug=2', 'libelf debug=2 foo=1')
|
||||
|
||||
self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo')
|
||||
self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo')
|
||||
|
||||
|
||||
def test_constrain_compiler_flags(self):
|
||||
self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cppflags="-Wall"')
|
||||
self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cflags="-O3" cppflags="-Wall"')
|
||||
|
||||
|
||||
def test_constrain_arch(self):
|
||||
self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0')
|
||||
self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0')
|
||||
self.check_constrain('libelf arch=bgqos_0', 'libelf arch=bgqos_0', 'libelf arch=bgqos_0')
|
||||
self.check_constrain('libelf arch=bgqos_0', 'libelf', 'libelf arch=bgqos_0')
|
||||
|
||||
|
||||
def test_constrain_compiler(self):
|
||||
self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0')
|
||||
self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0')
|
||||
self.check_constrain('libelf %gcc@4.4.7', 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7')
|
||||
self.check_constrain('libelf %gcc@4.4.7', 'libelf', 'libelf %gcc@4.4.7')
|
||||
|
||||
|
||||
def test_invalid_constraint(self):
|
||||
@ -321,8 +366,11 @@ def test_invalid_constraint(self):
|
||||
|
||||
self.check_invalid_constraint('libelf+debug', 'libelf~debug')
|
||||
self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
|
||||
self.check_invalid_constraint('libelf debug=2', 'libelf debug=1')
|
||||
|
||||
self.check_invalid_constraint('libelf=bgqos_0', 'libelf=x86_54')
|
||||
self.check_invalid_constraint('libelf cppflags="-O3"', 'libelf cppflags="-O2"')
|
||||
|
||||
self.check_invalid_constraint('libelf arch=bgqos_0', 'libelf arch=x86_54')
|
||||
|
||||
|
||||
def test_constrain_changed(self):
|
||||
@ -332,7 +380,9 @@ def test_constrain_changed(self):
|
||||
self.check_constrain_changed('libelf%gcc', '%gcc@4.5')
|
||||
self.check_constrain_changed('libelf', '+debug')
|
||||
self.check_constrain_changed('libelf', '~debug')
|
||||
self.check_constrain_changed('libelf', '=bgqos_0')
|
||||
self.check_constrain_changed('libelf', 'debug=2')
|
||||
self.check_constrain_changed('libelf', 'cppflags="-O3"')
|
||||
self.check_constrain_changed('libelf', ' arch=bgqos_0')
|
||||
|
||||
|
||||
def test_constrain_not_changed(self):
|
||||
@ -343,7 +393,9 @@ def test_constrain_not_changed(self):
|
||||
self.check_constrain_not_changed('libelf%gcc@4.5', '%gcc@4.5')
|
||||
self.check_constrain_not_changed('libelf+debug', '+debug')
|
||||
self.check_constrain_not_changed('libelf~debug', '~debug')
|
||||
self.check_constrain_not_changed('libelf=bgqos_0', '=bgqos_0')
|
||||
self.check_constrain_not_changed('libelf debug=2', 'debug=2')
|
||||
self.check_constrain_not_changed('libelf cppflags="-O3"', 'cppflags="-O3"')
|
||||
self.check_constrain_not_changed('libelf arch=bgqos_0', ' arch=bgqos_0')
|
||||
self.check_constrain_not_changed('libelf^foo', 'libelf^foo')
|
||||
self.check_constrain_not_changed('libelf^foo^bar', 'libelf^foo^bar')
|
||||
|
||||
@ -355,7 +407,8 @@ def test_constrain_dependency_changed(self):
|
||||
self.check_constrain_changed('libelf^foo%gcc', 'libelf^foo%gcc@4.5')
|
||||
self.check_constrain_changed('libelf^foo', 'libelf^foo+debug')
|
||||
self.check_constrain_changed('libelf^foo', 'libelf^foo~debug')
|
||||
self.check_constrain_changed('libelf^foo', 'libelf^foo=bgqos_0')
|
||||
self.check_constrain_changed('libelf^foo', 'libelf^foo cppflags="-O3"')
|
||||
self.check_constrain_changed('libelf^foo', 'libelf^foo arch=bgqos_0')
|
||||
|
||||
|
||||
def test_constrain_dependency_not_changed(self):
|
||||
@ -365,4 +418,6 @@ def test_constrain_dependency_not_changed(self):
|
||||
self.check_constrain_not_changed('libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5')
|
||||
self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug')
|
||||
self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug')
|
||||
self.check_constrain_not_changed('libelf^foo=bgqos_0', 'libelf^foo=bgqos_0')
|
||||
self.check_constrain_not_changed('libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"')
|
||||
self.check_constrain_not_changed('libelf^foo arch=bgqos_0', 'libelf^foo arch=bgqos_0')
|
||||
|
||||
|
@ -104,6 +104,8 @@ def test_dependencies_with_versions(self):
|
||||
|
||||
def test_full_specs(self):
|
||||
self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4^stackwalker@8.1_1e")
|
||||
self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2~qt_4^stackwalker@8.1_1e")
|
||||
self.check_parse('mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3"+debug~qt_4^stackwalker@8.1_1e')
|
||||
|
||||
def test_canonicalize(self):
|
||||
self.check_parse(
|
||||
@ -128,7 +130,10 @@ def test_parse_errors(self):
|
||||
|
||||
def test_duplicate_variant(self):
|
||||
self.assertRaises(DuplicateVariantError, self.check_parse, "x@1.2+debug+debug")
|
||||
self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2+debug+debug")
|
||||
self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2+debug debug=true")
|
||||
self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2 debug=false debug=true")
|
||||
self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2 debug=false~debug")
|
||||
|
||||
|
||||
def test_duplicate_depdendence(self):
|
||||
self.assertRaises(DuplicateDependencyError, self.check_parse, "x ^y ^y")
|
||||
|
@ -32,5 +32,5 @@
|
||||
class Variant(object):
|
||||
"""Represents a variant on a build. Can be either on or off."""
|
||||
def __init__(self, default, description):
|
||||
self.default = bool(default)
|
||||
self.default = default
|
||||
self.description = str(description)
|
||||
|
@ -67,10 +67,15 @@ def update(self, spec):
|
||||
if type(spec) != spack.spec.Spec:
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
if not spec.name:
|
||||
# Empty specs do not have a package
|
||||
return
|
||||
|
||||
assert(not spec.virtual)
|
||||
|
||||
pkg = spec.package
|
||||
for provided_spec, provider_spec in pkg.provided.iteritems():
|
||||
provider_spec.compiler_flags = spec.compiler_flags.copy()#We want satisfaction other than flags
|
||||
if provider_spec.satisfies(spec, deps=False):
|
||||
provided_name = provided_spec.name
|
||||
|
||||
|
@ -103,19 +103,19 @@ def has_a_default(self):
|
||||
#
|
||||
# Make sure we can switch methods on different architectures
|
||||
#
|
||||
@when('=x86_64')
|
||||
@when('arch=x86_64')
|
||||
def different_by_architecture(self):
|
||||
return 'x86_64'
|
||||
|
||||
@when('=ppc64')
|
||||
@when('arch=ppc64')
|
||||
def different_by_architecture(self):
|
||||
return 'ppc64'
|
||||
|
||||
@when('=ppc32')
|
||||
@when('arch=ppc32')
|
||||
def different_by_architecture(self):
|
||||
return 'ppc32'
|
||||
|
||||
@when('=arm64')
|
||||
@when('arch=arm64')
|
||||
def different_by_architecture(self):
|
||||
return 'arm64'
|
||||
|
||||
|
@ -127,7 +127,7 @@ def url_for_version(self, version):
|
||||
dots, underscores)
|
||||
|
||||
def determine_toolset(self, spec):
|
||||
if spec.satisfies("=darwin-x86_64"):
|
||||
if spec.satisfies("arch=darwin-x86_64"):
|
||||
return 'darwin'
|
||||
|
||||
toolsets = {'g++': 'gcc',
|
||||
|
@ -28,9 +28,9 @@
|
||||
class Ghostscript(Package):
|
||||
"""an interpreter for the PostScript language and for PDF. """
|
||||
homepage = "http://ghostscript.com/"
|
||||
url = "http://downloads.ghostscript.com/public/old-gs-releases/ghostscript-9.16.tar.gz"
|
||||
url = "http://downloads.ghostscript.com/public/old-gs-releases/ghostscript-9.18.tar.gz"
|
||||
|
||||
version('9.16', '829319325bbdb83f5c81379a8f86f38f')
|
||||
version('9.18', '33a47567d7a591c00a253caddd12a88a')
|
||||
|
||||
parallel = False
|
||||
|
||||
|
@ -37,7 +37,7 @@ class Libpciaccess(Package):
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# libpciaccess does not support OS X
|
||||
if spec.satisfies('=darwin-x86_64'):
|
||||
if spec.satisfies('arch=darwin-x86_64'):
|
||||
# create a dummy directory
|
||||
mkdir(prefix.lib)
|
||||
return
|
||||
|
@ -57,7 +57,7 @@ class Lua(Package):
|
||||
placement='luarocks')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
if spec.satisfies("=darwin-i686") or spec.satisfies("=darwin-x86_64"):
|
||||
if spec.satisfies("arch=darwin-i686") or spec.satisfies("arch=darwin-x86_64"):
|
||||
target = 'macosx'
|
||||
else:
|
||||
target = 'linux'
|
||||
|
@ -35,7 +35,7 @@ class Openssl(Package):
|
||||
Transport Layer Security (TLS v1) protocols as well as a
|
||||
full-strength general purpose cryptography library."""
|
||||
homepage = "http://www.openssl.org"
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
url = "https://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
|
||||
version('1.0.1r', '1abd905e079542ccae948af37e393d28')
|
||||
@ -100,7 +100,7 @@ def install(self, spec, prefix):
|
||||
# in the environment, then this will override what is set in the
|
||||
# Makefile, leading to build errors.
|
||||
env.pop('APPS', None)
|
||||
if spec.satisfies("=darwin-x86_64") or spec.satisfies("=ppc64"):
|
||||
if spec.satisfies("arch=darwin-x86_64") or spec.satisfies("arch=ppc64"):
|
||||
# This needs to be done for all 64-bit architectures (except Linux,
|
||||
# where it happens automatically?)
|
||||
env['KERNEL_BITS'] = '64'
|
||||
|
Loading…
Reference in New Issue
Block a user