Merge branch 'develop' of https://github.com/LLNL/spack into features/install_with_phases_rebase

Conflicts:
	lib/spack/spack/cmd/setup.py
	lib/spack/spack/package.py
	var/spack/repos/builtin/packages/gmp/package.py
This commit is contained in:
alalazo 2016-10-05 09:33:59 +02:00
commit 7a26c60dbd
211 changed files with 5586 additions and 1230 deletions

2
.gitignore vendored
View File

@ -1,3 +1,4 @@
/db
/var/spack/stage
/var/spack/cache
/var/spack/repos/*/index.yaml
@ -12,6 +13,7 @@
/etc/spackconfig
/share/spack/dotkit
/share/spack/modules
/share/spack/lmod
/TAGS
/htmlcov
.coverage

View File

@ -20,7 +20,7 @@ written in pure Python, and specs allow package authors to write a
single build script for many different builds of the same package.
See the
[Feature Overview](http://spack.readthedocs.io/latest/features.html)
[Feature Overview](http://spack.readthedocs.io/en/latest/features.html)
for examples and highlights.
To install spack and install your first package:

View File

@ -111,8 +111,12 @@ while read line && ((lines < 2)) ; do
done < "$script"
# Invoke any interpreter found, or raise an error if none was found.
if [ -n "$interpreter" ]; then
exec $interpreter "$@"
if [[ -n "$interpreter" ]]; then
if [[ "${interpreter##*/}" = "perl" ]]; then
exec $interpreter -x "$@"
else
exec $interpreter "$@"
fi
else
echo "error: sbang found no interpreter in $script"
exit 1

View File

@ -25,9 +25,9 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
if not sys.version_info[:2] >= (2, 6):
if (sys.version_info[0] > 2) or (sys.version_info[:2] < (2, 6)):
v_info = sys.version_info[:3]
sys.exit("Spack requires Python 2.6 or higher. "
sys.exit("Spack requires Python 2.6 or 2.7. "
"This is Python %d.%d.%d." % v_info)
import os

View File

@ -19,3 +19,4 @@ packages:
mpi: [openmpi, mpich]
blas: [openblas]
lapack: [openblas]
pil: [py-pillow]

View File

@ -2,12 +2,12 @@
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXOPTS = -E
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
export PYTHONPATH = ../../spack
export PYTHONPATH := ../../spack:$(PYTHONPATH)
APIDOC_FILES = spack*.rst
# Internal variables.

View File

@ -1,5 +1,4 @@
.. _command_index:
=================
Command index
=================

View File

@ -1,10 +1,7 @@
# flake8: noqa
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# Created by Todd Gamblin, tgamblin@llnl.gov.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
@ -65,14 +62,15 @@
# Set an environment variable so that colify will print output like it would to
# a terminal.
os.environ['COLIFY_SIZE'] = '25x80'
os.environ['COLIFY_SIZE'] = '25x120'
#
# Generate package list using spack command
#
with open('package_list.rst', 'w') as plist_file:
subprocess.Popen(
[spack_root + '/bin/spack', 'package-list'], stdout=plist_file)
if not os.path.exists('package_list.rst'):
with open('package_list.rst', 'w') as plist_file:
subprocess.Popen(
[spack_root + '/bin/spack', 'package-list'], stdout=plist_file)
#
# Find all the `spack-*` references and add them to a command index
@ -85,11 +83,12 @@
if match:
command_names.append(match.group(1).strip())
shutil.copy('command_index.in', 'command_index.rst')
with open('command_index.rst', 'a') as index:
index.write('\n')
for cmd in sorted(command_names):
index.write(' * :ref:`%s`\n' % cmd)
if not os.path.exists('command_index.rst'):
shutil.copy('command_index.in', 'command_index.rst')
with open('command_index.rst', 'a') as index:
index.write('\n')
for cmd in sorted(command_names):
index.write(' * :ref:`%s`\n' % cmd)
# Run sphinx-apidoc

View File

@ -207,7 +207,7 @@ supply ``-p`` to Spack on the command line, before any subcommands.
``spack --profile`` output looks like this:
.. command-output:: spack --profile graph dyninst
.. command-output:: spack --profile graph --deptype=nobuild dyninst
:ellipsis: 25
The bottom of the output shows the top most time consuming functions,

View File

@ -2090,12 +2090,11 @@ Blas and Lapack libraries
Different packages provide implementation of ``Blas`` and ``Lapack`` routines.
The names of the resulting static and/or shared libraries differ from package
to package. In order to make the ``install()`` method indifferent to the
to package. In order to make the ``install()`` method independent of the
choice of ``Blas`` implementation, each package which provides it
sets up ``self.spec.blas_shared_lib`` and ``self.spec.blas_static_lib`` to
point to the shared and static ``Blas`` libraries, respectively. The same
applies to packages which provide ``Lapack``. Package developers are advised to
use these variables, for example ``spec['blas'].blas_shared_lib`` instead of
sets up ``self.spec.blas_libs`` to point to the correct ``Blas`` libraries.
The same applies to packages which provide ``Lapack``. Package developers are advised to
use these variables, for example ``spec['blas'].blas_libs.joined()`` instead of
hard-coding ``join_path(spec['blas'].prefix.lib, 'libopenblas.so')``.
^^^^^^^^^^^^^^^^^^^^^
@ -2889,9 +2888,22 @@ dependency graph. For example:
.. command-output:: spack graph mpileaks
At the top is the root package in the DAG, with dependency edges
emerging from it. On a color terminal, the edges are colored by which
dependency they lead to.
At the top is the root package in the DAG, with dependency edges emerging
from it. On a color terminal, the edges are colored by which dependency
they lead to.
.. command-output:: spack graph --deptype=all mpileaks
The ``deptype`` argument tells Spack what types of dependencies to graph.
By default it includes link and run dependencies but not build
dependencies. Supplying ``--deptype=all`` will show the build
dependencies as well. This is equivalent to
``--deptype=build,link,run``. Options for ``deptype`` include:
* Any combination of ``build``, ``link``, and ``run`` separated by
commas.
* ``nobuild``, ``nolink``, ``norun`` to omit one type.
* ``all`` or ``alldeps`` for all types of dependencies.
You can also use ``spack graph`` to generate graphs in the widely used
`Dot <http://www.graphviz.org/doc/info/lang.html>`_ format. For

32
lib/spack/env/cc vendored
View File

@ -266,22 +266,38 @@ for dep in "${deps[@]}"; do
# Prepend lib and RPATH directories
if [[ -d $dep/lib ]]; then
if [[ $mode == ccld ]]; then
$add_rpaths && args=("$rpath$dep/lib" "${args[@]}")
args=("-L$dep/lib" "${args[@]}")
if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
$add_rpaths && args=("$rpath$dep/lib" "${args[@]}")
fi
if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
args=("-L$dep/lib" "${args[@]}")
fi
elif [[ $mode == ld ]]; then
$add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
args=("-L$dep/lib" "${args[@]}")
if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
$add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
fi
if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
args=("-L$dep/lib" "${args[@]}")
fi
fi
fi
# Prepend lib64 and RPATH directories
if [[ -d $dep/lib64 ]]; then
if [[ $mode == ccld ]]; then
$add_rpaths && args=("$rpath$dep/lib64" "${args[@]}")
args=("-L$dep/lib64" "${args[@]}")
if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
$add_rpaths && args=("$rpath$dep/lib64" "${args[@]}")
fi
if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
args=("-L$dep/lib64" "${args[@]}")
fi
elif [[ $mode == ld ]]; then
$add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
args=("-L$dep/lib64" "${args[@]}")
if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
$add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
fi
if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
args=("-L$dep/lib64" "${args[@]}")
fi
fi
fi
done

View File

@ -22,18 +22,22 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import collections
import errno
import fileinput
import getpass
import glob
import numbers
import os
import re
import shutil
import stat
import errno
import getpass
from contextlib import contextmanager
import subprocess
import fileinput
import sys
from contextlib import contextmanager
import llnl.util.tty as tty
from llnl.util.lang import dedupe
__all__ = ['set_install_permissions', 'install', 'install_tree',
'traverse_tree',
@ -42,8 +46,8 @@
'filter_file',
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
'set_executable', 'copy_mode', 'unset_executable_mode',
'remove_dead_links', 'remove_linked_tree', 'find_library_path',
'fix_darwin_install_name', 'to_link_flags', 'to_lib_name']
'remove_dead_links', 'remove_linked_tree',
'fix_darwin_install_name', 'find_libraries', 'LibraryList']
def filter_file(regex, repl, *filenames, **kwargs):
@ -326,7 +330,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
follow_links = kwargs.get('follow_link', False)
# Yield in pre or post order?
order = kwargs.get('order', 'pre')
order = kwargs.get('order', 'pre')
if order not in ('pre', 'post'):
raise ValueError("Order must be 'pre' or 'post'.")
@ -338,7 +342,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
return
source_path = os.path.join(source_root, rel_path)
dest_path = os.path.join(dest_root, rel_path)
dest_path = os.path.join(dest_root, rel_path)
# preorder yields directories before children
if order == 'pre':
@ -346,8 +350,8 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
for f in os.listdir(source_path):
source_child = os.path.join(source_path, f)
dest_child = os.path.join(dest_path, f)
rel_child = os.path.join(rel_path, f)
dest_child = os.path.join(dest_path, f)
rel_child = os.path.join(rel_path, f)
# Treat as a directory
if os.path.isdir(source_child) and (
@ -440,35 +444,162 @@ def fix_darwin_install_name(path):
stdout=subprocess.PIPE).communicate()[0]
break
# Utilities for libraries
def to_lib_name(library):
"""Transforms a path to the library /path/to/lib<name>.xyz into <name>
class LibraryList(collections.Sequence):
"""Sequence of absolute paths to libraries
Provides a few convenience methods to manipulate library paths and get
commonly used compiler flags or names
"""
# Assume libXYZ.suffix
return os.path.basename(library)[3:].split(".")[0]
def __init__(self, libraries):
self.libraries = list(libraries)
@property
def directories(self):
"""Stable de-duplication of the directories where the libraries
reside
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/libc.a'])
>>> assert l.directories == ['/dir1', '/dir2']
"""
return list(dedupe(
os.path.dirname(x) for x in self.libraries if os.path.dirname(x)
))
@property
def basenames(self):
"""Stable de-duplication of the base-names in the list
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.a'])
>>> assert l.basenames == ['liba.a', 'libb.a']
"""
return list(dedupe(os.path.basename(x) for x in self.libraries))
@property
def names(self):
"""Stable de-duplication of library names in the list
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.so'])
>>> assert l.names == ['a', 'b']
"""
return list(dedupe(x.split('.')[0][3:] for x in self.basenames))
@property
def search_flags(self):
"""Search flags for the libraries
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
>>> assert l.search_flags == '-L/dir1 -L/dir2'
"""
return ' '.join(['-L' + x for x in self.directories])
@property
def link_flags(self):
"""Link flags for the libraries
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
>>> assert l.search_flags == '-la -lb'
"""
return ' '.join(['-l' + name for name in self.names])
@property
def ld_flags(self):
"""Search flags + link flags
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
>>> assert l.search_flags == '-L/dir1 -L/dir2 -la -lb'
"""
return self.search_flags + ' ' + self.link_flags
def __getitem__(self, item):
cls = type(self)
if isinstance(item, numbers.Integral):
return self.libraries[item]
return cls(self.libraries[item])
def __add__(self, other):
return LibraryList(dedupe(self.libraries + list(other)))
def __radd__(self, other):
return self.__add__(other)
def __eq__(self, other):
return self.libraries == other.libraries
def __len__(self):
return len(self.libraries)
def joined(self, separator=' '):
return separator.join(self.libraries)
def __repr__(self):
return self.__class__.__name__ + '(' + repr(self.libraries) + ')'
def __str__(self):
return self.joined()
def to_link_flags(library):
"""Transforms a path to a <library> into linking flags -L<dir> -l<name>.
def find_libraries(args, root, shared=True, recurse=False):
"""Returns an iterable object containing a list of full paths to
libraries if found.
Return:
A string of linking flags.
Args:
args: iterable object containing a list of library names to \
search for (e.g. 'libhdf5')
root: root folder where to start searching
shared: if True searches for shared libraries, otherwise for static
recurse: if False search only root folder, if True descends top-down \
from the root
Returns:
list of full paths to the libraries that have been found
"""
dir = os.path.dirname(library)
name = to_lib_name(library)
res = '-L%s -l%s' % (dir, name)
return res
if not isinstance(args, collections.Sequence) or isinstance(args, str):
message = '{0} expects a sequence of strings as first argument'
message += ' [got {1} instead]'
raise TypeError(message.format(find_libraries.__name__, type(args)))
# Construct the right suffix for the library
if shared is True:
suffix = 'dylib' if sys.platform == 'darwin' else 'so'
else:
suffix = 'a'
# List of libraries we are searching with suffixes
libraries = ['{0}.{1}'.format(lib, suffix) for lib in args]
# Search method
if recurse is False:
search_method = _find_libraries_non_recursive
else:
search_method = _find_libraries_recursive
return search_method(libraries, root)
def find_library_path(libname, *paths):
"""Searches for a file called <libname> in each path.
def _find_libraries_recursive(libraries, root):
library_dict = collections.defaultdict(list)
for path, _, files in os.walk(root):
for lib in libraries:
if lib in files:
library_dict[lib].append(
join_path(path, lib)
)
answer = []
for lib in libraries:
answer.extend(library_dict[lib])
return LibraryList(answer)
Return:
directory where the library was found, if found. None otherwise.
"""
for path in paths:
library = join_path(path, libname)
if os.path.exists(library):
return path
return None
def _find_libraries_non_recursive(libraries, root):
def lib_or_none(lib):
library = join_path(root, lib)
if not os.path.exists(library):
return None
return library
return LibraryList(
[lib_or_none(lib) for lib in libraries if lib_or_none(lib) is not None]
)

View File

@ -374,6 +374,22 @@ def __iter__(self):
return wrapper()
def dedupe(sequence):
"""Yields a stable de-duplication of an hashable sequence
Args:
sequence: hashable sequence to be de-duplicated
Returns:
stable de-duplication of the sequence
"""
seen = set()
for x in sequence:
if x not in seen:
yield x
seen.add(x)
class RequiredAttributeError(ValueError):
def __init__(self, message):

View File

@ -69,8 +69,17 @@ def _lock(self, op, timeout):
start_time = time.time()
while (time.time() - start_time) < timeout:
try:
# If this is already open read-only and we want to
# upgrade to an exclusive write lock, close first.
if self._fd is not None:
flags = fcntl.fcntl(self._fd, fcntl.F_GETFL)
if op == fcntl.LOCK_EX and flags | os.O_RDONLY:
os.close(self._fd)
self._fd = None
if self._fd is None:
self._fd = os.open(self._file_path, os.O_RDWR)
mode = os.O_RDWR if op == fcntl.LOCK_EX else os.O_RDONLY
self._fd = os.open(self._file_path, mode)
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
if op == fcntl.LOCK_EX:

View File

@ -54,10 +54,10 @@ def _gcc_get_libstdcxx_version(self, version):
output = None
if compiler.cxx:
rungcc = Executable(compiler.cxx)
libname = "libstdc++.so"
libname = "libstdc++." + dso_suffix
elif compiler.cc:
rungcc = Executable(compiler.cc)
libname = "libgcc_s.so"
libname = "libgcc_s." + dso_suffix
else:
return None
try:

View File

@ -75,6 +75,8 @@
#
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
SPACK_RPATH_DEPS = 'SPACK_RPATH_DEPS'
SPACK_LINK_DEPS = 'SPACK_LINK_DEPS'
SPACK_PREFIX = 'SPACK_PREFIX'
SPACK_INSTALL = 'SPACK_INSTALL'
SPACK_DEBUG = 'SPACK_DEBUG'
@ -252,9 +254,15 @@ def set_build_environment_variables(pkg, env, dirty=False):
env.set_path(SPACK_ENV_PATH, env_paths)
# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
dep_prefixes = [d.prefix
for d in pkg.spec.traverse(root=False, deptype='build')]
dep_prefixes = [d.prefix for d in
pkg.spec.traverse(root=False, deptype=('build', 'link'))]
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
# These variables control compiler wrapper behavior
env.set_path(SPACK_RPATH_DEPS, [d.prefix for d in get_rpath_deps(pkg)])
env.set_path(SPACK_LINK_DEPS, [
d.prefix for d in pkg.spec.traverse(root=False, deptype=('link'))])
# Add dependencies to CMAKE_PREFIX_PATH
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
@ -286,8 +294,8 @@ def set_build_environment_variables(pkg, env, dirty=False):
env.remove_path('PATH', p)
# Add bin directories from dependencies to the PATH for the build.
bin_dirs = reversed(
filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
bin_dirs = reversed(filter(os.path.isdir, [
'%s/bin' % d.prefix for d in pkg.spec.dependencies(deptype='build')]))
for item in bin_dirs:
env.prepend_path('PATH', item)
@ -372,10 +380,15 @@ def set_module_variables_for_package(pkg, module):
m.dso_suffix = dso_suffix
def get_rpath_deps(pkg):
"""We only need to RPATH immediate dependencies."""
return pkg.spec.dependencies(deptype='link')
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
deps = pkg.spec.dependencies(deptype='link')
deps = get_rpath_deps(pkg)
rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in deps

View File

@ -69,17 +69,17 @@ def get_cmd_function_name(name):
def get_module(name):
"""Imports the module for a particular command name and returns it."""
module_name = "%s.%s" % (__name__, name)
module = __import__(
module_name, fromlist=[name, SETUP_PARSER, DESCRIPTION],
level=0)
module = __import__(module_name,
fromlist=[name, SETUP_PARSER, DESCRIPTION],
level=0)
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, DESCRIPTION, "")
fn_name = get_cmd_function_name(name)
if not hasattr(module, fn_name):
tty.die("Command module %s (%s) must define function '%s'."
% (module.__name__, module.__file__, fn_name))
tty.die("Command module %s (%s) must define function '%s'." %
(module.__name__, module.__file__, fn_name))
return module

View File

@ -113,6 +113,6 @@ def checksum(parser, args):
tty.die("Could not fetch any versions for %s" % pkg.name)
version_lines = [
" version('%s', '%s')" % (v, h) for v, h in version_hashes
" version('%s', '%s')" % (v, h) for v, h in version_hashes
]
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)

View File

@ -0,0 +1,84 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
from datetime import datetime
from glob import glob
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
import spack
from spack.util.executable import which
description = "Debugging commands for troubleshooting Spack."
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='debug_command')
sp.add_parser('create-db-tarball',
help="Create a tarball of Spack's installation metadata.")
def _debug_tarball_suffix():
now = datetime.now()
suffix = now.strftime('%Y-%m-%d-%H%M%S')
git = which('git')
if not git:
return 'nobranch-nogit-%s' % suffix
with working_dir(spack.spack_root):
if not os.path.isdir('.git'):
return 'nobranch.nogit.%s' % suffix
symbolic = git(
'rev-parse', '--abbrev-ref', '--short', 'HEAD', output=str).strip()
commit = git(
'rev-parse', '--short', 'HEAD', output=str).strip()
if symbolic == commit:
return "nobranch.%s.%s" % (commit, suffix)
else:
return "%s.%s.%s" % (symbolic, commit, suffix)
def create_db_tarball(args):
tar = which('tar')
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
with working_dir(spack.spack_root):
files = [spack.installed_db._index_path]
files += glob('%s/*/*/*/.spack/spec.yaml' % spack.install_path)
files = [os.path.relpath(f) for f in files]
tar('-czf', tarball_path, *files)
tty.msg('Created %s' % tarball_name)
def debug(parser, args):
action = {'create-db-tarball': create_db_tarball}
action[args.debug_command](args)

View File

@ -52,6 +52,9 @@ def setup_parser(subparser):
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
help="specs to use for install. Must contain package AND version.")
subparser.add_argument(
'--dirty', action='store_true', dest='dirty',
help="Install a package *without* cleaning the environment.")
def diy(self, args):
@ -100,4 +103,5 @@ def diy(self, args):
keep_prefix=args.keep_prefix,
ignore_deps=args.ignore_deps,
verbose=not args.quiet,
keep_stage=True) # don't remove source dir for DIY.
keep_stage=True, # don't remove source dir for DIY.
dirty=args.dirty)

View File

@ -24,8 +24,11 @@
##############################################################################
import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
from spack.spec import *
from spack.graph import *
description = "Generate graphs of package dependency relationships."
@ -36,15 +39,28 @@ def setup_parser(subparser):
method = subparser.add_mutually_exclusive_group()
method.add_argument(
'--ascii', action='store_true',
'-a', '--ascii', action='store_true',
help="Draw graph as ascii to stdout (default).")
method.add_argument(
'--dot', action='store_true',
'-d', '--dot', action='store_true',
help="Generate graph in dot format and print to stdout.")
subparser.add_argument(
'--concretize', action='store_true',
help="Concretize specs before graphing.")
'-n', '--normalize', action='store_true',
help="Skip concretization; only print normalized spec.")
subparser.add_argument(
'-s', '--static', action='store_true',
help="Use static information from packages, not dynamic spec info.")
subparser.add_argument(
'-i', '--installed', action='store_true',
help="Graph all installed specs in dot format (implies --dot).")
subparser.add_argument(
'-t', '--deptype', action='store',
help="Comma-separated list of deptypes to traverse. default=%s."
% ','.join(alldeps))
subparser.add_argument(
'specs', nargs=argparse.REMAINDER,
@ -52,18 +68,32 @@ def setup_parser(subparser):
def graph(parser, args):
specs = spack.cmd.parse_specs(
args.specs, normalize=True, concretize=args.concretize)
concretize = not args.normalize
if args.installed:
if args.specs:
tty.die("Can't specify specs with --installed")
args.dot = True
specs = spack.installed_db.query()
else:
specs = spack.cmd.parse_specs(
args.specs, normalize=True, concretize=concretize)
if not specs:
setup_parser.parser.print_help()
return 1
deptype = nobuild
if args.deptype:
deptype = tuple(args.deptype.split(','))
validate_deptype(deptype)
deptype = canonical_deptype(deptype)
if args.dot: # Dot graph only if asked for.
graph_dot(*specs)
graph_dot(specs, static=args.static, deptype=deptype)
elif specs: # ascii is default: user doesn't need to provide it explicitly
graph_ascii(specs[0], debug=spack.debug)
graph_ascii(specs[0], debug=spack.debug, deptype=deptype)
for spec in specs[1:]:
print # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug)

View File

@ -25,7 +25,6 @@
import argparse
import llnl.util.tty as tty
from llnl.util.filesystem import join_path
import spack
import spack.cmd
@ -77,7 +76,7 @@ def location(parser, args):
print spack.prefix
elif args.packages:
print spack.repo.root
print spack.repo.first_repo().root
elif args.stages:
print spack.stage_path
@ -99,7 +98,7 @@ def location(parser, args):
if args.package_dir:
# This one just needs the spec name.
print join_path(spack.repo.root, spec.name)
print spack.repo.dirname_for_package_name(spec.name)
else:
# These versions need concretized specs.

View File

@ -29,10 +29,10 @@
import shutil
import sys
import llnl.util.filesystem as filesystem
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import llnl.util.filesystem as filesystem
from spack.modules import module_types
description = "Manipulate module files"

View File

@ -48,6 +48,9 @@ def setup_parser(subparser):
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
help="specs to use for install. Must contain package AND version.")
subparser.add_argument(
'--dirty', action='store_true', dest='dirty',
help="Install a package *without* cleaning the environment.")
def spack_transitive_include_path():

View File

@ -56,7 +56,7 @@ class MockCache(object):
def store(self, copyCmd, relativeDst):
pass
def fetcher(self, targetPath, digest):
def fetcher(self, targetPath, digest, **kwargs):
return MockCacheFetcher()

View File

@ -49,8 +49,9 @@
from llnl.util.lock import *
import spack.spec
from spack.directory_layout import DirectoryLayoutError
from spack.version import Version
from spack.spec import Spec
from spack.spec import *
from spack.error import SpackError
from spack.repository import UnknownPackageError
import spack.util.spack_yaml as syaml
@ -64,6 +65,9 @@
# Default timeout for spack database locks is 5 min.
_db_lock_timeout = 60
# Types of dependencies tracked by the database
_tracked_deps = nobuild
def _autospec(function):
"""Decorator that automatically converts the argument of a single-arg
@ -232,8 +236,6 @@ def _assign_dependencies(self, hash_key, installs, data):
spec.format('$_$#'), dname, dhash[:7]))
continue
# defensive copy (not sure everything handles extra
# parent links yet)
child = data[dhash].spec
spec._add_dependency(child, dtypes)
@ -328,7 +330,7 @@ def invalid_record(hash_key, error):
self._data = data
def reindex(self, directory_layout):
"""Build database index from scratch based from a directory layout.
"""Build database index from scratch based on a directory layout.
Locks the DB if it isn't locked already.
@ -359,9 +361,6 @@ def _read_suppress_error():
# Ask the directory layout to traverse the filesystem.
for spec in directory_layout.all_specs():
# Create a spec for each known package and add it.
path = directory_layout.path_for_spec(spec)
# Try to recover explicit value from old DB, but
# default it to False if DB was corrupt.
explicit = False
@ -370,7 +369,7 @@ def _read_suppress_error():
if old_info is not None:
explicit = old_info.explicit
self._add(spec, path, directory_layout, explicit=explicit)
self._add(spec, directory_layout, explicit=explicit)
self._check_ref_counts()
@ -389,10 +388,7 @@ def _check_ref_counts(self):
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
# XXX(deptype): This checks all dependencies, but build
# dependencies might be able to be dropped in the
# future.
for dep in rec.spec.dependencies():
for dep in rec.spec.dependencies(_tracked_deps):
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
@ -450,52 +446,62 @@ def _read(self):
# reindex() takes its own write lock, so no lock here.
self.reindex(spack.install_layout)
def _add(self, spec, path, directory_layout=None, explicit=False):
"""Add an install record for spec at path to the database.
def _add(self, spec, directory_layout=None, explicit=False):
"""Add an install record for this spec to the database.
This assumes that the spec is not already installed. It
updates the ref counts on dependencies of the spec in the DB.
Assumes spec is installed in ``layout.path_for_spec(spec)``.
This operation is in-memory, and does not lock the DB.
Also ensures dependencies are present and updated in the DB as
either intsalled or missing.
"""
key = spec.dag_hash()
if key in self._data:
rec = self._data[key]
rec.installed = True
if not spec.concrete:
raise NonConcreteSpecAddError(
"Specs added to DB must be concrete.")
# TODO: this overwrites a previous install path (when path !=
# self._data[key].path), and the old path still has a
# dependent in the DB. We could consider re-RPATH-ing the
# dependents. This case is probably infrequent and may not be
# worth fixing, but this is where we can discover it.
rec.path = path
for dep in spec.dependencies(_tracked_deps):
dkey = dep.dag_hash()
if dkey not in self._data:
self._add(dep, directory_layout, explicit=False)
else:
self._data[key] = InstallRecord(spec, path, True,
explicit=explicit)
for dep in spec.dependencies(('link', 'run')):
self._increment_ref_count(dep, directory_layout)
def _increment_ref_count(self, spec, directory_layout=None):
"""Recursively examine dependencies and update their DB entries."""
key = spec.dag_hash()
if key not in self._data:
installed = False
path = None
if directory_layout:
path = directory_layout.path_for_spec(spec)
installed = os.path.isdir(path)
try:
directory_layout.check_installed(spec)
installed = True
except DirectoryLayoutError as e:
tty.warn(
'Dependency missing due to corrupt install directory:',
path, str(e))
self._data[key] = InstallRecord(spec.copy(), path, installed)
# Create a new install record with no deps initially.
new_spec = spec.copy(deps=False)
self._data[key] = InstallRecord(
new_spec, path, installed, ref_count=0, explicit=explicit)
for dep in spec.dependencies(('link', 'run')):
self._increment_ref_count(dep)
# Connect dependencies from the DB to the new copy.
for name, dep in spec.dependencies_dict(_tracked_deps).iteritems():
dkey = dep.spec.dag_hash()
new_spec._add_dependency(self._data[dkey].spec, dep.deptypes)
self._data[dkey].ref_count += 1
self._data[key].ref_count += 1
# Mark concrete once everything is built, and preserve
# the original hash of concrete specs.
new_spec._mark_concrete()
new_spec._hash = key
else:
# If it is already there, mark it as installed.
self._data[key].installed = True
self._data[key].explicit = explicit
@_autospec
def add(self, spec, path, explicit=False):
def add(self, spec, directory_layout, explicit=False):
"""Add spec at path to database, locking and reading DB to sync.
``add()`` will lock and read from the DB on disk.
@ -504,7 +510,7 @@ def add(self, spec, path, explicit=False):
# TODO: ensure that spec is concrete?
# Entire add is transactional.
with self.write_transaction():
self._add(spec, path, explicit=explicit)
self._add(spec, directory_layout, explicit=explicit)
def _get_matching_spec_key(self, spec, **kwargs):
"""Get the exact spec OR get a single spec that matches."""
@ -534,7 +540,7 @@ def _decrement_ref_count(self, spec):
if rec.ref_count == 0 and not rec.installed:
del self._data[key]
for dep in spec.dependencies('link'):
for dep in spec.dependencies(_tracked_deps):
self._decrement_ref_count(dep)
def _remove(self, spec):
@ -548,7 +554,7 @@ def _remove(self, spec):
return rec.spec
del self._data[key]
for dep in rec.spec.dependencies('link'):
for dep in rec.spec.dependencies(_tracked_deps):
self._decrement_ref_count(dep)
# Returns the concrete spec so we know it in the case where a
@ -657,6 +663,10 @@ class CorruptDatabaseError(SpackError):
"""Raised when errors are found while reading the database."""
class NonConcreteSpecAddError(SpackError):
"""Raised when attemptint to add non-concrete spec to DB."""
class InvalidDatabaseVersionError(SpackError):
def __init__(self, expected, found):

View File

@ -423,7 +423,7 @@ class RemoveFailedError(DirectoryLayoutError):
def __init__(self, installed_spec, prefix, error):
super(RemoveFailedError, self).__init__(
'Could not remove prefix %s for %s : %s'
% prefix, installed_spec.short_spec, error)
% (prefix, installed_spec.short_spec, error))
self.cause = error

View File

@ -170,12 +170,11 @@ def fetch(self):
tty.msg("Already downloaded %s" % self.archive_file)
return
possible_files = self.stage.expected_archive_files
save_file = None
partial_file = None
if possible_files:
save_file = self.stage.expected_archive_files[0]
partial_file = self.stage.expected_archive_files[0] + '.part'
if self.stage.save_filename:
save_file = self.stage.save_filename
partial_file = self.stage.save_filename + '.part'
tty.msg("Trying to fetch from %s" % self.url)
@ -307,7 +306,7 @@ def archive(self, destination):
if not self.archive_file:
raise NoArchiveFileError("Cannot call archive() before fetching.")
shutil.copy(self.archive_file, destination)
shutil.copyfile(self.archive_file, destination)
@_needs_stage
def check(self):
@ -858,9 +857,9 @@ def store(self, fetcher, relativeDst):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)
def fetcher(self, targetPath, digest):
def fetcher(self, targetPath, digest, **kwargs):
url = "file://" + join_path(self.root, targetPath)
return CacheURLFetchStrategy(url, digest)
return CacheURLFetchStrategy(url, digest, **kwargs)
def destroy(self):
shutil.rmtree(self.root, ignore_errors=True)

View File

@ -67,22 +67,20 @@
from llnl.util.lang import *
from llnl.util.tty.color import *
import spack
from spack.spec import Spec
from spack.spec import *
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
def topological_sort(spec, **kwargs):
def topological_sort(spec, reverse=False, deptype=None):
"""Topological sort for specs.
Return a list of dependency specs sorted topologically. The spec
argument is not modified in the process.
"""
reverse = kwargs.get('reverse', False)
# XXX(deptype): iterate over a certain kind of dependency. Maybe color
# edges based on the type of dependency?
deptype = canonical_deptype(deptype)
if not reverse:
parents = lambda s: s.dependents()
children = lambda s: s.dependencies()
@ -91,7 +89,7 @@ def topological_sort(spec, **kwargs):
children = lambda s: s.dependents()
# Work on a copy so this is nondestructive.
spec = spec.copy()
spec = spec.copy(deps=deptype)
nodes = spec.index()
topo_order = []
@ -129,7 +127,7 @@ def find(seq, predicate):
return -1
# Names of different graph line states. We Record previous line
# Names of different graph line states. We record previous line
# states so that we can easily determine what to do when connecting.
states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
@ -143,6 +141,7 @@ def __init__(self):
self.node_character = '*'
self.debug = False
self.indent = 0
self.deptype = alldeps
# These are colors in the order they'll be used for edges.
# See llnl.util.tty.color for details on color characters.
@ -162,6 +161,9 @@ def _indent(self):
def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream."""
# Ignore empty frontier entries (they're just collapsed)
if not self._frontier[index]:
return
name = self._frontier[index][sub]
edge = "@%s{%s}" % (self._name_to_color[name], string)
self._out.write(edge)
@ -386,7 +388,7 @@ def write(self, spec, **kwargs):
self._out = ColorStream(sys.stdout, color=color)
# We'll traverse the spec in topo order as we graph it.
topo_order = topological_sort(spec, reverse=True)
topo_order = topological_sort(spec, reverse=True, deptype=self.deptype)
# Work on a copy to be nondestructive
spec = spec.copy()
@ -420,20 +422,26 @@ def write(self, spec, **kwargs):
if back:
back.sort()
prev_ends = []
collapse_l1 = False
for j, (b, d) in enumerate(back):
self._frontier[i].remove(d)
if i - b > 1:
self._back_edge_line(prev_ends, b, i, False,
'left-1')
collapse_l1 = any(not e for e in self._frontier)
self._back_edge_line(
prev_ends, b, i, collapse_l1, 'left-1')
del prev_ends[:]
prev_ends.append(b)
# Check whether we did ALL the deps as back edges,
# in which case we're done.
collapse = not self._frontier[i]
if collapse:
pop = not self._frontier[i]
collapse_l2 = pop
if collapse_l1:
collapse_l2 = False
if pop:
self._frontier.pop(i)
self._back_edge_line(prev_ends, -1, -1, collapse, 'left-2')
self._back_edge_line(
prev_ends, -1, -1, collapse_l2, 'left-2')
elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections
@ -476,32 +484,28 @@ def write(self, spec, **kwargs):
# Replace node with its dependencies
self._frontier.pop(i)
if node.dependencies():
deps = sorted((d.name for d in node.dependencies()),
reverse=True)
deps = node.dependencies(self.deptype)
if deps:
deps = sorted((d.name for d in deps), reverse=True)
self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier:
self._collapse_line(i)
def graph_ascii(spec, **kwargs):
node_character = kwargs.get('node', 'o')
out = kwargs.pop('out', None)
debug = kwargs.pop('debug', False)
indent = kwargs.pop('indent', 0)
color = kwargs.pop('color', None)
check_kwargs(kwargs, graph_ascii)
def graph_ascii(spec, node='o', out=None, debug=False,
indent=0, color=None, deptype=None):
graph = AsciiGraph()
graph.debug = debug
graph.indent = indent
graph.node_character = node_character
graph.node_character = node
if deptype:
graph.deptype = canonical_deptype(deptype)
graph.write(spec, color=color, out=out)
def graph_dot(*specs, **kwargs):
def graph_dot(specs, deptype=None, static=False, out=None):
"""Generate a graph in dot format of all provided specs.
Print out a dot formatted graph of all the dependencies between
@ -510,42 +514,73 @@ def graph_dot(*specs, **kwargs):
spack graph --dot qt | dot -Tpdf > spack-graph.pdf
"""
out = kwargs.pop('out', sys.stdout)
check_kwargs(kwargs, graph_dot)
if out is None:
out = sys.stdout
if deptype is None:
deptype = alldeps
out.write('digraph G {\n')
out.write(' label = "Spack Dependencies"\n')
out.write(' labelloc = "b"\n')
out.write(' rankdir = "LR"\n')
out.write(' rankdir = "TB"\n')
out.write(' ranksep = "5"\n')
out.write('node[\n')
out.write(' fontname=Monaco,\n')
out.write(' penwidth=2,\n')
out.write(' fontsize=12,\n')
out.write(' margin=.1,\n')
out.write(' shape=box,\n')
out.write(' fillcolor=lightblue,\n')
out.write(' style="rounded,filled"]\n')
out.write('\n')
def quote(string):
def q(string):
return '"%s"' % string
if not specs:
specs = [p.name for p in spack.repo.all_packages()]
else:
roots = specs
specs = set()
for spec in roots:
specs.update(Spec(s.name) for s in spec.normalized().traverse())
raise ValueError("Must provide specs ot graph_dot")
deps = []
# Static graph includes anything a package COULD depend on.
if static:
names = set.union(*[s.package.possible_dependencies() for s in specs])
specs = [Spec(name) for name in names]
labeled = set()
def label(key, label):
if key not in labeled:
out.write(' "%s" [label="%s"]\n' % (key, label))
labeled.add(key)
deps = set()
for spec in specs:
out.write(' %-30s [label="%s"]\n' % (quote(spec.name), spec.name))
if static:
out.write(' "%s" [label="%s"]\n' % (spec.name, spec.name))
# Skip virtual specs (we'll find out about them from concrete ones.
if spec.virtual:
continue
# Skip virtual specs (we'll find out about them from concrete ones.
if spec.virtual:
continue
# Add edges for each depends_on in the package.
for dep_name, dep in spec.package.dependencies.iteritems():
deps.append((spec.name, dep_name))
# Add edges for each depends_on in the package.
for dep_name, dep in spec.package.dependencies.iteritems():
deps.add((spec.name, dep_name))
# If the package provides something, add an edge for that.
for provider in set(s.name for s in spec.package.provided):
deps.append((provider, spec.name))
# If the package provides something, add an edge for that.
for provider in set(s.name for s in spec.package.provided):
deps.add((provider, spec.name))
else:
def key_label(s):
return s.dag_hash(), "%s-%s" % (s.name, s.dag_hash(7))
for s in spec.traverse(deptype=deptype):
skey, slabel = key_label(s)
out.write(' "%s" [label="%s"]\n' % (skey, slabel))
for d in s.dependencies(deptype=deptype):
dkey, _ = key_label(d)
deps.add((skey, dkey))
out.write('\n')

View File

@ -24,7 +24,7 @@
##############################################################################
"""This package contains modules with hooks for various stages in the
Spack install process. You can add modules here and they'll be
executaed by package at various times during the package lifecycle.
executed by package at various times during the package lifecycle.
Each hook is just a function that takes a package as a parameter.
Hooks are not executed in any particular order.
@ -41,9 +41,10 @@
features.
"""
import imp
from llnl.util.lang import memoized, list_modules
from llnl.util.filesystem import join_path
import spack
from llnl.util.filesystem import join_path
from llnl.util.lang import memoized, list_modules
@memoized
@ -70,12 +71,11 @@ def __call__(self, pkg):
if hasattr(hook, '__call__'):
hook(pkg)
#
# Define some functions that can be called to fire off hooks.
#
pre_install = HookRunner('pre_install')
post_install = HookRunner('post_install')
pre_install = HookRunner('pre_install')
post_install = HookRunner('post_install')
pre_uninstall = HookRunner('pre_uninstall')
pre_uninstall = HookRunner('pre_uninstall')
post_uninstall = HookRunner('post_uninstall')

View File

@ -0,0 +1,35 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack.modules
def post_install(pkg):
dk = spack.modules.LmodModule(pkg.spec)
dk.write()
def post_uninstall(pkg):
dk = spack.modules.LmodModule(pkg.spec)
dk.remove()

View File

@ -81,8 +81,10 @@ def filter_shebang(path):
tty.warn("Patched overlong shebang in %s" % path)
def filter_shebangs_in_directory(directory):
for file in os.listdir(directory):
def filter_shebangs_in_directory(directory, filenames=None):
if filenames is None:
filenames = os.listdir(directory)
for file in filenames:
path = os.path.join(directory, file)
# only handle files
@ -104,6 +106,6 @@ def post_install(pkg):
"""This hook edits scripts so that they call /bin/bash
$spack_prefix/bin/sbang instead of something longer than the
shebang limit."""
if not os.path.isdir(pkg.prefix.bin):
return
filter_shebangs_in_directory(pkg.prefix.bin)
for directory, _, filenames in os.walk(pkg.prefix):
filter_shebangs_in_directory(directory, filenames)

View File

@ -40,6 +40,7 @@
"""
import copy
import datetime
import itertools
import os
import os.path
import re
@ -48,6 +49,7 @@
import llnl.util.tty as tty
import spack
import spack.compilers # Needed by LmodModules
import spack.config
from llnl.util.filesystem import join_path, mkdirp
from spack.build_environment import parent_class_modules
@ -56,7 +58,8 @@
__all__ = ['EnvModule', 'Dotkit', 'TclModule']
# Registry of all types of modules. Entries created by EnvModule's metaclass
"""Registry of all types of modules. Entries created by EnvModule's
metaclass."""
module_types = {}
CONFIGURATION = spack.config.get_config('modules')
@ -120,7 +123,7 @@ def dependencies(spec, request='all'):
return []
if request == 'direct':
return spec.dependencies()
return spec.dependencies(deptype=('link', 'run'))
# FIXME : during module file creation nodes seem to be visited multiple
# FIXME : times even if cover='nodes' is given. This work around permits
@ -133,6 +136,7 @@ def dependencies(spec, request='all'):
spec.traverse(order='post',
depth=True,
cover='nodes',
deptype=('link', 'run'),
root=False),
reverse=True)]
return [xx for ii, xx in l if not (xx in seen or seen_add(xx))]
@ -388,6 +392,7 @@ def write(self, overwrite=False):
for mod in modules:
set_module_variables_for_package(package, mod)
set_module_variables_for_package(package, package.module)
package.setup_environment(spack_env, env)
package.setup_dependent_package(self.pkg.module, self.spec)
package.setup_dependent_environment(spack_env, env, self.spec)
@ -632,3 +637,237 @@ def module_specific_content(self, configuration):
raise SystemExit('Module generation aborted.')
line = line.format(**naming_tokens)
yield line
# To construct an arbitrary hierarchy of module files:
# 1. Parse the configuration file and check that all the items in
# hierarchical_scheme are indeed virtual packages
# This needs to be done only once at start-up
# 2. Order the stack as `hierarchical_scheme + ['mpi, 'compiler']
# 3. Check which of the services are provided by the package
# -> may be more than one
# 4. Check which of the services are needed by the package
# -> this determines where to write the module file
# 5. For each combination of services in which we have at least one provider
# here add the appropriate conditional MODULEPATH modifications
class LmodModule(EnvModule):
name = 'lmod'
path = join_path(spack.share_path, "lmod")
environment_modifications_formats = {
PrependPath: 'prepend_path("{name}", "{value}")\n',
AppendPath: 'append_path("{name}", "{value}")\n',
RemovePath: 'remove_path("{name}", "{value}")\n',
SetEnv: 'setenv("{name}", "{value}")\n',
UnsetEnv: 'unsetenv("{name}")\n'
}
autoload_format = ('if not isloaded("{module_file}") then\n'
' LmodMessage("Autoloading {module_file}")\n'
' load("{module_file}")\n'
'end\n\n')
prerequisite_format = 'prereq("{module_file}")\n'
family_format = 'family("{family}")\n'
path_part_with_hash = join_path('{token.name}', '{token.version}-{token.hash}') # NOQA: ignore=E501
path_part_without_hash = join_path('{token.name}', '{token.version}')
# TODO : Check that extra tokens specified in configuration file
# TODO : are actually virtual dependencies
configuration = CONFIGURATION.get('lmod', {})
hierarchy_tokens = configuration.get('hierarchical_scheme', [])
hierarchy_tokens = hierarchy_tokens + ['mpi', 'compiler']
def __init__(self, spec=None):
super(LmodModule, self).__init__(spec)
# Sets the root directory for this architecture
self.modules_root = join_path(LmodModule.path, self.spec.architecture)
# Retrieve core compilers
self.core_compilers = self.configuration.get('core_compilers', [])
# Keep track of the requirements that this package has in terms
# of virtual packages
# that participate in the hierarchical structure
self.requires = {'compiler': self.spec.compiler}
# For each virtual dependency in the hierarchy
for x in self.hierarchy_tokens:
if x in self.spec and not self.spec.package.provides(
x): # if I depend on it
self.requires[x] = self.spec[x] # record the actual provider
# Check what are the services I need (this will determine where the
# module file will be written)
self.substitutions = {}
self.substitutions.update(self.requires)
# TODO : complete substitutions
# Check what service I provide to others
self.provides = {}
# If it is in the list of supported compilers family -> compiler
if self.spec.name in spack.compilers.supported_compilers():
self.provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
# Special case for llvm
if self.spec.name == 'llvm':
self.provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
self.provides['compiler'].name = 'clang'
for x in self.hierarchy_tokens:
if self.spec.package.provides(x):
self.provides[x] = self.spec[x]
def _hierarchy_token_combinations(self):
"""
Yields all the relevant combinations that could appear in the hierarchy
"""
for ii in range(len(self.hierarchy_tokens) + 1):
for item in itertools.combinations(self.hierarchy_tokens, ii):
if 'compiler' in item:
yield item
def _hierarchy_to_be_provided(self):
"""
Filters a list of hierarchy tokens and yields only the one that we
need to provide
"""
for item in self._hierarchy_token_combinations():
if any(x in self.provides for x in item):
yield item
def token_to_path(self, name, value):
# If we are dealing with a core compiler, return 'Core'
if name == 'compiler' and str(value) in self.core_compilers:
return 'Core'
# CompilerSpec does not have an hash
if name == 'compiler':
return self.path_part_without_hash.format(token=value)
# For virtual providers add a small part of the hash
# to distinguish among different variants in a directory hierarchy
value.hash = value.dag_hash(length=6)
return self.path_part_with_hash.format(token=value)
@property
def file_name(self):
parts = [self.token_to_path(x, self.requires[x])
for x in self.hierarchy_tokens if x in self.requires]
hierarchy_name = join_path(*parts)
fullname = join_path(self.modules_root, hierarchy_name,
self.use_name + '.lua')
return fullname
@property
def use_name(self):
return self.token_to_path('', self.spec)
def modulepath_modifications(self):
# What is available is what we require plus what we provide
entry = ''
available = {}
available.update(self.requires)
available.update(self.provides)
available_parts = [self.token_to_path(x, available[x])
for x in self.hierarchy_tokens if x in available]
# Missing parts
missing = [x for x in self.hierarchy_tokens if x not in available]
# Direct path we provide on top of compilers
modulepath = join_path(self.modules_root, *available_parts)
env = EnvironmentModifications()
env.prepend_path('MODULEPATH', modulepath)
for line in self.process_environment_command(env):
entry += line
def local_variable(x):
lower, upper = x.lower(), x.upper()
fmt = 'local {lower}_name = os.getenv("LMOD_{upper}_NAME")\n'
fmt += 'local {lower}_version = os.getenv("LMOD_{upper}_VERSION")\n' # NOQA: ignore=501
return fmt.format(lower=lower, upper=upper)
def set_variables_for_service(env, x):
upper = x.upper()
s = self.provides[x]
name, version = os.path.split(self.token_to_path(x, s))
env.set('LMOD_{upper}_NAME'.format(upper=upper), name)
env.set('LMOD_{upper}_VERSION'.format(upper=upper), version)
def conditional_modulepath_modifications(item):
entry = 'if '
needed = []
for x in self.hierarchy_tokens:
if x in missing:
needed.append('{x}_name '.format(x=x))
entry += 'and '.join(needed) + 'then\n'
entry += ' local t = pathJoin("{root}"'.format(
root=self.modules_root)
for x in item:
if x in missing:
entry += ', {lower}_name, {lower}_version'.format(
lower=x.lower())
else:
entry += ', "{x}"'.format(
x=self.token_to_path(x, available[x]))
entry += ')\n'
entry += ' prepend_path("MODULEPATH", t)\n'
entry += 'end\n\n'
return entry
if 'compiler' not in self.provides:
# Retrieve variables
entry += '\n'
for x in missing:
entry += local_variable(x)
entry += '\n'
# Conditional modifications
conditionals = [x
for x in self._hierarchy_to_be_provided()
if any(t in missing for t in x)]
for item in conditionals:
entry += conditional_modulepath_modifications(item)
# Set environment variables for the services we provide
env = EnvironmentModifications()
for x in self.provides:
set_variables_for_service(env, x)
for line in self.process_environment_command(env):
entry += line
return entry
@property
def header(self):
timestamp = datetime.datetime.now()
# Header as in
# https://www.tacc.utexas.edu/research-development/tacc-projects/lmod/advanced-user-guide/more-about-writing-module-files
header = "-- -*- lua -*-\n"
header += '-- Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501
header += '--\n'
header += '-- %s\n' % self.spec.short_spec
header += '--\n'
# Short description -> whatis()
if self.short_description:
header += "whatis([[Name : {name}]])\n".format(name=self.spec.name)
header += "whatis([[Version : {version}]])\n".format(
version=self.spec.version)
# Long description -> help()
if self.long_description:
doc = re.sub(r'"', '\"', self.long_description)
header += "help([[{documentation}]])\n".format(documentation=doc)
# Certain things need to be done only if we provide a service
if self.provides:
# Add family directives
header += '\n'
for x in self.provides:
header += self.family_format.format(family=x)
header += '\n'
header += '-- MODULEPATH modifications\n'
header += '\n'
# Modify MODULEPATH
header += self.modulepath_modifications()
# Set environment variables for services we provide
header += '\n'
header += '-- END MODULEPATH modifications\n'
header += '\n'
return header

View File

@ -22,7 +22,7 @@ def __init__(self):
"10.11": "elcapitan",
"10.12": "sierra"}
mac_ver = py_platform.mac_ver()[0][:-2]
mac_ver = '.'.join(py_platform.mac_ver()[0].split('.')[:2])
name = mac_releases.get(mac_ver, "macos")
super(MacOs, self).__init__(name, mac_ver)

View File

@ -575,6 +575,20 @@ def __init__(self, spec):
self.extra_args = {}
def possible_dependencies(self, visited=None):
"""Return set of possible transitive dependencies of this package."""
if visited is None:
visited = set()
visited.add(self.name)
for name in self.dependencies:
if name not in visited and not spack.spec.Spec(name).virtual:
pkg = spack.repo.get(name)
for name in pkg.possible_dependencies(visited):
visited.add(name)
return visited
@property
def package_dir(self):
"""Return the directory where the package.py file lives."""
@ -886,7 +900,7 @@ def do_fetch(self, mirror_only=False):
if not ignore_checksum:
raise FetchError("Will not fetch %s" %
self.spec.format('$_$@'), checksum_msg)
self.spec.format('$_$@'), ck_msg)
self.stage.fetch(mirror_only)
@ -1080,7 +1094,8 @@ def do_install(self,
skip_patch=skip_patch,
verbose=verbose,
make_jobs=make_jobs,
run_tests=run_tests)
run_tests=run_tests,
dirty=dirty)
# Set run_tests flag before starting build.
self.run_tests = run_tests
@ -1171,7 +1186,9 @@ def build_process():
keep_prefix = True if self.last_phase is None else keep_prefix
# note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.installed_db.add(self.spec, self.prefix, explicit=explicit)
spack.installed_db.add(
self.spec, spack.install_layout, explicit=explicit
)
except directory_layout.InstallDirectoryAlreadyExistsError:
# Abort install if install directory exists.
# But do NOT remove it (you'd be overwriting someone else's stuff)

View File

@ -8,37 +8,21 @@
from spack.operating_systems.cnl import Cnl
from llnl.util.filesystem import join_path
# Craype- module prefixes that are not valid CPU targets.
NON_TARGETS = ('hugepages', 'network', 'target', 'accel', 'xtpe')
def _get_modules_in_modulecmd_output(output):
'''Return list of valid modules parsed from modulecmd output string.'''
return [i for i in output.splitlines()
if len(i.split()) == 1]
def _target_from_clean_env(name):
'''Return the default back_end target as loaded in a clean login session.
A bash subshell is launched with a wiped environment and the list of loaded
modules is parsed for the first acceptable CrayPE target.
'''
# Based on the incantation:
# echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')"
targets = []
if name != 'front_end':
env = which('env')
env.add_default_arg('-')
# CAUTION - $USER is generally needed to initialize the environment.
# There may be other variables needed for general success.
output = env('USER=%s' % os.environ['USER'],
'/bin/bash', '--noprofile', '--norc', '-c',
'. /etc/profile; module list -lt',
output=str, error=str)
default_modules = [i for i in output.splitlines()
if len(i.split()) == 1]
tty.debug("Found default modules:",
*[" " + mod for mod in default_modules])
pattern = 'craype-(?!{0})(\S*)'.format('|'.join(NON_TARGETS))
for mod in default_modules:
if 'craype-' in mod:
targets.extend(re.findall(pattern, mod))
return targets[0] if targets else None
def _fill_craype_targets_from_modules(targets, modules):
'''Extend CrayPE CPU targets list with those found in list of modules.'''
# Craype- module prefixes that are not valid CPU targets.
non_targets = ('hugepages', 'network', 'target', 'accel', 'xtpe')
pattern = r'craype-(?!{0})(\S*)'.format('|'.join(non_targets))
for mod in modules:
if 'craype-' in mod:
targets.extend(re.findall(pattern, mod))
class Cray(Platform):
@ -56,7 +40,12 @@ def __init__(self):
'''
super(Cray, self).__init__('cray')
# Get targets from config or make best guess from environment:
# Make all craype targets available.
for target in self._avail_targets():
name = target.replace('-', '_')
self.add_target(name, Target(name, 'craype-%s' % target))
# Get aliased targets from config or best guess from environment:
conf = spack.config.get_config('targets')
for name in ('front_end', 'back_end'):
_target = getattr(self, name, None)
@ -64,18 +53,16 @@ def __init__(self):
_target = os.environ.get('SPACK_' + name.upper())
if _target is None:
_target = conf.get(name)
if _target is None:
_target = _target_from_clean_env(name)
setattr(self, name, _target)
if _target is None and name == 'back_end':
_target = self._default_target_from_env()
if _target is not None:
self.add_target(name, Target(_target, 'craype-' + _target))
self.add_target(_target, Target(_target, 'craype-' + _target))
safe_name = _target.replace('-', '_')
setattr(self, name, safe_name)
self.add_target(name, self.targets[safe_name])
if self.back_end is not None:
self.default = self.back_end
self.add_target(
'default', Target(self.default, 'craype-' + self.default))
self.add_target('default', self.targets[self.back_end])
else:
raise NoPlatformError()
@ -90,7 +77,7 @@ def __init__(self):
self.add_operating_system(self.front_os, front_distro)
@classmethod
def setup_platform_environment(self, pkg, env):
def setup_platform_environment(cls, pkg, env):
""" Change the linker to default dynamic to be more
similar to linux/standard linker behavior
"""
@ -101,5 +88,43 @@ def setup_platform_environment(self, pkg, env):
env.prepend_path('SPACK_ENV_PATH', cray_wrapper_names)
@classmethod
def detect(self):
def detect(cls):
return os.environ.get('CRAYPE_VERSION') is not None
def _default_target_from_env(self):
'''Set and return the default CrayPE target loaded in a clean login
session.
A bash subshell is launched with a wiped environment and the list of
loaded modules is parsed for the first acceptable CrayPE target.
'''
# Based on the incantation:
# echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')"
if getattr(self, 'default', None) is None:
env = which('env')
env.add_default_arg('-')
# CAUTION - $USER is generally needed in the sub-environment.
# There may be other variables needed for general success.
output = env('USER=%s' % os.environ['USER'],
'HOME=%s' % os.environ['HOME'],
'/bin/bash', '--noprofile', '--norc', '-c',
'. /etc/profile; module list -lt',
output=str, error=str)
self._defmods = _get_modules_in_modulecmd_output(output)
targets = []
_fill_craype_targets_from_modules(targets, self._defmods)
self.default = targets[0] if targets else None
tty.debug("Found default modules:",
*[" %s" % mod for mod in self._defmods])
return self.default
def _avail_targets(self):
'''Return a list of available CrayPE CPU targets.'''
if getattr(self, '_craype_targets', None) is None:
module = which('modulecmd', required=True)
module.add_default_arg('python')
output = module('avail', '-t', 'craype-', output=str, error=str)
craype_modules = _get_modules_in_modulecmd_output(output)
self._craype_targets = targets = []
_fill_craype_targets_from_modules(targets, craype_modules)
return self._craype_targets

View File

@ -139,7 +139,20 @@
'default': [],
'items': {
'type': 'string',
'enum': ['tcl', 'dotkit']}},
'enum': ['tcl', 'dotkit', 'lmod']}},
'lmod': {
'allOf': [
# Base configuration
{'$ref': '#/definitions/module_type_configuration'},
{
'core_compilers': {
'$ref': '#/definitions/array_of_strings'
},
'hierarchical_scheme': {
'$ref': '#/definitions/array_of_strings'
}
} # Specific lmod extensions
]},
'tcl': {
'allOf': [
# Base configuration

View File

@ -123,6 +123,39 @@
from spack.version import *
from spack.provider_index import ProviderIndex
__all__ = [
'Spec',
'alldeps',
'nolink',
'nobuild',
'canonical_deptype',
'validate_deptype',
'parse',
'parse_anonymous_spec',
'SpecError',
'SpecParseError',
'DuplicateDependencyError',
'DuplicateVariantError',
'DuplicateCompilerSpecError',
'UnsupportedCompilerError',
'UnknownVariantError',
'DuplicateArchitectureError',
'InconsistentSpecError',
'InvalidDependencyError',
'InvalidDependencyTypeError',
'NoProviderError',
'MultipleProviderError',
'UnsatisfiableSpecError',
'UnsatisfiableSpecNameError',
'UnsatisfiableVersionSpecError',
'UnsatisfiableCompilerSpecError',
'UnsatisfiableVariantSpecError',
'UnsatisfiableCompilerFlagSpecError',
'UnsatisfiableArchitectureSpecError',
'UnsatisfiableProviderSpecError',
'UnsatisfiableDependencySpecError',
'SpackYAMLError',
'AmbiguousHashError']
# Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*'
@ -156,12 +189,46 @@
# Special types of dependencies.
alldeps = ('build', 'link', 'run')
nolink = ('build', 'run')
nolink = ('build', 'run')
nobuild = ('link', 'run')
norun = ('link', 'build')
special_types = {
'alldeps': alldeps,
'all': alldeps, # allow "all" as string but not symbol.
'nolink': nolink,
'nobuild': nobuild,
'norun': norun,
}
legal_deps = tuple(special_types) + alldeps
def validate_deptype(deptype):
if isinstance(deptype, str):
if deptype not in legal_deps:
raise InvalidDependencyTypeError(
"Invalid dependency type: %s" % deptype)
elif isinstance(deptype, (list, tuple)):
for t in deptype:
validate_deptype(t)
elif deptype is None:
raise InvalidDependencyTypeError("deptype cannot be None!")
def canonical_deptype(deptype):
if deptype is None:
return alldeps
elif isinstance(deptype, str):
return special_types.get(deptype, (deptype,))
elif isinstance(deptype, (tuple, list)):
return (sum((canonical_deptype(d) for d in deptype), ()))
return deptype
def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in
@ -527,6 +594,14 @@ def __init__(self, spec_like, *dep_like, **kwargs):
# XXX(deptype): default deptypes
self._add_dependency(spec, ('build', 'link'))
def __getattr__(self, item):
"""Delegate to self.package if the attribute is not in the spec"""
# This line is to avoid infinite recursion in case package is
# not present among self attributes
if item.endswith('libs'):
return getattr(self.package, item)
raise AttributeError()
def get_dependency(self, name):
dep = self._dependencies.get(name)
if dep is not None:
@ -534,17 +609,8 @@ def get_dependency(self, name):
raise InvalidDependencyException(
self.name + " does not depend on " + comma_or(name))
def _deptype_norm(self, deptype):
if deptype is None:
return alldeps
# Force deptype to be a set object so that we can do set intersections.
if isinstance(deptype, str):
# Support special deptypes.
return special_types.get(deptype, (deptype,))
return deptype
def _find_deps(self, where, deptype):
deptype = self._deptype_norm(deptype)
deptype = canonical_deptype(deptype)
return [dep.spec
for dep in where.values()
@ -557,7 +623,7 @@ def dependents(self, deptype=None):
return self._find_deps(self._dependents, deptype)
def _find_deps_dict(self, where, deptype):
deptype = self._deptype_norm(deptype)
deptype = canonical_deptype(deptype)
return dict((dep.spec.name, dep)
for dep in where.values()
@ -1353,12 +1419,11 @@ def flat_dependencies_with_deptype(self, **kwargs):
# parser doesn't allow it. Spack must be broken!
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
def index(self):
def index(self, deptype=None):
"""Return DependencyMap that points to all the dependencies in this
spec."""
dm = DependencyMap()
# XXX(deptype): use a deptype kwarg.
for spec in self.traverse():
for spec in self.traverse(deptype=deptype):
dm[spec.name] = spec
return dm
@ -1561,7 +1626,7 @@ def normalize(self, force=False):
# actually deps of this package. Raise an error.
extra = set(spec_deps.keys()).difference(visited)
if extra:
raise InvalidDependencyException(
raise InvalidDependencyError(
self.name + " does not depend on " + comma_or(extra))
# Mark the spec as normal once done.
@ -2659,17 +2724,11 @@ def parse_anonymous_spec(spec_like, pkg_name):
class SpecError(spack.error.SpackError):
"""Superclass for all errors that occur while constructing specs."""
def __init__(self, message):
super(SpecError, self).__init__(message)
class SpecParseError(SpecError):
"""Wrapper for ParseError for when we're parsing specs."""
def __init__(self, parse_error):
super(SpecParseError, self).__init__(parse_error.message)
self.string = parse_error.string
@ -2677,79 +2736,53 @@ def __init__(self, parse_error):
class DuplicateDependencyError(SpecError):
"""Raised when the same dependency occurs in a spec twice."""
def __init__(self, message):
super(DuplicateDependencyError, self).__init__(message)
class DuplicateVariantError(SpecError):
"""Raised when the same variant occurs in a spec twice."""
def __init__(self, message):
super(DuplicateVariantError, self).__init__(message)
class DuplicateCompilerSpecError(SpecError):
"""Raised when the same compiler occurs in a spec twice."""
def __init__(self, message):
super(DuplicateCompilerSpecError, self).__init__(message)
class UnsupportedCompilerError(SpecError):
"""Raised when the user asks for a compiler spack doesn't know about."""
def __init__(self, compiler_name):
super(UnsupportedCompilerError, self).__init__(
"The '%s' compiler is not yet supported." % compiler_name)
class UnknownVariantError(SpecError):
"""Raised when the same variant occurs in a spec twice."""
def __init__(self, pkg, variant):
super(UnknownVariantError, self).__init__(
"Package %s has no variant %s!" % (pkg, variant))
class DuplicateArchitectureError(SpecError):
"""Raised when the same architecture occurs in a spec twice."""
def __init__(self, message):
super(DuplicateArchitectureError, self).__init__(message)
class InconsistentSpecError(SpecError):
"""Raised when two nodes in the same spec DAG have inconsistent
constraints."""
def __init__(self, message):
super(InconsistentSpecError, self).__init__(message)
class InvalidDependencyException(SpecError):
class InvalidDependencyError(SpecError):
"""Raised when a dependency in a spec is not actually a dependency
of the package."""
def __init__(self, message):
super(InvalidDependencyException, self).__init__(message)
class InvalidDependencyTypeError(SpecError):
"""Raised when a dependency type is not a legal Spack dep type."""
class NoProviderError(SpecError):
"""Raised when there is no package that provides a particular
virtual dependency.
"""
def __init__(self, vpkg):
super(NoProviderError, self).__init__(
"No providers found for virtual package: '%s'" % vpkg)
@ -2757,11 +2790,9 @@ def __init__(self, vpkg):
class MultipleProviderError(SpecError):
"""Raised when there is no package that provides a particular
virtual dependency.
"""
def __init__(self, vpkg, providers):
"""Takes the name of the vpkg"""
super(MultipleProviderError, self).__init__(
@ -2772,10 +2803,8 @@ def __init__(self, vpkg, providers):
class UnsatisfiableSpecError(SpecError):
"""Raised when a spec conflicts with package constraints.
Provide the requirement that was violated when raising."""
def __init__(self, provided, required, constraint_type):
super(UnsatisfiableSpecError, self).__init__(
"%s does not satisfy %s" % (provided, required))
@ -2785,89 +2814,70 @@ def __init__(self, provided, required, constraint_type):
class UnsatisfiableSpecNameError(UnsatisfiableSpecError):
"""Raised when two specs aren't even for the same package."""
def __init__(self, provided, required):
super(UnsatisfiableSpecNameError, self).__init__(
provided, required, "name")
class UnsatisfiableVersionSpecError(UnsatisfiableSpecError):
"""Raised when a spec version conflicts with package constraints."""
def __init__(self, provided, required):
super(UnsatisfiableVersionSpecError, self).__init__(
provided, required, "version")
class UnsatisfiableCompilerSpecError(UnsatisfiableSpecError):
"""Raised when a spec comiler conflicts with package constraints."""
def __init__(self, provided, required):
super(UnsatisfiableCompilerSpecError, self).__init__(
provided, required, "compiler")
class UnsatisfiableVariantSpecError(UnsatisfiableSpecError):
"""Raised when a spec variant conflicts with package constraints."""
def __init__(self, provided, required):
super(UnsatisfiableVariantSpecError, self).__init__(
provided, required, "variant")
class UnsatisfiableCompilerFlagSpecError(UnsatisfiableSpecError):
"""Raised when a spec variant conflicts with package constraints."""
def __init__(self, provided, required):
super(UnsatisfiableCompilerFlagSpecError, self).__init__(
provided, required, "compiler_flags")
class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError):
"""Raised when a spec architecture conflicts with package constraints."""
def __init__(self, provided, required):
super(UnsatisfiableArchitectureSpecError, self).__init__(
provided, required, "architecture")
class UnsatisfiableProviderSpecError(UnsatisfiableSpecError):
"""Raised when a provider is supplied but constraints don't match
a vpkg requirement"""
def __init__(self, provided, required):
super(UnsatisfiableProviderSpecError, self).__init__(
provided, required, "provider")
# TODO: get rid of this and be more specific about particular incompatible
# dep constraints
class UnsatisfiableDependencySpecError(UnsatisfiableSpecError):
"""Raised when some dependency of constrained specs are incompatible"""
def __init__(self, provided, required):
super(UnsatisfiableDependencySpecError, self).__init__(
provided, required, "dependency")
class SpackYAMLError(spack.error.SpackError):
def __init__(self, msg, yaml_error):
super(SpackYAMLError, self).__init__(msg, str(yaml_error))
class AmbiguousHashError(SpecError):
def __init__(self, msg, *specs):
super(AmbiguousHashError, self).__init__(msg)
for spec in specs:

View File

@ -216,9 +216,9 @@ def _need_to_create_path(self):
def expected_archive_files(self):
"""Possible archive file paths."""
paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy):
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(
self.path, os.path.basename(self.fetcher.url)))
self.path, os.path.basename(self.default_fetcher.url)))
if self.mirror_path:
paths.append(os.path.join(
@ -226,19 +226,19 @@ def expected_archive_files(self):
return paths
@property
def save_filename(self):
possible_filenames = self.expected_archive_files
if possible_filenames:
# This prefers using the URL associated with the default fetcher if
# available, so that the fetched resource name matches the remote
# name
return possible_filenames[0]
@property
def archive_file(self):
"""Path to the source archive within this stage directory."""
paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(
self.path, os.path.basename(self.fetcher.url)))
if self.mirror_path:
paths.append(os.path.join(
self.path, os.path.basename(self.mirror_path)))
for path in paths:
for path in self.expected_archive_files:
if os.path.exists(path):
return path
else:
@ -301,8 +301,10 @@ def fetch(self, mirror_only=False):
# then use the same digest. `spack mirror` ensures that
# the checksum will be the same.
digest = None
expand = True
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.default_fetcher.digest
expand = self.default_fetcher.expand_archive
# Have to skip the checksum for things archived from
# repositories. How can this be made safer?
@ -310,9 +312,11 @@ def fetch(self, mirror_only=False):
# Add URL strategies for all the mirrors with the digest
for url in urls:
fetchers.insert(0, fs.URLFetchStrategy(url, digest))
fetchers.insert(0, spack.fetch_cache.fetcher(self.mirror_path,
digest))
fetchers.insert(
0, fs.URLFetchStrategy(url, digest, expand=expand))
fetchers.insert(
0, spack.fetch_cache.fetcher(
self.mirror_path, digest, expand=expand))
# Look for the archive in list_url
package_name = os.path.dirname(self.mirror_path)

View File

@ -53,6 +53,7 @@
'git_fetch',
'hg_fetch',
'install',
'library_list',
'link_tree',
'lock',
'make_executable',

View File

@ -223,6 +223,8 @@ def test_dep_rpath(self):
def test_dep_include(self):
"""Ensure a single dependency include directory is added."""
os.environ['SPACK_DEPENDENCIES'] = self.dep4
os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_cc('dump-args', test_command,
self.realcc + ' ' +
'-Wl,-rpath,' + self.prefix + '/lib ' +
@ -233,6 +235,8 @@ def test_dep_include(self):
def test_dep_lib(self):
"""Ensure a single dependency RPATH is added."""
os.environ['SPACK_DEPENDENCIES'] = self.dep2
os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_cc('dump-args', test_command,
self.realcc + ' ' +
'-Wl,-rpath,' + self.prefix + '/lib ' +
@ -241,10 +245,34 @@ def test_dep_lib(self):
'-Wl,-rpath,' + self.dep2 + '/lib64 ' +
' '.join(test_command))
def test_dep_lib_no_rpath(self):
"""Ensure a single dependency link flag is added with no dep RPATH."""
os.environ['SPACK_DEPENDENCIES'] = self.dep2
os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_cc('dump-args', test_command,
self.realcc + ' ' +
'-Wl,-rpath,' + self.prefix + '/lib ' +
'-Wl,-rpath,' + self.prefix + '/lib64 ' +
'-L' + self.dep2 + '/lib64 ' +
' '.join(test_command))
def test_dep_lib_no_lib(self):
"""Ensure a single dependency RPATH is added with no -L."""
os.environ['SPACK_DEPENDENCIES'] = self.dep2
os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_cc('dump-args', test_command,
self.realcc + ' ' +
'-Wl,-rpath,' + self.prefix + '/lib ' +
'-Wl,-rpath,' + self.prefix + '/lib64 ' +
'-Wl,-rpath,' + self.dep2 + '/lib64 ' +
' '.join(test_command))
def test_all_deps(self):
"""Ensure includes and RPATHs for all deps are added. """
os.environ['SPACK_DEPENDENCIES'] = ':'.join([
self.dep1, self.dep2, self.dep3, self.dep4])
os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
# This is probably more constrained than it needs to be; it
# checks order within prepended args and doesn't strictly have
@ -273,6 +301,8 @@ def test_ld_deps(self):
"""Ensure no (extra) -I args or -Wl, are passed in ld mode."""
os.environ['SPACK_DEPENDENCIES'] = ':'.join([
self.dep1, self.dep2, self.dep3, self.dep4])
os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_ld('dump-args', test_command,
'ld ' +
@ -290,10 +320,46 @@ def test_ld_deps(self):
' '.join(test_command))
def test_ld_deps_no_rpath(self):
"""Ensure SPACK_RPATH_DEPS controls RPATHs for ld."""
os.environ['SPACK_DEPENDENCIES'] = ':'.join([
self.dep1, self.dep2, self.dep3, self.dep4])
os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_ld('dump-args', test_command,
'ld ' +
'-rpath ' + self.prefix + '/lib ' +
'-rpath ' + self.prefix + '/lib64 ' +
'-L' + self.dep3 + '/lib64 ' +
'-L' + self.dep2 + '/lib64 ' +
'-L' + self.dep1 + '/lib ' +
' '.join(test_command))
def test_ld_deps_no_link(self):
"""Ensure SPACK_LINK_DEPS controls -L for ld."""
os.environ['SPACK_DEPENDENCIES'] = ':'.join([
self.dep1, self.dep2, self.dep3, self.dep4])
os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_ld('dump-args', test_command,
'ld ' +
'-rpath ' + self.prefix + '/lib ' +
'-rpath ' + self.prefix + '/lib64 ' +
'-rpath ' + self.dep3 + '/lib64 ' +
'-rpath ' + self.dep2 + '/lib64 ' +
'-rpath ' + self.dep1 + '/lib ' +
' '.join(test_command))
def test_ld_deps_reentrant(self):
"""Make sure ld -r is handled correctly on OS's where it doesn't
support rpaths."""
os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1])
os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64"
reentrant_test_command = ['-r'] + test_command

View File

@ -26,8 +26,8 @@
These tests check the database is functioning properly,
both in memory and in its file
"""
import os.path
import multiprocessing
import os.path
import spack
from llnl.util.filesystem import join_path
@ -88,16 +88,16 @@ def test_010_all_install_sanity(self):
# query specs with multiple configurations
mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
self.assertEqual(len(mpileaks_specs), 3)
self.assertEqual(len(callpath_specs), 3)
self.assertEqual(len(mpi_specs), 3)
# query specs with single configurations
dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')]
dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')]
libdwarf_specs = [s for s in all_specs if s.satisfies('libdwarf')]
libelf_specs = [s for s in all_specs if s.satisfies('libelf')]
libelf_specs = [s for s in all_specs if s.satisfies('libelf')]
self.assertEqual(len(dyninst_specs), 1)
self.assertEqual(len(libdwarf_specs), 1)
@ -124,6 +124,19 @@ def test_015_write_and_read(self):
self.assertEqual(new_rec.path, rec.path)
self.assertEqual(new_rec.installed, rec.installed)
def _check_merkleiness(self):
"""Ensure the spack database is a valid merkle graph."""
all_specs = spack.installed_db.query(installed=any)
seen = {}
for spec in all_specs:
for dep in spec.dependencies():
hash_key = dep.dag_hash()
if hash_key not in seen:
seen[hash_key] = id(dep)
else:
self.assertEqual(seen[hash_key], id(dep))
def _check_db_sanity(self):
"""Utiilty function to check db against install layout."""
expected = sorted(spack.install_layout.all_specs())
@ -133,10 +146,17 @@ def _check_db_sanity(self):
for e, a in zip(expected, actual):
self.assertEqual(e, a)
self._check_merkleiness()
def test_020_db_sanity(self):
"""Make sure query() returns what's actually in the db."""
self._check_db_sanity()
def test_025_reindex(self):
"""Make sure reindex works and ref counts are valid."""
spack.installed_db.reindex(spack.install_layout)
self._check_db_sanity()
def test_030_db_sanity_from_another_process(self):
def read_and_modify():
self._check_db_sanity() # check that other process can read DB
@ -163,16 +183,16 @@ def test_050_basic_query(self):
# query specs with multiple configurations
mpileaks_specs = self.installed_db.query('mpileaks')
callpath_specs = self.installed_db.query('callpath')
mpi_specs = self.installed_db.query('mpi')
mpi_specs = self.installed_db.query('mpi')
self.assertEqual(len(mpileaks_specs), 3)
self.assertEqual(len(callpath_specs), 3)
self.assertEqual(len(mpi_specs), 3)
# query specs with single configurations
dyninst_specs = self.installed_db.query('dyninst')
dyninst_specs = self.installed_db.query('dyninst')
libdwarf_specs = self.installed_db.query('libdwarf')
libelf_specs = self.installed_db.query('libelf')
libelf_specs = self.installed_db.query('libelf')
self.assertEqual(len(dyninst_specs), 1)
self.assertEqual(len(libdwarf_specs), 1)
@ -203,9 +223,10 @@ def _check_remove_and_add_package(self, spec):
self.assertTrue(concrete_spec not in remaining)
# add it back and make sure everything is ok.
self.installed_db.add(concrete_spec, "")
self.installed_db.add(concrete_spec, spack.install_layout)
installed = self.installed_db.query()
self.assertEqual(len(installed), len(original))
self.assertTrue(concrete_spec in installed)
self.assertEqual(installed, original)
# sanity check against direcory layout and check ref counts.
self._check_db_sanity()
@ -233,7 +254,7 @@ def test_080_root_ref_counts(self):
self.assertEqual(self.installed_db.get_record('mpich').ref_count, 1)
# Put the spec back
self.installed_db.add(rec.spec, rec.path)
self.installed_db.add(rec.spec, spack.install_layout)
# record is present again
self.assertEqual(

View File

@ -0,0 +1,111 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import unittest
from llnl.util.filesystem import LibraryList
class LibraryListTest(unittest.TestCase):
def setUp(self):
l = [
'/dir1/liblapack.a',
'/dir2/libfoo.dylib',
'/dir1/libblas.a',
'/dir3/libbar.so',
'libbaz.so'
]
self.liblist = LibraryList(l)
def test_repr(self):
x = eval(repr(self.liblist))
self.assertEqual(self.liblist, x)
def test_joined_and_str(self):
s1 = self.liblist.joined()
self.assertEqual(
s1,
'/dir1/liblapack.a /dir2/libfoo.dylib /dir1/libblas.a /dir3/libbar.so libbaz.so' # NOQA: ignore=E501
)
s2 = str(self.liblist)
self.assertEqual(s1, s2)
s3 = self.liblist.joined(';')
self.assertEqual(
s3,
'/dir1/liblapack.a;/dir2/libfoo.dylib;/dir1/libblas.a;/dir3/libbar.so;libbaz.so' # NOQA: ignore=E501
)
def test_flags(self):
search_flags = self.liblist.search_flags
self.assertTrue('-L/dir1' in search_flags)
self.assertTrue('-L/dir2' in search_flags)
self.assertTrue('-L/dir3' in search_flags)
self.assertTrue(isinstance(search_flags, str))
link_flags = self.liblist.link_flags
self.assertEqual(
link_flags,
'-llapack -lfoo -lblas -lbar -lbaz'
)
ld_flags = self.liblist.ld_flags
self.assertEqual(ld_flags, search_flags + ' ' + link_flags)
def test_paths_manipulation(self):
names = self.liblist.names
self.assertEqual(names, ['lapack', 'foo', 'blas', 'bar', 'baz'])
directories = self.liblist.directories
self.assertEqual(directories, ['/dir1', '/dir2', '/dir3'])
def test_get_item(self):
a = self.liblist[0]
self.assertEqual(a, '/dir1/liblapack.a')
b = self.liblist[:]
self.assertEqual(type(b), type(self.liblist))
self.assertEqual(self.liblist, b)
self.assertTrue(self.liblist is not b)
def test_add(self):
pylist = [
'/dir1/liblapack.a', # removed from the final list
'/dir2/libbaz.so',
'/dir4/libnew.a'
]
another = LibraryList(pylist)
l = self.liblist + another
self.assertEqual(len(l), 7)
# Invariant : l == l + l
self.assertEqual(l, l + l)
# Always produce an instance of LibraryList
self.assertEqual(
type(self.liblist),
type(self.liblist + pylist)
)
self.assertEqual(
type(pylist + self.liblist),
type(self.liblist)
)

View File

@ -157,6 +157,35 @@ def test_write_lock_timeout_with_multiple_readers_3_2(self):
self.acquire_read, self.acquire_read, self.acquire_read,
self.timeout_write, self.timeout_write)
#
# Test that read can be upgraded to write.
#
def test_upgrade_read_to_write(self):
# ensure lock file exists the first time, so we open it read-only
# to begin wtih.
touch(self.lock_path)
lock = Lock(self.lock_path)
self.assertTrue(lock._reads == 0)
self.assertTrue(lock._writes == 0)
lock.acquire_read()
self.assertTrue(lock._reads == 1)
self.assertTrue(lock._writes == 0)
lock.acquire_write()
self.assertTrue(lock._reads == 1)
self.assertTrue(lock._writes == 1)
lock.release_write()
self.assertTrue(lock._reads == 1)
self.assertTrue(lock._writes == 0)
lock.release_read()
self.assertTrue(lock._reads == 0)
self.assertTrue(lock._writes == 0)
self.assertTrue(lock._fd is None)
#
# Longer test case that ensures locks are reusable. Ordering is
# enforced by barriers throughout -- steps are shown with numbers.

View File

@ -49,105 +49,10 @@ def mock_open(filename, mode):
handle.close()
configuration_autoload_direct = {
'enable': ['tcl'],
'tcl': {
'all': {
'autoload': 'direct'
}
}
}
configuration_autoload_all = {
'enable': ['tcl'],
'tcl': {
'all': {
'autoload': 'all'
}
}
}
configuration_prerequisites_direct = {
'enable': ['tcl'],
'tcl': {
'all': {
'prerequisites': 'direct'
}
}
}
configuration_prerequisites_all = {
'enable': ['tcl'],
'tcl': {
'all': {
'prerequisites': 'all'
}
}
}
configuration_alter_environment = {
'enable': ['tcl'],
'tcl': {
'all': {
'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']},
'environment': {
'set': {'{name}_ROOT': '{prefix}'}
}
},
'platform=test target=x86_64': {
'environment': {
'set': {'FOO': 'foo'},
'unset': ['BAR']
}
},
'platform=test target=x86_32': {
'load': ['foo/bar']
}
}
}
configuration_blacklist = {
'enable': ['tcl'],
'tcl': {
'whitelist': ['zmpi'],
'blacklist': ['callpath', 'mpi'],
'all': {
'autoload': 'direct'
}
}
}
configuration_conflicts = {
'enable': ['tcl'],
'tcl': {
'naming_scheme': '{name}/{version}-{compiler.name}',
'all': {
'conflict': ['{name}', 'intel/14.0.1']
}
}
}
configuration_wrong_conflicts = {
'enable': ['tcl'],
'tcl': {
'naming_scheme': '{name}/{version}-{compiler.name}',
'all': {
'conflict': ['{name}/{compiler.name}']
}
}
}
configuration_suffix = {
'enable': ['tcl'],
'tcl': {
'mpileaks': {
'suffixes': {
'+debug': 'foo',
'~debug': 'bar'
}
}
}
}
# Spec strings that will be used throughout the tests
mpich_spec_string = 'mpich@3.0.4'
mpileaks_spec_string = 'mpileaks'
libdwarf_spec_string = 'libdwarf arch=x64-linux'
class HelperFunctionsTests(MockPackagesTest):
@ -187,61 +92,195 @@ def test_inspect_path(self):
self.assertTrue('CPATH' in names)
class TclTests(MockPackagesTest):
class ModuleFileGeneratorTests(MockPackagesTest):
"""
Base class to test module file generators. Relies on child having defined
a 'factory' attribute to create an instance of the generator to be tested.
"""
def setUp(self):
super(TclTests, self).setUp()
self.configuration_obj = spack.modules.CONFIGURATION
super(ModuleFileGeneratorTests, self).setUp()
self.configuration_instance = spack.modules.CONFIGURATION
self.module_types_instance = spack.modules.module_types
spack.modules.open = mock_open
# Make sure that a non-mocked configuration will trigger an error
spack.modules.CONFIGURATION = None
spack.modules.module_types = {self.factory.name: self.factory}
def tearDown(self):
del spack.modules.open
spack.modules.CONFIGURATION = self.configuration_obj
super(TclTests, self).tearDown()
spack.modules.module_types = self.module_types_instance
spack.modules.CONFIGURATION = self.configuration_instance
super(ModuleFileGeneratorTests, self).tearDown()
def get_modulefile_content(self, spec):
spec.concretize()
generator = spack.modules.TclModule(spec)
generator = self.factory(spec)
generator.write()
content = FILE_REGISTRY[generator.file_name].split('\n')
return content
class TclTests(ModuleFileGeneratorTests):
factory = spack.modules.TclModule
configuration_autoload_direct = {
'enable': ['tcl'],
'tcl': {
'all': {
'autoload': 'direct'
}
}
}
configuration_autoload_all = {
'enable': ['tcl'],
'tcl': {
'all': {
'autoload': 'all'
}
}
}
configuration_prerequisites_direct = {
'enable': ['tcl'],
'tcl': {
'all': {
'prerequisites': 'direct'
}
}
}
configuration_prerequisites_all = {
'enable': ['tcl'],
'tcl': {
'all': {
'prerequisites': 'all'
}
}
}
configuration_alter_environment = {
'enable': ['tcl'],
'tcl': {
'all': {
'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']},
'environment': {
'set': {'{name}_ROOT': '{prefix}'}
}
},
'platform=test target=x86_64': {
'environment': {
'set': {'FOO': 'foo'},
'unset': ['BAR']
}
},
'platform=test target=x86_32': {
'load': ['foo/bar']
}
}
}
configuration_blacklist = {
'enable': ['tcl'],
'tcl': {
'whitelist': ['zmpi'],
'blacklist': ['callpath', 'mpi'],
'all': {
'autoload': 'direct'
}
}
}
configuration_conflicts = {
'enable': ['tcl'],
'tcl': {
'naming_scheme': '{name}/{version}-{compiler.name}',
'all': {
'conflict': ['{name}', 'intel/14.0.1']
}
}
}
configuration_wrong_conflicts = {
'enable': ['tcl'],
'tcl': {
'naming_scheme': '{name}/{version}-{compiler.name}',
'all': {
'conflict': ['{name}/{compiler.name}']
}
}
}
configuration_suffix = {
'enable': ['tcl'],
'tcl': {
'mpileaks': {
'suffixes': {
'+debug': 'foo',
'~debug': 'bar'
}
}
}
}
def test_simple_case(self):
spack.modules.CONFIGURATION = configuration_autoload_direct
spec = spack.spec.Spec('mpich@3.0.4')
spack.modules.CONFIGURATION = self.configuration_autoload_direct
spec = spack.spec.Spec(mpich_spec_string)
content = self.get_modulefile_content(spec)
self.assertTrue('module-whatis "mpich @3.0.4"' in content)
self.assertRaises(TypeError, spack.modules.dependencies,
spec, 'non-existing-tag')
def test_autoload(self):
spack.modules.CONFIGURATION = configuration_autoload_direct
spec = spack.spec.Spec('mpileaks')
spack.modules.CONFIGURATION = self.configuration_autoload_direct
spec = spack.spec.Spec(mpileaks_spec_string)
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
spack.modules.CONFIGURATION = configuration_autoload_all
spec = spack.spec.Spec('mpileaks')
spack.modules.CONFIGURATION = self.configuration_autoload_all
spec = spack.spec.Spec(mpileaks_spec_string)
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5)
self.assertEqual(len([x for x in content if 'module load ' in x]), 5)
# dtbuild1 has
# - 1 ('run',) dependency
# - 1 ('build','link') dependency
# - 1 ('build',) dependency
# Just make sure the 'build' dependency is not there
spack.modules.CONFIGURATION = self.configuration_autoload_direct
spec = spack.spec.Spec('dtbuild1')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
# dtbuild1 has
# - 1 ('run',) dependency
# - 1 ('build','link') dependency
# - 1 ('build',) dependency
# Just make sure the 'build' dependency is not there
spack.modules.CONFIGURATION = self.configuration_autoload_all
spec = spack.spec.Spec('dtbuild1')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
def test_prerequisites(self):
spack.modules.CONFIGURATION = configuration_prerequisites_direct
spack.modules.CONFIGURATION = self.configuration_prerequisites_direct
spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'prereq' in x]), 2)
spack.modules.CONFIGURATION = configuration_prerequisites_all
spack.modules.CONFIGURATION = self.configuration_prerequisites_all
spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'prereq' in x]), 5)
def test_alter_environment(self):
spack.modules.CONFIGURATION = configuration_alter_environment
spack.modules.CONFIGURATION = self.configuration_alter_environment
spec = spack.spec.Spec('mpileaks platform=test target=x86_64')
content = self.get_modulefile_content(spec)
self.assertEqual(
@ -271,7 +310,7 @@ def test_alter_environment(self):
len([x for x in content if 'setenv LIBDWARF_ROOT' in x]), 1)
def test_blacklist(self):
spack.modules.CONFIGURATION = configuration_blacklist
spack.modules.CONFIGURATION = self.configuration_blacklist
spec = spack.spec.Spec('mpileaks ^zmpi')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
@ -285,7 +324,7 @@ def test_blacklist(self):
self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
def test_conflicts(self):
spack.modules.CONFIGURATION = configuration_conflicts
spack.modules.CONFIGURATION = self.configuration_conflicts
spec = spack.spec.Spec('mpileaks')
content = self.get_modulefile_content(spec)
self.assertEqual(
@ -295,11 +334,11 @@ def test_conflicts(self):
self.assertEqual(
len([x for x in content if x == 'conflict intel/14.0.1']), 1)
spack.modules.CONFIGURATION = configuration_wrong_conflicts
spack.modules.CONFIGURATION = self.configuration_wrong_conflicts
self.assertRaises(SystemExit, self.get_modulefile_content, spec)
def test_suffixes(self):
spack.modules.CONFIGURATION = configuration_suffix
spack.modules.CONFIGURATION = self.configuration_suffix
spec = spack.spec.Spec('mpileaks+debug arch=x86-linux')
spec.concretize()
generator = spack.modules.TclModule(spec)
@ -311,18 +350,123 @@ def test_suffixes(self):
self.assertTrue('bar' in generator.use_name)
configuration_dotkit = {
'enable': ['dotkit'],
'dotkit': {
'all': {
'prerequisites': 'direct'
class LmodTests(ModuleFileGeneratorTests):
factory = spack.modules.LmodModule
configuration_autoload_direct = {
'enable': ['lmod'],
'lmod': {
'all': {
'autoload': 'direct'
}
}
}
}
configuration_autoload_all = {
'enable': ['lmod'],
'lmod': {
'all': {
'autoload': 'all'
}
}
}
configuration_alter_environment = {
'enable': ['lmod'],
'lmod': {
'all': {
'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
},
'platform=test target=x86_64': {
'environment': {
'set': {'FOO': 'foo'},
'unset': ['BAR']
}
},
'platform=test target=x86_32': {
'load': ['foo/bar']
}
}
}
configuration_blacklist = {
'enable': ['lmod'],
'lmod': {
'blacklist': ['callpath'],
'all': {
'autoload': 'direct'
}
}
}
def test_simple_case(self):
spack.modules.CONFIGURATION = self.configuration_autoload_direct
spec = spack.spec.Spec(mpich_spec_string)
content = self.get_modulefile_content(spec)
self.assertTrue('-- -*- lua -*-' in content)
self.assertTrue('whatis([[Name : mpich]])' in content)
self.assertTrue('whatis([[Version : 3.0.4]])' in content)
def test_autoload(self):
spack.modules.CONFIGURATION = self.configuration_autoload_direct
spec = spack.spec.Spec(mpileaks_spec_string)
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x for x in content if 'if not isloaded(' in x]), 2)
self.assertEqual(len([x for x in content if 'load(' in x]), 2)
spack.modules.CONFIGURATION = self.configuration_autoload_all
spec = spack.spec.Spec(mpileaks_spec_string)
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x for x in content if 'if not isloaded(' in x]), 5)
self.assertEqual(len([x for x in content if 'load(' in x]), 5)
def test_alter_environment(self):
spack.modules.CONFIGURATION = self.configuration_alter_environment
spec = spack.spec.Spec('mpileaks platform=test target=x86_64')
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x
for x in content
if x.startswith('prepend_path("CMAKE_PREFIX_PATH"')]), 0)
self.assertEqual(
len([x for x in content if 'setenv("FOO", "foo")' in x]), 1)
self.assertEqual(
len([x for x in content if 'unsetenv("BAR")' in x]), 1)
spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32')
content = self.get_modulefile_content(spec)
print('\n'.join(content))
self.assertEqual(
len([x
for x in content
if x.startswith('prepend-path("CMAKE_PREFIX_PATH"')]), 0)
self.assertEqual(
len([x for x in content if 'setenv("FOO", "foo")' in x]), 0)
self.assertEqual(
len([x for x in content if 'unsetenv("BAR")' in x]), 0)
def test_blacklist(self):
spack.modules.CONFIGURATION = self.configuration_blacklist
spec = spack.spec.Spec(mpileaks_spec_string)
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x for x in content if 'if not isloaded(' in x]), 1)
self.assertEqual(len([x for x in content if 'load(' in x]), 1)
class DotkitTests(MockPackagesTest):
configuration_dotkit = {
'enable': ['dotkit'],
'dotkit': {
'all': {
'prerequisites': 'direct'
}
}
}
def setUp(self):
super(DotkitTests, self).setUp()
self.configuration_obj = spack.modules.CONFIGURATION
@ -343,7 +487,7 @@ def get_modulefile_content(self, spec):
return content
def test_dotkit(self):
spack.modules.CONFIGURATION = configuration_dotkit
spack.modules.CONFIGURATION = self.configuration_dotkit
spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertTrue('#c spack' in content)

View File

@ -241,15 +241,15 @@ def test_unsatisfiable_architecture(self):
def test_invalid_dep(self):
spec = Spec('libelf ^mpich')
self.assertRaises(spack.spec.InvalidDependencyException,
self.assertRaises(spack.spec.InvalidDependencyError,
spec.normalize)
spec = Spec('libelf ^libdwarf')
self.assertRaises(spack.spec.InvalidDependencyException,
self.assertRaises(spack.spec.InvalidDependencyError,
spec.normalize)
spec = Spec('mpich ^dyninst ^libelf')
self.assertRaises(spack.spec.InvalidDependencyException,
self.assertRaises(spack.spec.InvalidDependencyError,
spec.normalize)
def test_equal(self):

View File

@ -24,34 +24,34 @@
##############################################################################
import unittest
import spack.spec
import spack.spec as sp
from spack.parse import Token
from spack.spec import *
# Sample output for a complex lexing.
complex_lex = [Token(ID, 'mvapich_foo'),
Token(DEP),
Token(ID, '_openmpi'),
Token(AT),
Token(ID, '1.2'),
Token(COLON),
Token(ID, '1.4'),
Token(COMMA),
Token(ID, '1.6'),
Token(PCT),
Token(ID, 'intel'),
Token(AT),
Token(ID, '12.1'),
Token(COLON),
Token(ID, '12.6'),
Token(ON),
Token(ID, 'debug'),
Token(OFF),
Token(ID, 'qt_4'),
Token(DEP),
Token(ID, 'stackwalker'),
Token(AT),
Token(ID, '8.1_1e')]
complex_lex = [Token(sp.ID, 'mvapich_foo'),
Token(sp.DEP),
Token(sp.ID, '_openmpi'),
Token(sp.AT),
Token(sp.ID, '1.2'),
Token(sp.COLON),
Token(sp.ID, '1.4'),
Token(sp.COMMA),
Token(sp.ID, '1.6'),
Token(sp.PCT),
Token(sp.ID, 'intel'),
Token(sp.AT),
Token(sp.ID, '12.1'),
Token(sp.COLON),
Token(sp.ID, '12.6'),
Token(sp.ON),
Token(sp.ID, 'debug'),
Token(sp.OFF),
Token(sp.ID, 'qt_4'),
Token(sp.DEP),
Token(sp.ID, 'stackwalker'),
Token(sp.AT),
Token(sp.ID, '8.1_1e')]
class SpecSyntaxTest(unittest.TestCase):
@ -74,16 +74,16 @@ def check_parse(self, expected, spec=None, remove_arch=True):
"""
if spec is None:
spec = expected
output = spack.spec.parse(spec)
output = sp.parse(spec)
parsed = (" ".join(str(spec) for spec in output))
self.assertEqual(expected, parsed)
def check_lex(self, tokens, spec):
"""Check that the provided spec parses to the provided token list."""
lex_output = SpecLexer().lex(spec)
lex_output = sp.SpecLexer().lex(spec)
for tok, spec_tok in zip(tokens, lex_output):
if tok.type == ID:
if tok.type == sp.ID:
self.assertEqual(tok, spec_tok)
else:
# Only check the type for non-identifiers.

View File

@ -18,11 +18,11 @@ SPACK_ROOT="$(dirname "$0")/../../.."
cd "$SPACK_ROOT"
# Add changed files that have been committed since branching off of develop
changed=($(git diff --name-only --find-renames develop... -- "$@"))
changed=($(git diff --name-only --diff-filter=ACMR develop... -- "$@"))
# Add changed files that have been staged but not yet committed
changed+=($(git diff --name-only --find-renames --cached -- "$@"))
changed+=($(git diff --name-only --diff-filter=ACMR --cached -- "$@"))
# Add changed files that are unstaged
changed+=($(git diff --name-only --find-renames -- "$@"))
changed+=($(git diff --name-only --diff-filter=ACMR -- "$@"))
# Add new files that are untracked
changed+=($(git ls-files --exclude-standard --other -- "$@"))

View File

@ -27,6 +27,9 @@ for dep in "$@"; do
spack_package=py-flake8
pip_package=flake8
;;
dot)
spack_package=graphviz
;;
git)
spack_package=git
;;

View File

@ -9,7 +9,7 @@
# run-doc-tests
#
# Notes:
# Requires sphinx, git, mercurial, and subversion.
# Requires sphinx, graphviz, git, mercurial, and subversion.
#
QA_DIR="$(dirname "$0")"
@ -20,6 +20,7 @@ DOC_DIR="$SPACK_ROOT/lib/spack/docs"
deps=(
sphinx-apidoc
sphinx-build
dot
git
hg
svn
@ -35,9 +36,7 @@ export PATH="$SPACK_ROOT/bin:$PATH"
# Allows script to be run from anywhere
cd "$DOC_DIR"
# Cleanup temporary files upon exit or when script is killed
trap 'make clean --silent' EXIT SIGINT SIGTERM
# Treat warnings as fatal errors
make clean --silent
make SPHINXOPTS=-W

View File

@ -58,7 +58,7 @@ class R(Package):
# Concrete dependencies
depends_on('readline')
depends_on('ncurses')
depends_on('icu')
depends_on('icu4c')
depends_on('glib')
depends_on('zlib')
depends_on('bzip2')

View File

@ -0,0 +1,29 @@
From 3b21a8a4150962c6938baeceacd04f619cea2fbc Mon Sep 17 00:00:00 2001
From: Norbert Podhorszki <pnorbert@ornl.gov>
Date: Thu, 1 Sep 2016 16:26:23 -0400
Subject: [PATCH] ifdef around 'bool' type. hdf5 1.10 defines bool and breaks
compiling bp2h5.c
---
utils/bp2h5/bp2h5.c | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/utils/bp2h5/bp2h5.c b/utils/bp2h5/bp2h5.c
index 9c500c7..fa746bd 100644
--- a/utils/bp2h5/bp2h5.c
+++ b/utils/bp2h5/bp2h5.c
@@ -43,9 +43,11 @@
#include "dmalloc.h"
#endif
-typedef int bool;
-#define false 0
-#define true 1
+#ifndef bool
+ typedef int bool;
+# define false 0
+# define true 1
+#endif
bool noindex = false; // do no print array indices with data
bool printByteAsChar = false; // print 8 bit integer arrays as string

View File

@ -36,6 +36,8 @@ class Adios(Package):
homepage = "http://www.olcf.ornl.gov/center-projects/adios/"
url = "https://github.com/ornladios/ADIOS/archive/v1.10.0.tar.gz"
version('develop', git='https://github.com/ornladios/ADIOS.git',
branch='master')
version('1.10.0', 'eff450a4c0130479417cfd63186957f3')
version('1.9.0', '310ff02388bbaa2b1c1710ee970b5678')
@ -48,14 +50,14 @@ class Adios(Package):
variant('mpi', default=True, description='Enable MPI support')
variant('infiniband', default=False, description='Enable infiniband support')
# transforms
variant('zlib', default=True, description='Enable szip transform support')
variant('szip', default=False, description='Enable szip transform support')
variant('hdf5', default=False, description='Enable HDF5 transport support')
variant('netcdf', default=False, description='Enable NetCDF transport support')
# transports and serial file converters
variant('hdf5', default=False, description='Enable parallel HDF5 transport and serial bp2h5 converter')
# Lots of setting up here for this package
# module swap PrgEnv-intel PrgEnv-$COMP
# module load cray-netcdf/4.3.3.1
# module load cray-hdf5/1.8.14
# module load python/2.7.10
@ -69,9 +71,13 @@ class Adios(Package):
# optional transformations
depends_on('zlib', when='+zlib')
depends_on('szip', when='+szip')
# optional transports
depends_on('hdf5', when='+hdf5')
depends_on('netcdf', when='+netcdf')
# optional transports & file converters
depends_on('hdf5@1.8:+mpi', when='+hdf5')
# Fix ADIOS <=1.10.0 compile error on HDF5 1.10+
# https://github.com/ornladios/ADIOS/commit/3b21a8a41509
# https://github.com/LLNL/spack/issues/1683
patch('adios_1100.patch', when='@:1.10.0^hdf5@1.10:')
def validate(self, spec):
"""
@ -114,9 +120,7 @@ def install(self, spec, prefix):
if '+szip' in spec:
extra_args.append('--with-szip=%s' % spec['szip'].prefix)
if '+hdf5' in spec:
extra_args.append('--with-hdf5=%s' % spec['hdf5'].prefix)
if '+netcdf' in spec:
extra_args.append('--with-netcdf=%s' % spec['netcdf'].prefix)
extra_args.append('--with-phdf5=%s' % spec['hdf5'].prefix)
sh = which('sh')
sh('./autogen.sh')

View File

@ -46,18 +46,20 @@ class Armadillo(Package):
depends_on('hdf5', when='+hdf5')
def install(self, spec, prefix):
arpack = find_libraries(['libarpack'], root=spec[
'arpack-ng'].prefix.lib, shared=True)
superlu = find_libraries(['libsuperlu'], root=spec[
'superlu'].prefix, shared=False, recurse=True)
cmake_args = [
# ARPACK support
'-DARPACK_LIBRARY={0}/libarpack.{1}'.format(
spec['arpack-ng'].prefix.lib, dso_suffix),
'-DARPACK_LIBRARY={0}'.format(arpack.joined()),
# BLAS support
'-DBLAS_LIBRARY={0}'.format(spec['blas'].blas_shared_lib),
'-DBLAS_LIBRARY={0}'.format(spec['blas'].blas_libs.joined()),
# LAPACK support
'-DLAPACK_LIBRARY={0}'.format(spec['lapack'].lapack_shared_lib),
'-DLAPACK_LIBRARY={0}'.format(spec['lapack'].lapack_libs.joined()),
# SuperLU support
'-DSuperLU_INCLUDE_DIR={0}'.format(spec['superlu'].prefix.include),
'-DSuperLU_LIBRARY={0}/libsuperlu.a'.format(
spec['superlu'].prefix.lib64),
'-DSuperLU_LIBRARY={0}'.format(superlu.joined()),
# HDF5 support
'-DDETECT_HDF5={0}'.format('ON' if '+hdf5' in spec else 'OFF')
]

View File

@ -88,17 +88,16 @@ def install(self, spec, prefix):
options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
# Make sure we use Spack's blas/lapack:
lapack_libs = spec['lapack'].lapack_libs.joined(';')
blas_libs = spec['blas'].blas_libs.joined(';')
options.extend([
'-DLAPACK_FOUND=true',
'-DLAPACK_INCLUDE_DIRS=%s' % spec['lapack'].prefix.include,
'-DLAPACK_LIBRARIES=%s' % (
spec['lapack'].lapack_shared_lib if '+shared' in spec else
spec['lapack'].lapack_static_lib),
'-DLAPACK_INCLUDE_DIRS={0}'.format(spec['lapack'].prefix.include),
'-DLAPACK_LIBRARIES={0}'.format(lapack_libs),
'-DBLAS_FOUND=true',
'-DBLAS_INCLUDE_DIRS=%s' % spec['blas'].prefix.include,
'-DBLAS_LIBRARIES=%s' % (
spec['blas'].blas_shared_lib if '+shared' in spec else
spec['blas'].blas_static_lib)
'-DBLAS_INCLUDE_DIRS={0}'.format(spec['blas'].prefix.include),
'-DBLAS_LIBRARIES={0}'.format(blas_libs)
])
if '+mpi' in spec:
@ -129,19 +128,12 @@ def install(self, spec, prefix):
'F77=%s' % spec['mpi'].mpif77
])
if '+shared' in spec:
options.extend([
'--with-blas=%s' % to_link_flags(
spec['blas'].blas_shared_lib),
'--with-lapack=%s' % to_link_flags(
spec['lapack'].lapack_shared_lib)
])
else:
options.extend([
'--with-blas=%s' % spec['blas'].blas_static_lib,
'--with-lapack=%s' % spec['lapack'].lapack_static_lib,
'--enable-shared=no'
])
options.extend([
'--with-blas={0}'.format(spec['blas'].blas_libs.ld_flags),
'--with-lapack={0}'.format(spec['lapack'].lapack_libs.ld_flags)
])
if '+shared' not in spec:
options.append('--enable-shared=no')
bootstrap()
configure(*options)

View File

@ -51,6 +51,7 @@ class Atlas(Package):
url='http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2')
variant('shared', default=True, description='Builds shared library')
variant('pthread', default=False, description='Use multithreaded libraries')
provides('blas')
provides('lapack')
@ -107,18 +108,32 @@ def install(self, spec, prefix):
make("install")
self.install_test()
def setup_dependent_package(self, module, dspec):
@property
def blas_libs(self):
# libsatlas.[so,dylib,dll ] contains all serial APIs (serial lapack,
# serial BLAS), and all ATLAS symbols needed to support them. Whereas
# libtatlas.[so,dylib,dll ] is parallel (multithreaded) version.
name = 'libsatlas.%s' % dso_suffix
libdir = find_library_path(name,
self.prefix.lib64,
self.prefix.lib)
is_threaded = '+pthread' in self.spec
if '+shared' in self.spec:
self.spec.blas_shared_lib = join_path(libdir, name)
self.spec.lapack_shared_lib = self.spec.blas_shared_lib
to_find = ['libtatlas'] if is_threaded else ['libsatlas']
shared = True
else:
interfaces = [
'libptcblas',
'libptf77blas'
] if is_threaded else [
'libcblas',
'libf77blas'
]
to_find = ['liblapack'] + interfaces + ['libatlas']
shared = False
return find_libraries(
to_find, root=self.prefix, shared=shared, recurse=True
)
@property
def lapack_libs(self):
return self.blas_libs
def install_test(self):
source_file = join_path(os.path.dirname(self.module.__file__),
@ -126,9 +141,8 @@ def install_test(self):
blessed_file = join_path(os.path.dirname(self.module.__file__),
'test_cblas_dgemm.output')
include_flags = ["-I%s" % join_path(self.spec.prefix, "include")]
link_flags = ["-L%s" % join_path(self.spec.prefix, "lib"),
"-lsatlas"]
include_flags = ["-I%s" % self.spec.prefix.include]
link_flags = self.lapack_libs.ld_flags.split()
output = compile_c_and_execute(source_file, include_flags, link_flags)
compare_output_file(output, blessed_file)

View File

@ -0,0 +1,45 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Bamtools(Package):
"""C++ API & command-line toolkit for working with BAM data."""
homepage = "https://github.com/pezmaster31/bamtools"
url = "https://github.com/pezmaster31/bamtools/archive/v2.4.0.tar.gz"
version('2.4.0', '6139d00c1b1fe88fe15d094d8a74d8b9')
version('2.3.0', 'd327df4ba037d6eb8beef65d7da75ebc')
version('2.2.3', '6eccd3e45e4ba12a68daa3298998e76d')
depends_on('cmake', type='build')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
cmake('..', *std_cmake_args)
make()
make('install')

View File

@ -0,0 +1,64 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class BashCompletion(Package):
"""Programmable completion functions for bash."""
homepage = "https://github.com/scop/bash-completion"
url = "https://github.com/scop/bash-completion/archive/2.3.tar.gz"
version('2.3', '67e50f5f3c804350b43f2b664c33dde811d24292')
version('develop', git='https://github.com/scop/bash-completion.git')
# Build dependencies
depends_on('automake', type='build')
depends_on('autoconf', type='build')
depends_on('libtool', type='build')
# Other dependencies
depends_on('bash@4.1:', type='run')
def install(self, spec, prefix):
make_args = ['--prefix=%s' % prefix]
autoreconf('-i')
configure(*make_args)
make()
# make("check") # optional, requires dejagnu and tcllib
make("install",
parallel=False)
# Guidelines for individual user as provided by the author at
# https://github.com/scop/bash-completion
print('=====================================================')
print('Bash completion has been installed. To use it, please')
print('include the following lines in your ~/.bash_profile :')
print('')
print('# Use bash-completion, if available')
print('[[ $PS1 && -f %s/share/bash-completion/bash_completion ]] && \ ' % prefix) # NOQA: ignore=E501
print(' . %s/share/bash-completion/bash_completion' % prefix)
print('')
print('=====================================================')

View File

@ -0,0 +1,43 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Bedtools2(Package):
"""Collectively, the bedtools utilities are a swiss-army knife of
tools for a wide-range of genomics analysis tasks. The most
widely-used tools enable genome arithmetic: that is, set theory
on the genome."""
homepage = "https://github.com/arq5x/bedtools2"
url = "https://github.com/arq5x/bedtools2/archive/v2.26.0.tar.gz"
version('2.26.0', '52227e7efa6627f0f95d7d734973233d')
version('2.25.0', '534fb4a7bf0d0c3f05be52a0160d8e3d')
depends_on('zlib')
def install(self, spec, prefix):
make("prefix=%s" % prefix, "install")

View File

@ -31,9 +31,9 @@ class Binutils(Package):
homepage = "http://www.gnu.org/software/binutils/"
url = "https://ftp.gnu.org/gnu/binutils/binutils-2.25.tar.bz2"
# 2.26 is incompatible with py-pillow build for some reason.
version('2.27', '2869c9bf3e60ee97c74ac2a6bf4e9d68')
version('2.26', '64146a0faa3b411ba774f47d41de239f')
version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66', preferred=True)
version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66')
version('2.24', 'e0f71a7b2ddab0f8612336ac81d9636b')
version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e')
version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764')

View File

@ -41,6 +41,7 @@ class Boost(Package):
list_url = "http://sourceforge.net/projects/boost/files/boost/"
list_depth = 2
version('1.62.0', '5fb94629535c19e48703bdb2b2e9490f')
version('1.61.0', '6095876341956f65f9d35939ccea1a9f')
version('1.60.0', '65a840e1a0b13a558ff19eeb2c4f0cbe')
version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87')
@ -111,12 +112,12 @@ class Boost(Package):
description="Build multi-threaded versions of libraries")
variant('singlethreaded', default=True,
description="Build single-threaded versions of libraries")
variant('icu_support', default=False,
description="Include ICU support (for regex/locale libraries)")
variant('icu', default=False,
description="Build with Unicode and ICU suport")
variant('graph', default=False,
description="Build the Boost Graph library")
depends_on('icu', when='+icu_support')
depends_on('icu4c', when='+icu')
depends_on('python', when='+python')
depends_on('mpi', when='+mpi')
depends_on('bzip2', when='+iostreams')
@ -138,15 +139,13 @@ def url_for_version(self, version):
def determine_toolset(self, spec):
if spec.satisfies("platform=darwin"):
return 'darwin'
else:
platform = 'linux'
toolsets = {'g++': 'gcc',
'icpc': 'intel',
'clang++': 'clang'}
if spec.satisfies('@1.47:'):
toolsets['icpc'] += '-' + platform
toolsets['icpc'] += '-linux'
for cc, toolset in toolsets.iteritems():
if cc in self.compiler.cxx_names:
return toolset
@ -164,6 +163,16 @@ def determine_bootstrap_options(self, spec, withLibs, options):
join_path(spec['python'].prefix.bin, 'python'))
with open('user-config.jam', 'w') as f:
# Boost may end up using gcc even though clang+gfortran is set in
# compilers.yaml. Make sure this does not happen:
if not spec.satisfies('%intel'):
# using intel-linux : : spack_cxx in user-config.jam leads to
# error: at project-config.jam:12
# error: duplicate initialization of intel-linux with the following parameters: # noqa
# error: version = <unspecified>
# error: previous initialization at ./user-config.jam:1
f.write("using {0} : : {1} ;\n".format(boostToolsetId,
spack_cxx))
if '+mpi' in spec:
f.write('using mpi : %s ;\n' %
@ -204,7 +213,13 @@ def determine_b2_options(self, spec, options):
options.extend([
'link=%s' % ','.join(linkTypes),
'--layout=tagged'])
'--layout=tagged'
])
if not spec.satisfies('%intel'):
options.extend([
'toolset=%s' % self.determine_toolset(spec)
])
return threadingOpts

View File

@ -34,7 +34,6 @@ class BppSuite(Package):
version('2.2.0', 'd8b29ad7ccf5bd3a7beb701350c9e2a4')
# FIXME: Add dependencies if required.
depends_on('cmake', type='build')
depends_on('texinfo', type='build')
depends_on('bpp-core')

View File

@ -85,8 +85,9 @@ def install(self, spec, prefix):
# BLAS/LAPACK support
if '+lapack' in spec:
lapack_blas = spec['lapack'].lapack_libs + spec['blas'].blas_libs
options.extend([
'blas_lapack_libs=lapack,blas',
'blas_lapack_libs={0}'.format(','.join(lapack_blas.names)),
'blas_lapack_dir={0}'.format(spec['lapack'].prefix.lib)
])

View File

@ -30,17 +30,88 @@ class Cdo(Package):
Climate and NWP model Data. """
homepage = "https://code.zmaw.de/projects/cdo"
url = "https://code.zmaw.de/attachments/download/10198/cdo-1.6.9.tar.gz"
version('1.6.9', 'bf0997bf20e812f35e10188a930e24e2')
version('1.7.2', 'f08e4ce8739a4f2b63fc81a24db3ee31', url='https://code.zmaw.de/attachments/download/12760/cdo-1.7.2.tar.gz')
version('1.6.9', 'bf0997bf20e812f35e10188a930e24e2', url='https://code.zmaw.de/attachments/download/10198/cdo-1.6.9.tar.gz')
variant('mpi', default=True)
variant('szip', default=True, description='Enable szip compression for GRIB1')
variant('hdf5', default=False, description='Enable HDF5 support')
variant('netcdf', default=True, description='Enable NetCDF support')
variant('udunits2', default=True, description='Enable UDUNITS2 support')
variant('grib', default=True, description='Enable GRIB_API support')
variant('libxml2', default=True, description='Enable libxml2 support')
variant('proj', default=True, description='Enable PROJ library for cartographic projections')
variant('curl', default=True, description='Enable curl support')
variant('fftw', default=True, description='Enable support for fftw3')
variant('magics', default=True, description='Enable Magics library support')
depends_on('netcdf')
depends_on('netcdf+mpi', when='+mpi')
depends_on('netcdf~mpi', when='~mpi')
depends_on('szip', when='+szip')
depends_on('netcdf', when='+netcdf')
depends_on('hdf5+threadsafe', when='+hdf5')
depends_on('udunits2', when='+udunits2')
depends_on('grib-api', when='+grib')
depends_on('libxml2', when='+libxml2')
depends_on('proj', when='+proj')
depends_on('curl', when='+curl')
depends_on('fftw', when='+fftw')
depends_on('magics', when='+magics')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
config_args = ["--prefix=" + prefix,
"--enable-shared",
"--enable-static"]
if '+szip' in spec:
config_args.append('--with-szlib=' + spec['szip'].prefix)
else:
config_args.append('--without-szlib')
if '+hdf5' in spec:
config_args.append('--with-hdf5=' + spec['hdf5'].prefix)
else:
config_args.append('--without-hdf5')
if '+netcdf' in spec:
config_args.append('--with-netcdf=' + spec['netcdf'].prefix)
else:
config_args.append('--without-netcdf')
if '+udunits2' in spec:
config_args.append('--with-udunits2=' + spec['udunits2'].prefix)
else:
config_args.append('--without-udunits2')
if '+grib' in spec:
config_args.append('--with-grib_api=' + spec['grib-api'].prefix)
else:
config_args.append('--without-grib_api')
if '+libxml2' in spec:
config_args.append('--with-libxml2=' + spec['libxml2'].prefix)
else:
config_args.append('--without-libxml2')
if '+proj' in spec:
config_args.append('--with-proj=' + spec['proj'].prefix)
else:
config_args.append('--without-proj')
if '+curl' in spec:
config_args.append('--with-curl=' + spec['curl'].prefix)
else:
config_args.append('--without-curl')
if '+fftw' in spec:
config_args.append('--with-fftw3')
else:
config_args.append('--without-fftw3')
if '+magics' in spec:
config_args.append('--with-magics=' + spec['magics'].prefix)
else:
config_args.append('--without-magics')
configure(*config_args)
make()
make('install')

View File

@ -39,6 +39,7 @@ class Cereal(Package):
homepage = "http://uscilab.github.io/cereal/"
url = "https://github.com/USCiLab/cereal/archive/v1.1.2.tar.gz"
version('1.2.1', '64476ed74c19068ee543b53ad3992261')
version('1.2.0', 'e372c9814696481dbdb7d500e1410d2b')
version('1.1.2', '34d4ad174acbff005c36d4d10e48cbb9')
version('1.1.1', '0ceff308c38f37d5b5f6df3927451c27')

View File

@ -22,44 +22,57 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Cgal(Package):
"""CGAL is a software project that provides easy access to efficient and
reliable geometric algorithms in the form of a C++ library. CGAL
is used in various areas needing geometric computation, such as
geographic information systems, computer aided design, molecular
biology, medical imaging, computer graphics, and robotics.
"""The Computational Geometry Algorithms Library (CGAL) is a C++ library
that aims to provide easy access to efficient and reliable algorithms in
computational geometry. CGAL is used in various areas needing geometric
computation, such as geographic information systems, computer aided design,
molecular biology, medical imaging, computer graphics, and robotics.
"""
homepage = 'http://www.cgal.org/'
url = 'https://github.com/CGAL/cgal/archive/releases/CGAL-4.7.tar.gz'
version('4.9', '7b628db3e5614347f776c046b7666089')
version('4.7', '4826714810f3b4c65cac96b90fb03b67')
version('4.6.3', 'e8ee2ecc8d2b09b94a121c09257b576d')
# Installation instructions :
# http://doc.cgal.org/latest/Manual/installation.html
variant('shared', default=True,
description='Enables the build of shared libraries')
variant('debug', default=False,
description='Builds a debug version of the libraries')
# Essential Third Party Libraries
depends_on('boost')
depends_on('mpfr')
depends_on('gmp')
depends_on('mpfr')
depends_on('zlib')
# depends_on('opengl')
depends_on('qt@5:')
# Optional Third Party Libraries
# depends_on('leda')
# depends_on('mpfi')
# depends_on('rs')
# depends_on('rs3')
# depends_on('ntl')
# depends_on('eigen')
# depends_on('libqglviewer')
# depends_on('esbtl')
# depends_on('intel-tbb')
# Build dependencies
depends_on('cmake', type='build')
# FIXME : Qt5 dependency missing (needs Qt5 and OpenGL)
# FIXME : Optional third party libraries missing
def install(self, spec, prefix):
# Installation instructions:
# http://doc.cgal.org/latest/Manual/installation.html
options = []
options.extend(std_cmake_args)
# CGAL supports only Release and Debug build type. Any other build type
# will raise an error at configure time
if '+debug' in spec:
@ -72,9 +85,7 @@ def install(self, spec, prefix):
else:
options.append('-DBUILD_SHARED_LIBS:BOOL=OFF')
build_directory = join_path(self.stage.path, 'spack-build')
source_directory = self.stage.source_path
with working_dir(build_directory, create=True):
cmake(source_directory, *options)
make()
make("install")
cmake('.', *options)
make()
make('install')

View File

@ -0,0 +1,19 @@
--- old/src/scripts/configure
+++ new/src/scripts/configure
@@ -3293,10 +3293,16 @@
test_link "whether -lmpi" "ok" "no" "-lmpi"
if test $pass -eq 1
then
add_flag CMK_SYSLIBS='"$CMK_SYSLIBS -lmpi"' "mpi lib"
else
+ test_link "whether -lmpi -lmpi_cxx" "ok" "no" "-lmpi -lmpi_cxx"
+ if test $pass -eq 1
+ then
+ add_flag CMK_SYSLIBS='"$CMK_SYSLIBS -lmpi -lmpi_cxx"' "mpi lib"
+ else
echo "Error: can not find mpi library"
test_finish 1
+ fi
fi
fi
else

View File

@ -0,0 +1,172 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import platform
import shutil
import sys
from spack import *
class Charm(Package):
"""Charm++ is a parallel programming framework in C++ supported by
an adaptive runtime system, which enhances user productivity and
allows programs to run portably from small multicore computers
(your laptop) to the largest supercomputers."""
homepage = "http://charmplusplus.org"
url = "http://charm.cs.illinois.edu/distrib/charm-6.7.1.tar.gz"
version("6.7.1", "a8e20cf85e9c8721158f5bbd0ade48d9")
version("6.7.0", "35a39a7975f1954a7db2d76736158231")
version("6.6.1", "9554230f741e2599deaaac4d9d93d7ab")
version("6.6.0", "31e95901b3f7324d52107e6ad000fcc8")
version("6.5.1", "034d99458474a3ab96d8bede8a691a5d")
# Support OpenMPI; see
# <https://charm.cs.illinois.edu/redmine/issues/1206>
patch("mpi.patch")
# Communication mechanisms (choose exactly one)
# TODO: Support Blue Gene/Q PAMI, Cray GNI, Cray shmem, CUDA
variant("mpi", default=False,
description="Use MPI as communication mechanism")
variant("multicore", default=False,
description="Disable inter-node communication")
variant("net", default=False,
description="Use net communication mechanism")
variant("netlrts", default=True,
description="Use netlrts communication mechanism")
variant("verbs", default=False,
description="Use Infiniband as communication mechanism")
# Other options
# Something is off with PAPI -- there are build errors. Maybe
# Charm++ expects a particular version?
variant("papi", default=False, description="Enable PAPI integration")
variant("smp", default=True,
description=(
"Enable SMP parallelism (does not work with +multicore)"))
variant("tcp", default=False,
description="Use TCP as transport mechanism (requires +net)")
# Note: We could add variants for AMPI, LIBS, bigemulator, msa, Tau
# Note: We could support shared libraries
depends_on("mpi", when="+mpi")
depends_on("papi", when="+papi")
def install(self, spec, prefix):
target = "charm++"
# Note: Turn this into a multi-valued variant, once these
# exist in Spack
if sum(["+mpi" in spec,
"+multicore" in spec,
"+net" in spec,
"+netlrts" in spec,
"+verbs" in spec]) != 1:
raise InstallError(
"Exactly one communication mechanism "
"(+mpi, +multicore, +net, +netlrts, or +verbs) "
"must be enabled")
if "+mpi" in spec:
comm = "mpi"
if "+multicore" in spec:
comm = "multicore"
if "+net" in spec:
comm = "net"
if "+netlrts" in spec:
comm = "netlrts"
if "+verbs" in spec:
comm = "verbs"
plat = sys.platform
if plat.startswith("linux"):
plat = "linux"
mach = platform.machine()
# Define Charm++ version names for various (plat, mach, comm)
# combinations. Note that not all combinations are supported.
versions = {
("darwin", "i386", "multicore"): "multicore-darwin-x86",
("darwin", "i386", "net"): "net-darwin-x86",
("darwin", "x86_64", "mpi"): "mpi-darwin-x86_64",
("darwin", "x86_64", "multicore"): "multicore-darwin-x86_64",
("darwin", "x86_64", "net"): "net-darwin-x86_64",
("darwin", "x86_64", "netlrts"): "netlrts-darwin-x86_64",
("linux", "i386", "mpi"): "mpi-linux",
("linux", "i386", "multicore"): "multicore-linux32",
("linux", "i386", "net"): "net-linux",
("linux", "i386", "netlrts"): "netlrts-linux",
("linux", "x86_64", "mpi"): "mpi-linux-x86_64",
("linux", "x86_64", "multicore"): "multicore-linux64",
("linux", "x86_64", "net"): "net-linux-x86_64",
("linux", "x86_64", "netlrts"): "netlrts-linux-x86_64",
("linux", "x86_64", "verbs"): "verbs-linux-x86_64",
}
if (plat, mach, comm) not in versions:
raise InstallError(
"The communication mechanism %s is not supported "
"on a %s platform with a %s CPU" %
(comm, plat, mach))
version = versions[(plat, mach, comm)]
# We assume that Spack's compiler wrappers make this work. If
# not, then we need to query the compiler vendor from Spack
# here.
compiler = "gcc"
options = [compiler,
"--with-production", # Note: turn this into a variant
"-j%d" % make_jobs,
"--destination=%s" % prefix]
if "+mpi" in spec:
options.append("--basedir=%s" % spec["mpi"].prefix)
if "+papi" in spec:
options.extend(["papi", "--basedir=%s" % spec["papi"].prefix])
if "+smp" in spec:
if "+multicore" in spec:
# This is a Charm++ limitation; it would lead to a
# build error
raise InstallError("Cannot combine +smp with +multicore")
options.append("smp")
if "+tcp" in spec:
if "+net" not in spec:
# This is a Charm++ limitation; it would lead to a
# build error
raise InstallError(
"The +tcp variant requires "
"the +net communication mechanism")
options.append("tcp")
# Call "make" via the build script
# Note: This builds Charm++ in the "tmp" subdirectory of the
# install directory. Maybe we could set up a symbolic link
# back to the build tree to prevent this? Alternatively, we
# could dissect the build script; the build instructions say
# this wouldn't be difficult.
build = Executable(join_path(".", "build"))
build(target, version, *options)
shutil.rmtree(join_path(prefix, "tmp"))

View File

@ -0,0 +1,11 @@
--- CLHEP/CMakeLists.txt 2016-06-20 14:41:12.000000000 -0500
+++ CLHEP/CMakeLists.txt 2016-06-20 14:40:57.000000000 -0500
@@ -37,7 +37,7 @@
# If Policy CMP0042 exists, use OLD to prefer the use of install names
# instead of the new @rpath default.
if(POLICY CMP0042)
- cmake_policy(SET CMP0042 NEW)
+ cmake_policy(SET CMP0042 OLD)
endif()
set(CMAKE_MODULE_PATH

View File

@ -0,0 +1,81 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Clhep(Package):
"""CLHEP is a C++ Class Library for High Energy Physics. """
homepage = "http://proj-clhep.web.cern.ch/proj-clhep/"
url = "http://proj-clhep.web.cern.ch/proj-clhep/DISTRIBUTION/tarFiles/clhep-2.2.0.5.tgz"
list_url = "https://proj-clhep.web.cern.ch/proj-clhep/DISTRIBUTION/"
version('2.3.2.2', '567b304b0fa017e1e9fbf199f456ebe9')
version('2.3.1.1', '16efca7641bc118c9d217cc96fe90bf5')
version('2.3.1.0', 'b084934fc26a4182a08c09c292e19161')
version('2.3.0.0', 'a00399a2ca867f2be902c22fc71d7e2e')
version('2.2.0.8', '5a23ed3af785ac100a25f6cb791846af')
version('2.2.0.5', '1584e8ce6ebf395821aed377df315c7c')
version('2.2.0.4', '71d2c7c2e39d86a0262e555148de01c1')
variant('debug', default=False, description="Switch to the debug version of CLHEP.")
variant('cxx11', default=True, description="Compile using c++11 dialect.")
variant('cxx14', default=False, description="Compile using c++14 dialect.")
depends_on('cmake@2.8.12.2:', when='@2.2.0.4:2.3.0.0', type='build')
depends_on('cmake@3.2:', when='@2.3.0.1:', type='build')
def patch(self):
filter_file('SET CMP0042 OLD',
'SET CMP0042 NEW',
'%s/%s/CLHEP/CMakeLists.txt'
% (self.stage.path, self.spec.version))
def install(self, spec, prefix):
# Handle debug
# Pull out the BUILD_TYPE so we can change it (Release is default)
cmake_args = [arg for arg in std_cmake_args if 'BUILD_TYPE' not in arg]
build_type = 'Debug' if '+debug' in spec else 'MinSizeRel'
cmake_args.extend(['-DCMAKE_BUILD_TYPE=' + build_type])
if '+cxx11' in spec:
env['CXXFLAGS'] = self.compiler.cxx11_flag
cmake_args.append('-DCLHEP_BUILD_CXXSTD=' +
self.compiler.cxx11_flag)
if '+cxx14' in spec:
env['CXXFLAGS'] = self.compiler.cxx14_flag
cmake_args.append('-DCLHEP_BUILD_CXXSTD=' +
self.compiler.cxx14_flag)
# Note that the tar file is unusual in that there's a
# CLHEP directory (addtional layer)
cmake_args.append("../CLHEP")
# Run cmake in a build directory
with working_dir('build', create=True):
cmake(*cmake_args)
make()
make("install")

View File

@ -88,11 +88,13 @@ def install(self, spec, prefix):
options = [
'--prefix={0}'.format(prefix),
'--parallel={0}'.format(make_jobs),
# jsoncpp requires CMake to build
# use CMake-provided library to avoid circular dependency
'--no-system-jsoncpp'
]
'--parallel={0}'.format(make_jobs)]
if spec.satisfies("@3:"):
options.append(
# jsoncpp requires CMake to build
# use CMake-provided library to avoid circular dependency
'--no-system-jsoncpp'
)
if '+ownlibs' in spec:
# Build and link to the CMake-provided third-party libraries

View File

@ -47,19 +47,18 @@ class Cp2k(Package):
depends_on('lapack')
depends_on('blas')
depends_on('fftw')
depends_on('libint@:1.2', when='@3.0')
depends_on('mpi', when='+mpi')
depends_on('scalapack', when='+mpi')
depends_on('plumed+shared+mpi', when='+plumed+mpi')
depends_on('plumed+shared~mpi', when='+plumed~mpi')
depends_on('pexsi', when='+mpi')
depends_on('wannier90', when='+mpi')
depends_on('elpa', when='+mpi')
# TODO : add dependency on libint
# TODO : add dependency on libsmm, libxsmm
# TODO : add dependency on elpa
# TODO : add dependency on CUDA
# TODO : add dependency on QUIP
# TODO : add dependency on libwannier90
parallel = False
@ -88,15 +87,20 @@ def install(self, spec, prefix):
}
cppflags = [
'-D__FFTW3',
'-D__LIBPEXSI',
'-D__LIBINT',
'-I' + spec['fftw'].prefix.include
]
fcflags = copy.deepcopy(optflags[self.spec.compiler.name])
fcflags.extend([
'-I' + spec['fftw'].prefix.include
])
ldflags = ['-L' + spec['fftw'].prefix.lib]
libs = []
fftw = find_libraries(['libfftw3'], root=spec['fftw'].prefix.lib)
ldflags = [fftw.search_flags]
libs = [
join_path(spec['libint'].prefix.lib, 'libint.so'),
join_path(spec['libint'].prefix.lib, 'libderiv.so'),
join_path(spec['libint'].prefix.lib, 'libr12.so')
]
if '+plumed' in self.spec:
# Include Plumed.inc in the Makefile
mkf.write('include {0}\n'.format(
@ -109,7 +113,8 @@ def install(self, spec, prefix):
# Add required macro
cppflags.extend(['-D__PLUMED2'])
libs.extend([
join_path(self.spec['plumed'].prefix.lib, 'libplumed.so')
join_path(self.spec['plumed'].prefix.lib,
'libplumed.{0}'.format(dso_suffix))
])
mkf.write('CC = {0.compiler.cc}\n'.format(self))
@ -143,15 +148,26 @@ def install(self, spec, prefix):
if '+mpi' in self.spec:
cppflags.extend([
'-D__parallel',
'-D__LIBPEXSI',
'-D__WANNIER90',
'-D__ELPA3',
'-D__SCALAPACK'
])
fcflags.extend([
'-I' + join_path(
spec['elpa'].prefix,
'include',
'elpa-{0}'.format(str(spec['elpa'].version)),
'modules'
),
'-I' + join_path(spec['pexsi'].prefix, 'fortran')
])
ldflags.extend([
'-L' + spec['scalapack'].prefix.lib
])
scalapack = spec['scalapack'].scalapack_libs
ldflags.append(scalapack.search_flags)
libs.extend([
join_path(spec['elpa'].prefix.lib,
'libelpa.{0}'.format(dso_suffix)),
join_path(spec['wannier90'].prefix.lib, 'libwannier.a'),
join_path(spec['pexsi'].prefix.lib, 'libpexsi.a'),
join_path(spec['superlu-dist'].prefix.lib,
'libsuperlu_dist.a'),
@ -164,19 +180,15 @@ def install(self, spec, prefix):
'libmetis.{0}'.format(dso_suffix)
),
])
libs.extend(spec['scalapack'].scalapack_shared_libs)
libs.extend(scalapack)
libs.extend(self.spec['mpi'].mpicxx_shared_libs)
libs.extend(self.compiler.stdcxx_libs)
# LAPACK / BLAS
ldflags.extend([
'-L' + spec['lapack'].prefix.lib,
'-L' + spec['blas'].prefix.lib
])
libs.extend([
join_path(spec['fftw'].prefix.lib, 'libfftw3.so'),
spec['lapack'].lapack_shared_lib,
spec['blas'].blas_shared_lib
])
lapack = spec['lapack'].lapack_libs
blas = spec['blas'].blas_libs
ldflags.append((lapack + blas).search_flags)
libs.extend([str(x) for x in (fftw, lapack, blas)])
# Write compiler flags to file
mkf.write('CPPFLAGS = {0}\n'.format(' '.join(cppflags)))

View File

@ -43,14 +43,19 @@ class Cube(Package):
version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20',
url="http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz")
# TODO : add variant that builds GUI on top of Qt
variant('gui', default=False, description='Build CUBE GUI')
depends_on('zlib')
depends_on('qt@4.6:', when='+gui')
def install(self, spec, prefix):
configure_args = ["--prefix=%s" % prefix,
"--without-paraver",
"--without-gui"]
"--without-paraver"]
# TODO : need to handle cross compiling build
if '+gui' not in spec:
configure_args.append('--without-gui')
configure(*configure_args)
make(parallel=False)
make()
make("install", parallel=False)

View File

@ -39,7 +39,7 @@ class Cuda(Package):
Type, select runfile and click Download. Spack will search your
current directory for this file. Alternatively, add this file to a
mirror so that Spack can find it. For instructions on how to set up a
mirror, see http://software.llnl.gov/spack/mirrors.html
mirror, see http://spack.readthedocs.io/en/latest/mirrors.html.
Note: This package does not currently install the drivers necessary
to run CUDA. These will need to be installed manually. See:

View File

@ -32,6 +32,8 @@ class Curl(Package):
homepage = "http://curl.haxx.se"
url = "http://curl.haxx.se/download/curl-7.46.0.tar.bz2"
version('7.50.3', 'bd177fd6deecce00cfa7b5916d831c5e')
version('7.50.2', '6e161179f7af4b9f8b6ea21420132719')
version('7.50.1', '015f6a0217ca6f2c5442ca406476920b')
version('7.49.1', '6bb1f7af5b58b30e4e6414b8c1abccab')
version('7.47.1', '9ea3123449439bbd960cd25cf98796fb')

View File

@ -9,7 +9,7 @@ class Daal(IntelInstaller):
Note: You will have to add the download file to a
mirror so that Spack can find it. For instructions on how to set up a
mirror, see http://software.llnl.gov/spack/mirrors.html"""
mirror, see http://spack.readthedocs.io/en/latest/mirrors.html"""
homepage = "https://software.intel.com/en-us/daal"

View File

@ -0,0 +1,65 @@
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import os
class DarshanRuntime(Package):
"""Darshan (runtime) is a scalable HPC I/O characterization tool
designed to capture an accurate picture of application I/O behavior,
including properties such as patterns of access within files, with
minimum overhead. DarshanRuntime package should be installed on
systems where you intend to instrument MPI applications."""
homepage = "http://www.mcs.anl.gov/research/projects/darshan/"
url = "ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-3.1.0.tar.gz"
version('3.1.0', '439d717323e6265b2612ed127886ae52')
version('3.0.0', '732577fe94238936268d74d7d74ebd08')
depends_on('mpi')
depends_on('zlib')
variant('slurm', default=False, description='Use Slurm Job ID')
variant('cobalt', default=False, description='Use Coblat Job Id')
variant('pbs', default=False, description='Use PBS Job Id')
def install(self, spec, prefix):
job_id = 'NONE'
if '+slurm' in spec:
job_id = 'SLURM_JOBID'
if '+cobalt' in spec:
job_id = 'COBALT_JOBID'
if '+pbs' in spec:
job_id = 'PBS_JOBID'
# TODO: BG-Q and other platform configure options
options = ['CC=%s' % spec['mpi'].mpicc,
'--with-mem-align=8',
'--with-log-path-by-env=DARSHAN_LOG_DIR_PATH',
'--with-jobid-env=%s' % job_id,
'--with-zlib=%s' % spec['zlib'].prefix]
with working_dir('spack-build', create=True):
configure = Executable('../darshan-runtime/configure')
configure('--prefix=%s' % prefix, *options)
make()
make('install')
def setup_environment(self, spack_env, run_env):
# default path for log file, could be user or site specific setting
darshan_log_dir = '%s' % os.environ['HOME']
run_env.set('DARSHAN_LOG_DIR_PATH', darshan_log_dir)

View File

@ -0,0 +1,41 @@
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class DarshanUtil(Package):
"""Darshan (util) is collection of tools for parsing and summarizing log
files produced by Darshan (runtime) instrumentation. This package is
typically installed on systems (front-end) where you intend to analyze
log files produced by Darshan (runtime)."""
homepage = "http://www.mcs.anl.gov/research/projects/darshan/"
url = "ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-3.1.0.tar.gz"
version('3.1.0', '439d717323e6265b2612ed127886ae52')
version('3.0.0', '732577fe94238936268d74d7d74ebd08')
depends_on('zlib')
def install(self, spec, prefix):
options = ['CC=%s' % self.compiler.cc,
'--with-zlib=%s' % spec['zlib'].prefix]
with working_dir('spack-build', create=True):
configure = Executable('../darshan-util/configure')
configure('--prefix=%s' % prefix, *options)
make()
make('install')

View File

@ -32,6 +32,7 @@ class Dealii(Package):
homepage = "https://www.dealii.org"
url = "https://github.com/dealii/dealii/releases/download/v8.4.1/dealii-8.4.1.tar.gz"
version('8.4.2', '84c6bd3f250d3e0681b645d24cb987a7')
version('8.4.1', 'efbaf16f9ad59cfccad62302f36c3c1d')
version('8.4.0', 'ac5dbf676096ff61e092ce98c80c2b00')
version('8.3.0', 'fc6cdcb16309ef4bea338a4f014de6fa')
@ -67,11 +68,11 @@ class Dealii(Package):
# Boost 1.58 is blacklisted, see
# https://github.com/dealii/dealii/issues/1591
# Require at least 1.59
# +python won't affect @:8.4.1
# +python won't affect @:8.4.2
depends_on("boost@1.59.0:+thread+system+serialization+iostreams",
when='@:8.4.1~mpi')
when='@:8.4.2~mpi')
depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi",
when='@:8.4.1+mpi')
when='@:8.4.2+mpi')
# since @8.5.0: (and @develop) python bindings are introduced:
depends_on("boost@1.59.0:+thread+system+serialization+iostreams",
when='@8.5.0:~mpi~python')
@ -102,8 +103,9 @@ class Dealii(Package):
depends_on("netcdf-cxx", when='+netcdf+mpi')
depends_on("oce", when='+oce')
depends_on("p4est", when='+p4est+mpi')
depends_on("petsc+mpi", when='@8.5.0:+petsc+mpi')
depends_on("slepc", when='@8.5.0:+slepc+petsc+mpi')
depends_on("petsc+mpi", when='@8.4.2:+petsc+mpi')
depends_on('python', when='@8.5.0:+python')
depends_on("slepc", when='@8.4.2:+slepc+petsc+mpi')
depends_on("petsc@:3.6.4+mpi", when='@:8.4.1+petsc+mpi')
depends_on("slepc@:3.6.3", when='@:8.4.1+slepc+petsc+mpi')
depends_on("trilinos", when='+trilinos+mpi')
@ -122,6 +124,7 @@ def install(self, spec, prefix):
options.remove(word)
dsuf = 'dylib' if sys.platform == 'darwin' else 'so'
lapack_blas = spec['lapack'].lapack_libs + spec['blas'].blas_libs
options.extend([
'-DCMAKE_BUILD_TYPE=DebugRelease',
'-DDEAL_II_COMPONENT_EXAMPLES=ON',
@ -134,9 +137,7 @@ def install(self, spec, prefix):
'-DLAPACK_FOUND=true',
'-DLAPACK_INCLUDE_DIRS=%s;%s' % (
spec['lapack'].prefix.include, spec['blas'].prefix.include),
'-DLAPACK_LIBRARIES=%s;%s' % (
spec['lapack'].lapack_shared_lib,
spec['blas'].blas_shared_lib),
'-DLAPACK_LIBRARIES=%s' % lapack_blas.joined(';'),
'-DMUPARSER_DIR=%s' % spec['muparser'].prefix,
'-DUMFPACK_DIR=%s' % spec['suite-sparse'].prefix,
'-DTBB_DIR=%s' % spec['tbb'].prefix,

View File

@ -87,12 +87,12 @@ def configure(self, spec):
# BLAS/LAPACK support
# Note: BLAS/LAPACK must be compiled with OpenMP support
# if the +openmp variant is chosen
blas = 'blas.a'
blas = 'blas.a'
lapack = 'lapack.a'
if '+blas' in spec:
blas = spec['blas'].blas_shared_lib
blas = spec['blas'].blas_libs.joined()
if '+lapack' in spec:
lapack = spec['lapack'].lapack_shared_lib
lapack = spec['lapack'].lapack_libs.joined()
# lapack must come before blas
config['LIB_LPK'] = ' '.join([lapack, blas])

View File

@ -34,8 +34,16 @@ class Elpa(Package):
homepage = 'http://elpa.mpcdf.mpg.de/'
url = 'http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz'
version('2015.11.001', 'de0f35b7ee7c971fd0dca35c900b87e6',
url='http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz')
version(
'2016.05.003',
'88a9f3f3bfb63e16509dd1be089dcf2c',
url='http://elpa.mpcdf.mpg.de/html/Releases/2016.05.003/elpa-2016.05.003.tar.gz'
)
version(
'2015.11.001',
'de0f35b7ee7c971fd0dca35c900b87e6',
url='http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz'
)
variant('openmp', default=False, description='Activates OpenMP support')
@ -46,7 +54,24 @@ class Elpa(Package):
def install(self, spec, prefix):
options = ["--prefix=%s" % prefix]
options = [
'CC={0}'.format(self.spec['mpi'].mpicc),
'FC={0}'.format(self.spec['mpi'].mpifc),
'CXX={0}'.format(self.spec['mpi'].mpicxx),
'FCFLAGS={0}'.format(
spec['lapack'].lapack_libs.joined()
),
'LDFLAGS={0}'.format(
spec['lapack'].lapack_libs.joined()
),
'SCALAPACK_FCFLAGS={0}'.format(
spec['scalapack'].scalapack_libs.joined()
),
'SCALAPACK_LDFLAGS={0}'.format(
spec['scalapack'].scalapack_libs.joined()
),
'--prefix={0}'.format(self.prefix)
]
if '+openmp' in spec:
options.append("--enable-openmp")

View File

@ -88,7 +88,7 @@ def install(self, spec, prefix):
# Add a list of directories to search
search_list = []
for name, dependency_spec in spec.dependencies.iteritems():
for dependency_spec in spec.dependencies():
search_list.extend([dependency_spec.prefix.lib,
dependency_spec.prefix.lib64])

View File

@ -53,8 +53,8 @@ class Extrae(Package):
programming models either alone or in conjunction with MPI :
OpenMP, CUDA, OpenCL, pthread, OmpSs"""
homepage = "http://www.bsc.es/computer-sciences/extrae"
url = "http://www.bsc.es/ssl/apps/performanceTools/files/extrae-3.0.1.tar.bz2"
version('3.0.1', 'a6a8ca96cd877723cd8cc5df6bdb922b')
url = "http://www.bsc.es/ssl/apps/performanceTools/files/extrae-3.3.0.tar.bz2"
version('3.3.0', 'f46e3f1a6086b5b3ac41c9585b42952d')
depends_on("mpi")
depends_on("dyninst")
@ -62,6 +62,9 @@ class Extrae(Package):
depends_on("boost")
depends_on("libdwarf")
depends_on("papi")
depends_on("libelf")
depends_on("libxml2")
depends_on("binutils+libiberty")
def install(self, spec, prefix):
if 'openmpi' in spec:
@ -80,6 +83,9 @@ def install(self, spec, prefix):
"--with-papi=%s" % spec['papi'].prefix,
"--with-dyninst-headers=%s" % spec[
'dyninst'].prefix.include,
"--with-elf=%s" % spec['libelf'].prefix,
"--with-xml-prefix=%s" % spec['libxml2'].prefix,
"--with-binutils=%s" % spec['binutils'].prefix,
"--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib)
make()

View File

@ -0,0 +1,11 @@
--- a/CMakeLists.txt 2016-08-16 02:30:13.466078087 +0200
+++ b/CMakeLists.txt 2016-08-16 02:30:36.879586772 +0200
@@ -553,7 +553,7 @@
set(ENV{HDF5_ROOT} "$ENV{HDF5_DIR}")
endif()
set(HDF5_PREFER_PARALLEL TRUE)
- find_package(HDF5)
+ find_package(HDF5 COMPONENTS C)
set_package_properties(HDF5 PROPERTIES TYPE OPTIONAL
DESCRIPTION "Hierarchical Data Format 5 (HDF5)"
URL "https://www.hdfgroup.org/HDF5")

View File

@ -62,17 +62,15 @@ class Fenics(Package):
# variant('slepc4py', default=True, description='Uses SLEPc4py')
# variant('pastix', default=True, description='Compile with Pastix')
patch('petsc-3.7.patch', when='^petsc@3.7:')
patch('petsc-3.7.patch', when='@1.6.1^petsc@3.7:')
patch('petsc-version-detection.patch', when='@:1.6.1')
patch('hdf5~cxx-detection.patch')
extends('python')
depends_on('py-numpy')
depends_on('py-ply')
depends_on('py-six')
depends_on('py-sphinx@1.0.1:', when='+doc')
depends_on('eigen@3.2.0:')
depends_on('boost')
depends_on('eigen@3.2.0:', type='build')
depends_on('boost+filesystem+program_options+system+iostreams+timer+regex+chrono')
depends_on('mpi', when='+mpi')
depends_on('hdf5', when='+hdf5')
depends_on('parmetis@4.0.2:^metis+real64', when='+parmetis')
@ -85,12 +83,27 @@ class Fenics(Package):
depends_on('suite-sparse', when='+suite-sparse')
depends_on('qt', when='+qt')
# This are the build dependencies
depends_on('py-setuptools')
depends_on('cmake@2.8.12:')
depends_on('swig@3.0.3:')
depends_on('py-ply', type=nolink)
depends_on('py-six', type=nolink)
depends_on('py-numpy', type=nolink)
depends_on('py-sympy', type=nolink)
depends_on('swig@3.0.3:', type=nolink)
depends_on('cmake@2.8.12:', type=nolink)
depends_on('py-setuptools', type='build')
depends_on('py-sphinx@1.0.1:', when='+doc', type='build')
releases = [
{
'version': '2016.1.0',
'md5': '92e8d00f6487a575987201f0b0d19173',
'resources': {
'ffc': '35457ae164e481ba5c9189ebae060a47',
'fiat': 'ac0c49942831ee434301228842bcc280',
'instant': '0e3dbb464c4d90d691f31f0fdd63d4f6',
'ufl': '37433336e5c9b58d1d5ab4acca9104a7',
}
},
{
'version': '1.6.0',
'md5': '35cb4baf7ab4152a40fb7310b34d5800',

View File

@ -34,9 +34,12 @@ class Fontconfig(Package):
depends_on('freetype')
depends_on('libxml2')
depends_on('pkg-config', type='build')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix, "--enable-libxml2")
configure("--prefix=%s" % prefix,
"--enable-libxml2",
"--disable-docs")
make()
make("install")

View File

@ -3,6 +3,7 @@
from contextlib import closing
from glob import glob
import sys
from os.path import isfile
class Gcc(Package):
@ -68,6 +69,19 @@ def install(self, spec, prefix):
if spec.satisfies("@4.7.1:") and sys.platform != 'darwin':
enabled_languages.add('go')
# Fix a standard header file for OS X Yosemite that
# is GCC incompatible by replacing non-GCC compliant macros
if 'yosemite' in spec.architecture:
if isfile(r'/usr/include/dispatch/object.h'):
new_dispatch_dir = join_path(prefix, 'include', 'dispatch')
mkdirp(new_dispatch_dir)
cp = which('cp')
new_header = join_path(new_dispatch_dir, 'object.h')
cp(r'/usr/include/dispatch/object.h', new_header)
filter_file(r'typedef void \(\^dispatch_block_t\)\(void\)',
'typedef void* dispatch_block_t',
new_header)
# Generic options to compile GCC
options = ["--prefix=%s" % prefix, "--libdir=%s/lib64" % prefix,
"--disable-multilib",

View File

@ -41,9 +41,18 @@ class Gdb(Package):
version('7.9', '8f8ced422fe462a00e0135a643544f17')
version('7.8.2', '8b0ea8b3559d3d90b3ff4952f0aeafbc')
variant('python', default=True, description='Compile with Python support')
# Required dependency
depends_on('texinfo', type='build')
# Optional dependency
depends_on('python', when='+python')
def install(self, spec, prefix):
configure('--prefix=%s' % prefix)
options = ['--prefix=%s' % prefix]
if '+python' in spec:
options.extend(['--with-python'])
configure(*options)
make()
make("install")

View File

@ -0,0 +1,85 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Geant4(Package):
"""Geant4 is a toolkit for the simulation of the passage of particles
through matter. Its areas of application include high energy, nuclear
and accelerator physics, as well as studies in medical and space
science."""
homepage = "http://geant4.cern.ch/"
url = "http://geant4.cern.ch/support/source/geant4.10.01.p03.tar.gz"
version('10.02.p01', 'b81f7082a15f6a34b720b6f15c6289cfe4ddbbbdcef0dc52719f71fac95f7f1c')
version('10.01.p03', '4fb4175cc0dabcd517443fbdccd97439')
variant('qt', default=False, description='Enable Qt support')
depends_on('cmake@3.5:', type='build')
depends_on("clhep@2.3.1.1~cxx11+cxx14", when="@10.02.p01")
depends_on("clhep@2.2.0.4~cxx11+cxx14", when="@10.01.p03")
depends_on("expat")
depends_on("zlib")
depends_on("xerces-c")
depends_on("qt@4.8:", when="+qt")
def install(self, spec, prefix):
cmake_args = list(std_cmake_args)
cmake_args.append('-DXERCESC_ROOT_DIR:STRING=%s' %
spec['xerces-c'].prefix)
cmake_args.append('-DGEANT4_BUILD_CXXSTD=c++14')
cmake_args += ['-DGEANT4_USE_GDML=ON',
'-DGEANT4_USE_SYSTEM_EXPAT=ON',
'-DGEANT4_USE_SYSTEM_ZLIB=ON',
'-DGEANT4_USE_SYSTEM_CLHEP=ON']
# fixme: turn off data for now and maybe each data set should
# go into a separate package to cut down on disk usage between
# different code versions using the same data versions.
cmake_args.append('-DGEANT4_INSTALL_DATA=OFF')
# http://geant4.web.cern.ch/geant4/UserDocumentation/UsersGuides/InstallationGuide/html/ch02s03.html
# fixme: likely things that need addressing:
# -DGEANT4_USE_OPENGL_X11=ON
if '+qt' in spec:
cmake_args.append('-DGEANT4_USE_QT=ON')
build_directory = join_path(self.stage.path, 'spack-build')
source_directory = self.stage.source_path
with working_dir(build_directory, create=True):
cmake(source_directory, *cmake_args)
make()
make("install")
def url_for_version(self, version):
"""Handle Geant4's unusual version string."""
return "http://geant4.cern.ch/support/source/geant4.%s.tar.gz" % version

View File

@ -26,17 +26,28 @@
class GitLfs(Package):
"""Tool for managing large files with Git."""
"""Git LFS is a system for managing and versioning large files in
association with a Git repository. Instead of storing the large files
within the Git repository as blobs, Git LFS stores special "pointer
files" in the repository, while storing the actual file contents on a
Git LFS server."""
homepage = "https://git-lfs.github.com"
url = "https://github.com/github/git-lfs/archive/v1.4.1.tar.gz"
git_url = "https://github.com/github/git-lfs.git"
version('1.4.1', 'c62a314d96d3a30af4d98fa3305ad317')
version('1.4.1', git=git_url, tag='v1.4.1')
version('1.3.1', git=git_url, tag='v1.3.1')
# TODO: Implement this by following the instructions at this location:
# https://github.com/github/git-lfs/blob/master/CONTRIBUTING.md#building
# variant('test', default=True, description='Build and run tests as part of the build.') # NOQA: E501
depends_on('go@1.5:', type='build')
depends_on('git@1.8.2:', type='run')
def install(self, spec, prefix):
bootstrap = Executable('./scripts/bootstrap')
bootstrap()
install('bin/git-lfs', prefix.bin)
bootstrap_script = Executable(join_path('script', 'bootstrap'))
bootstrap_script()
mkdirp(prefix.bin)
install(join_path('bin', 'git-lfs'), prefix.bin)

View File

@ -22,6 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
from spack import *
@ -54,28 +55,33 @@ class Git(Package):
# version('2.5.4', '3eca2390cf1fa698b48e2a233563a76b')
# version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c')
depends_on("openssl")
depends_on("autoconf", type='build')
depends_on("curl")
depends_on("expat")
depends_on("gettext")
depends_on("zlib")
depends_on("libiconv")
depends_on("openssl")
depends_on("pcre")
depends_on("perl")
depends_on("zlib")
def install(self, spec, prefix):
env['LDFLAGS'] = "-L%s" % spec['gettext'].prefix.lib + " -lintl"
configure_args = [
"--prefix=%s" % prefix,
"--with-libpcre=%s" % spec['pcre'].prefix,
"--with-openssl=%s" % spec['openssl'].prefix,
"--with-zlib=%s" % spec['zlib'].prefix,
"--with-curl=%s" % spec['curl'].prefix,
"--with-expat=%s" % spec['expat'].prefix,
"--with-iconv=%s" % spec['libiconv'].prefix,
"--with-libpcre=%s" % spec['pcre'].prefix,
"--with-openssl=%s" % spec['openssl'].prefix,
"--with-perl=%s" % join_path(spec['perl'].prefix.bin, 'perl'),
"--with-zlib=%s" % spec['zlib'].prefix,
]
which('autoreconf')('-i')
configure(*configure_args)
if sys.platform == "darwin":
# Don't link with -lrt; the system has no (and needs no) librt
filter_file(r' -lrt$', '', 'Makefile')
make()
make("install")

View File

@ -0,0 +1,16 @@
--- a/configure.ac 2016-08-16 11:57:34.000000000 -0400
+++ b/configure.ac 2016-08-16 11:57:36.000000000 -0400
@@ -3357,11 +3357,11 @@
enable_compile_warnings=yes)
AS_IF([test "x$enable_compile_warnings" = xyes], [
CC_CHECK_FLAGS_APPEND([GLIB_WARN_CFLAGS], [CFLAGS], [\
-Wall -Wstrict-prototypes -Werror=declaration-after-statement \
-Werror=missing-prototypes -Werror=implicit-function-declaration \
- -Werror=pointer-arith -Werror=init-self -Werror=format-security \
- -Werror=format=2 -Werror=missing-include-dirs])
+ -Werror=pointer-arith -Werror=init-self \
+ -Werror=missing-include-dirs])
])
AC_SUBST(GLIB_WARN_CFLAGS)
#

View File

@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import os
class Glib(Package):
@ -38,14 +39,20 @@ class Glib(Package):
version('2.48.1', '67bd3b75c9f6d5587b457dc01cdcd5bb')
version('2.42.1', '89c4119e50e767d3532158605ee9121a')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('pkg-config', type='build')
depends_on('libffi')
depends_on('zlib')
depends_on('pkg-config', type='build')
depends_on('gettext')
depends_on('pcre+utf', when='@2.48:')
# The following patch is needed for gcc-6.1
patch('g_date_strftime.patch', when='@2.42.1')
# Clang doesn't seem to acknowledge the pragma lines to disable the -Werror
# around a legitimate usage.
patch('no-Werror=format-security.patch')
def url_for_version(self, version):
"""Handle glib's version-based custom URLs."""
@ -53,6 +60,16 @@ def url_for_version(self, version):
return url + '/%s/glib-%s.tar.xz' % (version.up_to(2), version)
def install(self, spec, prefix):
autoreconf = which("autoreconf")
autoreconf("--install", "--verbose", "--force",
"-I", "config",
"-I", os.path.join(spec['pkg-config'].prefix,
"share", "aclocal"),
"-I", os.path.join(spec['automake'].prefix,
"share", "aclocal"),
"-I", os.path.join(spec['libtool'].prefix,
"share", "aclocal"),
)
configure("--prefix=%s" % prefix)
make()
make("install", parallel=False)

View File

@ -0,0 +1,42 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Gmake(Package):
"""GNU Make."""
homepage = "http://gnu.org/gnu/make"
url = "ftp://ftp.gnu.org/gnu/make/make-4.0.tar.gz"
version('4.0', 'b5e558f981326d9ca1bfdb841640721a')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
make()
make('install')
with working_dir(prefix.bin):
symlink('make', 'gmake')

View File

@ -37,4 +37,7 @@ class Gmp(AutotoolsPackage):
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
version('6.0.0', '6ef5869ae735db9995619135bd856b84')
depends_on("m4", type='build')
depends_on('m4', type='build')
def configure_args(self):
return ['--enable-cxx']

View File

@ -87,9 +87,9 @@ def install(self, spec, prefix):
options.append('-DENABLE_OS_SPECIFIC_INSTALL=OFF')
# Make sure GMSH picks up correct BlasLapack by providing linker flags
options.append('-DBLAS_LAPACK_LIBRARIES=%s %s' %
(to_link_flags(spec['lapack'].lapack_shared_lib),
to_link_flags(spec['blas'].blas_shared_lib)))
blas_lapack = spec['lapack'].lapack_libs + spec['blas'].blas_libs
options.append(
'-DBLAS_LAPACK_LIBRARIES={0}'.format(blas_lapack.ld_flags))
# Gmsh does not have an option to compile against external metis.
# Its own Metis, however, fails to build

View File

@ -1,3 +1,27 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import shutil
import glob
@ -17,25 +41,37 @@ class GoBootstrap(Package):
extendable = True
# temporary fix until tags are pulled correctly
# NOTE: Go@1.4.2 is the only supported bootstrapping compiler because all
# later versions require a Go compiler to build.
# See: https://golang.org/doc/install/source
version('1.4.2', git='https://go.googlesource.com/go', tag='go1.4.2')
variant('test',
default=True,
description="Run tests as part of build, a good idea but quite"
" time consuming")
variant('test', default=True, description='Build and run tests as part of the build.')
provides('golang@:1.4.2')
depends_on('git')
depends_on('git', type='alldeps')
# NOTE: Older versions of Go attempt to download external files that have
# since been moved while running the test suite. This patch modifies the
# test files so that these tests don't cause false failures.
# See: https://github.com/golang/go/issues/15694
@when('@:1.4.3')
def patch(self):
test_suite_file = FileFilter(join_path('src', 'run.bash'))
test_suite_file.filter(
r'^(.*)(\$GOROOT/src/cmd/api/run.go)(.*)$',
r'# \1\2\3',
)
@when('@1.5.0:')
def patch(self):
pass
def install(self, spec, prefix):
bash = which('bash')
with working_dir('src'):
if '+test' in spec:
bash('all.bash')
else:
bash('make.bash')
bash('{0}.bash'.format('all' if '+test' in spec else 'make'))
try:
os.makedirs(prefix)

View File

@ -1,3 +1,27 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import shutil
import glob
@ -12,28 +36,39 @@ class Go(Package):
extendable = True
version('1.5.4', git='https://go.googlesource.com/go', tag='go1.5.4')
version('1.6.2', git='https://go.googlesource.com/go', tag='go1.6.2')
version('1.5.4', git='https://go.googlesource.com/go', tag='go1.5.4')
version('1.4.2', git='https://go.googlesource.com/go', tag='go1.4.2')
variant('test',
default=True,
description="Run tests as part of build, a good idea but quite"
" time consuming")
variant('test', default=True, description='Build and run tests as part of the build.')
provides('golang')
# to-do, make non-c self-hosting compilers feasible without backflips
depends_on('git', type='alldeps')
# TODO: Make non-c self-hosting compilers feasible without backflips
# should be a dep on external go compiler
depends_on('go-bootstrap', type='build')
depends_on('git', type='alldeps')
# NOTE: Older versions of Go attempt to download external files that have
# since been moved while running the test suite. This patch modifies the
# test files so that these tests don't cause false failures.
# See: https://github.com/golang/go/issues/15694
@when('@:1.4.3')
def patch(self):
test_suite_file = FileFilter(join_path('src', 'run.bash'))
test_suite_file.filter(
r'^(.*)(\$GOROOT/src/cmd/api/run.go)(.*)$',
r'# \1\2\3',
)
@when('@1.5.0:')
def patch(self):
pass
def install(self, spec, prefix):
bash = which('bash')
with working_dir('src'):
if '+test' in spec:
bash('all.bash')
else:
bash('make.bash')
bash('{0}.bash'.format('all' if '+test' in spec else 'make'))
try:
os.makedirs(prefix)

View File

@ -0,0 +1,71 @@
MACH_TEXT = Generic Linux
MACH_VALID = 1
MACH_FILE = Make.mach.@ARCHITECTURE
#-----------------------------------------------------------------------
# Install paths (local variables)
#-----------------------------------------------------------------------
LOCAL_HDF5_INSTALL = @HDF5_ROOT
#-----------------------------------------------------------------------
# Compiler settings
#-----------------------------------------------------------------------
MACH_CC_NOMPI = @CC # C compiler
MACH_CXX_NOMPI = @CXX # C++ compiler
MACH_FC_NOMPI = @F77 # Fortran 77
MACH_F90_NOMPI = @FC # Fortran 90
MACH_LD_NOMPI = @FC # Linker
@LINK_VARIABLES_DEFINITION
#-----------------------------------------------------------------------
# Machine-dependent defines
#-----------------------------------------------------------------------
MACH_DEFINES = -DLINUX -DH5_USE_16_API -fPIC
#-----------------------------------------------------------------------
# Compiler flag settings
#-----------------------------------------------------------------------
MACH_CPPFLAGS = -P -traditional
MACH_CFLAGS =
MACH_CXXFLAGS =
MACH_FFLAGS = -fno-second-underscore -ffixed-line-length-132
MACH_F90FLAGS = -fno-second-underscore
MACH_LDFLAGS = @STDCXX_LIB
#-----------------------------------------------------------------------
# Optimization flags
#-----------------------------------------------------------------------
MACH_OPT_WARN = -Wall -g
MACH_OPT_DEBUG = -g
MACH_OPT_HIGH = -O2
MACH_OPT_AGGRESSIVE = -O3 -g
#-----------------------------------------------------------------------
# Includes
#-----------------------------------------------------------------------
LOCAL_INCLUDES_HDF5 = -I@HDF5_ROOT/include # HDF5 includes
MACH_INCLUDES = $(LOCAL_INCLUDES_HDF5)
#-----------------------------------------------------------------------
# Libraries
#-----------------------------------------------------------------------
LOCAL_LIBS_HDF5 = -L@HDF5_ROOT/lib -lhdf5 # HDF5 libraries
LOCAL_LIBS_MACH = # Machine-dependent libraries
MACH_LIBS = $(LOCAL_LIBS_HDF5) $(LOCAL_LIBS_MACH)
#-----------------------------------------------------------------------
# Installation
#-----------------------------------------------------------------------
MACH_INSTALL_PREFIX = @PREFIX
MACH_INSTALL_LIB_DIR =
MACH_INSTALL_INCLUDE_DIR =

View File

@ -0,0 +1,89 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os.path
import shutil
import inspect
from spack import *
class Grackle(Package):
"""Grackle is a chemistry and radiative cooling library for astrophysical
simulations with interfaces for C, C++, and Fortran codes. It is a
generalized and trimmed down version of the chemistry network of the Enzo
simulation code
"""
homepage = 'http://grackle.readthedocs.io/en/grackle-2.2/'
url = 'https://bitbucket.org/grackle/grackle/get/grackle-2.0.1.tar.bz2'
version('2.2', 'ec49ed1db5a42db21f478285150c2ba3')
version('2.0.1', 'a9624ad13a60c592c1a0a4ea8e1ae86d')
depends_on('libtool', when='@2.2')
depends_on('mpi')
depends_on('hdf5+mpi')
parallel = False
def install(self, spec, prefix):
template_name = '{0.architecture}-{0.compiler.name}'
grackle_architecture = template_name.format(spec)
link_variables = 'MACH_AR = ar' if spec.version < Version(2.2) else 'MACH_LIBTOOL = libtool' # NOQA: ignore=E501
substitutions = {
'@ARCHITECTURE': grackle_architecture,
'@CC': spec['mpi'].mpicc,
'@CXX': spec['mpi'].mpicxx,
'@FC': spec['mpi'].mpifc,
'@F77': spec['mpi'].mpif77,
'@STDCXX_LIB': ' '.join(self.compiler.stdcxx_libs),
'@HDF5_ROOT': spec['hdf5'].prefix,
'@PREFIX': prefix,
'@LINK_VARIABLES_DEFINITION': link_variables
}
template = join_path(
os.path.dirname(inspect.getmodule(self).__file__),
'Make.mach.template'
)
makefile = join_path(
self.stage.source_path,
'src',
'clib',
'Make.mach.{0}'.format(grackle_architecture)
)
shutil.copy(template, makefile)
for key, value in substitutions.items():
filter_file(key, value, makefile)
configure()
with working_dir('src/clib'):
make('clean')
make('machine-{0}'.format(grackle_architecture))
make('opt-high')
make('show-config')
make()
mkdirp(prefix.lib)
make('install')

Some files were not shown because too many files have changed in this diff Show More