Merge branch 'develop' of https://github.com/LLNL/spack into features/custom_modulefile_from_config
This commit is contained in:
commit
9eee71edf7
34
.coveragerc
Normal file
34
.coveragerc
Normal file
@ -0,0 +1,34 @@
|
||||
# -*- conf -*-
|
||||
# .coveragerc to control coverage.py
|
||||
[run]
|
||||
branch = True
|
||||
source = lib
|
||||
omit =
|
||||
lib/spack/spack/test/*
|
||||
lib/spack/env/*
|
||||
lib/spack/docs/*
|
||||
lib/spack/external/*
|
||||
|
||||
[report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
exclude_lines =
|
||||
# Have to re-enable the standard pragma
|
||||
pragma: no cover
|
||||
|
||||
# Don't complain about missing debug-only code:
|
||||
def __repr__
|
||||
if self\.debug
|
||||
|
||||
# Don't complain if tests don't hit defensive assertion code:
|
||||
raise AssertionError
|
||||
raise NotImplementedError
|
||||
|
||||
# Don't complain if non-runnable code isn't run:
|
||||
if 0:
|
||||
if False:
|
||||
if __name__ == .__main__.:
|
||||
|
||||
ignore_errors = True
|
||||
|
||||
[html]
|
||||
directory = htmlcov
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -9,3 +9,5 @@
|
||||
/share/spack/dotkit
|
||||
/share/spack/modules
|
||||
/TAGS
|
||||
/htmlcov
|
||||
.coverage
|
||||
|
3
.style.yapf
Normal file
3
.style.yapf
Normal file
@ -0,0 +1,3 @@
|
||||
[style]
|
||||
based_on_style = pep8
|
||||
column_limit = 80
|
18
.travis.yml
18
.travis.yml
@ -6,20 +6,32 @@ python:
|
||||
# Use new Travis infrastructure (Docker can't sudo yet)
|
||||
sudo: false
|
||||
|
||||
# No need to install any deps.
|
||||
install: true
|
||||
# Install coveralls to obtain code coverage
|
||||
install:
|
||||
- "pip install coveralls"
|
||||
- "pip install flake8"
|
||||
|
||||
before_install:
|
||||
# Need this for the git tests to succeed.
|
||||
- git config --global user.email "spack@example.com"
|
||||
- git config --global user.name "Test User"
|
||||
# Need this to be able to compute the list of changed files
|
||||
- git fetch origin develop:develop
|
||||
|
||||
script:
|
||||
- . share/spack/setup-env.sh
|
||||
- spack compilers
|
||||
- spack config get compilers
|
||||
- spack test
|
||||
- spack install -v libdwarf
|
||||
# Run unit tests with code coverage
|
||||
- coverage run bin/spack test
|
||||
# Checks if the file that have been changed are flake8 conformant
|
||||
- CHANGED_PYTHON_FILES=`git diff develop... --name-only | perl -ne 'print if /\.py$/'`
|
||||
- if [[ ${CHANGED_PYTHON_FILES} ]] ; then flake8 --format pylint --config flake8.ini ${CHANGED_PYTHON_FILES} ; fi
|
||||
|
||||
|
||||
after_success:
|
||||
- coveralls
|
||||
|
||||
notifications:
|
||||
email:
|
||||
|
@ -1,7 +1,8 @@
|
||||

|
||||
============
|
||||
|
||||
[](https://travis-ci.org/LLNL/spack)
|
||||
[](https://travis-ci.org/LLNL/spack)
|
||||
[](https://coveralls.io/github/LLNL/spack?branch=develop)
|
||||
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
|
@ -152,7 +152,7 @@ def main():
|
||||
command = spack.cmd.get_command(args.command)
|
||||
try:
|
||||
return_val = command(parser, args)
|
||||
except SpackError, e:
|
||||
except SpackError as e:
|
||||
e.die()
|
||||
except KeyboardInterrupt:
|
||||
sys.stderr.write('\n')
|
||||
|
3
flake8.ini
Normal file
3
flake8.ini
Normal file
@ -0,0 +1,3 @@
|
||||
[flake8]
|
||||
ignore = W391,F403
|
||||
max-line-length = 120
|
@ -372,25 +372,32 @@ how this is done is in :ref:`sec-specs`.
|
||||
``spack compiler add``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
An alias for ``spack compiler find``.
|
||||
|
||||
.. _spack-compiler-find:
|
||||
|
||||
``spack compiler find``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you do not see a compiler in this list, but you want to use it with
|
||||
Spack, you can simply run ``spack compiler add`` with the path to
|
||||
Spack, you can simply run ``spack compiler find`` with the path to
|
||||
where the compiler is installed. For example::
|
||||
|
||||
$ spack compiler add /usr/local/tools/ic-13.0.079
|
||||
$ spack compiler find /usr/local/tools/ic-13.0.079
|
||||
==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
|
||||
intel@13.0.079
|
||||
|
||||
Or you can run ``spack compiler add`` with no arguments to force
|
||||
Or you can run ``spack compiler find`` with no arguments to force
|
||||
auto-detection. This is useful if you do not know where compilers are
|
||||
installed, but you know that new compilers have been added to your
|
||||
``PATH``. For example, using dotkit, you might do this::
|
||||
|
||||
$ module load gcc-4.9.0
|
||||
$ spack compiler add
|
||||
$ spack compiler find
|
||||
==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
|
||||
gcc@4.9.0
|
||||
|
||||
This loads the environment module for gcc-4.9.0 to get it into the
|
||||
This loads the environment module for gcc-4.9.0 to add it to
|
||||
``PATH``, and then it adds the compiler to Spack.
|
||||
|
||||
.. _spack-compiler-info:
|
||||
@ -807,17 +814,22 @@ Environment Modules, you can get it with Spack:
|
||||
|
||||
1. Install with::
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
spack install environment-modules
|
||||
|
||||
2. Activate with::
|
||||
|
||||
MODULES_HOME=`spack location -i environment-modules`
|
||||
MODULES_VERSION=`ls -1 $MODULES_HOME/Modules | head -1`
|
||||
${MODULES_HOME}/Modules/${MODULES_VERSION}/bin/add.modules
|
||||
Add the following two lines to your ``.bashrc`` profile (or similar):
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
MODULES_HOME=`spack location -i environment-modules`
|
||||
source ${MODULES_HOME}/Modules/init/bash
|
||||
|
||||
In case you use a Unix shell other than bash, substitute ``bash`` by
|
||||
the appropriate file in ``${MODULES_HOME}/Modules/init/``.
|
||||
|
||||
This adds to your ``.bashrc`` (or similar) files, enabling Environment
|
||||
Modules when you log in. It will ask your permission before changing
|
||||
any files.
|
||||
|
||||
Spack and Environment Modules
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
@ -1803,15 +1803,15 @@ Compile-time library search paths
|
||||
* ``-L$dep_prefix/lib``
|
||||
* ``-L$dep_prefix/lib64``
|
||||
Runtime library search paths (RPATHs)
|
||||
* ``-Wl,-rpath,$dep_prefix/lib``
|
||||
* ``-Wl,-rpath,$dep_prefix/lib64``
|
||||
* ``$rpath_flag$dep_prefix/lib``
|
||||
* ``$rpath_flag$dep_prefix/lib64``
|
||||
Include search paths
|
||||
* ``-I$dep_prefix/include``
|
||||
|
||||
An example of this would be the ``libdwarf`` build, which has one
|
||||
dependency: ``libelf``. Every call to ``cc`` in the ``libdwarf``
|
||||
build will have ``-I$LIBELF_PREFIX/include``,
|
||||
``-L$LIBELF_PREFIX/lib``, and ``-Wl,-rpath,$LIBELF_PREFIX/lib``
|
||||
``-L$LIBELF_PREFIX/lib``, and ``$rpath_flag$LIBELF_PREFIX/lib``
|
||||
inserted on the command line. This is done transparently to the
|
||||
project's build system, which will just think it's using a system
|
||||
where ``libelf`` is readily available. Because of this, you **do
|
||||
@ -1831,6 +1831,48 @@ successfully find ``libdwarf.h`` and ``libdwarf.so``, without the
|
||||
packager having to provide ``--with-libdwarf=/path/to/libdwarf`` on
|
||||
the command line.
|
||||
|
||||
.. note::
|
||||
|
||||
For most compilers, ``$rpath_flag`` is ``-Wl,-rpath,``. However, NAG
|
||||
passes its flags to GCC instead of passing them directly to the linker.
|
||||
Therefore, its ``$rpath_flag`` is doubly wrapped: ``-Wl,-Wl,,-rpath,``.
|
||||
``$rpath_flag`` can be overriden on a compiler specific basis in
|
||||
``lib/spack/spack/compilers/$compiler.py``.
|
||||
|
||||
Compiler flags
|
||||
~~~~~~~~~~~~~~
|
||||
In rare circumstances such as compiling and running small unit tests, a package
|
||||
developer may need to know what are the appropriate compiler flags to enable
|
||||
features like ``OpenMP``, ``c++11``, ``c++14`` and alike. To that end the
|
||||
compiler classes in ``spack`` implement the following _properties_ :
|
||||
``openmp_flag``, ``cxx11_flag``, ``cxx14_flag``, which can be accessed in a
|
||||
package by ``self.compiler.cxx11_flag`` and alike. Note that the implementation
|
||||
is such that if a given compiler version does not support this feature, an
|
||||
error will be produced. Therefore package developers can also use these properties
|
||||
to assert that a compiler supports the requested feature. This is handy when a
|
||||
package supports additional variants like
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('openmp', default=True, description="Enable OpenMP support.")
|
||||
|
||||
Message Parsing Interface (MPI)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
It is common for high performance computing software/packages to use ``MPI``.
|
||||
As a result of conretization, a given package can be built using different
|
||||
implementations of MPI such as ``Openmpi``, ``MPICH`` or ``IntelMPI``.
|
||||
In some scenarios to configure a package one have to provide it with appropriate MPI
|
||||
compiler wrappers such as ``mpicc``, ``mpic++``.
|
||||
However different implementations of ``MPI`` may have different names for those
|
||||
wrappers. In order to make package's ``install()`` method indifferent to the
|
||||
choice ``MPI`` implementation, each package which implements ``MPI`` sets up
|
||||
``self.spec.mpicc``, ``self.spec.mpicxx``, ``self.spec.mpifc`` and ``self.spec.mpif77``
|
||||
to point to ``C``, ``C++``, ``Fortran 90`` and ``Fortran 77`` ``MPI`` wrappers.
|
||||
Package developers are advised to use these variables, for example ``self.spec['mpi'].mpicc``
|
||||
instead of hard-coding ``join_path(self.spec['mpi'].prefix.bin, 'mpicc')`` for
|
||||
the reasons outlined above.
|
||||
|
||||
|
||||
Forking ``install()``
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
41
lib/spack/env/cc
vendored
41
lib/spack/env/cc
vendored
@ -38,15 +38,20 @@
|
||||
# -Wl,-rpath arguments for dependency /lib directories.
|
||||
#
|
||||
|
||||
# This is the list of environment variables that need to be set before
|
||||
# This is an array of environment variables that need to be set before
|
||||
# the script runs. They are set by routines in spack.build_environment
|
||||
# as part of spack.package.Package.do_install().
|
||||
parameters="
|
||||
SPACK_PREFIX
|
||||
SPACK_ENV_PATH
|
||||
SPACK_DEBUG_LOG_DIR
|
||||
SPACK_COMPILER_SPEC
|
||||
SPACK_SHORT_SPEC"
|
||||
parameters=(
|
||||
SPACK_PREFIX
|
||||
SPACK_ENV_PATH
|
||||
SPACK_DEBUG_LOG_DIR
|
||||
SPACK_COMPILER_SPEC
|
||||
SPACK_CC_RPATH_ARG
|
||||
SPACK_CXX_RPATH_ARG
|
||||
SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
)
|
||||
|
||||
# The compiler input variables are checked for sanity later:
|
||||
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
|
||||
@ -64,7 +69,7 @@ function die {
|
||||
exit 1
|
||||
}
|
||||
|
||||
for param in $parameters; do
|
||||
for param in ${parameters[@]}; do
|
||||
if [[ -z ${!param} ]]; then
|
||||
die "Spack compiler must be run from Spack! Input '$param' is missing."
|
||||
fi
|
||||
@ -85,6 +90,7 @@ done
|
||||
# ccld compile & link
|
||||
|
||||
command=$(basename "$0")
|
||||
comp="CC"
|
||||
case "$command" in
|
||||
cpp)
|
||||
mode=cpp
|
||||
@ -92,18 +98,22 @@ case "$command" in
|
||||
cc|c89|c99|gcc|clang|icc|pgcc|xlc)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
;;
|
||||
c++|CC|g++|clang++|icpc|pgc++|xlc++)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
;;
|
||||
f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
;;
|
||||
f77|gfortran|ifort|pgfortran|xlf|nagfor)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
;;
|
||||
ld)
|
||||
mode=ld
|
||||
@ -142,6 +152,9 @@ if [[ -z $mode ]]; then
|
||||
done
|
||||
fi
|
||||
|
||||
# Set up rpath variable according to language.
|
||||
eval rpath=\$SPACK_${comp}_RPATH_ARG
|
||||
|
||||
# Dump the version and exit if we're in testing mode.
|
||||
if [[ $SPACK_TEST_COMMAND == dump-mode ]]; then
|
||||
echo "$mode"
|
||||
@ -162,7 +175,7 @@ fi
|
||||
# It doesn't work with -rpath.
|
||||
# This variable controls whether they are added.
|
||||
add_rpaths=true
|
||||
if [[ mode == ld && $OSTYPE == darwin* ]]; then
|
||||
if [[ $mode == ld && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then
|
||||
for arg in "$@"; do
|
||||
if [[ $arg == -r ]]; then
|
||||
add_rpaths=false
|
||||
@ -188,7 +201,7 @@ for dep in "${deps[@]}"; do
|
||||
# Prepend lib and RPATH directories
|
||||
if [[ -d $dep/lib ]]; then
|
||||
if [[ $mode == ccld ]]; then
|
||||
$add_rpaths && args=("-Wl,-rpath,$dep/lib" "${args[@]}")
|
||||
$add_rpaths && args=("$rpath$dep/lib" "${args[@]}")
|
||||
args=("-L$dep/lib" "${args[@]}")
|
||||
elif [[ $mode == ld ]]; then
|
||||
$add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
|
||||
@ -199,7 +212,7 @@ for dep in "${deps[@]}"; do
|
||||
# Prepend lib64 and RPATH directories
|
||||
if [[ -d $dep/lib64 ]]; then
|
||||
if [[ $mode == ccld ]]; then
|
||||
$add_rpaths && args=("-Wl,-rpath,$dep/lib64" "${args[@]}")
|
||||
$add_rpaths && args=("$rpath$dep/lib64" "${args[@]}")
|
||||
args=("-L$dep/lib64" "${args[@]}")
|
||||
elif [[ $mode == ld ]]; then
|
||||
$add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
|
||||
@ -210,9 +223,11 @@ done
|
||||
|
||||
# Include all -L's and prefix/whatever dirs in rpath
|
||||
if [[ $mode == ccld ]]; then
|
||||
$add_rpaths && args=("-Wl,-rpath,$SPACK_PREFIX/lib" "-Wl,-rpath,$SPACK_PREFIX/lib64" "${args[@]}")
|
||||
$add_rpaths && args=("$rpath$SPACK_PREFIX/lib64" "${args[@]}")
|
||||
$add_rpaths && args=("$rpath$SPACK_PREFIX/lib" "${args[@]}")
|
||||
elif [[ $mode == ld ]]; then
|
||||
$add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib" "-rpath" "$SPACK_PREFIX/lib64" "${args[@]}")
|
||||
$add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib64" "${args[@]}")
|
||||
$add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib" "${args[@]}")
|
||||
fi
|
||||
|
||||
#
|
||||
|
@ -98,21 +98,27 @@ def set_compiler_environment_variables(pkg, env):
|
||||
# and return it
|
||||
# TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc.
|
||||
link_dir = spack.build_env_path
|
||||
env.set('CC', join_path(link_dir, pkg.compiler.link_paths['cc']))
|
||||
env.set('CC', join_path(link_dir, pkg.compiler.link_paths['cc']))
|
||||
env.set('CXX', join_path(link_dir, pkg.compiler.link_paths['cxx']))
|
||||
env.set('F77', join_path(link_dir, pkg.compiler.link_paths['f77']))
|
||||
env.set('FC', join_path(link_dir, pkg.compiler.link_paths['fc']))
|
||||
env.set('FC', join_path(link_dir, pkg.compiler.link_paths['fc']))
|
||||
|
||||
# Set SPACK compiler variables so that our wrapper knows what to call
|
||||
compiler = pkg.compiler
|
||||
if compiler.cc:
|
||||
env.set('SPACK_CC', compiler.cc)
|
||||
env.set('SPACK_CC', compiler.cc)
|
||||
if compiler.cxx:
|
||||
env.set('SPACK_CXX', compiler.cxx)
|
||||
if compiler.f77:
|
||||
env.set('SPACK_F77', compiler.f77)
|
||||
if compiler.fc:
|
||||
env.set('SPACK_FC', compiler.fc)
|
||||
env.set('SPACK_FC', compiler.fc)
|
||||
|
||||
# Set SPACK compiler rpath flags so that our wrapper knows what to use
|
||||
env.set('SPACK_CC_RPATH_ARG', compiler.cc_rpath_arg)
|
||||
env.set('SPACK_CXX_RPATH_ARG', compiler.cxx_rpath_arg)
|
||||
env.set('SPACK_F77_RPATH_ARG', compiler.f77_rpath_arg)
|
||||
env.set('SPACK_FC_RPATH_ARG', compiler.fc_rpath_arg)
|
||||
|
||||
env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
|
||||
return env
|
||||
@ -175,8 +181,8 @@ def set_build_environment_variables(pkg, env):
|
||||
# Add any pkgconfig directories to PKG_CONFIG_PATH
|
||||
pkg_config_dirs = []
|
||||
for p in dep_prefixes:
|
||||
for libdir in ('lib', 'lib64'):
|
||||
pcdir = join_path(p, libdir, 'pkgconfig')
|
||||
for maybe in ('lib', 'lib64', 'share'):
|
||||
pcdir = join_path(p, maybe, 'pkgconfig')
|
||||
if os.path.isdir(pcdir):
|
||||
pkg_config_dirs.append(pcdir)
|
||||
env.set_path('PKG_CONFIG_PATH', pkg_config_dirs)
|
||||
|
@ -22,19 +22,18 @@
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import sys
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import colorize
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.lang import index_by
|
||||
|
||||
import spack.compilers
|
||||
import spack.spec
|
||||
import spack.config
|
||||
from spack.util.environment import get_path
|
||||
import spack.spec
|
||||
from llnl.util.lang import index_by
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
from spack.spec import CompilerSpec
|
||||
from spack.util.environment import get_path
|
||||
|
||||
description = "Manage compilers"
|
||||
|
||||
@ -44,10 +43,10 @@ def setup_parser(subparser):
|
||||
|
||||
scopes = spack.config.config_scopes
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser('add', help='Add compilers to the Spack configuration.')
|
||||
add_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
|
||||
add_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
# Find
|
||||
find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.')
|
||||
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
|
||||
find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
|
||||
# Remove
|
||||
@ -70,7 +69,7 @@ def setup_parser(subparser):
|
||||
help="Configuration scope to read from.")
|
||||
|
||||
|
||||
def compiler_add(args):
|
||||
def compiler_find(args):
|
||||
"""Search either $PATH or a list of paths for compilers and add them
|
||||
to Spack's configuration."""
|
||||
paths = args.add_paths
|
||||
@ -136,7 +135,8 @@ def compiler_list(args):
|
||||
|
||||
|
||||
def compiler(parser, args):
|
||||
action = { 'add' : compiler_add,
|
||||
action = { 'add' : compiler_find,
|
||||
'find' : compiler_find,
|
||||
'remove' : compiler_remove,
|
||||
'rm' : compiler_remove,
|
||||
'info' : compiler_info,
|
||||
|
@ -124,10 +124,12 @@ def __call__(self, stage):
|
||||
autotools = "configure('--prefix=%s' % prefix)"
|
||||
cmake = "cmake('.', *std_cmake_args)"
|
||||
python = "python('setup.py', 'install', '--prefix=%s' % prefix)"
|
||||
r = "R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)"
|
||||
|
||||
config_lines = ((r'/configure$', 'autotools', autotools),
|
||||
(r'/CMakeLists.txt$', 'cmake', cmake),
|
||||
(r'/setup.py$', 'python', python))
|
||||
(r'/setup.py$', 'python', python),
|
||||
(r'/NAMESPACE$', 'r', r))
|
||||
|
||||
# Peek inside the tarball.
|
||||
tar = which('tar')
|
||||
@ -272,6 +274,10 @@ def create(parser, args):
|
||||
if guesser.build_system == 'python':
|
||||
name = 'py-%s' % name
|
||||
|
||||
# Prepend 'r-' to R package names, by convention.
|
||||
if guesser.build_system == 'r':
|
||||
name = 'r-%s' % name
|
||||
|
||||
# Create a directory for the new package.
|
||||
pkg_path = repo.filename_for_package_name(name)
|
||||
if os.path.exists(pkg_path) and not args.force:
|
||||
|
@ -23,87 +23,106 @@
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import argparse
|
||||
import xml.etree.ElementTree as ET
|
||||
import itertools
|
||||
import re
|
||||
import os
|
||||
import codecs
|
||||
import os
|
||||
import time
|
||||
import xml.dom.minidom
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
from llnl.util.filesystem import *
|
||||
from spack.build_environment import InstallError
|
||||
from spack.fetch_strategy import FetchError
|
||||
import spack.cmd
|
||||
|
||||
description = "Run package installation as a unit test, output formatted results."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-j', '--jobs', action='store', type=int,
|
||||
help="Explicitly set number of make jobs. Default is #cpus.")
|
||||
subparser.add_argument('-j',
|
||||
'--jobs',
|
||||
action='store',
|
||||
type=int,
|
||||
help="Explicitly set number of make jobs. Default is #cpus.")
|
||||
|
||||
subparser.add_argument(
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
help="Do not check packages against checksum")
|
||||
subparser.add_argument('-n',
|
||||
'--no-checksum',
|
||||
action='store_true',
|
||||
dest='no_checksum',
|
||||
help="Do not check packages against checksum")
|
||||
|
||||
subparser.add_argument(
|
||||
'-o', '--output', action='store', help="test output goes in this file")
|
||||
subparser.add_argument('-o', '--output', action='store', help="test output goes in this file")
|
||||
|
||||
subparser.add_argument(
|
||||
'package', nargs=argparse.REMAINDER, help="spec of package to install")
|
||||
|
||||
|
||||
class JunitResultFormat(object):
|
||||
def __init__(self):
|
||||
self.root = ET.Element('testsuite')
|
||||
self.tests = []
|
||||
|
||||
def add_test(self, buildId, testResult, buildInfo=None):
|
||||
self.tests.append((buildId, testResult, buildInfo))
|
||||
|
||||
def write_to(self, stream):
|
||||
self.root.set('tests', '{0}'.format(len(self.tests)))
|
||||
for buildId, testResult, buildInfo in self.tests:
|
||||
testcase = ET.SubElement(self.root, 'testcase')
|
||||
testcase.set('classname', buildId.name)
|
||||
testcase.set('name', buildId.stringId())
|
||||
if testResult == TestResult.FAILED:
|
||||
failure = ET.SubElement(testcase, 'failure')
|
||||
failure.set('type', "Build Error")
|
||||
failure.text = buildInfo
|
||||
elif testResult == TestResult.SKIPPED:
|
||||
skipped = ET.SubElement(testcase, 'skipped')
|
||||
skipped.set('type', "Skipped Build")
|
||||
skipped.text = buildInfo
|
||||
ET.ElementTree(self.root).write(stream)
|
||||
subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install")
|
||||
|
||||
|
||||
class TestResult(object):
|
||||
PASSED = 0
|
||||
FAILED = 1
|
||||
SKIPPED = 2
|
||||
ERRORED = 3
|
||||
|
||||
|
||||
class BuildId(object):
|
||||
def __init__(self, spec):
|
||||
self.name = spec.name
|
||||
self.version = spec.version
|
||||
self.hashId = spec.dag_hash()
|
||||
class TestSuite(object):
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
self.root = ET.Element('testsuite')
|
||||
self.tests = []
|
||||
|
||||
def stringId(self):
|
||||
return "-".join(str(x) for x in (self.name, self.version, self.hashId))
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.name, self.version, self.hashId))
|
||||
def append(self, item):
|
||||
if not isinstance(item, TestCase):
|
||||
raise TypeError('only TestCase instances may be appended to a TestSuite instance')
|
||||
self.tests.append(item) # Append the item to the list of tests
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, BuildId):
|
||||
return False
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
# Prepare the header for the entire test suite
|
||||
number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests)
|
||||
self.root.set('errors', str(number_of_errors))
|
||||
number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests)
|
||||
self.root.set('failures', str(number_of_failures))
|
||||
self.root.set('tests', str(len(self.tests)))
|
||||
|
||||
return ((self.name, self.version, self.hashId) ==
|
||||
(other.name, other.version, other.hashId))
|
||||
for item in self.tests:
|
||||
self.root.append(item.element)
|
||||
|
||||
with open(self.filename, 'wb') as file:
|
||||
xml_string = ET.tostring(self.root)
|
||||
xml_string = xml.dom.minidom.parseString(xml_string).toprettyxml()
|
||||
file.write(xml_string)
|
||||
|
||||
|
||||
class TestCase(object):
|
||||
|
||||
results = {
|
||||
TestResult.PASSED: None,
|
||||
TestResult.SKIPPED: 'skipped',
|
||||
TestResult.FAILED: 'failure',
|
||||
TestResult.ERRORED: 'error',
|
||||
}
|
||||
|
||||
def __init__(self, classname, name, time=None):
|
||||
self.element = ET.Element('testcase')
|
||||
self.element.set('classname', str(classname))
|
||||
self.element.set('name', str(name))
|
||||
if time is not None:
|
||||
self.element.set('time', str(time))
|
||||
self.result_type = None
|
||||
|
||||
def set_result(self, result_type, message=None, error_type=None, text=None):
|
||||
self.result_type = result_type
|
||||
result = TestCase.results[self.result_type]
|
||||
if result is not None and result is not TestResult.PASSED:
|
||||
subelement = ET.SubElement(self.element, result)
|
||||
if error_type is not None:
|
||||
subelement.set('type', error_type)
|
||||
if message is not None:
|
||||
subelement.set('message', str(message))
|
||||
if text is not None:
|
||||
subelement.text = text
|
||||
|
||||
|
||||
def fetch_log(path):
|
||||
@ -114,46 +133,76 @@ def fetch_log(path):
|
||||
|
||||
|
||||
def failed_dependencies(spec):
|
||||
return set(childSpec for childSpec in spec.dependencies.itervalues() if not
|
||||
spack.repo.get(childSpec).installed)
|
||||
return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
|
||||
|
||||
|
||||
def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log):
|
||||
# Post-order traversal is not strictly required but it makes sense to output
|
||||
# tests for dependencies first.
|
||||
for spec in topSpec.traverse(order='post'):
|
||||
if spec not in newInstalls:
|
||||
continue
|
||||
def get_top_spec_or_die(args):
|
||||
specs = spack.cmd.parse_specs(args.package, concretize=True)
|
||||
if len(specs) > 1:
|
||||
tty.die("Only 1 top-level package can be specified")
|
||||
top_spec = iter(specs).next()
|
||||
return top_spec
|
||||
|
||||
failedDeps = failed_dependencies(spec)
|
||||
package = spack.repo.get(spec)
|
||||
if failedDeps:
|
||||
result = TestResult.SKIPPED
|
||||
dep = iter(failedDeps).next()
|
||||
depBID = BuildId(dep)
|
||||
errOutput = "Skipped due to failed dependency: {0}".format(
|
||||
depBID.stringId())
|
||||
elif (not package.installed) and (not package.stage.source_path):
|
||||
result = TestResult.FAILED
|
||||
errOutput = "Failure to fetch package resources."
|
||||
elif not package.installed:
|
||||
result = TestResult.FAILED
|
||||
lines = getLogFunc(package.build_log_path)
|
||||
errMessages = list(line for line in lines if
|
||||
re.search('error:', line, re.IGNORECASE))
|
||||
errOutput = errMessages if errMessages else lines[-10:]
|
||||
errOutput = '\n'.join(itertools.chain(
|
||||
[spec.to_yaml(), "Errors:"], errOutput,
|
||||
["Build Log:", package.build_log_path]))
|
||||
else:
|
||||
result = TestResult.PASSED
|
||||
errOutput = None
|
||||
|
||||
bId = BuildId(spec)
|
||||
output.add_test(bId, result, errOutput)
|
||||
def install_single_spec(spec, number_of_jobs):
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
# If it is already installed, skip the test
|
||||
if spack.repo.get(spec).installed:
|
||||
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
|
||||
testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed')
|
||||
return testcase
|
||||
|
||||
# If it relies on dependencies that did not install, skip
|
||||
if failed_dependencies(spec):
|
||||
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
|
||||
testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed')
|
||||
return testcase
|
||||
|
||||
# Otherwise try to install the spec
|
||||
try:
|
||||
start_time = time.time()
|
||||
package.do_install(keep_prefix=False,
|
||||
keep_stage=True,
|
||||
ignore_deps=False,
|
||||
make_jobs=number_of_jobs,
|
||||
verbose=True,
|
||||
fake=False)
|
||||
duration = time.time() - start_time
|
||||
testcase = TestCase(package.name, package.spec.short_spec, duration)
|
||||
testcase.set_result(TestResult.PASSED)
|
||||
except InstallError:
|
||||
# An InstallError is considered a failure (the recipe didn't work correctly)
|
||||
duration = time.time() - start_time
|
||||
# Try to get the log
|
||||
lines = fetch_log(package.build_log_path)
|
||||
text = '\n'.join(lines)
|
||||
testcase = TestCase(package.name, package.spec.short_spec, duration)
|
||||
testcase.set_result(TestResult.FAILED, message='Installation failure', text=text)
|
||||
|
||||
except FetchError:
|
||||
# A FetchError is considered an error (we didn't even start building)
|
||||
duration = time.time() - start_time
|
||||
testcase = TestCase(package.name, package.spec.short_spec, duration)
|
||||
testcase.set_result(TestResult.ERRORED, message='Unable to fetch package')
|
||||
|
||||
return testcase
|
||||
|
||||
|
||||
def get_filename(args, top_spec):
|
||||
if not args.output:
|
||||
fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash())
|
||||
output_directory = join_path(os.getcwd(), 'test-output')
|
||||
if not os.path.exists(output_directory):
|
||||
os.mkdir(output_directory)
|
||||
output_filename = join_path(output_directory, fname)
|
||||
else:
|
||||
output_filename = args.output
|
||||
return output_filename
|
||||
|
||||
|
||||
def test_install(parser, args):
|
||||
# Check the input
|
||||
if not args.package:
|
||||
tty.die("install requires a package argument")
|
||||
|
||||
@ -162,50 +211,15 @@ def test_install(parser, args):
|
||||
tty.die("The -j option must be a positive integer!")
|
||||
|
||||
if args.no_checksum:
|
||||
spack.do_checksum = False # TODO: remove this global.
|
||||
spack.do_checksum = False # TODO: remove this global.
|
||||
|
||||
specs = spack.cmd.parse_specs(args.package, concretize=True)
|
||||
if len(specs) > 1:
|
||||
tty.die("Only 1 top-level package can be specified")
|
||||
topSpec = iter(specs).next()
|
||||
|
||||
newInstalls = set()
|
||||
for spec in topSpec.traverse():
|
||||
package = spack.repo.get(spec)
|
||||
if not package.installed:
|
||||
newInstalls.add(spec)
|
||||
|
||||
if not args.output:
|
||||
bId = BuildId(topSpec)
|
||||
outputDir = join_path(os.getcwd(), "test-output")
|
||||
if not os.path.exists(outputDir):
|
||||
os.mkdir(outputDir)
|
||||
outputFpath = join_path(outputDir, "test-{0}.xml".format(bId.stringId()))
|
||||
else:
|
||||
outputFpath = args.output
|
||||
|
||||
for spec in topSpec.traverse(order='post'):
|
||||
# Calling do_install for the top-level package would be sufficient but
|
||||
# this attempts to keep going if any package fails (other packages which
|
||||
# are not dependents may succeed)
|
||||
package = spack.repo.get(spec)
|
||||
if (not failed_dependencies(spec)) and (not package.installed):
|
||||
try:
|
||||
package.do_install(
|
||||
keep_prefix=False,
|
||||
keep_stage=True,
|
||||
ignore_deps=False,
|
||||
make_jobs=args.jobs,
|
||||
verbose=True,
|
||||
fake=False)
|
||||
except InstallError:
|
||||
pass
|
||||
except FetchError:
|
||||
pass
|
||||
|
||||
jrf = JunitResultFormat()
|
||||
handled = {}
|
||||
create_test_output(topSpec, newInstalls, jrf)
|
||||
|
||||
with open(outputFpath, 'wb') as F:
|
||||
jrf.write_to(F)
|
||||
# Get the one and only top spec
|
||||
top_spec = get_top_spec_or_die(args)
|
||||
# Get the filename of the test
|
||||
output_filename = get_filename(args, top_spec)
|
||||
# TEST SUITE
|
||||
with TestSuite(output_filename) as test_suite:
|
||||
# Traverse in post order : each spec is a test case
|
||||
for spec in top_spec.traverse(order='post'):
|
||||
test_case = install_single_spec(spec, args.jobs)
|
||||
test_suite.append(test_case)
|
||||
|
@ -91,11 +91,22 @@ class Compiler(object):
|
||||
# version suffix for gcc.
|
||||
suffixes = [r'-.*']
|
||||
|
||||
# Names of generic arguments used by this compiler
|
||||
arg_rpath = '-Wl,-rpath,%s'
|
||||
# Default flags used by a compiler to set an rpath
|
||||
@property
|
||||
def cc_rpath_arg(self):
|
||||
return '-Wl,-rpath,'
|
||||
|
||||
# argument used to get C++11 options
|
||||
cxx11_flag = "-std=c++11"
|
||||
@property
|
||||
def cxx_rpath_arg(self):
|
||||
return '-Wl,-rpath,'
|
||||
|
||||
@property
|
||||
def f77_rpath_arg(self):
|
||||
return '-Wl,-rpath,'
|
||||
|
||||
@property
|
||||
def fc_rpath_arg(self):
|
||||
return '-Wl,-rpath,'
|
||||
|
||||
|
||||
def __init__(self, cspec, cc, cxx, f77, fc):
|
||||
@ -117,6 +128,37 @@ def check(exe):
|
||||
def version(self):
|
||||
return self.spec.version
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# OpenMP is supported by that compiler
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
tty.die("The compiler you have chosen does not currently support OpenMP.",
|
||||
"If you think it should, please edit the compiler subclass and",
|
||||
"submit a pull request or issue.")
|
||||
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++11 is supported by that compiler
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
tty.die("The compiler you have chosen does not currently support C++11.",
|
||||
"If you think it should, please edit the compiler subclass and",
|
||||
"submit a pull request or issue.")
|
||||
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++14 is supported by that compiler
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
tty.die("The compiler you have chosen does not currently support C++14.",
|
||||
"If you think it should, please edit the compiler subclass and",
|
||||
"submit a pull request or issue.")
|
||||
|
||||
|
||||
|
||||
#
|
||||
# Compiler classes have methods for querying the version of
|
||||
# specific compiler executables. This is used when discovering compilers.
|
||||
@ -202,6 +244,10 @@ def check(key):
|
||||
return None
|
||||
|
||||
successful = [key for key in parmap(check, checks) if key is not None]
|
||||
# The 'successful' list is ordered like the input paths.
|
||||
# Reverse it here so that the dict creation (last insert wins)
|
||||
# does not spoil the intented precedence.
|
||||
successful.reverse()
|
||||
return dict(((v, p, s), path) for v, p, s, path in successful)
|
||||
|
||||
@classmethod
|
||||
|
@ -26,6 +26,8 @@
|
||||
import spack.compiler as cpr
|
||||
from spack.compiler import *
|
||||
from spack.util.executable import *
|
||||
import llnl.util.tty as tty
|
||||
from spack.version import ver
|
||||
|
||||
class Clang(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
@ -47,6 +49,29 @@ class Clang(Compiler):
|
||||
'f77' : 'f77',
|
||||
'fc' : 'f90' }
|
||||
|
||||
@property
|
||||
def is_apple(self):
|
||||
ver_string = str(self.version)
|
||||
return ver_string.endswith('-apple')
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
if self.is_apple:
|
||||
tty.die("Clang from Apple does not support Openmp yet.")
|
||||
else:
|
||||
return "-fopenmp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.is_apple:
|
||||
# FIXME: figure out from which version Apple's clang supports c++11
|
||||
return "-std=c++11"
|
||||
else:
|
||||
if self.version < ver('3.3'):
|
||||
tty.die("Only Clang 3.3 and above support c++11.")
|
||||
else:
|
||||
return "-std=c++11"
|
||||
|
||||
@classmethod
|
||||
def default_version(self, comp):
|
||||
"""The '--version' option works for clang compilers.
|
||||
|
@ -49,14 +49,25 @@ class Gcc(Compiler):
|
||||
'f77' : 'gcc/gfortran',
|
||||
'fc' : 'gcc/gfortran' }
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-fopenmp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.version < ver('4.3'):
|
||||
tty.die("Only gcc 4.3 and above support c++11.")
|
||||
elif self.version < ver('4.7'):
|
||||
return "-std=gnu++0x"
|
||||
return "-std=c++0x"
|
||||
else:
|
||||
return "-std=gnu++11"
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.version < ver('4.8'):
|
||||
tty.die("Only gcc 4.8 and above support c++14.")
|
||||
else:
|
||||
return "-std=c++14"
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
|
@ -23,6 +23,8 @@
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack.compiler import *
|
||||
import llnl.util.tty as tty
|
||||
from spack.version import ver
|
||||
|
||||
class Intel(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
@ -43,6 +45,13 @@ class Intel(Compiler):
|
||||
'f77' : 'intel/ifort',
|
||||
'fc' : 'intel/ifort' }
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
if self.version < ver('16.0'):
|
||||
return "-openmp"
|
||||
else:
|
||||
return "-qopenmp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.version < ver('11.1'):
|
||||
@ -68,5 +77,3 @@ def default_version(cls, comp):
|
||||
"""
|
||||
return get_compiler_version(
|
||||
comp, '--version', r'\((?:IFORT|ICC)\) ([^ ]+)')
|
||||
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
from spack.compiler import *
|
||||
import llnl.util.tty as tty
|
||||
|
||||
class Nag(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
@ -20,6 +21,27 @@ class Nag(Compiler):
|
||||
'f77' : 'nag/nagfor',
|
||||
'fc' : 'nag/nagfor' }
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-openmp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
# NAG does not have a C++ compiler
|
||||
# However, it can be mixed with a compiler that does support it
|
||||
return "-std=c++11"
|
||||
|
||||
# Unlike other compilers, the NAG compiler passes options to GCC, which
|
||||
# then passes them to the linker. Therefore, we need to doubly wrap the
|
||||
# options with '-Wl,-Wl,,'
|
||||
@property
|
||||
def f77_rpath_arg(self):
|
||||
return '-Wl,-Wl,,-rpath,'
|
||||
|
||||
@property
|
||||
def fc_rpath_arg(self):
|
||||
return '-Wl,-Wl,,-rpath,'
|
||||
|
||||
@classmethod
|
||||
def default_version(self, comp):
|
||||
"""The '-V' option works for nag compilers.
|
||||
|
@ -23,6 +23,7 @@
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack.compiler import *
|
||||
import llnl.util.tty as tty
|
||||
|
||||
class Pgi(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
@ -43,6 +44,15 @@ class Pgi(Compiler):
|
||||
'f77' : 'pgi/pgfortran',
|
||||
'fc' : 'pgi/pgfortran' }
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-mp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
return "-std=c++11"
|
||||
|
||||
|
||||
@classmethod
|
||||
def default_version(cls, comp):
|
||||
"""The '-V' option works for all the PGI compilers.
|
||||
@ -54,4 +64,3 @@ def default_version(cls, comp):
|
||||
"""
|
||||
return get_compiler_version(
|
||||
comp, '-V', r'pg[^ ]* ([^ ]+) \d\d\d?-bit target')
|
||||
|
||||
|
@ -24,6 +24,8 @@
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack.compiler import *
|
||||
import llnl.util.tty as tty
|
||||
from spack.version import ver
|
||||
|
||||
class Xl(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
@ -44,6 +46,10 @@ class Xl(Compiler):
|
||||
'f77' : 'xl/xlf',
|
||||
'fc' : 'xl/xlf90' }
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-qsmp=omp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.version < ver('13.1'):
|
||||
|
@ -618,14 +618,16 @@ def update_config(section, update_data, scope=None):
|
||||
other yaml-ish structure.
|
||||
|
||||
"""
|
||||
# read in the config to ensure we've got current data
|
||||
get_config(section)
|
||||
validate_section_name(section) # validate section name
|
||||
scope = validate_scope(scope) # get ConfigScope object from string.
|
||||
|
||||
validate_section_name(section) # validate section name
|
||||
scope = validate_scope(scope) # get ConfigScope object from string.
|
||||
# read in the config to ensure we've got current data
|
||||
configuration = get_config(section)
|
||||
|
||||
configuration.update(update_data)
|
||||
|
||||
# read only the requested section's data.
|
||||
scope.sections[section] = { section : update_data }
|
||||
scope.sections[section] = {section: configuration}
|
||||
scope.write_section(section)
|
||||
|
||||
|
||||
|
@ -157,12 +157,26 @@ def fetch(self):
|
||||
tty.msg("Already downloaded %s" % self.archive_file)
|
||||
return
|
||||
|
||||
possible_files = self.stage.expected_archive_files
|
||||
save_file = None
|
||||
partial_file = None
|
||||
if possible_files:
|
||||
save_file = self.stage.expected_archive_files[0]
|
||||
partial_file = self.stage.expected_archive_files[0] + '.part'
|
||||
|
||||
tty.msg("Trying to fetch from %s" % self.url)
|
||||
|
||||
curl_args = ['-O', # save file to disk
|
||||
if partial_file:
|
||||
save_args = ['-C', '-', # continue partial downloads
|
||||
'-o', partial_file] # use a .part file
|
||||
else:
|
||||
save_args = ['-O']
|
||||
|
||||
curl_args = save_args + [
|
||||
'-f', # fail on >400 errors
|
||||
'-D', '-', # print out HTML headers
|
||||
'-L', self.url, ]
|
||||
'-L', # resolve 3xx redirects
|
||||
self.url, ]
|
||||
|
||||
if sys.stdout.isatty():
|
||||
curl_args.append('-#') # status bar when using a tty
|
||||
@ -178,6 +192,9 @@ def fetch(self):
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
|
||||
if partial_file and os.path.exists(partial_file):
|
||||
os.remove(partial_file)
|
||||
|
||||
if spack.curl.returncode == 22:
|
||||
# This is a 404. Curl will print the error.
|
||||
raise FailedDownloadError(
|
||||
@ -209,6 +226,9 @@ def fetch(self):
|
||||
"'spack clean <package>' to remove the bad archive, then fix",
|
||||
"your internet gateway issue and install again.")
|
||||
|
||||
if save_file:
|
||||
os.rename(partial_file, save_file)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(self.url)
|
||||
|
||||
|
@ -210,6 +210,18 @@ def _need_to_create_path(self):
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def expected_archive_files(self):
|
||||
"""Possible archive file paths."""
|
||||
paths = []
|
||||
if isinstance(self.fetcher, fs.URLFetchStrategy):
|
||||
paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url)))
|
||||
|
||||
if self.mirror_path:
|
||||
paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
|
||||
|
||||
return paths
|
||||
|
||||
@property
|
||||
def archive_file(self):
|
||||
"""Path to the source archive within this stage directory."""
|
||||
|
@ -62,14 +62,14 @@
|
||||
'optional_deps',
|
||||
'make_executable',
|
||||
'configure_guess',
|
||||
'unit_install',
|
||||
'lock',
|
||||
'database',
|
||||
'namespace_trie',
|
||||
'yaml',
|
||||
'sbang',
|
||||
'environment',
|
||||
'cmd.uninstall']
|
||||
'cmd.uninstall',
|
||||
'cmd.test_install']
|
||||
|
||||
|
||||
def list_tests():
|
||||
|
@ -67,6 +67,11 @@ def setUp(self):
|
||||
os.environ['SPACK_COMPILER_SPEC'] = "gcc@4.4.7"
|
||||
os.environ['SPACK_SHORT_SPEC'] = "foo@1.2"
|
||||
|
||||
os.environ['SPACK_CC_RPATH_ARG'] = "-Wl,-rpath,"
|
||||
os.environ['SPACK_CXX_RPATH_ARG'] = "-Wl,-rpath,"
|
||||
os.environ['SPACK_F77_RPATH_ARG'] = "-Wl,-rpath,"
|
||||
os.environ['SPACK_FC_RPATH_ARG'] = "-Wl,-rpath,"
|
||||
|
||||
# Make some fake dependencies
|
||||
self.tmp_deps = tempfile.mkdtemp()
|
||||
self.dep1 = join_path(self.tmp_deps, 'dep1')
|
||||
@ -219,3 +224,27 @@ def test_ld_deps(self):
|
||||
|
||||
' '.join(test_command))
|
||||
|
||||
def test_ld_deps_reentrant(self):
|
||||
"""Make sure ld -r is handled correctly on OS's where it doesn't
|
||||
support rpaths."""
|
||||
os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1])
|
||||
|
||||
os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64"
|
||||
reentrant_test_command = ['-r'] + test_command
|
||||
self.check_ld('dump-args', reentrant_test_command,
|
||||
'ld ' +
|
||||
'-rpath ' + self.prefix + '/lib ' +
|
||||
'-rpath ' + self.prefix + '/lib64 ' +
|
||||
|
||||
'-L' + self.dep1 + '/lib ' +
|
||||
'-rpath ' + self.dep1 + '/lib ' +
|
||||
|
||||
'-r ' +
|
||||
' '.join(test_command))
|
||||
|
||||
os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=darwin-x86_64"
|
||||
self.check_ld('dump-args', reentrant_test_command,
|
||||
'ld ' +
|
||||
'-L' + self.dep1 + '/lib ' +
|
||||
'-r ' +
|
||||
' '.join(test_command))
|
||||
|
190
lib/spack/spack/test/cmd/test_install.py
Normal file
190
lib/spack/spack/test/cmd/test_install.py
Normal file
@ -0,0 +1,190 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import collections
|
||||
from contextlib import contextmanager
|
||||
|
||||
import StringIO
|
||||
|
||||
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
|
||||
|
||||
# Monkey-patch open to write module files to a StringIO instance
|
||||
@contextmanager
|
||||
def mock_open(filename, mode):
|
||||
if not mode == 'wb':
|
||||
raise RuntimeError('test.test_install : unexpected opening mode for monkey-patched open')
|
||||
|
||||
FILE_REGISTRY[filename] = StringIO.StringIO()
|
||||
|
||||
try:
|
||||
yield FILE_REGISTRY[filename]
|
||||
finally:
|
||||
handle = FILE_REGISTRY[filename]
|
||||
FILE_REGISTRY[filename] = handle.getvalue()
|
||||
handle.close()
|
||||
|
||||
import os
|
||||
import itertools
|
||||
import unittest
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
|
||||
# The use of __import__ is necessary to maintain a name with hyphen (which cannot be an identifier in python)
|
||||
test_install = __import__("spack.cmd.test-install", fromlist=['test_install'])
|
||||
|
||||
|
||||
class MockSpec(object):
|
||||
def __init__(self, name, version, hashStr=None):
|
||||
self.dependencies = {}
|
||||
self.name = name
|
||||
self.version = version
|
||||
self.hash = hashStr if hashStr else hash((name, version))
|
||||
|
||||
def traverse(self, order=None):
|
||||
for _, spec in self.dependencies.items():
|
||||
yield spec
|
||||
yield self
|
||||
#allDeps = itertools.chain.from_iterable(i.traverse() for i in self.dependencies.itervalues())
|
||||
#return set(itertools.chain([self], allDeps))
|
||||
|
||||
def dag_hash(self):
|
||||
return self.hash
|
||||
|
||||
@property
|
||||
def short_spec(self):
|
||||
return '-'.join([self.name, str(self.version), str(self.hash)])
|
||||
|
||||
|
||||
class MockPackage(object):
|
||||
def __init__(self, spec, buildLogPath):
|
||||
self.name = spec.name
|
||||
self.spec = spec
|
||||
self.installed = False
|
||||
self.build_log_path = buildLogPath
|
||||
|
||||
def do_install(self, *args, **kwargs):
|
||||
self.installed = True
|
||||
|
||||
|
||||
class MockPackageDb(object):
|
||||
def __init__(self, init=None):
|
||||
self.specToPkg = {}
|
||||
if init:
|
||||
self.specToPkg.update(init)
|
||||
|
||||
def get(self, spec):
|
||||
return self.specToPkg[spec]
|
||||
|
||||
|
||||
def mock_fetch_log(path):
|
||||
return []
|
||||
|
||||
specX = MockSpec('X', "1.2.0")
|
||||
specY = MockSpec('Y', "2.3.8")
|
||||
specX.dependencies['Y'] = specY
|
||||
pkgX = MockPackage(specX, 'logX')
|
||||
pkgY = MockPackage(specY, 'logY')
|
||||
|
||||
|
||||
class MockArgs(object):
|
||||
def __init__(self, package):
|
||||
self.package = package
|
||||
self.jobs = None
|
||||
self.no_checksum = False
|
||||
self.output = None
|
||||
|
||||
|
||||
# TODO: add test(s) where Y fails to install
|
||||
class TestInstallTest(unittest.TestCase):
|
||||
"""
|
||||
Tests test-install where X->Y
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(TestInstallTest, self).setUp()
|
||||
|
||||
# Monkey patch parse specs
|
||||
def monkey_parse_specs(x, concretize):
|
||||
if x == 'X':
|
||||
return [specX]
|
||||
elif x == 'Y':
|
||||
return [specY]
|
||||
return []
|
||||
|
||||
self.parse_specs = spack.cmd.parse_specs
|
||||
spack.cmd.parse_specs = monkey_parse_specs
|
||||
|
||||
# Monkey patch os.mkdirp
|
||||
self.os_mkdir = os.mkdir
|
||||
os.mkdir = lambda x: True
|
||||
|
||||
# Monkey patch open
|
||||
test_install.open = mock_open
|
||||
|
||||
# Clean FILE_REGISTRY
|
||||
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
|
||||
|
||||
pkgX.installed = False
|
||||
pkgY.installed = False
|
||||
|
||||
# Monkey patch pkgDb
|
||||
self.saved_db = spack.repo
|
||||
pkgDb = MockPackageDb({specX: pkgX, specY: pkgY})
|
||||
spack.repo = pkgDb
|
||||
|
||||
def tearDown(self):
|
||||
# Remove the monkey patched test_install.open
|
||||
test_install.open = open
|
||||
|
||||
# Remove the monkey patched os.mkdir
|
||||
os.mkdir = self.os_mkdir
|
||||
del self.os_mkdir
|
||||
|
||||
# Remove the monkey patched parse_specs
|
||||
spack.cmd.parse_specs = self.parse_specs
|
||||
del self.parse_specs
|
||||
super(TestInstallTest, self).tearDown()
|
||||
|
||||
spack.repo = self.saved_db
|
||||
|
||||
def test_installing_both(self):
|
||||
test_install.test_install(None, MockArgs('X') )
|
||||
self.assertEqual(len(FILE_REGISTRY), 1)
|
||||
for _, content in FILE_REGISTRY.items():
|
||||
self.assertTrue('tests="2"' in content)
|
||||
self.assertTrue('failures="0"' in content)
|
||||
self.assertTrue('errors="0"' in content)
|
||||
|
||||
def test_dependency_already_installed(self):
|
||||
pkgX.installed = True
|
||||
pkgY.installed = True
|
||||
test_install.test_install(None, MockArgs('X'))
|
||||
self.assertEqual(len(FILE_REGISTRY), 1)
|
||||
for _, content in FILE_REGISTRY.items():
|
||||
self.assertTrue('tests="2"' in content)
|
||||
self.assertTrue('failures="0"' in content)
|
||||
self.assertTrue('errors="0"' in content)
|
||||
self.assertEqual(sum('skipped' in line for line in content.split('\n')), 2)
|
@ -33,7 +33,7 @@
|
||||
|
||||
# Some sample compiler config data
|
||||
a_comps = {
|
||||
"all": {
|
||||
"x86_64_E5v2_IntelIB": {
|
||||
"gcc@4.7.3" : {
|
||||
"cc" : "/gcc473",
|
||||
"cxx": "/g++473",
|
||||
@ -53,7 +53,7 @@
|
||||
}
|
||||
|
||||
b_comps = {
|
||||
"all": {
|
||||
"x86_64_E5v3": {
|
||||
"icc@10.0" : {
|
||||
"cc" : "/icc100",
|
||||
"cxx": "/icc100",
|
||||
@ -85,27 +85,24 @@ def tearDown(self):
|
||||
super(ConfigTest, self).tearDown()
|
||||
shutil.rmtree(self.tmp_dir, True)
|
||||
|
||||
|
||||
def check_config(self, comps, *compiler_names):
|
||||
def check_config(self, comps, arch, *compiler_names):
|
||||
"""Check that named compilers in comps match Spack's config."""
|
||||
config = spack.config.get_config('compilers')
|
||||
compiler_list = ['cc', 'cxx', 'f77', 'fc']
|
||||
for key in compiler_names:
|
||||
for c in compiler_list:
|
||||
expected = comps['all'][key][c]
|
||||
actual = config['all'][key][c]
|
||||
expected = comps[arch][key][c]
|
||||
actual = config[arch][key][c]
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
|
||||
def test_write_key_in_memory(self):
|
||||
# Write b_comps "on top of" a_comps.
|
||||
spack.config.update_config('compilers', a_comps, 'test_low_priority')
|
||||
spack.config.update_config('compilers', b_comps, 'test_high_priority')
|
||||
|
||||
# Make sure the config looks how we expect.
|
||||
self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
|
||||
self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
|
||||
|
||||
self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
|
||||
self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
|
||||
|
||||
def test_write_key_to_disk(self):
|
||||
# Write b_comps "on top of" a_comps.
|
||||
@ -116,5 +113,17 @@ def test_write_key_to_disk(self):
|
||||
spack.config.clear_config_caches()
|
||||
|
||||
# Same check again, to ensure consistency.
|
||||
self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
|
||||
self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
|
||||
self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
|
||||
self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
|
||||
|
||||
def test_write_to_same_priority_file(self):
|
||||
# Write b_comps in the same file as a_comps.
|
||||
spack.config.update_config('compilers', a_comps, 'test_low_priority')
|
||||
spack.config.update_config('compilers', b_comps, 'test_low_priority')
|
||||
|
||||
# Clear caches so we're forced to read from disk.
|
||||
spack.config.clear_config_caches()
|
||||
|
||||
# Same check again, to ensure consistency.
|
||||
self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
|
||||
self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
|
||||
|
@ -1,126 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import itertools
|
||||
import unittest
|
||||
|
||||
import spack
|
||||
|
||||
test_install = __import__("spack.cmd.test-install",
|
||||
fromlist=["BuildId", "create_test_output", "TestResult"])
|
||||
|
||||
class MockOutput(object):
|
||||
def __init__(self):
|
||||
self.results = {}
|
||||
|
||||
def add_test(self, buildId, passed=True, buildInfo=None):
|
||||
self.results[buildId] = passed
|
||||
|
||||
def write_to(self, stream):
|
||||
pass
|
||||
|
||||
class MockSpec(object):
|
||||
def __init__(self, name, version, hashStr=None):
|
||||
self.dependencies = {}
|
||||
self.name = name
|
||||
self.version = version
|
||||
self.hash = hashStr if hashStr else hash((name, version))
|
||||
|
||||
def traverse(self, order=None):
|
||||
allDeps = itertools.chain.from_iterable(i.traverse() for i in
|
||||
self.dependencies.itervalues())
|
||||
return set(itertools.chain([self], allDeps))
|
||||
|
||||
def dag_hash(self):
|
||||
return self.hash
|
||||
|
||||
def to_yaml(self):
|
||||
return "<<<MOCK YAML {0}>>>".format(test_install.BuildId(self).stringId())
|
||||
|
||||
class MockPackage(object):
|
||||
def __init__(self, buildLogPath):
|
||||
self.installed = False
|
||||
self.build_log_path = buildLogPath
|
||||
|
||||
specX = MockSpec("X", "1.2.0")
|
||||
specY = MockSpec("Y", "2.3.8")
|
||||
specX.dependencies['Y'] = specY
|
||||
pkgX = MockPackage('logX')
|
||||
pkgY = MockPackage('logY')
|
||||
bIdX = test_install.BuildId(specX)
|
||||
bIdY = test_install.BuildId(specY)
|
||||
|
||||
class UnitInstallTest(unittest.TestCase):
|
||||
"""Tests test-install where X->Y"""
|
||||
|
||||
def setUp(self):
|
||||
super(UnitInstallTest, self).setUp()
|
||||
|
||||
pkgX.installed = False
|
||||
pkgY.installed = False
|
||||
|
||||
self.saved_db = spack.repo
|
||||
pkgDb = MockPackageDb({specX:pkgX, specY:pkgY})
|
||||
spack.repo = pkgDb
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
super(UnitInstallTest, self).tearDown()
|
||||
|
||||
spack.repo = self.saved_db
|
||||
|
||||
def test_installing_both(self):
|
||||
mo = MockOutput()
|
||||
|
||||
pkgX.installed = True
|
||||
pkgY.installed = True
|
||||
test_install.create_test_output(specX, [specX, specY], mo, getLogFunc=mock_fetch_log)
|
||||
|
||||
self.assertEqual(mo.results,
|
||||
{bIdX:test_install.TestResult.PASSED,
|
||||
bIdY:test_install.TestResult.PASSED})
|
||||
|
||||
|
||||
def test_dependency_already_installed(self):
|
||||
mo = MockOutput()
|
||||
|
||||
pkgX.installed = True
|
||||
pkgY.installed = True
|
||||
test_install.create_test_output(specX, [specX], mo, getLogFunc=mock_fetch_log)
|
||||
self.assertEqual(mo.results, {bIdX:test_install.TestResult.PASSED})
|
||||
|
||||
#TODO: add test(s) where Y fails to install
|
||||
|
||||
|
||||
class MockPackageDb(object):
|
||||
def __init__(self, init=None):
|
||||
self.specToPkg = {}
|
||||
if init:
|
||||
self.specToPkg.update(init)
|
||||
|
||||
def get(self, spec):
|
||||
return self.specToPkg[spec]
|
||||
|
||||
def mock_fetch_log(path):
|
||||
return []
|
@ -206,6 +206,9 @@ def parse_version_offset(path):
|
||||
# e.g. lame-398-1
|
||||
(r'-((\d)+-\d)', stem),
|
||||
|
||||
# e.g. foobar_1.2-3
|
||||
(r'_((\d+\.)+\d+(-\d+)?[a-z]?)', stem),
|
||||
|
||||
# e.g. foobar-4.5.1
|
||||
(r'-((\d+\.)*\d+)$', stem),
|
||||
|
||||
|
@ -144,7 +144,7 @@ def streamify(arg, mode):
|
||||
|
||||
cmd = self.exe + list(args)
|
||||
|
||||
cmd_line = ' '.join(cmd)
|
||||
cmd_line = "'%s'" % "' '".join(map(lambda arg: arg.replace("'", "'\"'\"'"), cmd))
|
||||
tty.debug(cmd_line)
|
||||
|
||||
try:
|
||||
|
@ -1,4 +1,4 @@
|
||||
##############################################################################
|
||||
#####################################################################
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
@ -84,7 +84,10 @@ function spack {
|
||||
if [ "$_sp_arg" = "-h" ]; then
|
||||
command spack cd -h
|
||||
else
|
||||
cd $(spack location $_sp_arg "$@")
|
||||
LOC="$(spack location $_sp_arg "$@")"
|
||||
if [[ -d "$LOC" ]] ; then
|
||||
cd "$LOC"
|
||||
fi
|
||||
fi
|
||||
return
|
||||
;;
|
||||
|
15
var/spack/repos/builtin/packages/LuaJIT/package.py
Normal file
15
var/spack/repos/builtin/packages/LuaJIT/package.py
Normal file
@ -0,0 +1,15 @@
|
||||
import os
|
||||
from spack import *
|
||||
|
||||
class Luajit(Package):
|
||||
"""Flast flexible JITed lua"""
|
||||
homepage = "http://www.luajit.org"
|
||||
url = "http://luajit.org/download/LuaJIT-2.0.4.tar.gz"
|
||||
|
||||
version('2.0.4', 'dd9c38307f2223a504cbfb96e477eca0')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# Linking with the C++ compiler is a dirty hack to deal with the fact
|
||||
# that unwinding symbols are not included by libc, this is necessary
|
||||
# on some platforms for the final link stage to work
|
||||
make("install", "PREFIX=" + prefix, "TARGET_LD=" + os.environ['CXX'])
|
@ -1,4 +1,14 @@
|
||||
import functools
|
||||
import glob
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
from contextlib import closing
|
||||
|
||||
import spack
|
||||
from llnl.util.lang import match_predicate
|
||||
from spack import *
|
||||
from spack.util.environment import *
|
||||
|
||||
|
||||
class R(Package):
|
||||
@ -9,6 +19,8 @@ class R(Package):
|
||||
"""
|
||||
homepage = "https://www.r-project.org"
|
||||
url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz"
|
||||
|
||||
extendable = True
|
||||
|
||||
version('3.2.3', '1ba3dac113efab69e706902810cc2970')
|
||||
version('3.2.2', '57cef5c2e210a5454da1979562a10e5b')
|
||||
@ -38,12 +50,57 @@ class R(Package):
|
||||
depends_on('tk')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
rlibdir = join_path(prefix, 'rlib')
|
||||
options = ['--prefix=%s' % prefix,
|
||||
'--libdir=%s' % rlibdir,
|
||||
'--enable-R-shlib',
|
||||
'--enable-BLAS-shlib']
|
||||
'--enable-BLAS-shlib',
|
||||
'--enable-R-framework=no']
|
||||
if '+external-lapack' in spec:
|
||||
options.extend(['--with-blas', '--with-lapack'])
|
||||
|
||||
configure(*options)
|
||||
make()
|
||||
make('install')
|
||||
|
||||
# ========================================================================
|
||||
# Set up environment to make install easy for R extensions.
|
||||
# ========================================================================
|
||||
|
||||
@property
|
||||
def r_lib_dir(self):
|
||||
return os.path.join('rlib', 'R', 'library')
|
||||
|
||||
def setup_dependent_environment(self, spack_env, run_env, extension_spec):
|
||||
# Set R_LIBS to include the library dir for the
|
||||
# extension and any other R extensions it depends on.
|
||||
r_libs_path = []
|
||||
for d in extension_spec.traverse():
|
||||
if d.package.extends(self.spec):
|
||||
r_libs_path.append(os.path.join(d.prefix, self.r_lib_dir))
|
||||
|
||||
r_libs_path = ':'.join(r_libs_path)
|
||||
spack_env.set('R_LIBS', r_libs_path)
|
||||
|
||||
# For run time environment set only the path for extension_spec and prepend it to R_LIBS
|
||||
if extension_spec.package.extends(self.spec):
|
||||
run_env.prepend_path('R_LIBS', os.path.join(extension_spec.prefix, self.r_lib_dir))
|
||||
|
||||
|
||||
def setup_dependent_package(self, module, ext_spec):
|
||||
"""
|
||||
Called before R modules' install() methods.
|
||||
|
||||
In most cases, extensions will only need to have one line::
|
||||
|
||||
R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
|
||||
"""
|
||||
# R extension builds can have a global R executable function
|
||||
module.R = Executable(join_path(self.spec.prefix.bin, 'R'))
|
||||
|
||||
# Add variable for library directry
|
||||
module.r_lib_dir = os.path.join(ext_spec.prefix, self.r_lib_dir)
|
||||
|
||||
# Make the site packages directory for extensions, if it does not exist already.
|
||||
if ext_spec.package.is_extension:
|
||||
mkdirp(module.r_lib_dir)
|
||||
|
13
var/spack/repos/builtin/packages/adol-c/openmp_exam.patch
Normal file
13
var/spack/repos/builtin/packages/adol-c/openmp_exam.patch
Normal file
@ -0,0 +1,13 @@
|
||||
diff --git a/ADOL-C/examples/additional_examples/openmp_exam/liborpar.cpp b/ADOL-C/examples/additional_examples/openmp_exam/liborpar.cpp
|
||||
index fc6fc28..14103d2 100644
|
||||
--- a/ADOL-C/examples/additional_examples/openmp_exam/liborpar.cpp
|
||||
+++ b/ADOL-C/examples/additional_examples/openmp_exam/liborpar.cpp
|
||||
@@ -27,7 +27,7 @@ using namespace std;
|
||||
#include <ctime>
|
||||
#include <cmath>
|
||||
|
||||
-#include "adolc.h"
|
||||
+#include <adolc/adolc.h>
|
||||
|
||||
#ifdef _OPENMP
|
||||
#include <omp.h>
|
80
var/spack/repos/builtin/packages/adol-c/package.py
Normal file
80
var/spack/repos/builtin/packages/adol-c/package.py
Normal file
@ -0,0 +1,80 @@
|
||||
from spack import *
|
||||
import sys
|
||||
|
||||
class AdolC(Package):
|
||||
"""A package for the automatic differentiation of first and higher derivatives of vector functions in C and C++ programs by operator overloading."""
|
||||
homepage = "https://projects.coin-or.org/ADOL-C"
|
||||
url = "http://www.coin-or.org/download/source/ADOL-C/ADOL-C-2.6.1.tgz"
|
||||
|
||||
version('head', svn='https://projects.coin-or.org/svn/ADOL-C/trunk/')
|
||||
version('2.6.1', '1032b28427d6e399af4610e78c0f087b')
|
||||
|
||||
variant('doc', default=True, description='Install documentation')
|
||||
variant('openmp', default=False, description='Enable OpenMP support')
|
||||
variant('sparse', default=False, description='Enable sparse drivers')
|
||||
variant('tests', default=True, description='Build all included examples as a test case')
|
||||
|
||||
patch('openmp_exam.patch')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make_args = ['--prefix=%s' % prefix]
|
||||
|
||||
# --with-cflags=FLAGS use CFLAGS=FLAGS (default: -O3 -Wall -ansi)
|
||||
# --with-cxxflags=FLAGS use CXXFLAGS=FLAGS (default: -O3 -Wall)
|
||||
|
||||
if '+openmp' in spec:
|
||||
if spec.satisfies('%gcc'):
|
||||
make_args.extend([
|
||||
'--with-openmp-flag=-fopenmp' # FIXME: Is this required? -I <path to omp.h> -L <LLVM OpenMP library path>
|
||||
])
|
||||
else:
|
||||
raise InstallError("OpenMP flags for compilers other than GCC are not implemented.")
|
||||
|
||||
if '+sparse' in spec:
|
||||
make_args.extend([
|
||||
'--enable-sparse'
|
||||
])
|
||||
|
||||
# We can simply use the bundled examples to check
|
||||
# whether Adol-C works as expected
|
||||
if '+tests' in spec:
|
||||
make_args.extend([
|
||||
'--enable-docexa', # Documeted examples
|
||||
'--enable-addexa' # Additional examples
|
||||
])
|
||||
if '+openmp' in spec:
|
||||
make_args.extend([
|
||||
'--enable-parexa' # Parallel examples
|
||||
])
|
||||
|
||||
configure(*make_args)
|
||||
make()
|
||||
make("install")
|
||||
|
||||
# Copy the config.h file, as some packages might require it
|
||||
source_directory = self.stage.source_path
|
||||
config_h = join_path(source_directory,'ADOL-C','src','config.h')
|
||||
install(config_h, join_path(prefix.include,'adolc'))
|
||||
|
||||
# Install documentation to {prefix}/share
|
||||
if '+doc' in spec:
|
||||
install_tree(join_path('ADOL-C','doc'),
|
||||
join_path(prefix.share,'doc'))
|
||||
|
||||
# Install examples to {prefix}/share
|
||||
if '+tests' in spec:
|
||||
install_tree(join_path('ADOL-C','examples'),
|
||||
join_path(prefix.share,'examples'))
|
||||
|
||||
# Run some examples that don't require user input
|
||||
# TODO: Check that bundled examples produce the correct results
|
||||
with working_dir(join_path(source_directory,'ADOL-C','examples')):
|
||||
Executable('./tapeless_scalar')()
|
||||
Executable('./tapeless_vector')()
|
||||
|
||||
with working_dir(join_path(source_directory,'ADOL-C','examples','additional_examples')):
|
||||
Executable('./checkpointing/checkpointing')()
|
||||
|
||||
if '+openmp' in spec:
|
||||
with working_dir(join_path(source_directory,'ADOL-C','examples','additional_examples')):
|
||||
Executable('./checkpointing/checkpointing')()
|
47
var/spack/repos/builtin/packages/antlr/package.py
Normal file
47
var/spack/repos/builtin/packages/antlr/package.py
Normal file
@ -0,0 +1,47 @@
|
||||
from spack import *
|
||||
|
||||
class Antlr(Package):
|
||||
|
||||
homepage = "http://www.antlr.org"
|
||||
url = "https://github.com/antlr/antlr/tarball/v2.7.7"
|
||||
|
||||
# NOTE: This requires that a system Java be available.
|
||||
# Spack does not yet know how to install Java compilers
|
||||
|
||||
# Notes from http://nco.sourceforge.net/#bld
|
||||
# The first steps to build (i.e., compile, for the most part) NCO from
|
||||
# source code are to install the pre-requisites: ANTLR version 2.7.7
|
||||
# (like this one not version 3.x or 4.x!) (required for ncap2)... ANTLR
|
||||
# binaries from major distributions are pre-built with the source patch
|
||||
# necessary to allow NCO to link to ANTLR... The ANTLR source file
|
||||
# CharScanner.hpp must include this line: #include <cstring> or else
|
||||
# ncap2 will not compile (this tarball is already patched).
|
||||
version('2.7.7', '914865e853fe8e1e61a9f23d045cb4ab',
|
||||
# Patched version as described above
|
||||
url='http://dust.ess.uci.edu/tmp/antlr-2.7.7.tar.gz')
|
||||
# Unpatched version
|
||||
# url='http://dust.ess.uci.edu/nco/antlr-2.7.7.tar.gz')
|
||||
|
||||
variant('cxx', default=False, description='Enable ANTLR for C++')
|
||||
variant('java', default=False, description='Enable ANTLR for Java')
|
||||
variant('python', default=False, description='Enable ANTLR for Python')
|
||||
variant('csharp', default=False, description='Enable ANTLR for Csharp')
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# Check for future enabling of variants
|
||||
for v in ('+java', '+python', '+csharp'):
|
||||
if v in spec:
|
||||
raise Error('Illegal variant %s; for now, Spack only knows how to build antlr or antlr+cxx')
|
||||
|
||||
config_args = [
|
||||
'--prefix=%s' % prefix,
|
||||
'--%s-cxx' % ('enable' if '+cxx' in spec else 'disable'),
|
||||
'--%s-java' % ('enable' if '+java' in spec else 'disable'),
|
||||
'--%s-python' % ('enable' if '+python' in spec else 'disable'),
|
||||
'--%s-csharp' % ('enable' if '+csharp' in spec else 'disable')]
|
||||
|
||||
# which('autoreconf')('-iv')
|
||||
configure(*config_args)
|
||||
make()
|
||||
make("install")
|
@ -14,4 +14,5 @@ def install(self, spec, prefix):
|
||||
make('-f',
|
||||
join_path(self.stage.source_path,'build','clang','Makefile'),
|
||||
parallel=False)
|
||||
mkdirp(self.prefix.bin)
|
||||
install(join_path(self.stage.source_path, 'src','bin','astyle'), self.prefix.bin)
|
||||
|
@ -8,6 +8,8 @@ class Autoconf(Package):
|
||||
version('2.69', '82d05e03b93e45f5a39b828dc9c6c29b')
|
||||
version('2.62', '6c1f3b3734999035d77da5024aab4fbd')
|
||||
|
||||
depends_on("m4")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
|
||||
|
17
var/spack/repos/builtin/packages/bbcp/package.py
Normal file
17
var/spack/repos/builtin/packages/bbcp/package.py
Normal file
@ -0,0 +1,17 @@
|
||||
from spack import *
|
||||
|
||||
class Bbcp(Package):
|
||||
"""Securely and quickly copy data from source to target"""
|
||||
homepage = "http://www.slac.stanford.edu/~abh/bbcp/"
|
||||
|
||||
version('git', git='http://www.slac.stanford.edu/~abh/bbcp/bbcp.git', branch="master")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
cd("src")
|
||||
make()
|
||||
# BBCP wants to build the executable in a directory whose name depends on the system type
|
||||
makesname = Executable("../MakeSname")
|
||||
bbcp_executable_path = "../bin/%s/bbcp" % makesname(output=str).rstrip("\n")
|
||||
destination_path = "%s/bin/" % prefix
|
||||
mkdirp(destination_path)
|
||||
install(bbcp_executable_path, destination_path)
|
@ -12,6 +12,10 @@ class Binutils(Package):
|
||||
version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e')
|
||||
version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764')
|
||||
|
||||
depends_on('m4')
|
||||
depends_on('flex')
|
||||
depends_on('bison')
|
||||
|
||||
# Add a patch that creates binutils libiberty_pic.a which is preferred by OpenSpeedShop and cbtf-krell
|
||||
variant('krellpatch', default=False, description="build with openspeedshop based patch.")
|
||||
variant('gold', default=True, description="build the gold linker")
|
||||
@ -25,6 +29,7 @@ def install(self, spec, prefix):
|
||||
configure_args = [
|
||||
'--prefix=%s' % prefix,
|
||||
'--disable-dependency-tracking',
|
||||
'--disable-werror',
|
||||
'--enable-interwork',
|
||||
'--enable-multilib',
|
||||
'--enable-shared',
|
||||
|
@ -10,6 +10,8 @@ class Bison(Package):
|
||||
|
||||
version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8')
|
||||
|
||||
depends_on("m4")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
################################################################################
|
||||
# Copyright (c) 2015 Krell Institute. All Rights Reserved.
|
||||
# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it under
|
||||
# the terms of the GNU General Public License as published by the Free Software
|
||||
@ -24,43 +24,83 @@ class CbtfArgonavis(Package):
|
||||
homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
|
||||
|
||||
# Mirror access template example
|
||||
#url = "file:/g/g24/jeg/cbtf-argonavis-1.5.tar.gz"
|
||||
#version('1.5', '1f7f6512f55409ed2135cfceabe26b82')
|
||||
#url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/cbtf-argonavis-1.6.tar.gz"
|
||||
#version('1.6', '0fafa0008478405c2c2319450f174ed4')
|
||||
|
||||
version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-argonavis/cbtf-argonavis')
|
||||
version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-argonavis.git')
|
||||
|
||||
depends_on("cmake@3.0.2:")
|
||||
depends_on("cmake@3.0.2")
|
||||
depends_on("boost@1.50.0:")
|
||||
depends_on("papi")
|
||||
depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
|
||||
depends_on("cbtf")
|
||||
depends_on("cbtf-krell")
|
||||
depends_on("cuda")
|
||||
depends_on("cuda@6.0.37")
|
||||
#depends_on("cuda")
|
||||
|
||||
parallel = False
|
||||
|
||||
def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
|
||||
# Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
|
||||
|
||||
compile_flags="-O2 -g"
|
||||
BuildTypeOptions = []
|
||||
|
||||
# Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
|
||||
for word in cmakeOptions[:]:
|
||||
if word.startswith('-DCMAKE_BUILD_TYPE'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_CXX_FLAGS'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_C_FLAGS'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'):
|
||||
cmakeOptions.remove(word)
|
||||
BuildTypeOptions.extend([
|
||||
'-DCMAKE_VERBOSE_MAKEFILE=ON',
|
||||
'-DCMAKE_BUILD_TYPE=None',
|
||||
'-DCMAKE_CXX_FLAGS=%s' % compile_flags,
|
||||
'-DCMAKE_C_FLAGS=%s' % compile_flags
|
||||
])
|
||||
|
||||
cmakeOptions.extend(BuildTypeOptions)
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
|
||||
# Look for package installation information in the cbtf and cbtf-krell prefixes
|
||||
cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
|
||||
|
||||
# FIXME, hard coded for testing purposes, we will alter when the external package feature is available
|
||||
cuda_prefix_path = "/usr/local/cudatoolkit-6.0"
|
||||
cupti_prefix_path = "/usr/local/cudatoolkit-6.0/extras/CUPTI"
|
||||
|
||||
|
||||
with working_dir('CUDA'):
|
||||
with working_dir('build', create=True):
|
||||
cmake('..',
|
||||
'-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
|
||||
'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
|
||||
'-DCUDA_INSTALL_PATH=%s' % cuda_prefix_path,
|
||||
'-DCUDA_ROOT=%s' % cuda_prefix_path,
|
||||
'-DCUPTI_ROOT=%s' % cupti_prefix_path,
|
||||
'-DCUDA_DIR=%s' % cuda_prefix_path,
|
||||
'-DPAPI_ROOT=%s' % spec['papi'].prefix,
|
||||
'-DCBTF_PREFIX=%s' % spec['cbtf'].prefix,
|
||||
*std_cmake_args)
|
||||
make("clean")
|
||||
make()
|
||||
make("install")
|
||||
|
||||
cmakeOptions = []
|
||||
cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
|
||||
'-DCUDA_DIR=%s' % spec['cuda'].prefix,
|
||||
'-DCUDA_INSTALL_PATH=%s' % spec['cuda'].prefix,
|
||||
'-DCUDA_TOOLKIT_ROOT_DIR=%s' % spec['cuda'].prefix,
|
||||
'-DCUPTI_DIR=%s' % join_path(spec['cuda'].prefix + '/extras/CUPTI'),
|
||||
'-DCUPTI_ROOT=%s' % join_path(spec['cuda'].prefix + '/extras/CUPTI'),
|
||||
'-DPAPI_ROOT=%s' % spec['papi'].prefix,
|
||||
'-DCBTF_DIR=%s' % spec['cbtf'].prefix,
|
||||
'-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
|
||||
'-DBOOST_ROOT=%s' % spec['boost'].prefix,
|
||||
'-DBoost_DIR=%s' % spec['boost'].prefix,
|
||||
'-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib,
|
||||
'-DMRNET_DIR=%s' % spec['mrnet'].prefix,
|
||||
'-DBoost_NO_SYSTEM_PATHS=ON'
|
||||
])
|
||||
|
||||
# Add in the standard cmake arguments
|
||||
cmakeOptions.extend(std_cmake_args)
|
||||
|
||||
# Adjust the standard cmake arguments to what we want the build type, etc to be
|
||||
self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
|
||||
|
||||
# Invoke cmake
|
||||
cmake('..', *cmakeOptions)
|
||||
|
||||
make("clean")
|
||||
make()
|
||||
make("install")
|
||||
|
@ -1,5 +1,5 @@
|
||||
################################################################################
|
||||
# Copyright (c) 2015 Krell Institute. All Rights Reserved.
|
||||
# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it under
|
||||
# the terms of the GNU General Public License as published by the Free Software
|
||||
@ -26,21 +26,30 @@ class CbtfKrell(Package):
|
||||
homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
|
||||
|
||||
# optional mirror access template
|
||||
#url = "file:/g/g24/jeg/cbtf-krell-1.5.tar.gz"
|
||||
#version('1.5', 'b13f6df6a93c44149d977773dd776d2f')
|
||||
#url = "file:/home/jeg/cbtf-krell-1.6.tar.gz"
|
||||
#version('1.6', 'edeb61cd488f16e7b124f77db9ce762d')
|
||||
|
||||
version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-krell/cbtf-krell')
|
||||
version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-krell.git')
|
||||
|
||||
# MPI variants
|
||||
variant('openmpi', default=False, description="Build mpi experiment collector for openmpi MPI when this variant is enabled.")
|
||||
variant('mpt', default=False, description="Build mpi experiment collector for SGI MPT MPI when this variant is enabled.")
|
||||
variant('mvapich2', default=False, description="Build mpi experiment collector for mvapich2 MPI when this variant is enabled.")
|
||||
variant('mvapich', default=False, description="Build mpi experiment collector for mvapich MPI when this variant is enabled.")
|
||||
variant('mpich2', default=False, description="Build mpi experiment collector for mpich2 MPI when this variant is enabled.")
|
||||
variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.")
|
||||
|
||||
# Dependencies for cbtf-krell
|
||||
depends_on("cmake@3.0.2")
|
||||
|
||||
# For binutils service
|
||||
depends_on("binutils@2.24+krellpatch")
|
||||
|
||||
# collectionTool
|
||||
depends_on("boost@1.50.0")
|
||||
depends_on("dyninst@8.2.1")
|
||||
depends_on("mrnet@4.1.0:+lwthreads")
|
||||
depends_on("boost@1.50.0:")
|
||||
depends_on("dyninst@8.2.1:")
|
||||
depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
|
||||
|
||||
depends_on("xerces-c@3.1.1:")
|
||||
depends_on("cbtf")
|
||||
|
||||
@ -51,66 +60,207 @@ class CbtfKrell(Package):
|
||||
|
||||
# MPI Installations
|
||||
# These have not worked either for build or execution, commenting out for now
|
||||
#depends_on("openmpi")
|
||||
#depends_on("mvapich2@2.0")
|
||||
#depends_on("mpich")
|
||||
depends_on("openmpi", when='+openmpi')
|
||||
depends_on("mpich", when='+mpich')
|
||||
depends_on("mpich2", when='+mpich2')
|
||||
depends_on("mvapich2", when='+mvapich2')
|
||||
depends_on("mvapich", when='+mvapich')
|
||||
depends_on("mpt", when='+mpt')
|
||||
|
||||
parallel = False
|
||||
|
||||
def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
|
||||
# Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
|
||||
|
||||
compile_flags="-O2 -g"
|
||||
BuildTypeOptions = []
|
||||
# Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
|
||||
for word in cmakeOptions[:]:
|
||||
if word.startswith('-DCMAKE_BUILD_TYPE'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_CXX_FLAGS'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_C_FLAGS'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'):
|
||||
cmakeOptions.remove(word)
|
||||
BuildTypeOptions.extend([
|
||||
'-DCMAKE_VERBOSE_MAKEFILE=ON',
|
||||
'-DCMAKE_BUILD_TYPE=None',
|
||||
'-DCMAKE_CXX_FLAGS=%s' % compile_flags,
|
||||
'-DCMAKE_C_FLAGS=%s' % compile_flags
|
||||
])
|
||||
|
||||
cmakeOptions.extend(BuildTypeOptions)
|
||||
|
||||
|
||||
|
||||
def set_mpi_cmakeOptions(self, spec, cmakeOptions):
|
||||
# Appends to cmakeOptions the options that will enable the appropriate MPI implementations
|
||||
|
||||
MPIOptions = []
|
||||
|
||||
# openmpi
|
||||
if '+openmpi' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix
|
||||
])
|
||||
# mpich
|
||||
if '+mpich' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DMPICH_DIR=%s' % spec['mpich'].prefix
|
||||
])
|
||||
# mpich2
|
||||
if '+mpich2' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DMPICH2_DIR=%s' % spec['mpich2'].prefix
|
||||
])
|
||||
# mvapich
|
||||
if '+mvapich' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DMVAPICH_DIR=%s' % spec['mvapich'].prefix
|
||||
])
|
||||
# mvapich2
|
||||
if '+mvapich2' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix
|
||||
])
|
||||
# mpt
|
||||
if '+mpt' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DMPT_DIR=%s' % spec['mpt'].prefix
|
||||
])
|
||||
|
||||
cmakeOptions.extend(MPIOptions)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
|
||||
# Add in paths for finding package config files that tell us where to find these packages
|
||||
cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['dyninst'].prefix)
|
||||
|
||||
# FIXME - hard code path until external package support is available
|
||||
# Need to change this path and/or add additional paths for MPI experiment support on different platforms
|
||||
#openmpi_prefix_path = "/opt/openmpi-1.8.2"
|
||||
#mvapich_prefix_path = "/usr/local/tools/mvapich-gnu"
|
||||
|
||||
# Other possibilities, they will need a -DMVAPICH_DIR=, etc clause in the cmake command to be recognized
|
||||
# mvapich_prefix_path = "<mvapich install path>"
|
||||
# mvapich2_prefix_path = "<mvapich2 install path>"
|
||||
# mpich2_prefix_path = "<mpich2 install path>"
|
||||
# mpich_prefix_path = "<mpich install path>"
|
||||
# mpt_prefix_path = "<mpt install path>"
|
||||
|
||||
# Add in paths for cuda if requested via the cuda variant
|
||||
# FIXME - hard code path until external package support is available
|
||||
#if '+cuda' in spec:
|
||||
# cuda_prefix_path = "/usr/local/cuda-6.0"
|
||||
# cupti_prefix_path = "/usr/local/cuda-6.0/extras/CUPTI"
|
||||
#else:
|
||||
# cuda_prefix_path = ""
|
||||
# cupti_prefix_path = ""
|
||||
|
||||
#'-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix,
|
||||
#'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix,
|
||||
#'-DMPICH_DIR=%s' % spec['mpich'].prefix,
|
||||
#'-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
|
||||
#'-DOPENMPI_DIR=%s' % openmpi_prefix_path,
|
||||
#'-DMVAPICH_DIR=%s' % mvapich_prefix_path,
|
||||
#'-DLIB_SUFFIX=64',
|
||||
#'-DCUDA_DIR=%s' % cuda_prefix_path,
|
||||
#'-DCUPTI_DIR=%s' % cupti_prefix_path,
|
||||
#cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['dyninst'].prefix)
|
||||
#'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path
|
||||
|
||||
# Build cbtf-krell with cmake
|
||||
with working_dir('build_cbtf_krell', create=True):
|
||||
cmake('..',
|
||||
'-DCMAKE_BUILD_TYPE=Debug',
|
||||
'-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DCBTF_DIR=%s' % spec['cbtf'].prefix,
|
||||
'-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
|
||||
'-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
|
||||
'-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
|
||||
'-DPAPI_DIR=%s' % spec['papi'].prefix,
|
||||
'-DBOOST_DIR=%s' % spec['boost'].prefix,
|
||||
'-DMRNET_DIR=%s' % spec['mrnet'].prefix,
|
||||
'-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
|
||||
'-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
|
||||
'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
|
||||
*std_cmake_args)
|
||||
cmakeOptions = []
|
||||
cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DCBTF_DIR=%s' % spec['cbtf'].prefix,
|
||||
'-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
|
||||
'-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
|
||||
'-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
|
||||
'-DPAPI_DIR=%s' % spec['papi'].prefix,
|
||||
'-DBOOST_DIR=%s' % spec['boost'].prefix,
|
||||
'-DMRNET_DIR=%s' % spec['mrnet'].prefix,
|
||||
'-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
|
||||
'-DXERCESC_DIR=%s' % spec['xerces-c'].prefix
|
||||
])
|
||||
|
||||
|
||||
# Add any MPI implementations coming from variant settings
|
||||
self.set_mpi_cmakeOptions(spec, cmakeOptions)
|
||||
|
||||
# Add in the standard cmake arguments
|
||||
cmakeOptions.extend(std_cmake_args)
|
||||
|
||||
# Adjust the standard cmake arguments to what we want the build type, etc to be
|
||||
self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
|
||||
|
||||
# Invoke cmake
|
||||
cmake('..', *cmakeOptions)
|
||||
|
||||
make("clean")
|
||||
make()
|
||||
make("install")
|
||||
|
||||
|
||||
|
||||
#if '+cray' in spec:
|
||||
#if 'cray' in self.spec.architecture:
|
||||
# if '+runtime' in spec:
|
||||
# with working_dir('build_cbtf_cray_runtime', create=True):
|
||||
# python_vers='%d.%d' % spec['python'].version[:2]
|
||||
# cmake .. \
|
||||
# -DCMAKE_BUILD_TYPE=Debug \
|
||||
# -DTARGET_OS="cray" \
|
||||
# -DRUNTIME_ONLY="true" \
|
||||
# -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \
|
||||
# -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \
|
||||
# -DCBTF_DIR=${CBTF_ROOT} \
|
||||
# -DBOOST_ROOT=${BOOST_INSTALL_PREFIX} \
|
||||
# -DXERCESC_DIR=${XERCESC_INSTALL_PREFIX} \
|
||||
# -DBINUTILS_DIR=${KRELL_ROOT} \
|
||||
# -DLIBMONITOR_DIR=${KRELL_ROOT_COMPUTE} \
|
||||
# -DLIBUNWIND_DIR=${KRELL_ROOT_COMPUTE} \
|
||||
# -DPAPI_DIR=${PAPI_ROOT} \
|
||||
# -DDYNINST_DIR=${DYNINST_CN_ROOT} \
|
||||
# -DMRNET_DIR=${MRNET_INSTALL_PREFIX} \
|
||||
# -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48
|
||||
# else:
|
||||
# with working_dir('build_cbtf_cray_frontend', create=True):
|
||||
# python_vers='%d.%d' % spec['python'].version[:2]
|
||||
# cmake .. \
|
||||
# -DCMAKE_BUILD_TYPE=Debug \
|
||||
# -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \
|
||||
# -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \
|
||||
# -DCBTF_DIR=${CBTF_ROOT} \
|
||||
# -DRUNTIME_TARGET_OS="cray" \
|
||||
# -DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_RUNTIME_ROOT} \
|
||||
# -DCBTF_CN_RUNTIME_DIR=${CBTF_CN_RUNTIME_ROOT} \
|
||||
# -DLIBMONITOR_CN_RUNTIME_DIR=${LIBMONITOR_CN_ROOT} \
|
||||
# -DLIBUNWIND_CN_RUNTIME_DIR=${LIBUNWIND_CN_ROOT} \
|
||||
# -DPAPI_CN_RUNTIME_DIR=${PAPI_CN_ROOT} \
|
||||
# -DXERCESC_CN_RUNTIME_DIR=/${XERCESC_CN_ROOT} \
|
||||
# -DMRNET_CN_RUNTIME_DIR=${MRNET_CN_ROOT} \
|
||||
# -DBOOST_CN_RUNTIME_DIR=${BOOST_CN_ROOT} \
|
||||
# -DDYNINST_CN_RUNTIME_DIR=${DYNINST_CN_ROOT} \
|
||||
# -DBOOST_ROOT=/${KRELL_ROOT} \
|
||||
# -DXERCESC_DIR=/${KRELL_ROOT} \
|
||||
# -DBINUTILS_DIR=/${KRELL_ROOT} \
|
||||
# -DLIBMONITOR_DIR=${KRELL_ROOT} \
|
||||
# -DLIBUNWIND_DIR=${KRELL_ROOT} \
|
||||
# -DPAPI_DIR=${PAPI_ROOT} \
|
||||
# -DDYNINST_DIR=${KRELL_ROOT} \
|
||||
# -DMRNET_DIR=${KRELL_ROOT} \
|
||||
# -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48
|
||||
# fi
|
||||
#
|
||||
# make("clean")
|
||||
# make()
|
||||
# make("install")
|
||||
#
|
||||
# elif '+mic' in spec:
|
||||
# if '+runtime' in spec:
|
||||
# with working_dir('build_cbtf_mic_runtime', create=True):
|
||||
# python_vers='%d.%d' % spec['python'].version[:2]
|
||||
# cmake .. \
|
||||
#
|
||||
# else:
|
||||
# with working_dir('build_cbtf_cray_frontend', create=True):
|
||||
# python_vers='%d.%d' % spec['python'].version[:2]
|
||||
# cmake .. \
|
||||
# fi
|
||||
#
|
||||
# else:
|
||||
# # Build cbtf-krell with cmake
|
||||
# with working_dir('build_cbtf_krell', create=True):
|
||||
# cmake('..',
|
||||
# '-DCMAKE_BUILD_TYPE=Debug',
|
||||
# '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
# '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
|
||||
# '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
|
||||
# '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
|
||||
# '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
|
||||
# '-DPAPI_DIR=%s' % spec['papi'].prefix,
|
||||
# '-DBOOST_DIR=%s' % spec['boost'].prefix,
|
||||
# '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
|
||||
# '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
|
||||
# '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
|
||||
# '-DOPENMPI_DIR=%s' % openmpi_prefix_path,
|
||||
# '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
|
||||
# *std_cmake_args)
|
||||
#
|
||||
# make("clean")
|
||||
# make()
|
||||
# make("install")
|
||||
#
|
||||
# fi
|
||||
#
|
||||
|
@ -1,5 +1,5 @@
|
||||
################################################################################
|
||||
# Copyright (c) 2015 Krell Institute. All Rights Reserved.
|
||||
# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it under
|
||||
# the terms of the GNU General Public License as published by the Free Software
|
||||
@ -29,32 +29,65 @@ class CbtfLanl(Package):
|
||||
|
||||
version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-lanl/cbtf-lanl')
|
||||
|
||||
|
||||
depends_on("cmake@3.0.2")
|
||||
# Dependencies for cbtf-krell
|
||||
depends_on("boost@1.50")
|
||||
depends_on("mrnet@4.1.0:+lwthreads")
|
||||
depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
|
||||
depends_on("xerces-c@3.1.1:")
|
||||
depends_on("cbtf")
|
||||
depends_on("cbtf-krell")
|
||||
|
||||
parallel = False
|
||||
|
||||
def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
|
||||
# Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
|
||||
|
||||
compile_flags="-O2 -g"
|
||||
BuildTypeOptions = []
|
||||
# Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
|
||||
for word in cmakeOptions[:]:
|
||||
if word.startswith('-DCMAKE_BUILD_TYPE'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_CXX_FLAGS'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_C_FLAGS'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'):
|
||||
cmakeOptions.remove(word)
|
||||
BuildTypeOptions.extend([
|
||||
'-DCMAKE_VERBOSE_MAKEFILE=ON',
|
||||
'-DCMAKE_BUILD_TYPE=None',
|
||||
'-DCMAKE_CXX_FLAGS=%s' % compile_flags,
|
||||
'-DCMAKE_C_FLAGS=%s' % compile_flags
|
||||
])
|
||||
|
||||
cmakeOptions.extend(BuildTypeOptions)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
|
||||
# Add in paths for finding package config files that tell us where to find these packages
|
||||
cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
|
||||
|
||||
with working_dir('build', create=True):
|
||||
cmake('..',
|
||||
'-DCBTF_DIR=%s' % spec['cbtf'].prefix,
|
||||
'-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
|
||||
'-DMRNET_DIR=%s' % spec['mrnet'].prefix,
|
||||
'-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
|
||||
'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
|
||||
'-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake'),
|
||||
*std_cmake_args)
|
||||
cmakeOptions = []
|
||||
cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DCBTF_DIR=%s' % spec['cbtf'].prefix,
|
||||
'-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
|
||||
'-DMRNET_DIR=%s' % spec['mrnet'].prefix,
|
||||
'-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
|
||||
'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
|
||||
'-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake')
|
||||
])
|
||||
|
||||
make("clean")
|
||||
make()
|
||||
make("install")
|
||||
# Add in the standard cmake arguments
|
||||
cmakeOptions.extend(std_cmake_args)
|
||||
|
||||
# Adjust the standard cmake arguments to what we want the build type, etc to be
|
||||
self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
|
||||
|
||||
# Invoke cmake
|
||||
cmake('..', *cmakeOptions)
|
||||
|
||||
make("clean")
|
||||
make()
|
||||
make("install")
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
################################################################################
|
||||
# Copyright (c) 2015 Krell Institute. All Rights Reserved.
|
||||
# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it under
|
||||
# the terms of the GNU General Public License as published by the Free Software
|
||||
@ -25,21 +25,44 @@ class Cbtf(Package):
|
||||
homepage = "http://sourceforge.net/p/cbtf/wiki/Home"
|
||||
|
||||
# Mirror access template example
|
||||
#url = "file:/g/g24/jeg/cbtf-1.5.tar.gz"
|
||||
#version('1.6', '1ca88a8834759c4c74452cb97fe7b70a')
|
||||
#url = "file:/home/jeg/cbtf-1.6.tar.gz"
|
||||
#version('1.6', 'c1ef4e5aa4e470dffb042abdba0b9987')
|
||||
|
||||
# Use when the git repository is available
|
||||
version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf/cbtf')
|
||||
version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf.git')
|
||||
|
||||
depends_on("cmake")
|
||||
#depends_on("boost@1.42.0:")
|
||||
depends_on("boost@1.50.0")
|
||||
depends_on("mrnet@4.1.0+lwthreads")
|
||||
variant('runtime', default=False, description="build only the runtime libraries and collectors.")
|
||||
|
||||
depends_on("cmake@3.0.2")
|
||||
depends_on("boost@1.50.0:")
|
||||
depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
|
||||
depends_on("xerces-c@3.1.1:")
|
||||
depends_on("libxml2")
|
||||
# Work around for spack libxml2 package bug, take off python when fixed
|
||||
depends_on("libxml2+python")
|
||||
|
||||
parallel = False
|
||||
|
||||
def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
|
||||
# Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
|
||||
|
||||
compile_flags="-O2 -g"
|
||||
BuildTypeOptions = []
|
||||
# Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
|
||||
for word in cmakeOptions[:]:
|
||||
if word.startswith('-DCMAKE_BUILD_TYPE'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_CXX_FLAGS'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_C_FLAGS'):
|
||||
cmakeOptions.remove(word)
|
||||
BuildTypeOptions.extend([
|
||||
'-DCMAKE_BUILD_TYPE=None',
|
||||
'-DCMAKE_CXX_FLAGS=%s' % compile_flags,
|
||||
'-DCMAKE_C_FLAGS=%s' % compile_flags
|
||||
])
|
||||
|
||||
cmakeOptions.extend(BuildTypeOptions)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
with working_dir('build', create=True):
|
||||
|
||||
@ -48,14 +71,45 @@ def install(self, spec, prefix):
|
||||
# or BOOST_INCLUDEDIR). Useful when specifying BOOST_ROOT.
|
||||
# Defaults to OFF.
|
||||
|
||||
cmake('..',
|
||||
'--debug-output',
|
||||
'-DBoost_NO_SYSTEM_PATHS=TRUE',
|
||||
'-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
|
||||
'-DBOOST_ROOT=%s' % spec['boost'].prefix,
|
||||
'-DMRNET_DIR=%s' % spec['mrnet'].prefix,
|
||||
'-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake'),
|
||||
*std_cmake_args)
|
||||
if '+runtime' in spec:
|
||||
# Install message tag include file for use in Intel MIC cbtf-krell build
|
||||
# FIXME
|
||||
cmakeOptions = []
|
||||
cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DBoost_NO_SYSTEM_PATHS=TRUE',
|
||||
'-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
|
||||
'-DBOOST_ROOT=%s' % spec['boost'].prefix,
|
||||
'-DMRNET_DIR=%s' % spec['mrnet'].prefix,
|
||||
'-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake')
|
||||
])
|
||||
|
||||
# Add in the standard cmake arguments
|
||||
cmakeOptions.extend(std_cmake_args)
|
||||
|
||||
# Adjust the standard cmake arguments to what we want the build type, etc to be
|
||||
self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
|
||||
|
||||
# Invoke cmake
|
||||
cmake('..', *cmakeOptions)
|
||||
|
||||
else:
|
||||
cmakeOptions = []
|
||||
cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DBoost_NO_SYSTEM_PATHS=TRUE',
|
||||
'-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
|
||||
'-DBOOST_ROOT=%s' % spec['boost'].prefix,
|
||||
'-DMRNET_DIR=%s' % spec['mrnet'].prefix,
|
||||
'-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake')
|
||||
])
|
||||
|
||||
# Add in the standard cmake arguments
|
||||
cmakeOptions.extend(std_cmake_args)
|
||||
|
||||
# Adjust the standard cmake arguments to what we want the build type, etc to be
|
||||
self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
|
||||
|
||||
# Invoke cmake
|
||||
cmake('..', *cmakeOptions)
|
||||
|
||||
make("clean")
|
||||
make()
|
||||
|
@ -1,4 +1,5 @@
|
||||
from spack import *
|
||||
import os
|
||||
import shutil
|
||||
|
||||
class Cereal(Package):
|
||||
@ -30,5 +31,8 @@ def install(self, spec, prefix):
|
||||
# Install
|
||||
shutil.rmtree(join_path(prefix, 'doc'), ignore_errors=True)
|
||||
shutil.rmtree(join_path(prefix, 'include'), ignore_errors=True)
|
||||
shutil.rmtree(join_path(prefix, 'lib'), ignore_errors=True)
|
||||
shutil.copytree('doc', join_path(prefix, 'doc'), symlinks=True)
|
||||
shutil.copytree('include', join_path(prefix, 'include'), symlinks=True)
|
||||
# Create empty directory to avoid linker warnings later
|
||||
os.mkdir(join_path(prefix, 'lib'))
|
||||
|
@ -30,6 +30,7 @@ class Cmake(Package):
|
||||
homepage = 'https://www.cmake.org'
|
||||
url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz'
|
||||
|
||||
version('3.5.2', '701386a1b5ec95f8d1075ecf96383e02')
|
||||
version('3.5.1', 'ca051f4a66375c89d1a524e726da0296')
|
||||
version('3.5.0', '33c5d09d4c33d4ffcc63578a6ba8777e')
|
||||
version('3.4.3', '4cb3ff35b2472aae70f542116d616e63')
|
||||
|
12
var/spack/repos/builtin/packages/cnmem/package.py
Normal file
12
var/spack/repos/builtin/packages/cnmem/package.py
Normal file
@ -0,0 +1,12 @@
|
||||
from spack import *
|
||||
|
||||
class Cnmem(Package):
|
||||
"""CNMem mempool for CUDA devices"""
|
||||
homepage = "https://github.com/NVIDIA/cnmem"
|
||||
|
||||
version('git', git='https://github.com/NVIDIA/cnmem.git', branch="master")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
cmake('.',*std_cmake_args)
|
||||
make()
|
||||
make('install')
|
@ -13,6 +13,7 @@ class Cryptopp(Package):
|
||||
|
||||
version('5.6.3', '3c5b70e2ec98b7a24988734446242d07')
|
||||
version('5.6.2', '7ed022585698df48e65ce9218f6c6a67')
|
||||
version('5.6.1', '96cbeba0907562b077e26bcffb483828')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make()
|
||||
|
@ -12,6 +12,7 @@ class Dealii(Package):
|
||||
variant('mpi', default=True, description='Compile with MPI')
|
||||
variant('arpack', default=True, description='Compile with Arpack and PArpack (only with MPI)')
|
||||
variant('doc', default=False, description='Compile with documentation')
|
||||
variant('gsl' , default=True, description='Compile with GSL')
|
||||
variant('hdf5', default=True, description='Compile with HDF5 (only with MPI)')
|
||||
variant('metis', default=True, description='Compile with Metis')
|
||||
variant('netcdf', default=True, description='Compile with Netcdf (only with MPI)')
|
||||
@ -39,6 +40,8 @@ class Dealii(Package):
|
||||
depends_on ("mpi", when="+mpi")
|
||||
depends_on ("arpack-ng+mpi", when='+arpack+mpi')
|
||||
depends_on ("doxygen", when='+doc')
|
||||
depends_on ("gsl", when='@8.5.0:+gsl')
|
||||
depends_on ("gsl", when='@dev+gsl')
|
||||
depends_on ("hdf5+mpi~cxx", when='+hdf5+mpi') #FIXME NetCDF declares dependency with ~cxx, why?
|
||||
depends_on ("metis@5:", when='+metis')
|
||||
depends_on ("netcdf+mpi", when="+netcdf+mpi")
|
||||
@ -50,8 +53,8 @@ class Dealii(Package):
|
||||
depends_on ("trilinos", when='+trilinos+mpi')
|
||||
|
||||
# developer dependnecies
|
||||
#depends_on ("numdiff") #FIXME
|
||||
#depends_on ("astyle") #FIXME
|
||||
depends_on ("numdiff", when='@dev')
|
||||
depends_on ("astyle@2.04", when='@dev')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
options = []
|
||||
@ -80,7 +83,6 @@ def install(self, spec, prefix):
|
||||
(join_path(spec['lapack'].prefix.lib,'liblapack.%s' % dsuf), # FIXME don't hardcode names
|
||||
join_path(spec['blas'].prefix.lib,'libblas.%s' % dsuf)), # FIXME don't hardcode names
|
||||
'-DMUPARSER_DIR=%s ' % spec['muparser'].prefix,
|
||||
'-DP4EST_DIR=%s' % spec['p4est'].prefix,
|
||||
'-DUMFPACK_DIR=%s' % spec['suite-sparse'].prefix,
|
||||
'-DTBB_DIR=%s' % spec['tbb'].prefix,
|
||||
'-DZLIB_DIR=%s' % spec['zlib'].prefix
|
||||
@ -100,7 +102,7 @@ def install(self, spec, prefix):
|
||||
])
|
||||
|
||||
# Optional dependencies for which librariy names are the same as CMake variables
|
||||
for library in ('hdf5', 'p4est','petsc', 'slepc','trilinos','metis'):
|
||||
for library in ('gsl','hdf5','p4est','petsc','slepc','trilinos','metis'):
|
||||
if library in spec:
|
||||
options.extend([
|
||||
'-D{library}_DIR={value}'.format(library=library.upper(), value=spec[library].prefix),
|
||||
@ -251,3 +253,6 @@ def install(self, spec, prefix):
|
||||
cmake('.')
|
||||
make('release')
|
||||
make('run',parallel=False)
|
||||
|
||||
def setup_environment(self, spack_env, env):
|
||||
env.set('DEAL_II_DIR', self.prefix)
|
||||
|
@ -7,6 +7,7 @@ class Dia(Package):
|
||||
|
||||
version('0.97.3', '0e744a0f6a6c4cb6a089e4d955392c3c')
|
||||
|
||||
depends_on('intltool')
|
||||
depends_on('gtkplus@2.6.0:')
|
||||
depends_on('cairo')
|
||||
#depends_on('libart') # optional dependency, not yet supported by spack.
|
||||
|
@ -45,6 +45,7 @@ class Eigen(Package):
|
||||
|
||||
# TODO : dependency on googlehash, superlu, adolc missing
|
||||
|
||||
depends_on('cmake')
|
||||
depends_on('metis@5:', when='+metis')
|
||||
depends_on('scotch', when='+scotch')
|
||||
depends_on('fftw', when='+fftw')
|
||||
|
122
var/spack/repos/builtin/packages/elk/package.py
Normal file
122
var/spack/repos/builtin/packages/elk/package.py
Normal file
@ -0,0 +1,122 @@
|
||||
from spack import *
|
||||
import spack
|
||||
|
||||
class Elk(Package):
|
||||
'''An all-electron full-potential linearised augmented-plane wave
|
||||
(FP-LAPW) code with many advanced features.'''
|
||||
|
||||
homepage = 'http://elk.sourceforge.net/'
|
||||
url = 'https://sourceforge.net/projects/elk/files/elk-3.3.17.tgz'
|
||||
|
||||
version('3.3.17', 'f57f6230d14f3b3b558e5c71f62f0592')
|
||||
|
||||
# Elk provides these libraries, but allows you to specify your own
|
||||
variant('blas', default=True, description='Build with custom BLAS library')
|
||||
variant('lapack', default=True, description='Build with custom LAPACK library')
|
||||
variant('fft', default=True, description='Build with custom FFT library')
|
||||
|
||||
# Elk does not provide these libraries, but allows you to use them
|
||||
variant('mpi', default=True, description='Enable MPI parallelism')
|
||||
variant('openmp', default=True, description='Enable OpenMP support')
|
||||
variant('libxc', default=True, description='Link to Libxc functional library')
|
||||
|
||||
depends_on('blas', when='+blas')
|
||||
depends_on('lapack', when='+lapack')
|
||||
depends_on('fftw', when='+fft')
|
||||
depends_on('mpi', when='+mpi')
|
||||
depends_on('libxc', when='+libxc')
|
||||
|
||||
# Cannot be built in parallel
|
||||
parallel = False
|
||||
|
||||
|
||||
def configure(self, spec):
|
||||
# Dictionary of configuration options
|
||||
config = {
|
||||
'MAKE': 'make',
|
||||
'F90': join_path(spack.build_env_path, 'f90'),
|
||||
'F77': join_path(spack.build_env_path, 'f77'),
|
||||
'AR': 'ar',
|
||||
'LIB_FFT': 'fftlib.a',
|
||||
'SRC_MPI': 'mpi_stub.f90',
|
||||
'SRC_OMP': 'omp_stub.f90',
|
||||
'SRC_libxc': 'libxcifc_stub.f90',
|
||||
'SRC_FFT': 'zfftifc.f90'
|
||||
}
|
||||
|
||||
# Compiler-specific flags
|
||||
flags = ''
|
||||
if self.compiler.name == 'intel':
|
||||
flags = '-O3 -ip -unroll -no-prec-div -openmp'
|
||||
elif self.compiler.name == 'gcc':
|
||||
flags = '-O3 -ffast-math -funroll-loops -fopenmp'
|
||||
elif self.compiler.name == 'pgi':
|
||||
flags = '-O3 -mp -lpthread'
|
||||
elif self.compiler.name == 'g95':
|
||||
flags = '-O3 -fno-second-underscore'
|
||||
elif self.compiler.name == 'nag':
|
||||
flags = '-O4 -kind=byte -dusty -dcfuns'
|
||||
elif self.compiler.name == 'xl':
|
||||
flags = '-O3 -qsmp=omp'
|
||||
config['F90_OPTS'] = flags
|
||||
config['F77_OPTS'] = flags
|
||||
|
||||
# BLAS/LAPACK support
|
||||
blas = 'blas.a'
|
||||
lapack = 'lapack.a'
|
||||
if '+blas' in spec:
|
||||
blas = join_path(spec['blas'].prefix.lib, 'libblas.so')
|
||||
if '+lapack' in spec:
|
||||
lapack = join_path(spec['lapack'].prefix.lib, 'liblapack.so')
|
||||
config['LIB_LPK'] = ' '.join([lapack, blas]) # lapack must come before blas
|
||||
|
||||
# FFT support
|
||||
if '+fft' in spec:
|
||||
config['LIB_FFT'] = join_path(spec['fftw'].prefix.lib, 'libfftw3.so')
|
||||
config['SRC_FFT'] = 'zfftifc_fftw.f90'
|
||||
|
||||
# MPI support
|
||||
if '+mpi' in spec:
|
||||
config.pop('SRC_MPI')
|
||||
config['F90'] = join_path(spec['mpi'].prefix.bin, 'mpif90')
|
||||
config['F77'] = join_path(spec['mpi'].prefix.bin, 'mpif77')
|
||||
|
||||
# OpenMP support
|
||||
if '+openmp' in spec:
|
||||
config.pop('SRC_OMP')
|
||||
|
||||
# Libxc support
|
||||
if '+libxc' in spec:
|
||||
config['LIB_libxc'] = ' '.join([
|
||||
join_path(spec['libxc'].prefix.lib, 'libxcf90.so'),
|
||||
join_path(spec['libxc'].prefix.lib, 'libxc.so')
|
||||
])
|
||||
config['SRC_libxc'] = ' '.join([
|
||||
'libxc_funcs.f90',
|
||||
'libxc.f90',
|
||||
'libxcifc.f90'
|
||||
])
|
||||
|
||||
# Write configuration options to include file
|
||||
with open('make.inc', 'w') as inc:
|
||||
for key in config:
|
||||
inc.write('{0} = {1}\n'.format(key, config[key]))
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# Elk only provides an interactive setup script
|
||||
self.configure(spec)
|
||||
|
||||
make()
|
||||
make('test')
|
||||
|
||||
# The Elk Makefile does not provide an install target
|
||||
mkdirp(prefix.bin)
|
||||
|
||||
install('src/elk', prefix.bin)
|
||||
install('src/eos/eos', prefix.bin)
|
||||
install('src/spacegroup/spacegroup', prefix.bin)
|
||||
|
||||
install_tree('examples', join_path(prefix, 'examples'))
|
||||
install_tree('species', join_path(prefix, 'species'))
|
||||
|
@ -42,7 +42,7 @@ class Fftw(Package):
|
||||
variant('float', default=True, description='Produces a single precision version of the library')
|
||||
variant('long_double', default=True, description='Produces a long double precision version of the library')
|
||||
variant('quad', default=False, description='Produces a quad precision version of the library (works only with GCC and libquadmath)')
|
||||
|
||||
variant('openmp', default=False, description="Enable OpenMP support.")
|
||||
variant('mpi', default=False, description='Activate MPI support')
|
||||
|
||||
depends_on('mpi', when='+mpi')
|
||||
@ -52,8 +52,15 @@ class Fftw(Package):
|
||||
def install(self, spec, prefix):
|
||||
options = ['--prefix=%s' % prefix,
|
||||
'--enable-shared',
|
||||
'--enable-threads',
|
||||
'--enable-openmp']
|
||||
'--enable-threads']
|
||||
# Add support for OpenMP
|
||||
if '+openmp' in spec:
|
||||
# Note: Apple's Clang does not support OpenMP.
|
||||
if spec.satisfies('%clang'):
|
||||
ver = str(self.compiler.version)
|
||||
if ver.endswith('-apple'):
|
||||
raise InstallError("Apple's clang does not support OpenMP")
|
||||
options.append('--enable-openmp')
|
||||
if not self.compiler.f77 or not self.compiler.fc:
|
||||
options.append("--disable-fortran")
|
||||
if '+mpi' in spec:
|
||||
|
@ -6,6 +6,7 @@ class Flex(Package):
|
||||
homepage = "http://flex.sourceforge.net/"
|
||||
url = "http://download.sourceforge.net/flex/flex-2.5.39.tar.gz"
|
||||
|
||||
version('2.6.0', '5724bcffed4ebe39e9b55a9be80859ec')
|
||||
version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
|
@ -38,6 +38,7 @@ class Gcc(Package):
|
||||
list_url = 'http://open-source-box.org/gcc/'
|
||||
list_depth = 2
|
||||
|
||||
version('6.1.0', '8fb6cb98b8459f5863328380fbf06bd1')
|
||||
version('5.3.0', 'c9616fd448f980259c31de613e575719')
|
||||
version('5.2.0', 'a51bcfeb3da7dd4c623e27207ed43467')
|
||||
version('4.9.3', '6f831b4d251872736e8e9cc09746f327')
|
||||
|
@ -34,6 +34,7 @@ class Gdb(Package):
|
||||
homepage = "https://www.gnu.org/software/gdb"
|
||||
url = "http://ftp.gnu.org/gnu/gdb/gdb-7.10.tar.gz"
|
||||
|
||||
version('7.11', 'f585059252836a981ea5db9a5f8ce97f')
|
||||
version('7.10.1', 'b93a2721393e5fa226375b42d567d90b')
|
||||
version('7.10', 'fa6827ad0fd2be1daa418abb11a54d86')
|
||||
version('7.9.1', 'f3b97de919a9dba84490b2e076ec4cb0')
|
||||
|
@ -7,7 +7,8 @@ class Git(Package):
|
||||
homepage = "http://git-scm.com"
|
||||
url = "https://github.com/git/git/tarball/v2.7.1"
|
||||
|
||||
version('2.8.0-rc2', 'c2cf9f2cc70e35f2fafbaf9258f82e4c')
|
||||
version('2.8.1', '1308448d95afa41a4135903f22262fc8')
|
||||
version('2.8.0', 'eca687e46e9750121638f258cff8317b')
|
||||
version('2.7.3', 'fa1c008b56618c355a32ba4a678305f6')
|
||||
version('2.7.1', 'bf0706b433a8dedd27a63a72f9a66060')
|
||||
|
||||
@ -23,18 +24,10 @@ class Git(Package):
|
||||
#version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c')
|
||||
|
||||
|
||||
# Git compiles with curl support by default on but if your system
|
||||
# does not have it you will not be able to clone https repos
|
||||
variant("curl", default=False, description="Add the internal support of curl for https clone")
|
||||
|
||||
# Git compiles with expat support by default on but if your system
|
||||
# does not have it you will not be able to push https repos
|
||||
variant("expat", default=False, description="Add the internal support of expat for https push")
|
||||
|
||||
depends_on("openssl")
|
||||
depends_on("autoconf")
|
||||
depends_on("curl", when="+curl")
|
||||
depends_on("expat", when="+expat")
|
||||
depends_on("curl")
|
||||
depends_on("expat")
|
||||
|
||||
# Also depends_on gettext: apt-get install gettext (Ubuntu)
|
||||
|
||||
@ -49,23 +42,12 @@ def install(self, spec, prefix):
|
||||
"--prefix=%s" % prefix,
|
||||
"--without-pcre",
|
||||
"--with-openssl=%s" % spec['openssl'].prefix,
|
||||
"--with-zlib=%s" % spec['zlib'].prefix
|
||||
"--with-zlib=%s" % spec['zlib'].prefix,
|
||||
"--with-curl=%s" % spec['curl'].prefix,
|
||||
"--with-expat=%s" % spec['expat'].prefix,
|
||||
]
|
||||
|
||||
if '+curl' in spec:
|
||||
configure_args.append("--with-curl=%s" % spec['curl'].prefix)
|
||||
|
||||
if '+expat' in spec:
|
||||
configure_args.append("--with-expat=%s" % spec['expat'].prefix)
|
||||
|
||||
which('autoreconf')('-i')
|
||||
configure(*configure_args)
|
||||
make()
|
||||
make("install")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
from spack import *
|
||||
import sys
|
||||
|
||||
class Glib(Package):
|
||||
"""The GLib package contains a low-level libraries useful for
|
||||
@ -12,6 +13,8 @@ class Glib(Package):
|
||||
|
||||
depends_on("libffi")
|
||||
depends_on("zlib")
|
||||
depends_on("pkg-config")
|
||||
depends_on('gettext', sys.platform=='darwin')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
|
@ -11,6 +11,8 @@ class Glm(Package):
|
||||
url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz"
|
||||
|
||||
version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8')
|
||||
|
||||
depends_on ("cmake")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
with working_dir('spack-build', create=True):
|
||||
|
@ -35,6 +35,8 @@ class Gmp(Package):
|
||||
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
|
||||
version('6.0.0' , '6ef5869ae735db9995619135bd856b84')
|
||||
|
||||
depends_on("m4")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
make()
|
||||
|
@ -63,6 +63,11 @@ def install(self, spec, prefix):
|
||||
build_directory = join_path(self.stage.path, 'spack-build')
|
||||
source_directory = self.stage.source_path
|
||||
|
||||
options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
|
||||
|
||||
# Prevent GMsh from using its own strange directory structure on OSX
|
||||
options.append('-DENABLE_OS_SPECIFIC_INSTALL=OFF')
|
||||
|
||||
if '+shared' in spec:
|
||||
options.extend(['-DENABLE_BUILD_SHARED:BOOL=ON',
|
||||
'-DENABLE_BUILD_DYNAMIC:BOOL=ON']) # Builds dynamic executable and installs shared library
|
||||
|
@ -24,6 +24,7 @@
|
||||
##############################################################################
|
||||
|
||||
from spack import *
|
||||
import shutil
|
||||
|
||||
|
||||
class Hdf5(Package):
|
||||
@ -38,7 +39,7 @@ class Hdf5(Package):
|
||||
list_depth = 3
|
||||
|
||||
version('1.10.0', 'bdc935337ee8282579cd6bc4270ad199')
|
||||
version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618')
|
||||
version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618', preferred=True)
|
||||
version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24')
|
||||
version('1.8.13', 'c03426e9e77d7766944654280b467289')
|
||||
|
||||
@ -101,10 +102,10 @@ def install(self, spec, prefix):
|
||||
extra_args.append('--enable-cxx')
|
||||
|
||||
if '+fortran' in spec:
|
||||
extra_args.extend([
|
||||
'--enable-fortran',
|
||||
'--enable-fortran2003'
|
||||
])
|
||||
extra_args.append('--enable-fortran')
|
||||
# '--enable-fortran2003' no longer exists as of version 1.10.0
|
||||
if spec.satisfies('@:1.8.16'):
|
||||
extra_args.append('--enable-fortran2003')
|
||||
|
||||
if '+mpi' in spec:
|
||||
# The HDF5 configure script warns if cxx and mpi are enabled
|
||||
@ -114,14 +115,16 @@ def install(self, spec, prefix):
|
||||
# this is not actually a problem.
|
||||
extra_args.extend([
|
||||
"--enable-parallel",
|
||||
"CC=%s" % spec['mpi'].prefix.bin + "/mpicc",
|
||||
"CC=%s" % join_path(spec['mpi'].prefix.bin, "mpicc"),
|
||||
])
|
||||
|
||||
if '+cxx' in spec:
|
||||
extra_args.append("CXX=%s" % spec['mpi'].prefix.bin + "/mpic++")
|
||||
extra_args.append("CXX=%s" % join_path(spec['mpi'].prefix.bin,
|
||||
"mpic++"))
|
||||
|
||||
if '+fortran' in spec:
|
||||
extra_args.append("FC=%s" % spec['mpi'].prefix.bin + "/mpifort")
|
||||
extra_args.append("FC=%s" % join_path(spec['mpi'].prefix.bin,
|
||||
"mpifort"))
|
||||
|
||||
if '+szip' in spec:
|
||||
extra_args.append("--with-szlib=%s" % spec['szip'].prefix)
|
||||
@ -138,6 +141,58 @@ def install(self, spec, prefix):
|
||||
*extra_args)
|
||||
make()
|
||||
make("install")
|
||||
self.check_install(spec)
|
||||
|
||||
def check_install(self, spec):
|
||||
"Build and run a small program to test the installed HDF5 library"
|
||||
print "Checking HDF5 installation..."
|
||||
checkdir = "spack-check"
|
||||
with working_dir(checkdir, create=True):
|
||||
source = r"""
|
||||
#include <hdf5.h>
|
||||
#include <assert.h>
|
||||
#include <stdio.h>
|
||||
int main(int argc, char **argv) {
|
||||
unsigned majnum, minnum, relnum;
|
||||
herr_t herr = H5get_libversion(&majnum, &minnum, &relnum);
|
||||
assert(!herr);
|
||||
printf("HDF5 version %d.%d.%d %u.%u.%u\n", H5_VERS_MAJOR, H5_VERS_MINOR,
|
||||
H5_VERS_RELEASE, majnum, minnum, relnum);
|
||||
return 0;
|
||||
}
|
||||
"""
|
||||
expected = """\
|
||||
HDF5 version {version} {version}
|
||||
""".format(version=str(spec.version))
|
||||
with open("check.c", 'w') as f:
|
||||
f.write(source)
|
||||
if '+mpi' in spec:
|
||||
cc = which(join_path(spec['mpi'].prefix.bin, "mpicc"))
|
||||
else:
|
||||
cc = which('cc')
|
||||
# TODO: Automate these path and library settings
|
||||
cc('-c', "-I%s" % join_path(spec.prefix, "include"), "check.c")
|
||||
cc('-o', "check", "check.o",
|
||||
"-L%s" % join_path(spec.prefix, "lib"), "-lhdf5",
|
||||
"-lz")
|
||||
try:
|
||||
check = Executable('./check')
|
||||
output = check(return_output=True)
|
||||
except:
|
||||
output = ""
|
||||
success = output == expected
|
||||
if not success:
|
||||
print "Produced output does not match expected output."
|
||||
print "Expected output:"
|
||||
print '-'*80
|
||||
print expected
|
||||
print '-'*80
|
||||
print "Produced output:"
|
||||
print '-'*80
|
||||
print output
|
||||
print '-'*80
|
||||
raise RuntimeError("HDF5 install check failed")
|
||||
shutil.rmtree(checkdir)
|
||||
|
||||
def url_for_version(self, version):
|
||||
v = str(version)
|
||||
|
@ -17,6 +17,7 @@ class Hwloc(Package):
|
||||
list_url = "http://www.open-mpi.org/software/hwloc/"
|
||||
list_depth = 3
|
||||
|
||||
version('1.11.3', 'c1d36a9de6028eac1d18ea4782ef958f')
|
||||
version('1.11.2', 'e4ca55c2a5c5656da4a4e37c8fc51b23')
|
||||
version('1.11.1', 'feb4e416a1b25963ed565d8b42252fdc')
|
||||
version('1.9', '1f9f9155682fe8946a97c08896109508')
|
||||
|
21
var/spack/repos/builtin/packages/hydra/package.py
Normal file
21
var/spack/repos/builtin/packages/hydra/package.py
Normal file
@ -0,0 +1,21 @@
|
||||
from spack import *
|
||||
|
||||
class Hydra(Package):
|
||||
"""Hydra is a process management system for starting parallel jobs.
|
||||
Hydra is designed to natively work with existing launcher daemons
|
||||
(such as ssh, rsh, fork), as well as natively integrate with resource
|
||||
management systems (such as slurm, pbs, sge)."""
|
||||
|
||||
homepage = "http://www.mpich.org"
|
||||
url = "http://www.mpich.org/static/downloads/3.2/hydra-3.2.tar.gz"
|
||||
list_url = "http://www.mpich.org/static/downloads/"
|
||||
list_depth = 2
|
||||
|
||||
version('3.2', '4d670916695bf7e3a869cc336a881b39')
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure('--prefix=%s' % prefix)
|
||||
|
||||
make()
|
||||
make("install")
|
19
var/spack/repos/builtin/packages/intltool/package.py
Normal file
19
var/spack/repos/builtin/packages/intltool/package.py
Normal file
@ -0,0 +1,19 @@
|
||||
from spack import *
|
||||
|
||||
class Intltool(Package):
|
||||
"""intltool is a set of tools to centralize translation of many different file formats using GNU gettext-compatible PO files."""
|
||||
homepage = 'https://freedesktop.org/wiki/Software/intltool/'
|
||||
|
||||
version('0.51.0', '12e517cac2b57a0121cda351570f1e63')
|
||||
|
||||
def url_for_version(self, version):
|
||||
"""Handle version-based custom URLs."""
|
||||
return 'https://launchpad.net/intltool/trunk/%s/+download/intltool-%s.tar.gz' % (version, version)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
|
||||
# configure, build, install:
|
||||
options = ['--prefix=%s' % prefix ]
|
||||
configure(*options)
|
||||
make()
|
||||
make('install')
|
42
var/spack/repos/builtin/packages/ior/package.py
Normal file
42
var/spack/repos/builtin/packages/ior/package.py
Normal file
@ -0,0 +1,42 @@
|
||||
from spack import *
|
||||
import os
|
||||
|
||||
class Ior(Package):
|
||||
"""The IOR software is used for benchmarking parallel file systems
|
||||
using POSIX, MPI-IO, or HDF5 interfaces."""
|
||||
|
||||
homepage = "https://github.com/LLNL/ior"
|
||||
url = "https://github.com/LLNL/ior/archive/3.0.1.tar.gz"
|
||||
|
||||
version('3.0.1', '71150025e0bb6ea1761150f48b553065')
|
||||
|
||||
variant('hdf5', default=False, description='support IO with HDF5 backend')
|
||||
variant('ncmpi', default=False, description='support IO with NCMPI backend')
|
||||
|
||||
depends_on('mpi')
|
||||
depends_on('hdf5+mpi', when='+hdf5')
|
||||
depends_on('netcdf+mpi', when='+ncmpi')
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
os.system('./bootstrap')
|
||||
|
||||
config_args = [
|
||||
'MPICC=%s' % spec['mpi'].prefix.bin + '/mpicc',
|
||||
'--prefix=%s' % prefix,
|
||||
]
|
||||
|
||||
if '+hdf5' in spec:
|
||||
config_args.append('--with-hdf5')
|
||||
else:
|
||||
config_args.append('--without-hdf5')
|
||||
|
||||
if '+ncmpi' in spec:
|
||||
config_args.append('--with-ncmpi')
|
||||
else:
|
||||
config_args.append('--without-ncmpi')
|
||||
|
||||
configure(*config_args)
|
||||
|
||||
make()
|
||||
make('install')
|
@ -5,6 +5,7 @@ class Jemalloc(Package):
|
||||
homepage = "http://www.canonware.com/jemalloc/"
|
||||
url = "https://github.com/jemalloc/jemalloc/releases/download/4.0.4/jemalloc-4.0.4.tar.bz2"
|
||||
|
||||
version('4.1.0', 'c4e53c947905a533d5899e5cc3da1f94')
|
||||
version('4.0.4', '687c5cc53b9a7ab711ccd680351ff988')
|
||||
|
||||
variant('stats', default=False, description='Enable heap statistics')
|
||||
@ -20,5 +21,8 @@ def install(self, spec, prefix):
|
||||
|
||||
configure(*configure_args)
|
||||
|
||||
# Don't use -Werror
|
||||
filter_file(r'-Werror=\S*', '', 'Makefile')
|
||||
|
||||
make()
|
||||
make("install")
|
||||
|
@ -1,14 +1,19 @@
|
||||
from spack import *
|
||||
|
||||
class Jpeg(Package):
|
||||
"""jpeg library"""
|
||||
homepage = "http://www.ijg.org"
|
||||
url = "http://www.ijg.org/files/jpegsrc.v9a.tar.gz"
|
||||
"""libjpeg is a widely used free library with functions for handling the
|
||||
JPEG image data format. It implements a JPEG codec (encoding and decoding)
|
||||
alongside various utilities for handling JPEG data."""
|
||||
|
||||
homepage = "http://www.ijg.org"
|
||||
url = "http://www.ijg.org/files/jpegsrc.v9b.tar.gz"
|
||||
|
||||
version('9b', '6a9996ce116ec5c52b4870dbcd6d3ddb')
|
||||
version('9a', '3353992aecaee1805ef4109aadd433e7')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
|
||||
make()
|
||||
make("test")
|
||||
make("install")
|
||||
|
68
var/spack/repos/builtin/packages/julia/openblas.patch
Normal file
68
var/spack/repos/builtin/packages/julia/openblas.patch
Normal file
@ -0,0 +1,68 @@
|
||||
diff --git a/deps/Makefile b/deps/Makefile
|
||||
index 6cb73be..bcd8520 100644
|
||||
--- a/deps/Makefile
|
||||
+++ b/deps/Makefile
|
||||
@@ -1049,7 +1049,7 @@ OPENBLAS_BUILD_OPTS += NO_AFFINITY=1
|
||||
|
||||
# Build for all architectures - required for distribution
|
||||
ifeq ($(OPENBLAS_DYNAMIC_ARCH), 1)
|
||||
-OPENBLAS_BUILD_OPTS += DYNAMIC_ARCH=1
|
||||
+OPENBLAS_BUILD_OPTS += DYNAMIC_ARCH=1 MAKE_NO_J=1
|
||||
endif
|
||||
|
||||
# 64-bit BLAS interface
|
||||
@@ -1085,6 +1085,7 @@ OPENBLAS_BUILD_OPTS += NO_AVX2=1
|
||||
endif
|
||||
|
||||
$(OPENBLAS_SRC_DIR)/config.status: $(OPENBLAS_SRC_DIR)/Makefile
|
||||
+ cd $(dir $@) && patch -p1 < ../openblas-make.patch
|
||||
ifeq ($(OS),WINNT)
|
||||
cd $(dir $@) && patch -p1 < ../openblas-win64.patch
|
||||
endif
|
||||
diff --git a/deps/openblas.version b/deps/openblas.version
|
||||
index 7c97e1b..58b9467 100644
|
||||
--- a/deps/openblas.version
|
||||
+++ b/deps/openblas.version
|
||||
@@ -1,2 +1,2 @@
|
||||
-OPENBLAS_BRANCH=v0.2.15
|
||||
-OPENBLAS_SHA1=53e849f4fcae4363a64576de00e982722c7304f9
|
||||
+OPENBLAS_BRANCH=v0.2.17
|
||||
+OPENBLAS_SHA1=a71e8c82f6a9f73093b631e5deab1e8da716b61f
|
||||
--- a/deps/openblas-make.patch
|
||||
+++ b/deps/openblas-make.patch
|
||||
@@ -0,0 +1,35 @@
|
||||
+diff --git a/Makefile.system b/Makefile.system
|
||||
+index b89f60e..2dbdad0 100644
|
||||
+--- a/Makefile.system
|
||||
++++ b/Makefile.system
|
||||
+@@ -139,6 +139,10 @@ NO_PARALLEL_MAKE=0
|
||||
+ endif
|
||||
+ GETARCH_FLAGS += -DNO_PARALLEL_MAKE=$(NO_PARALLEL_MAKE)
|
||||
+
|
||||
++ifdef MAKE_NO_J
|
||||
++GETARCH_FLAGS += -DMAKE_NO_J=$(MAKE_NO_J)
|
||||
++endif
|
||||
++
|
||||
+ ifdef MAKE_NB_JOBS
|
||||
+ GETARCH_FLAGS += -DMAKE_NB_JOBS=$(MAKE_NB_JOBS)
|
||||
+ endif
|
||||
+diff --git a/getarch.c b/getarch.c
|
||||
+index f9c49e6..dffad70 100644
|
||||
+--- a/getarch.c
|
||||
++++ b/getarch.c
|
||||
+@@ -1012,6 +1012,7 @@ int main(int argc, char *argv[]){
|
||||
+ #endif
|
||||
+ #endif
|
||||
+
|
||||
++#ifndef MAKE_NO_J
|
||||
+ #ifdef MAKE_NB_JOBS
|
||||
+ printf("MAKE += -j %d\n", MAKE_NB_JOBS);
|
||||
+ #elif NO_PARALLEL_MAKE==1
|
||||
+@@ -1021,6 +1022,7 @@ int main(int argc, char *argv[]){
|
||||
+ printf("MAKE += -j %d\n", get_num_cores());
|
||||
+ #endif
|
||||
+ #endif
|
||||
++#endif
|
||||
+
|
||||
+ break;
|
||||
+
|
@ -4,43 +4,56 @@
|
||||
class Julia(Package):
|
||||
"""The Julia Language: A fresh approach to technical computing"""
|
||||
homepage = "http://julialang.org"
|
||||
url = "http://github.com/JuliaLang/julia/releases/download/v0.4.2/julia-0.4.2.tar.gz"
|
||||
url = "https://github.com/JuliaLang/julia/releases/download/v0.4.3/julia-0.4.3-full.tar.gz"
|
||||
|
||||
version('0.4.3', '7b9f096798fca4bef262a64674bc2b52')
|
||||
version('0.4.2', 'ccfeb4f4090c8b31083f5e1ccb03eb06')
|
||||
version('master',
|
||||
git='https://github.com/JuliaLang/julia.git', branch='master')
|
||||
version('0.4.5', '69141ff5aa6cee7c0ec8c85a34aa49a6')
|
||||
version('0.4.3', '8a4a59fd335b05090dd1ebefbbe5aaac')
|
||||
|
||||
patch('gc.patch')
|
||||
patch('openblas.patch', when='@0.4:0.4.5')
|
||||
|
||||
# Build-time dependencies
|
||||
depends_on("cmake @2.8:")
|
||||
# Build-time dependencies:
|
||||
# depends_on("awk")
|
||||
# depends_on("m4")
|
||||
# depends_on("pkg-config")
|
||||
depends_on("python @2.6:2.9")
|
||||
|
||||
# I think that Julia requires the dependencies above, but it builds find (on
|
||||
# my system) without these. We should enable them as necessary.
|
||||
# Combined build-time and run-time dependencies:
|
||||
depends_on("binutils")
|
||||
depends_on("cmake @2.8:")
|
||||
depends_on("git")
|
||||
depends_on("openssl")
|
||||
depends_on("python @2.7:2.999")
|
||||
|
||||
# Run-time dependencies
|
||||
# I think that Julia requires the dependencies above, but it
|
||||
# builds fine (on my system) without these. We should enable them
|
||||
# as necessary.
|
||||
|
||||
# Run-time dependencies:
|
||||
# depends_on("arpack")
|
||||
# depends_on("fftw +float")
|
||||
# depends_on("gmp")
|
||||
# depends_on("libgit")
|
||||
# depends_on("mpfr")
|
||||
# depends_on("openblas")
|
||||
# depends_on("pcre2")
|
||||
|
||||
# ARPACK: Requires BLAS and LAPACK; needs to use the same version as Julia.
|
||||
# ARPACK: Requires BLAS and LAPACK; needs to use the same version
|
||||
# as Julia.
|
||||
|
||||
# BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit systems. OpenBLAS
|
||||
# has an option for this; make it available as variant.
|
||||
# BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit
|
||||
# systems. OpenBLAS has an option for this; make it available as
|
||||
# variant.
|
||||
|
||||
# FFTW: Something doesn't work when using a pre-installed FFTW library; need
|
||||
# to investigate.
|
||||
# FFTW: Something doesn't work when using a pre-installed FFTW
|
||||
# library; need to investigate.
|
||||
|
||||
# GMP, MPFR: Something doesn't work when using a pre-installed FFTW library;
|
||||
# need to investigate.
|
||||
# GMP, MPFR: Something doesn't work when using a pre-installed
|
||||
# FFTW library; need to investigate.
|
||||
|
||||
# LLVM: Julia works only with specific versions, and might require patches.
|
||||
# Thus we let Julia install its own LLVM.
|
||||
# LLVM: Julia works only with specific versions, and might require
|
||||
# patches. Thus we let Julia install its own LLVM.
|
||||
|
||||
# Other possible dependencies:
|
||||
# USE_SYSTEM_OPENLIBM=0
|
||||
@ -50,11 +63,21 @@ class Julia(Package):
|
||||
# USE_SYSTEM_UTF8PROC=0
|
||||
# USE_SYSTEM_LIBGIT2=0
|
||||
|
||||
# Run-time dependencies for Julia packages:
|
||||
depends_on("hdf5")
|
||||
depends_on("mpi")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# Explicitly setting CC, CXX, or FC breaks building libuv, one of
|
||||
# Julia's dependencies. This might be a Darwin-specific problem. Given
|
||||
# how Spack sets up compilers, Julia should still use Spack's compilers,
|
||||
# even if we don't specify them explicitly.
|
||||
if '@master' in spec:
|
||||
# Julia needs to know the offset from a specific commit
|
||||
git = which('git')
|
||||
git('fetch', '--unshallow')
|
||||
|
||||
# Explicitly setting CC, CXX, or FC breaks building libuv, one
|
||||
# of Julia's dependencies. This might be a Darwin-specific
|
||||
# problem. Given how Spack sets up compilers, Julia should
|
||||
# still use Spack's compilers, even if we don't specify them
|
||||
# explicitly.
|
||||
options = [#"CC=cc",
|
||||
#"CXX=c++",
|
||||
#"FC=fc",
|
||||
|
32
var/spack/repos/builtin/packages/kripke/package.py
Normal file
32
var/spack/repos/builtin/packages/kripke/package.py
Normal file
@ -0,0 +1,32 @@
|
||||
from spack import *
|
||||
|
||||
class Kripke(Package):
|
||||
"""Kripke is a simple, scalable, 3D Sn deterministic particle
|
||||
transport proxy/mini app.
|
||||
"""
|
||||
homepage = "https://codesign.llnl.gov/kripke.php"
|
||||
url = "https://codesign.llnl.gov/downloads/kripke-openmp-1.1.tar.gz"
|
||||
|
||||
version('1.1', '7fe6f2b26ed983a6ce5495ab701f85bf')
|
||||
|
||||
variant('mpi', default=True, description='Build with MPI.')
|
||||
variant('openmp', default=True, description='Build with OpenMP enabled.')
|
||||
|
||||
depends_on('mpi', when="+mpi")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
with working_dir('build', create=True):
|
||||
def enabled(variant):
|
||||
return (1 if variant in spec else 0)
|
||||
|
||||
cmake('-DCMAKE_INSTALL_PREFIX:PATH=.',
|
||||
'-DENABLE_OPENMP=%d' % enabled('+openmp'),
|
||||
'-DENABLE_MPI=%d' % enabled('+mpi'),
|
||||
'..',
|
||||
*std_cmake_args)
|
||||
make()
|
||||
|
||||
# Kripke does not provide install target, so we have to copy
|
||||
# things into place.
|
||||
mkdirp(prefix.bin)
|
||||
install('kripke', prefix.bin)
|
@ -8,6 +8,9 @@ class Libpng(Package):
|
||||
version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d')
|
||||
version('1.6.15', '829a256f3de9307731d4f52dc071916d')
|
||||
version('1.6.14', '2101b3de1d5f348925990f9aa8405660')
|
||||
version('1.5.26', '3ca98347a5541a2dad55cd6d07ee60a9')
|
||||
version('1.4.19', '89bcbc4fc8b31f4a403906cf4f662330')
|
||||
version('1.2.56', '9508fc59d10a1ffadd9aae35116c19ee')
|
||||
|
||||
depends_on('zlib')
|
||||
|
||||
|
17
var/spack/repos/builtin/packages/libtermkey/package.py
Normal file
17
var/spack/repos/builtin/packages/libtermkey/package.py
Normal file
@ -0,0 +1,17 @@
|
||||
from spack import *
|
||||
|
||||
class Libtermkey(Package):
|
||||
"""Easy keyboard entry processing for terminal programs"""
|
||||
homepage = "http://www.leonerd.org.uk/code/libtermkey/"
|
||||
url = "http://www.leonerd.org.uk/code/libtermkey/libtermkey-0.18.tar.gz"
|
||||
|
||||
version('0.18' , '3be2e3e5a851a49cc5e8567ac108b520')
|
||||
version('0.17' , '20edb99e0d95ec1690fe90e6a555ae6d')
|
||||
version('0.16' , '7a24b675aaeb142d30db28e7554987d4')
|
||||
version('0.15b', '27689756e6c86c56ae454f2ac259bc3d')
|
||||
version('0.14' , 'e08ce30f440f9715c459060e0e048978')
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make()
|
||||
make("install", "PREFIX=" + prefix)
|
@ -8,6 +8,8 @@ class Libtool(Package):
|
||||
version('2.4.6' , 'addf44b646ddb4e3919805aa88fa7c5e')
|
||||
version('2.4.2' , 'd2f3b7d4627e69e13514a40e72a24d50')
|
||||
|
||||
depends_on('m4')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
|
||||
|
21
var/spack/repos/builtin/packages/libuv/package.py
Normal file
21
var/spack/repos/builtin/packages/libuv/package.py
Normal file
@ -0,0 +1,21 @@
|
||||
from spack import *
|
||||
|
||||
class Libuv(Package):
|
||||
"""Multi-platform library with a focus on asynchronous IO"""
|
||||
homepage = "http://libuv.org"
|
||||
url = "https://github.com/libuv/libuv/archive/v1.9.0.tar.gz"
|
||||
|
||||
version('1.9.0', '14737f9c76123a19a290dabb7d1cd04c')
|
||||
|
||||
depends_on('automake')
|
||||
depends_on('autoconf')
|
||||
depends_on('libtool')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
bash = which("bash")
|
||||
bash('autogen.sh')
|
||||
configure('--prefix=%s' % prefix)
|
||||
|
||||
make()
|
||||
make("check")
|
||||
make("install")
|
12
var/spack/repos/builtin/packages/libvterm/package.py
Normal file
12
var/spack/repos/builtin/packages/libvterm/package.py
Normal file
@ -0,0 +1,12 @@
|
||||
from spack import *
|
||||
|
||||
class Libvterm(Package):
|
||||
"""An abstract library implementation of a terminal emulator"""
|
||||
homepage = "http://www.leonerd.org.uk/code/libvterm/"
|
||||
url = "http://www.leonerd.org.uk/code/libvterm/libvterm-0+bzr681.tar.gz"
|
||||
|
||||
version('681', '7a4325a7350b7092245c04e8ee185ac3')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make()
|
||||
make("install", "PREFIX=" + prefix)
|
18
var/spack/repos/builtin/packages/libxc/package.py
Normal file
18
var/spack/repos/builtin/packages/libxc/package.py
Normal file
@ -0,0 +1,18 @@
|
||||
from spack import *
|
||||
|
||||
class Libxc(Package):
|
||||
"""Libxc is a library of exchange-correlation functionals for
|
||||
density-functional theory."""
|
||||
|
||||
homepage = "http://www.tddft.org/programs/octopus/wiki/index.php/Libxc"
|
||||
url = "http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz"
|
||||
|
||||
version('2.2.2', 'd9f90a0d6e36df6c1312b6422280f2ec')
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure('--prefix=%s' % prefix,
|
||||
'--enable-shared')
|
||||
|
||||
make()
|
||||
make("install")
|
@ -13,6 +13,7 @@ class Libxcb(Package):
|
||||
version('1.11.1', '118623c15a96b08622603a71d8789bf3')
|
||||
depends_on("python")
|
||||
depends_on("xcb-proto")
|
||||
depends_on("pkg-config")
|
||||
|
||||
# depends_on('pthread') # Ubuntu: apt-get install libpthread-stubs0-dev
|
||||
# depends_on('xau') # Ubuntu: apt-get install libxau-dev
|
||||
|
@ -136,6 +136,7 @@ def install(self, spec, prefix):
|
||||
source_directory = self.stage.source_path
|
||||
|
||||
options.append('-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=source_directory))
|
||||
options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
|
||||
|
||||
if '+shared' in spec:
|
||||
options.append('-DSHARED:BOOL=ON')
|
||||
@ -184,7 +185,3 @@ def install(self, spec, prefix):
|
||||
fs = glob.glob(join_path(source_directory,'GKlib',"*.h"))
|
||||
for f in fs:
|
||||
install(f, GKlib_dist)
|
||||
|
||||
# The shared library is not installed correctly on Darwin; correct this
|
||||
if (sys.platform == 'darwin') and ('+shared' in spec):
|
||||
fix_darwin_install_name(prefix.lib)
|
||||
|
125
var/spack/repos/builtin/packages/mfem/package.py
Normal file
125
var/spack/repos/builtin/packages/mfem/package.py
Normal file
@ -0,0 +1,125 @@
|
||||
from spack import *
|
||||
import glob, string
|
||||
|
||||
class Mfem(Package):
|
||||
"""Free, lightweight, scalable C++ library for finite element methods."""
|
||||
|
||||
homepage = 'http://www.mfem.org'
|
||||
url = 'https://github.com/mfem/mfem'
|
||||
|
||||
# version('3.1', git='https://github.com/mfem/mfem.git',
|
||||
# commit='dbae60fe32e071989b52efaaf59d7d0eb2a3b574')
|
||||
|
||||
version('3.1', '841ea5cf58de6fae4de0f553b0e01ebaab9cd9c67fa821e8a715666ecf18fc57',
|
||||
url='http://goo.gl/xrScXn', expand=False)
|
||||
|
||||
variant('metis', default=False, description='Activate support for metis')
|
||||
variant('hypre', default=False, description='Activate support for hypre')
|
||||
variant('suite-sparse', default=False,
|
||||
description='Activate support for SuiteSparse')
|
||||
variant('mpi', default=False, description='Activate support for MPI')
|
||||
variant('lapack', default=False, description='Activate support for LAPACK')
|
||||
variant('debug', default=False, description='Build debug version')
|
||||
|
||||
depends_on('blas', when='+lapack')
|
||||
depends_on('lapack', when='+lapack')
|
||||
|
||||
depends_on('mpi', when='+mpi')
|
||||
depends_on('metis', when='+mpi')
|
||||
depends_on('hypre', when='+mpi')
|
||||
|
||||
depends_on('hypre', when='+hypre')
|
||||
|
||||
depends_on('metis@4:', when='+metis')
|
||||
|
||||
depends_on('suite-sparse', when='+suite-sparse')
|
||||
depends_on('blas', when='+suite-sparse')
|
||||
depends_on('lapack', when='+suite-sparse')
|
||||
depends_on('metis@5:', when='+suite-sparse ^suite-sparse@4.5:')
|
||||
depends_on('cmake', when='^metis@5:')
|
||||
|
||||
def check_variants(self, spec):
|
||||
if '+mpi' in spec and ('+hypre' not in spec or '+metis' not in spec):
|
||||
raise InstallError('mfem+mpi must be built with +hypre ' +
|
||||
'and +metis!')
|
||||
if '+suite-sparse' in spec and ('+metis' not in spec or
|
||||
'+lapack' not in spec):
|
||||
raise InstallError('mfem+suite-sparse must be built with ' +
|
||||
'+metis and +lapack!')
|
||||
if 'metis@5:' in spec and '%clang' in spec and ('^cmake %gcc' not in spec):
|
||||
raise InstallError('To work around CMake bug with clang, must ' +
|
||||
'build mfem with mfem[+variants] %clang ' +
|
||||
'^cmake %gcc to force CMake to build with gcc')
|
||||
return
|
||||
|
||||
def install(self, spec, prefix):
|
||||
self.check_variants(spec)
|
||||
|
||||
options = ['PREFIX=%s' % prefix]
|
||||
|
||||
if '+lapack' in spec:
|
||||
lapack_lib = '-L{0} -llapack -L{1} -lblas'.format(
|
||||
spec['lapack'].prefix.lib, spec['blas'].prefix.lib)
|
||||
options.extend(['MFEM_USE_LAPACK=YES',
|
||||
'LAPACK_OPT=-I%s' % spec['lapack'].prefix.include,
|
||||
'LAPACK_LIB=%s' % lapack_lib])
|
||||
|
||||
if '+hypre' in spec:
|
||||
options.extend(['HYPRE_DIR=%s' % spec['hypre'].prefix,
|
||||
'HYPRE_OPT=-I%s' % spec['hypre'].prefix.include,
|
||||
'HYPRE_LIB=-L%s' % spec['hypre'].prefix.lib +
|
||||
' -lHYPRE'])
|
||||
|
||||
if '+metis' in spec:
|
||||
metis_lib = '-L%s -lmetis' % spec['metis'].prefix.lib
|
||||
if spec['metis'].satisfies('@5:'):
|
||||
metis_str = 'MFEM_USE_METIS_5=YES'
|
||||
else:
|
||||
metis_str = 'MFEM_USE_METIS_5=NO'
|
||||
options.extend([metis_str,
|
||||
'METIS_DIR=%s' % spec['metis'].prefix,
|
||||
'METIS_OPT=-I%s' % spec['metis'].prefix.include,
|
||||
'METIS_LIB=%s' % metis_lib])
|
||||
|
||||
if '+mpi' in spec: options.extend(['MFEM_USE_MPI=YES'])
|
||||
|
||||
if '+suite-sparse' in spec:
|
||||
ssp = spec['suite-sparse'].prefix
|
||||
ss_lib = '-L%s' % ssp.lib
|
||||
ss_lib += (' -lumfpack -lcholmod -lcolamd -lamd -lcamd' +
|
||||
' -lccolamd -lsuitesparseconfig')
|
||||
|
||||
no_librt_archs = ['darwin-i686', 'darwin-x86_64']
|
||||
no_rt = any(map(lambda a: spec.satisfies('='+a), no_librt_archs))
|
||||
if not no_rt: ss_lib += ' -lrt'
|
||||
ss_lib += (' ' + metis_lib + ' ' + lapack_lib)
|
||||
|
||||
options.extend(['MFEM_USE_SUITESPARSE=YES',
|
||||
'SUITESPARSE_DIR=%s' % ssp,
|
||||
'SUITESPARSE_OPT=-I%s' % ssp.include,
|
||||
'SUITESPARSE_LIB=%s' % ss_lib])
|
||||
|
||||
if '+debug' in spec: options.extend(['MFEM_DEBUG=YES'])
|
||||
|
||||
# Dirty hack to cope with URL redirect
|
||||
tgz_file = string.split(self.url,'/')[-1]
|
||||
tar = which('tar')
|
||||
tar('xzvf', tgz_file)
|
||||
cd(glob.glob('mfem*')[0])
|
||||
# End dirty hack to cope with URL redirect
|
||||
|
||||
make('config', *options)
|
||||
make('all')
|
||||
|
||||
# Run a small test before installation
|
||||
args = ['-m', join_path('data','star.mesh'), '--no-visualization']
|
||||
if '+mpi' in spec:
|
||||
Executable(join_path(spec['mpi'].prefix.bin,
|
||||
'mpirun'))('-np',
|
||||
'4',
|
||||
join_path('examples','ex1p'),
|
||||
*args)
|
||||
else:
|
||||
Executable(join_path('examples', 'ex1'))(*args)
|
||||
|
||||
make('install')
|
@ -43,6 +43,8 @@ class Mpich(Package):
|
||||
version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0')
|
||||
|
||||
variant('verbs', default=False, description='Build support for OpenFabrics verbs.')
|
||||
variant('pmi', default=True, description='Build with PMI support')
|
||||
variant('hydra', default=True, description='Build the hydra process manager')
|
||||
|
||||
provides('mpi@:3.0', when='@3:')
|
||||
provides('mpi@:1.3', when='@1:')
|
||||
@ -55,12 +57,15 @@ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
|
||||
spack_env.set('MPICH_FC', spack_fc)
|
||||
|
||||
def setup_dependent_package(self, module, dep_spec):
|
||||
"""For dependencies, make mpicc's use spack wrapper."""
|
||||
# FIXME : is this necessary ? Shouldn't this be part of a contract with MPI providers?
|
||||
module.mpicc = join_path(self.prefix.bin, 'mpicc')
|
||||
self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
|
||||
self.spec.mpicxx = join_path(self.prefix.bin, 'mpic++')
|
||||
self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
|
||||
self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
config_args = ["--prefix=" + prefix,
|
||||
"--with-pmi=" + ("yes" if '+pmi' in spec else 'no'),
|
||||
"--with-pm=" + ('hydra' if '+hydra' in spec else 'no'),
|
||||
"--enable-shared"]
|
||||
|
||||
# Variants
|
||||
|
154
var/spack/repos/builtin/packages/mrnet/krell-5.0.1.patch
Normal file
154
var/spack/repos/builtin/packages/mrnet/krell-5.0.1.patch
Normal file
@ -0,0 +1,154 @@
|
||||
--- mrnet-3093918/include/mrnet/Types.h 2015-12-10 09:32:24.000000000 -0800
|
||||
+++ mrnet_top_of_tree/include/mrnet/Types.h 2016-03-16 12:29:33.986132302 -0700
|
||||
@@ -23,7 +23,7 @@
|
||||
#ifndef MRNET_VERSION_MAJOR
|
||||
# define MRNET_VERSION_MAJOR 5
|
||||
# define MRNET_VERSION_MINOR 0
|
||||
-# define MRNET_VERSION_REV 0
|
||||
+# define MRNET_VERSION_REV 1
|
||||
#endif
|
||||
|
||||
namespace MRN
|
||||
--- mrnet-3093918/include/mrnet_lightweight/Types.h 2015-12-10 09:32:24.000000000 -0800
|
||||
+++ mrnet_top_of_tree/include/mrnet_lightweight/Types.h 2016-03-16 12:29:33.987132302 -0700
|
||||
@@ -30,7 +30,7 @@
|
||||
#ifndef MRNET_VERSION_MAJOR
|
||||
#define MRNET_VERSION_MAJOR 5
|
||||
#define MRNET_VERSION_MINOR 0
|
||||
-#define MRNET_VERSION_REV 0
|
||||
+#define MRNET_VERSION_REV 1
|
||||
#endif
|
||||
void get_Version(int* major,
|
||||
int* minor,
|
||||
--- mrnet-3093918/src/lightweight/SerialGraph.c 2015-12-10 09:32:24.000000000 -0800
|
||||
+++ mrnet_top_of_tree/src/lightweight/SerialGraph.c 2016-03-16 12:29:33.995132302 -0700
|
||||
@@ -59,7 +59,7 @@
|
||||
|
||||
mrn_dbg_func_begin();
|
||||
|
||||
- sprintf(hoststr, "[%s:%hu:%u:", ihostname, iport, irank);
|
||||
+ sprintf(hoststr, "[%s:%05hu:%u:", ihostname, iport, irank);
|
||||
mrn_dbg(5, mrn_printf(FLF, stderr, "looking for SubTreeRoot: '%s'\n", hoststr));
|
||||
|
||||
byte_array = sg->byte_array;
|
||||
@@ -110,7 +110,7 @@
|
||||
|
||||
mrn_dbg_func_begin();
|
||||
|
||||
- len = (size_t) sprintf(hoststr, "[%s:%hu:%u:0]", ihostname, iport, irank);
|
||||
+ len = (size_t) sprintf(hoststr, "[%s:%05hu:%u:0]", ihostname, iport, irank);
|
||||
mrn_dbg(5, mrn_printf(FLF, stderr, "adding sub tree leaf: %s\n", hoststr));
|
||||
|
||||
len += strlen(sg->byte_array) + 1;
|
||||
@@ -139,7 +139,7 @@
|
||||
|
||||
mrn_dbg_func_begin();
|
||||
|
||||
- len = (size_t) sprintf(hoststr, "[%s:%hu:%u:1", ihostname, iport, irank);
|
||||
+ len = (size_t) sprintf(hoststr, "[%s:%05hu:%u:1", ihostname, iport, irank);
|
||||
mrn_dbg(5, mrn_printf(FLF, stderr, "adding sub tree root: %s\n", hoststr));
|
||||
|
||||
len += strlen(sg->byte_array) + 1;
|
||||
@@ -360,8 +360,8 @@
|
||||
char old_hoststr[256];
|
||||
char new_hoststr[256];
|
||||
|
||||
- sprintf(old_hoststr, "[%s:%hu:%u:", hostname, UnknownPort, irank);
|
||||
- sprintf(new_hoststr, "[%s:%hu:%u:", hostname, port, irank);
|
||||
+ sprintf(old_hoststr, "[%s:%05hu:%u:", hostname, UnknownPort, irank);
|
||||
+ sprintf(new_hoststr, "[%s:%05hu:%u:", hostname, port, irank);
|
||||
|
||||
old_byte_array = sg->byte_array;
|
||||
new_byte_array = (char*) malloc( strlen(old_byte_array) + 10 );
|
||||
--- mrnet-3093918/xplat/src/lightweight/SocketUtils.c 2015-12-10 09:32:24.000000000 -0800
|
||||
+++ mrnet_top_of_tree/xplat/src/lightweight/SocketUtils.c 2016-03-16 12:29:34.006132303 -0700
|
||||
@@ -15,7 +15,7 @@
|
||||
#else
|
||||
const XPlat_Socket InvalidSocket = INVALID_SOCKET;
|
||||
#endif
|
||||
-const XPlat_Port InvalidPort = (XPlat_Port)-1;
|
||||
+const XPlat_Port InvalidPort = (XPlat_Port)0;
|
||||
|
||||
static bool_t SetTcpNoDelay( XPlat_Socket sock )
|
||||
{
|
||||
--- mrnet-3093918/conf/configure.in 2015-12-10 09:32:24.000000000 -0800
|
||||
+++ mrnet_top_of_tree/conf/configure.in 2016-03-16 12:45:54.573196781 -0700
|
||||
@@ -107,6 +107,18 @@
|
||||
AC_SUBST(PURIFY)
|
||||
|
||||
|
||||
+AC_ARG_WITH(expat,
|
||||
+ [AS_HELP_STRING([--with-expat=PATH],
|
||||
+ [Absolute path to installation of EXPAT libraries (note: specify the path to the directory containing "include" and "lib" sub-directories)])],
|
||||
+ [EXPAT_DIR="${withval}"],
|
||||
+ [EXPAT_DIR=""])
|
||||
+
|
||||
+if test "x$EXPAT_DIR" = "x" ; then
|
||||
+ EXPAT_LIB=""
|
||||
+else
|
||||
+ EXPAT_LIB="-L$EXPAT_DIR/lib"
|
||||
+fi
|
||||
+
|
||||
dnl === Checks for header files.
|
||||
AC_CHECK_HEADERS([assert.h errno.h fcntl.h limits.h netdb.h signal.h stddef.h stdlib.h stdio.h string.h unistd.h arpa/inet.h netinet/in.h sys/ioctl.h sys/socket.h sys/sockio.h sys/time.h])
|
||||
AC_HEADER_STDBOOL
|
||||
@@ -432,7 +444,7 @@
|
||||
CRAYXT_ATH_LIBS_SO="$CRAYXT_ATH_LIBS -lalps"
|
||||
CRAYXT_ATH_LIBS="$CRAYXT_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc -Wl,-Bdynamic"
|
||||
CRAYXE_ATH_LIBS_SO="$CRAYXE_ATH_LIBS -lalps"
|
||||
- CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi -lexpat -Wl,-Bdynamic"
|
||||
+ CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi $EXPAT_LIB -lexpat -Wl,-Bdynamic"
|
||||
|
||||
AC_CHECK_LIB( [alps], [alps_launch_tool_helper],
|
||||
[HAVE_ATH_LIBS="yes"; EXTRA_LIBS="$CRAYXT_ATH_LIBS $EXTRA_LIBS"; EXTRA_LIBS_SO="$CRAYXT_ATH_LIBS_SO $EXTRA_LIBS_SO"],
|
||||
--- mrnet-3093918/configure 2015-12-10 09:32:24.000000000 -0800
|
||||
+++ mrnet_top_of_tree/configure 2016-03-16 13:47:20.386439143 -0700
|
||||
@@ -742,6 +742,7 @@
|
||||
enable_debug
|
||||
enable_ltwt_threadsafe
|
||||
with_purify
|
||||
+with_expat
|
||||
'
|
||||
ac_precious_vars='build_alias
|
||||
host_alias
|
||||
@@ -1399,6 +1400,9 @@
|
||||
containing "include" and "lib" sub-directories)
|
||||
--with-launchmon=PATH Absolute path to installation of LaunchMON
|
||||
--with-purify Use purify for memory debugging
|
||||
+ --with-expat=PATH Absolute path to installation of EXPAT libraries
|
||||
+ (note: specify the path to the directory containing
|
||||
+ "include" and "lib" sub-directories)
|
||||
|
||||
Some influential environment variables:
|
||||
CC C compiler command
|
||||
@@ -3541,6 +3545,21 @@
|
||||
|
||||
|
||||
|
||||
+# Check whether --with-expat was given.
|
||||
+if test "${with_expat+set}" = set; then :
|
||||
+ withval=$with_expat; EXPAT_DIR="${withval}"
|
||||
+else
|
||||
+ EXPAT_DIR=""
|
||||
+fi
|
||||
+
|
||||
+
|
||||
+if test "x$EXPAT_DIR" = "x" ; then
|
||||
+ EXPAT_LIB=""
|
||||
+else
|
||||
+ EXPAT_LIB="-L$EXPAT_DIR/lib"
|
||||
+fi
|
||||
+
|
||||
+
|
||||
ac_ext=cpp
|
||||
ac_cpp='$CXXCPP $CPPFLAGS'
|
||||
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
|
||||
@@ -5473,7 +5492,7 @@
|
||||
CRAYXT_ATH_LIBS_SO="$CRAYXT_ATH_LIBS -lalps"
|
||||
CRAYXT_ATH_LIBS="$CRAYXT_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc -Wl,-Bdynamic"
|
||||
CRAYXE_ATH_LIBS_SO="$CRAYXE_ATH_LIBS -lalps"
|
||||
- CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi -lexpat -Wl,-Bdynamic"
|
||||
+ CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi $EXPAT_LIB -lexpat -Wl,-Bdynamic"
|
||||
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for alps_launch_tool_helper in -lalps" >&5
|
||||
$as_echo_n "checking for alps_launch_tool_helper in -lalps... " >&6; }
|
@ -3,11 +3,18 @@
|
||||
class Mrnet(Package):
|
||||
"""The MRNet Multi-Cast Reduction Network."""
|
||||
homepage = "http://paradyn.org/mrnet"
|
||||
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
|
||||
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_5.0.1.tar.gz"
|
||||
list_url = "http://ftp.cs.wisc.edu/paradyn/mrnet"
|
||||
|
||||
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
|
||||
version('4.1.0', '5a248298b395b329e2371bf25366115c')
|
||||
version('5.0.1-2', git='https://github.com/dyninst/mrnet.git', commit='20b1eacfc6d680d9f6472146d2dfaa0f900cc2e9')
|
||||
version('5.0.1', '17f65738cf1b9f9b95647ff85f69ecdd')
|
||||
version('4.1.0', '5a248298b395b329e2371bf25366115c')
|
||||
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
|
||||
|
||||
# Add a patch that brings mrnet-5.0.1 up to date with the current development tree
|
||||
# The development tree contains fixes needed for the krell based tools
|
||||
variant('krellpatch', default=False, description="Build MRNet with krell openspeedshop based patch.")
|
||||
patch('krell-5.0.1.patch', when='@5.0.1+krellpatch')
|
||||
|
||||
variant('lwthreads', default=False, description="Also build the MRNet LW threadsafe libraries")
|
||||
parallel = False
|
||||
|
14
var/spack/repos/builtin/packages/msgpack-c/package.py
Normal file
14
var/spack/repos/builtin/packages/msgpack-c/package.py
Normal file
@ -0,0 +1,14 @@
|
||||
from spack import *
|
||||
|
||||
class MsgpackC(Package):
|
||||
"""A small, fast binary interchange format convertible to/from JSON"""
|
||||
homepage = "http://www.msgpack.org"
|
||||
url = "https://github.com/msgpack/msgpack-c/archive/cpp-1.4.1.tar.gz"
|
||||
|
||||
version('1.4.1', 'e2fd3a7419b9bc49e5017fdbefab87e0')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
cmake('.', *std_cmake_args)
|
||||
|
||||
make()
|
||||
make("install")
|
@ -147,6 +147,12 @@ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
|
||||
spack_env.set('MPICH_F90', spack_fc)
|
||||
spack_env.set('MPICH_FC', spack_fc)
|
||||
|
||||
def setup_dependent_package(self, module, dep_spec):
|
||||
self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
|
||||
self.spec.mpicxx = join_path(self.prefix.bin, 'mpicxx')
|
||||
self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
|
||||
self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# we'll set different configure flags depending on our environment
|
||||
configure_args = [
|
||||
|
23
var/spack/repos/builtin/packages/nccmp/package.py
Normal file
23
var/spack/repos/builtin/packages/nccmp/package.py
Normal file
@ -0,0 +1,23 @@
|
||||
from spack import *
|
||||
|
||||
class Nccmp(Package):
|
||||
"""Compare NetCDF Files"""
|
||||
homepage = "http://nccmp.sourceforge.net/"
|
||||
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
|
||||
|
||||
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
|
||||
|
||||
depends_on('netcdf')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# Configure says: F90 and F90FLAGS are replaced by FC and
|
||||
# FCFLAGS respectively in this configure, please unset
|
||||
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
|
||||
# again.
|
||||
env.pop('F90', None)
|
||||
env.pop('F90FLAGS', None)
|
||||
|
||||
configure('--prefix=%s' % prefix)
|
||||
make()
|
||||
make("check")
|
||||
make("install")
|
30
var/spack/repos/builtin/packages/nco/package.py
Normal file
30
var/spack/repos/builtin/packages/nco/package.py
Normal file
@ -0,0 +1,30 @@
|
||||
from spack import *
|
||||
import os
|
||||
|
||||
class Nco(Package):
|
||||
"""The NCO toolkit manipulates and analyzes data stored in
|
||||
netCDF-accessible formats"""
|
||||
|
||||
homepage = "https://sourceforge.net/projects/nco"
|
||||
url = "https://github.com/nco/nco/archive/4.5.5.tar.gz"
|
||||
|
||||
version('4.5.5', '9f1f1cb149ad6407c5a03c20122223ce')
|
||||
|
||||
# See "Compilation Requirements" at:
|
||||
# http://nco.sourceforge.net/#bld
|
||||
|
||||
depends_on('netcdf')
|
||||
depends_on('antlr@2.7.7+cxx') # (required for ncap2)
|
||||
depends_on('gsl') # (desirable for ncap2)
|
||||
depends_on('udunits2') # (allows dimensional unit transformations)
|
||||
# depends_on('opendap') # (enables network transparency),
|
||||
|
||||
def install(self, spec, prefix):
|
||||
opts = [
|
||||
'--prefix=%s' % prefix,
|
||||
'--disable-openmp', # TODO: Make this a variant
|
||||
'--disable-dap', # TODO: Make this a variant
|
||||
'--disable-esmf']
|
||||
configure(*opts)
|
||||
make()
|
||||
make("install")
|
20
var/spack/repos/builtin/packages/ncview/package.py
Normal file
20
var/spack/repos/builtin/packages/ncview/package.py
Normal file
@ -0,0 +1,20 @@
|
||||
from spack import *
|
||||
|
||||
class Ncview(Package):
|
||||
"""Simple viewer for NetCDF files."""
|
||||
homepage = "http://meteora.ucsd.edu/~pierce/ncview_home_page.html"
|
||||
url = "ftp://cirrus.ucsd.edu/pub/ncview/ncview-2.1.7.tar.gz"
|
||||
|
||||
version('2.1.7', 'debd6ca61410aac3514e53122ab2ba07')
|
||||
|
||||
depends_on("netcdf")
|
||||
depends_on("udunits2")
|
||||
|
||||
# OS Dependencies
|
||||
# Ubuntu: apt-get install libxaw7-dev
|
||||
# CentOS 7: yum install libXaw-devel
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure('--prefix=%s' % prefix)
|
||||
make()
|
||||
make("install")
|
@ -12,15 +12,19 @@ class Netcdf(Package):
|
||||
version('4.4.0', 'cffda0cbd97fdb3a06e9274f7aef438e')
|
||||
version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae')
|
||||
|
||||
variant('mpi', default=True, description='Enables MPI parallelism')
|
||||
variant('hdf4', default=False, description="Enable HDF4 support")
|
||||
variant('mpi', default=True, description='Enables MPI parallelism')
|
||||
variant('hdf4', default=False, description='Enable HDF4 support')
|
||||
|
||||
# Dependencies:
|
||||
depends_on("curl") # required for DAP support
|
||||
depends_on("m4")
|
||||
depends_on("hdf", when='+hdf4')
|
||||
depends_on("hdf5+mpi~cxx", when='+mpi') # required for NetCDF-4 support
|
||||
depends_on("hdf5~mpi", when='~mpi') # required for NetCDF-4 support
|
||||
depends_on("zlib") # required for NetCDF-4 support
|
||||
|
||||
# Required for DAP support
|
||||
depends_on("curl")
|
||||
|
||||
# Required for NetCDF-4 support
|
||||
depends_on("zlib")
|
||||
depends_on("hdf5+mpi", when='+mpi')
|
||||
depends_on("hdf5~mpi", when='~mpi')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# Environment variables
|
||||
@ -48,7 +52,7 @@ def install(self, spec, prefix):
|
||||
# /usr/lib/x86_64-linux-gnu/libcurl.so: undefined reference to `SSL_CTX_use_certificate_chain_file@OPENSSL_1.0.0'
|
||||
LIBS.append("-lcurl")
|
||||
CPPFLAGS.append("-I%s" % spec['curl'].prefix.include)
|
||||
LDFLAGS.append ("-L%s" % spec['curl'].prefix.lib)
|
||||
LDFLAGS.append( "-L%s" % spec['curl'].prefix.lib)
|
||||
|
||||
if '+mpi' in spec:
|
||||
config_args.append('--enable-parallel4')
|
||||
|
35
var/spack/repos/builtin/packages/openblas/make.patch
Normal file
35
var/spack/repos/builtin/packages/openblas/make.patch
Normal file
@ -0,0 +1,35 @@
|
||||
diff --git a/Makefile.system b/Makefile.system
|
||||
index b89f60e..2dbdad0 100644
|
||||
--- a/Makefile.system
|
||||
+++ b/Makefile.system
|
||||
@@ -139,6 +139,10 @@ NO_PARALLEL_MAKE=0
|
||||
endif
|
||||
GETARCH_FLAGS += -DNO_PARALLEL_MAKE=$(NO_PARALLEL_MAKE)
|
||||
|
||||
+ifdef MAKE_NO_J
|
||||
+GETARCH_FLAGS += -DMAKE_NO_J=$(MAKE_NO_J)
|
||||
+endif
|
||||
+
|
||||
ifdef MAKE_NB_JOBS
|
||||
GETARCH_FLAGS += -DMAKE_NB_JOBS=$(MAKE_NB_JOBS)
|
||||
endif
|
||||
diff --git a/getarch.c b/getarch.c
|
||||
index f9c49e6..dffad70 100644
|
||||
--- a/getarch.c
|
||||
+++ b/getarch.c
|
||||
@@ -1012,6 +1012,7 @@ int main(int argc, char *argv[]){
|
||||
#endif
|
||||
#endif
|
||||
|
||||
+#ifndef MAKE_NO_J
|
||||
#ifdef MAKE_NB_JOBS
|
||||
printf("MAKE += -j %d\n", MAKE_NB_JOBS);
|
||||
#elif NO_PARALLEL_MAKE==1
|
||||
@@ -1021,6 +1022,7 @@ int main(int argc, char *argv[]){
|
||||
printf("MAKE += -j %d\n", get_num_cores());
|
||||
#endif
|
||||
#endif
|
||||
+#endif
|
||||
|
||||
break;
|
||||
|
@ -1,29 +1,35 @@
|
||||
from spack import *
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
|
||||
class Openblas(Package):
|
||||
"""OpenBLAS: An optimized BLAS library"""
|
||||
homepage = "http://www.openblas.net"
|
||||
url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz"
|
||||
|
||||
version('0.2.18', '805e7f660877d588ea7e3792cda2ee65')
|
||||
version('0.2.17', '664a12807f2a2a7cda4781e3ab2ae0e1')
|
||||
version('0.2.16', 'fef46ab92463bdbb1479dcec594ef6dc')
|
||||
version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9')
|
||||
|
||||
variant('shared', default=True, description="Build shared libraries as well as static libs.")
|
||||
variant('shared', default=True, description="Build shared libraries as well as static libs.")
|
||||
variant('openmp', default=False, description="Enable OpenMP support.")
|
||||
variant('fpic', default=True, description="Build position independent code")
|
||||
|
||||
# virtual dependency
|
||||
provides('blas')
|
||||
provides('lapack')
|
||||
|
||||
patch('make.patch')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# Openblas is picky about compilers. Configure fails with
|
||||
# FC=/abs/path/to/f77, whereas FC=f77 works fine.
|
||||
# To circumvent this, provide basename only:
|
||||
make_defs = ['CC=%s' % os.path.basename(spack_cc),
|
||||
'FC=%s' % os.path.basename(spack_f77)]
|
||||
'FC=%s' % os.path.basename(spack_f77),
|
||||
'MAKE_NO_J=1']
|
||||
|
||||
make_targets = ['libs', 'netlib']
|
||||
|
||||
@ -31,12 +37,24 @@ def install(self, spec, prefix):
|
||||
if '+shared' in spec:
|
||||
make_targets += ['shared']
|
||||
else:
|
||||
if '+fpic' in spec:
|
||||
make_defs.extend(['CFLAGS=-fPIC', 'FFLAGS=-fPIC'])
|
||||
make_defs += ['NO_SHARED=1']
|
||||
|
||||
# fix missing _dggsvd_ and _sggsvd_
|
||||
if spec.satisfies('@0.2.16'):
|
||||
make_defs += ['BUILD_LAPACK_DEPRECATED=1']
|
||||
|
||||
# Add support for OpenMP
|
||||
if '+openmp' in spec:
|
||||
# Note: Apple's most recent Clang 7.3.0 still does not support OpenMP.
|
||||
# What is worse, Openblas (as of 0.2.18) hardcoded that OpenMP cannot
|
||||
# be used with any (!) compiler named clang, bummer.
|
||||
if spec.satisfies('%clang'):
|
||||
raise InstallError('OpenBLAS does not support OpenMP with clang!')
|
||||
|
||||
make_defs += ['USE_OPENMP=1']
|
||||
|
||||
make_args = make_defs + make_targets
|
||||
make(*make_args)
|
||||
|
||||
@ -58,6 +76,10 @@ def install(self, spec, prefix):
|
||||
if '+shared' in spec:
|
||||
symlink('libopenblas.%s' % dso_suffix, 'liblapack.%s' % dso_suffix)
|
||||
|
||||
# Openblas may pass its own test but still fail to compile Lapack
|
||||
# symbols. To make sure we get working Blas and Lapack, do a small test.
|
||||
self.check_install(spec)
|
||||
|
||||
|
||||
def setup_dependent_package(self, module, dspec):
|
||||
# This is WIP for a prototype interface for virtual packages.
|
||||
@ -70,3 +92,44 @@ def setup_dependent_package(self, module, dspec):
|
||||
if '+shared' in self.spec:
|
||||
self.spec.blas_shared_lib = join_path(libdir, 'libopenblas.%s' % dso_suffix)
|
||||
self.spec.lapack_shared_lib = self.spec.blas_shared_lib
|
||||
|
||||
def check_install(self, spec):
|
||||
# TODO: Pull this out to the framework function which recieves a pair of xyz.c and xyz.output
|
||||
print "Checking Openblas installation..."
|
||||
source_file = join_path(os.path.dirname(self.module.__file__),
|
||||
'test_cblas_dgemm.c')
|
||||
output_file = join_path(os.path.dirname(self.module.__file__),
|
||||
'test_cblas_dgemm.output')
|
||||
|
||||
with open(output_file, 'r') as f:
|
||||
expected = f.read()
|
||||
|
||||
cc = which('cc')
|
||||
cc('-c', "-I%s" % join_path(spec.prefix, "include"), source_file)
|
||||
link_flags = ["-L%s" % join_path(spec.prefix, "lib"),
|
||||
"-llapack",
|
||||
"-lblas",
|
||||
"-lpthread"
|
||||
]
|
||||
if '+openmp' in spec:
|
||||
link_flags.extend([self.compiler.openmp_flag])
|
||||
cc('-o', "check", "test_cblas_dgemm.o",
|
||||
*link_flags)
|
||||
|
||||
try:
|
||||
check = Executable('./check')
|
||||
output = check(return_output=True)
|
||||
except:
|
||||
output = ""
|
||||
success = output == expected
|
||||
if not success:
|
||||
print "Produced output does not match expected output."
|
||||
print "Expected output:"
|
||||
print '-'*80
|
||||
print expected
|
||||
print '-'*80
|
||||
print "Produced output:"
|
||||
print '-'*80
|
||||
print output
|
||||
print '-'*80
|
||||
raise RuntimeError("Openblas install check failed")
|
||||
|
49
var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.c
Normal file
49
var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.c
Normal file
@ -0,0 +1,49 @@
|
||||
#include <cblas.h>
|
||||
#include <stdio.h>
|
||||
|
||||
double m[] = {
|
||||
3, 1, 3,
|
||||
1, 5, 9,
|
||||
2, 6, 5
|
||||
};
|
||||
|
||||
double x[] = {
|
||||
-1, 3, -3
|
||||
};
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
void dgesv_(int *n, int *nrhs, double *a, int *lda,
|
||||
int *ipivot, double *b, int *ldb, int *info);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
int main(void) {
|
||||
int i;
|
||||
// blas:
|
||||
double A[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
|
||||
double B[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
|
||||
double C[9] = {.5, .5, .5, .5, .5, .5, .5, .5, .5};
|
||||
cblas_dgemm(CblasColMajor, CblasNoTrans, CblasTrans,
|
||||
3, 3, 2, 1, A, 3, B, 3, 2, C, 3);
|
||||
for (i = 0; i < 9; i++)
|
||||
printf("%f\n", C[i]);
|
||||
|
||||
// lapack:
|
||||
int ipiv[3];
|
||||
int j;
|
||||
int info;
|
||||
int n = 1;
|
||||
int nrhs = 1;
|
||||
int lda = 3;
|
||||
int ldb = 3;
|
||||
dgesv_(&n,&nrhs, &m[0], &lda, ipiv, &x[0], &ldb, &info);
|
||||
for (i=0; i<3; ++i)
|
||||
printf("%5.1f %3d\n", x[i], ipiv[i]);
|
||||
|
||||
return 0;
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
11.000000
|
||||
-9.000000
|
||||
5.000000
|
||||
-9.000000
|
||||
21.000000
|
||||
-1.000000
|
||||
5.000000
|
||||
-1.000000
|
||||
3.000000
|
||||
-0.3 1
|
||||
3.0 1499101120
|
||||
-3.0 32767
|
@ -1,7 +1,5 @@
|
||||
import os
|
||||
|
||||
from spack import *
|
||||
|
||||
import os
|
||||
|
||||
class Openmpi(Package):
|
||||
"""Open MPI is a project combining technologies and resources from
|
||||
@ -28,6 +26,7 @@ class Openmpi(Package):
|
||||
patch('configure.patch', when="@1.10.0:1.10.1")
|
||||
|
||||
variant('psm', default=False, description='Build support for the PSM library.')
|
||||
variant('pmi', default=True, description='Build support for PMI-based launchers')
|
||||
variant('verbs', default=False, description='Build support for OpenFabrics verbs.')
|
||||
|
||||
# TODO : variant support for other schedulers is missing
|
||||
@ -38,6 +37,7 @@ class Openmpi(Package):
|
||||
|
||||
depends_on('hwloc')
|
||||
|
||||
|
||||
def url_for_version(self, version):
|
||||
return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % (version.up_to(2), version)
|
||||
|
||||
@ -48,6 +48,12 @@ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
|
||||
spack_env.set('OMPI_FC', spack_fc)
|
||||
spack_env.set('OMPI_F77', spack_f77)
|
||||
|
||||
def setup_dependent_package(self, module, dep_spec):
|
||||
self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
|
||||
self.spec.mpicxx = join_path(self.prefix.bin, 'mpic++')
|
||||
self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
|
||||
self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
config_args = ["--prefix=%s" % prefix,
|
||||
@ -62,6 +68,9 @@ def install(self, spec, prefix):
|
||||
if '+psm' in spec:
|
||||
config_args.append("--with-psm")
|
||||
|
||||
if '+pmi' in spec:
|
||||
config_args.append("--with-pmi") #TODO: let user specify directory when possible
|
||||
|
||||
if '+verbs' in spec:
|
||||
# Up through version 1.6, this option was previously named --with-openib
|
||||
if spec.satisfies('@:1.6'):
|
||||
|
@ -1,5 +1,5 @@
|
||||
################################################################################
|
||||
# Copyright (c) 2015 Krell Institute. All Rights Reserved.
|
||||
# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it under
|
||||
# the terms of the GNU General Public License as published by the Free Software
|
||||
@ -28,20 +28,15 @@ class Openspeedshop(Package):
|
||||
as open source code primarily under LGPL.
|
||||
"""
|
||||
|
||||
|
||||
homepage = "http://www.openspeedshop.org"
|
||||
url = "http://sourceforge.net/projects/openss/files/openss/openspeedshop-2.2/openspeedshop-2.2.tar.gz/download"
|
||||
url = "https://github.com/OpenSpeedShop"
|
||||
version('2.2', '16cb051179c2038de4e8a845edf1d573')
|
||||
# Use when the git repository is available
|
||||
version('2.2', branch='master', git='https://github.com/OpenSpeedShop/openspeedshop.git')
|
||||
|
||||
#homepage = "http://www.openspeedshop.org"
|
||||
#url = "http://sourceforge.net/projects/openss/files/openss/openspeedshop-2.1/openspeedshop-2.1.tar.gz/download"
|
||||
#version('2.1', 'bdaa57c1a0db9d0c3e0303fd8496c507')
|
||||
|
||||
# optional mirror template
|
||||
#url = "file:/g/g24/jeg/openspeedshop-2.1.tar.gz"
|
||||
#version('2.1', '64ee17166519838c7b94a1adc138e94f')
|
||||
|
||||
|
||||
# Optional mirror template
|
||||
#url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/openspeedshop-2.2.tar.gz"
|
||||
#version('2.2', '643337740dc6c2faca60f42d3620b0e1')
|
||||
|
||||
parallel = False
|
||||
|
||||
@ -51,11 +46,17 @@ class Openspeedshop(Package):
|
||||
variant('frontend', default=False, description="build only the front-end tool using the runtime_dir to point to the target build.")
|
||||
variant('cuda', default=False, description="build with cuda packages included.")
|
||||
variant('ptgf', default=False, description="build with the PTGF based gui package enabled.")
|
||||
variant('intelmic', default=False, description="build for the Intel MIC platform.")
|
||||
variant('cray', default=False, description="build for Cray platforms.")
|
||||
variant('bluegene', default=False, description="build for Cray platforms.")
|
||||
variant('rtfe', default=False, description="build for generic cluster platforms that have different processors on the fe and be nodes.")
|
||||
|
||||
# MPI variants
|
||||
variant('openmpi', default=False, description="Build mpi experiment collector for openmpi MPI when this variant is enabled.")
|
||||
variant('mpt', default=False, description="Build mpi experiment collector for SGI MPT MPI when this variant is enabled.")
|
||||
variant('mvapich2', default=False, description="Build mpi experiment collector for mvapich2 MPI when this variant is enabled.")
|
||||
variant('mvapich', default=False, description="Build mpi experiment collector for mvapich MPI when this variant is enabled.")
|
||||
variant('mpich2', default=False, description="Build mpi experiment collector for mpich2 MPI when this variant is enabled.")
|
||||
variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.")
|
||||
|
||||
depends_on("cmake@3.0.2")
|
||||
# Dependencies for openspeedshop that are common to all the variants of the OpenSpeedShop build
|
||||
depends_on("bison")
|
||||
depends_on("flex")
|
||||
@ -63,8 +64,8 @@ class Openspeedshop(Package):
|
||||
depends_on("libelf")
|
||||
depends_on("libdwarf")
|
||||
depends_on("sqlite")
|
||||
depends_on("boost@1.50.0")
|
||||
depends_on("dyninst@8.2.1")
|
||||
depends_on("boost@1.50.0:")
|
||||
depends_on("dyninst@9.1.0")
|
||||
depends_on("python")
|
||||
depends_on("qt@3.3.8b+krellpatch")
|
||||
|
||||
@ -72,15 +73,78 @@ class Openspeedshop(Package):
|
||||
depends_on("libunwind", when='+offline')
|
||||
depends_on("papi", when='+offline')
|
||||
depends_on("libmonitor+krellpatch", when='+offline')
|
||||
#depends_on("openmpi+krelloptions", when='+offline')
|
||||
#depends_on("openmpi", when='+offline')
|
||||
#depends_on("mpich", when='+offline')
|
||||
depends_on("openmpi", when='+offline+openmpi')
|
||||
depends_on("mpich", when='+offline+mpich')
|
||||
depends_on("mpich2", when='+offline+mpich2')
|
||||
depends_on("mvapich2", when='+offline+mvapich2')
|
||||
depends_on("mvapich", when='+offline+mvapich')
|
||||
depends_on("mpt", when='+offline+mpt')
|
||||
|
||||
# Dependencies only for the openspeedshop cbtf package.
|
||||
depends_on("cbtf", when='+cbtf')
|
||||
depends_on("cbtf-krell", when='+cbtf')
|
||||
depends_on("cbtf-argonavis", when='+cbtf')
|
||||
depends_on("mrnet@4.1.0:+lwthreads", when='+cbtf')
|
||||
depends_on("cbtf-argonavis", when='+cbtf+cuda')
|
||||
depends_on("mrnet@5.0.1:+lwthreads+krellpatch", when='+cbtf')
|
||||
|
||||
def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
|
||||
# Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
|
||||
|
||||
compile_flags="-O2 -g"
|
||||
BuildTypeOptions = []
|
||||
# Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
|
||||
for word in cmakeOptions[:]:
|
||||
if word.startswith('-DCMAKE_BUILD_TYPE'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_CXX_FLAGS'):
|
||||
cmakeOptions.remove(word)
|
||||
if word.startswith('-DCMAKE_C_FLAGS'):
|
||||
cmakeOptions.remove(word)
|
||||
BuildTypeOptions.extend([
|
||||
'-DCMAKE_BUILD_TYPE=None',
|
||||
'-DCMAKE_CXX_FLAGS=%s' % compile_flags,
|
||||
'-DCMAKE_C_FLAGS=%s' % compile_flags
|
||||
])
|
||||
|
||||
cmakeOptions.extend(BuildTypeOptions)
|
||||
|
||||
def set_mpi_cmakeOptions(self, spec, cmakeOptions):
|
||||
# Appends to cmakeOptions the options that will enable the appropriate MPI implementations
|
||||
|
||||
MPIOptions = []
|
||||
|
||||
# openmpi
|
||||
if '+openmpi' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix
|
||||
])
|
||||
# mpich
|
||||
if '+mpich' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DMPICH_DIR=%s' % spec['mpich'].prefix
|
||||
])
|
||||
# mpich2
|
||||
if '+mpich2' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DMPICH2_DIR=%s' % spec['mpich2'].prefix
|
||||
])
|
||||
# mvapich
|
||||
if '+mvapich' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DMVAPICH_DIR=%s' % spec['mvapich'].prefix
|
||||
])
|
||||
# mvapich2
|
||||
if '+mvapich2' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix
|
||||
])
|
||||
# mpt
|
||||
if '+mpt' in spec:
|
||||
MPIOptions.extend([
|
||||
'-DMPT_DIR=%s' % spec['mpt'].prefix
|
||||
])
|
||||
|
||||
cmakeOptions.extend(MPIOptions)
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
|
||||
@ -100,51 +164,118 @@ def install(self, spec, prefix):
|
||||
instrumentor_setting = "offline"
|
||||
if '+runtime' in spec:
|
||||
with working_dir('build_runtime', create=True):
|
||||
cmake('..',
|
||||
'-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
|
||||
'-DINSTRUMENTOR=%s' % instrumentor_setting,
|
||||
'-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
|
||||
'-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
|
||||
'-DPAPI_DIR=%s' % spec['papi'].prefix,
|
||||
*std_cmake_args)
|
||||
|
||||
cmakeOptions = []
|
||||
cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
|
||||
'-DINSTRUMENTOR=%s' % instrumentor_setting,
|
||||
'-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
|
||||
'-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
|
||||
'-DPAPI_DIR=%s' % spec['papi'].prefix
|
||||
])
|
||||
|
||||
# Add any MPI implementations coming from variant settings
|
||||
self.set_mpi_cmakeOptions(spec, cmakeOptions)
|
||||
cmakeOptions.extend(std_cmake_args)
|
||||
|
||||
# Adjust the build options to the favored ones for this build
|
||||
self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
|
||||
|
||||
cmake('..', *cmakeOptions)
|
||||
|
||||
make("clean")
|
||||
make()
|
||||
make("install")
|
||||
else:
|
||||
cmake_prefix_path = join_path(spec['dyninst'].prefix)
|
||||
with working_dir('build', create=True):
|
||||
|
||||
#python_vers=join_path(spec['python'].version[:2])
|
||||
#'-DOPENMPI_DIR=%s' % openmpi_prefix_path,
|
||||
#'-DMVAPICH_DIR=%s' % mvapich_prefix_path,
|
||||
#'-DMPICH_DIR=%s' % spec['mpich'].prefix,
|
||||
#'-DMPICH2_DIR=%s' % spec['mpich2'].prefix,
|
||||
#'-DBoost_NO_SYSTEM_PATHS=TRUE',
|
||||
#'-DBOOST_ROOT=%s' % spec['boost'].prefix,
|
||||
#'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix,
|
||||
|
||||
python_vers='%d.%d' % spec['python'].version[:2]
|
||||
cmake('..',
|
||||
'-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
|
||||
'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
|
||||
'-DINSTRUMENTOR=%s' % instrumentor_setting,
|
||||
'-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
|
||||
'-DLIBELF_DIR=%s' % spec['libelf'].prefix,
|
||||
'-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix,
|
||||
'-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
|
||||
'-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
|
||||
'-DPAPI_DIR=%s' % spec['papi'].prefix,
|
||||
'-DSQLITE3_DIR=%s' % spec['sqlite'].prefix,
|
||||
'-DQTLIB_DIR=%s' % spec['qt'].prefix,
|
||||
'-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'),
|
||||
'-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers,
|
||||
'-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so',
|
||||
'-DBoost_NO_SYSTEM_PATHS=TRUE',
|
||||
'-DBOOST_ROOT=%s' % spec['boost'].prefix,
|
||||
'-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
|
||||
*std_cmake_args)
|
||||
|
||||
cmakeOptions = []
|
||||
cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
'-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
|
||||
'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
|
||||
'-DINSTRUMENTOR=%s' % instrumentor_setting,
|
||||
'-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
|
||||
'-DLIBELF_DIR=%s' % spec['libelf'].prefix,
|
||||
'-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix,
|
||||
'-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
|
||||
'-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
|
||||
'-DPAPI_DIR=%s' % spec['papi'].prefix,
|
||||
'-DSQLITE3_DIR=%s' % spec['sqlite'].prefix,
|
||||
'-DQTLIB_DIR=%s' % spec['qt'].prefix,
|
||||
'-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'),
|
||||
'-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers,
|
||||
'-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so',
|
||||
'-DBoost_NO_SYSTEM_PATHS=TRUE',
|
||||
'-DBOOST_ROOT=%s' % spec['boost'].prefix,
|
||||
'-DDYNINST_DIR=%s' % spec['dyninst'].prefix
|
||||
])
|
||||
|
||||
# Add any MPI implementations coming from variant settings
|
||||
self.set_mpi_cmakeOptions(spec, cmakeOptions)
|
||||
cmakeOptions.extend(std_cmake_args)
|
||||
|
||||
# Adjust the build options to the favored ones for this build
|
||||
self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
|
||||
|
||||
cmake('..', *cmakeOptions)
|
||||
|
||||
make("clean")
|
||||
make()
|
||||
make("install")
|
||||
|
||||
elif '+cbtf' in spec:
|
||||
instrumentor_setting = "cbtf"
|
||||
resolve_symbols = "symtabapi"
|
||||
cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) + ':' + join_path(spec['dyninst'].prefix)
|
||||
#runtime_platform_cray = "cray"
|
||||
#if '+cray' in spec:
|
||||
# if '+runtime' in spec:
|
||||
# #-DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_INSTALL_DIR} \
|
||||
# with working_dir('build_cbtf_cray_runtime', create=True):
|
||||
# python_vers='%d.%d' % spec['python'].version[:2]
|
||||
# cmake('..',
|
||||
# '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
|
||||
# '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
|
||||
# '-DRUNTIME_PLATFORM=%s' % runtime_platform_cray,
|
||||
# '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
|
||||
# '-DRESOLVE_SYMBOLS=%s' % resolve_symbols,
|
||||
# '-DINSTRUMENTOR=%s' % instrumentor_setting,
|
||||
# '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
|
||||
# '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
|
||||
# '-DCBTF_KRELL_CN_RUNTIME_DIR=%s' % spec['cbtf-krell'].prefix,
|
||||
# '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
|
||||
# '-DLIBELF_DIR=%s' % spec['libelf'].prefix,
|
||||
# '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix,
|
||||
# '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
|
||||
# '-DPAPI_DIR=%s' % spec['papi'].prefix,
|
||||
# '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
|
||||
# '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
|
||||
# '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
|
||||
# '-DBoost_NO_SYSTEM_PATHS=TRUE',
|
||||
# '-DBOOST_ROOT=%s' % spec['boost'].prefix,
|
||||
# *std_cmake_args)
|
||||
|
||||
# make("clean")
|
||||
# make()
|
||||
# make("install")
|
||||
|
||||
|
||||
#elif '+mic' in spec:
|
||||
# comment out else and shift over the default case below until arch detection is in
|
||||
#else:
|
||||
|
||||
if '+runtime' in spec:
|
||||
with working_dir('build_cbtf_runtime', create=True):
|
||||
python_vers='%d.%d' % spec['python'].version[:2]
|
||||
@ -203,14 +334,63 @@ def install(self, spec, prefix):
|
||||
# tbd
|
||||
|
||||
|
||||
#if '+intelmic' in spec:
|
||||
# with working_dir('build_intelmic_compute', create=True):
|
||||
# tbd
|
||||
# with working_dir('build_intelmic_frontend', create=True):
|
||||
# tbd
|
||||
|
||||
#if '+cray' in spec:
|
||||
# with working_dir('build_cray_compute', create=True):
|
||||
# tbd
|
||||
# with working_dir('build_cray_frontend', create=True):
|
||||
# tbd
|
||||
#if '+cbtf' in spec:
|
||||
# if cray build type detected:
|
||||
# if '+runtime' in spec:
|
||||
# with working_dir('build_cray_cbtf_compute', create=True):
|
||||
# tbd
|
||||
# else:
|
||||
# with working_dir('build_cray_cbtf_frontend', create=True):
|
||||
# tbd
|
||||
# with working_dir('build_cray_osscbtf_frontend', create=True):
|
||||
# tbd
|
||||
# fi
|
||||
# elif '+intelmic' in spec:
|
||||
# if '+runtime' in spec:
|
||||
# with working_dir('build_intelmic_cbtf_compute', create=True):
|
||||
# tbd
|
||||
# else:
|
||||
# with working_dir('build_intelmic_cbtf_frontend', create=True):
|
||||
# tbd
|
||||
# with working_dir('build_intelmic_osscbtf_frontend', create=True):
|
||||
# fi
|
||||
# else
|
||||
# with working_dir('build_cluster_cbtf', create=True):
|
||||
# tbd
|
||||
# with working_dir('build_cluster osscbtf', create=True):
|
||||
# tbd
|
||||
# fi
|
||||
#elif '+offline' in spec:
|
||||
# if cray build type detected:
|
||||
# if '+runtime' in spec:
|
||||
# with working_dir('build_cray_ossoff_compute', create=True):
|
||||
# tbd
|
||||
# else:
|
||||
# with working_dir('build_cray_ossoff_frontend', create=True):
|
||||
# tbd
|
||||
# fi
|
||||
# elif '+intelmic' in spec:
|
||||
# if '+runtime' in spec:
|
||||
# with working_dir('build_intelmic_ossoff_compute', create=True):
|
||||
# tbd
|
||||
# else:
|
||||
# with working_dir('build_intelmic_ossoff_frontend', create=True):
|
||||
# tbd
|
||||
# fi
|
||||
# elif bgq build type detected:
|
||||
# if '+runtime' in spec:
|
||||
# with working_dir('build_bgq_ossoff_compute', create=True):
|
||||
# tbd
|
||||
# else:
|
||||
# with working_dir('build_bgq_ossoff_frontend', create=True):
|
||||
# tbd
|
||||
# fi
|
||||
# else
|
||||
# with working_dir('build_cluster ossoff', create=True):
|
||||
# tbd
|
||||
# fi
|
||||
#fi
|
||||
|
||||
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class Openssl(Package):
|
||||
"""The OpenSSL Project is a collaborative effort to develop a
|
||||
robust, commercial-grade, full-featured, and Open Source
|
||||
@ -14,10 +15,12 @@ class Openssl(Package):
|
||||
|
||||
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
|
||||
version('1.0.1r', '1abd905e079542ccae948af37e393d28')
|
||||
version('1.0.1t', '9837746fcf8a6727d46d22ca35953da1')
|
||||
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
|
||||
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
|
||||
version('1.0.2f', 'b3bf73f507172be9292ea2a8c28b659d')
|
||||
version('1.0.2g', 'f3c710c045cdee5fd114feb69feba7aa')
|
||||
version('1.0.2h', '9392e65072ce4b614c1392eefc1f23d0')
|
||||
|
||||
depends_on("zlib")
|
||||
parallel = False
|
||||
@ -30,26 +33,14 @@ def url_for_version(self, version):
|
||||
# Same idea, but just to avoid issuing the same message multiple times
|
||||
warnings_given_to_user = getattr(Openssl, '_warnings_given', {})
|
||||
if openssl_url is None:
|
||||
latest = 'http://www.openssl.org/source/openssl-{version}.tar.gz'
|
||||
older = 'http://www.openssl.org/source/old/{version_number}/openssl-{version_full}.tar.gz'
|
||||
# Try to use the url where the latest tarballs are stored. If the url does not exist (404), then
|
||||
# return the url for older format
|
||||
version_number = '.'.join([str(x) for x in version[:-1]])
|
||||
older_url = older.format(version_number=version_number, version_full=version)
|
||||
latest_url = latest.format(version=version)
|
||||
response = urllib.urlopen(latest.format(version=version))
|
||||
if response.getcode() == 404:
|
||||
openssl_url = older_url
|
||||
# Checks if we already warned the user for this particular version of OpenSSL.
|
||||
# If not we display a warning message and mark this version
|
||||
if self.spec.satisfies('@external'):
|
||||
# The version @external is reserved to system openssl. In that case return a fake url and exit
|
||||
openssl_url = '@external (reserved version for system openssl)'
|
||||
if not warnings_given_to_user.get(version, False):
|
||||
tty.warn('This installation depends on an old version of OpenSSL, which may have known security issues. ')
|
||||
tty.warn('Consider updating to the latest version of this package.')
|
||||
tty.warn('More details at {homepage}'.format(homepage=Openssl.homepage))
|
||||
tty.msg('Using openssl@external : the version @external is reserved for system openssl')
|
||||
warnings_given_to_user[version] = True
|
||||
else:
|
||||
openssl_url = latest_url
|
||||
# Store the computed URL
|
||||
openssl_url = self.check_for_outdated_release(version, warnings_given_to_user) # Store the computed URL
|
||||
openssl_urls[version] = openssl_url
|
||||
# Store the updated dictionary of URLS
|
||||
Openssl._openssl_url = openssl_urls
|
||||
@ -58,6 +49,28 @@ def url_for_version(self, version):
|
||||
|
||||
return openssl_url
|
||||
|
||||
def check_for_outdated_release(self, version, warnings_given_to_user):
|
||||
latest = 'ftp://ftp.openssl.org/source/openssl-{version}.tar.gz'
|
||||
older = 'http://www.openssl.org/source/old/{version_number}/openssl-{version_full}.tar.gz'
|
||||
# Try to use the url where the latest tarballs are stored. If the url does not exist (404), then
|
||||
# return the url for older format
|
||||
version_number = '.'.join([str(x) for x in version[:-1]])
|
||||
try:
|
||||
openssl_url = latest.format(version=version)
|
||||
urllib.urlopen(openssl_url)
|
||||
except IOError:
|
||||
openssl_url = older.format(version_number=version_number, version_full=version)
|
||||
# Checks if we already warned the user for this particular version of OpenSSL.
|
||||
# If not we display a warning message and mark this version
|
||||
if not warnings_given_to_user.get(version, False):
|
||||
tty.warn(
|
||||
'This installation depends on an old version of OpenSSL, which may have known security issues. ')
|
||||
tty.warn('Consider updating to the latest version of this package.')
|
||||
tty.warn('More details at {homepage}'.format(homepage=Openssl.homepage))
|
||||
warnings_given_to_user[version] = True
|
||||
|
||||
return openssl_url
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# OpenSSL uses a variable APPS in its Makefile. If it happens to be set
|
||||
# in the environment, then this will override what is set in the
|
||||
|
@ -0,0 +1,38 @@
|
||||
from spack import *
|
||||
|
||||
class OsuMicroBenchmarks(Package):
|
||||
"""The Ohio MicroBenchmark suite is a collection of independent MPI
|
||||
message passing performance microbenchmarks developed and written at
|
||||
The Ohio State University. It includes traditional benchmarks and
|
||||
performance measures such as latency, bandwidth and host overhead
|
||||
and can be used for both traditional and GPU-enhanced nodes."""
|
||||
|
||||
homepage = "http://mvapich.cse.ohio-state.edu/benchmarks/"
|
||||
url = "http://mvapich.cse.ohio-state.edu/download/mvapich/osu-micro-benchmarks-5.3.tar.gz"
|
||||
|
||||
version('5.3', '42e22b931d451e8bec31a7424e4adfc2')
|
||||
|
||||
variant('cuda', default=False, description="Enable CUDA support")
|
||||
|
||||
depends_on('mpi')
|
||||
depends_on('cuda', when='+cuda')
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
config_args = [
|
||||
'CC=%s' % spec['mpi'].prefix.bin + '/mpicc',
|
||||
'CXX=%s' % spec['mpi'].prefix.bin + '/mpicxx',
|
||||
'LDFLAGS=-lrt',
|
||||
'--prefix=%s' % prefix
|
||||
]
|
||||
|
||||
if '+cuda' in spec:
|
||||
config_args.extend([
|
||||
'--enable-cuda',
|
||||
'--with-cuda=%s' % spec['cuda'].prefix,
|
||||
])
|
||||
|
||||
configure(*config_args)
|
||||
|
||||
make()
|
||||
make('install')
|
@ -7,10 +7,17 @@ class P4est(Package):
|
||||
|
||||
version('1.1', '37ba7f4410958cfb38a2140339dbf64f')
|
||||
|
||||
# disable by default to make it work on frontend of clusters
|
||||
variant('tests', default=False, description='Run small tests')
|
||||
variant('tests', default=True, description='Run small tests')
|
||||
|
||||
# build dependencies
|
||||
depends_on('automake')
|
||||
depends_on('autoconf')
|
||||
depends_on('libtool@2.4.2:')
|
||||
|
||||
# other dependencies
|
||||
depends_on('lua') # Needed for the submodule sc
|
||||
depends_on('mpi')
|
||||
depends_on('zlib')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
options = ['--enable-mpi',
|
||||
@ -19,16 +26,20 @@ def install(self, spec, prefix):
|
||||
'--without-blas',
|
||||
'CPPFLAGS=-DSC_LOG_PRIORITY=SC_LP_ESSENTIAL',
|
||||
'CFLAGS=-O2',
|
||||
'CC=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # TODO: use ENV variables or MPI class wrappers
|
||||
'CXX=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'),
|
||||
'FC=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
|
||||
'F77=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif77'),
|
||||
'CC=%s' % self.spec['mpi'].mpicc,
|
||||
'CXX=%s' % self.spec['mpi'].mpicxx,
|
||||
'FC=%s' % self.spec['mpi'].mpifc,
|
||||
'F77=%s' % self.spec['mpi'].mpif77
|
||||
]
|
||||
|
||||
configure('--prefix=%s' % prefix, *options)
|
||||
|
||||
make()
|
||||
# Make tests optional as sometimes mpiexec can't be run with an error:
|
||||
# mpiexec has detected an attempt to run as root.
|
||||
# Running at root is *strongly* discouraged as any mistake (e.g., in
|
||||
# defining TMPDIR) or bug can result in catastrophic damage to the OS
|
||||
# file system, leaving your system in an unusable state.
|
||||
if '+tests' in self.spec:
|
||||
make("check")
|
||||
|
||||
make("check")
|
||||
make("install")
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user