Merge branch 'develop' into openmpi_variants

This commit is contained in:
Massimiliano Culpo 2016-05-02 18:26:05 +02:00
commit cfc25d0a92
71 changed files with 1723 additions and 417 deletions

View File

@ -372,25 +372,32 @@ how this is done is in :ref:`sec-specs`.
``spack compiler add``
~~~~~~~~~~~~~~~~~~~~~~~
An alias for ``spack compiler find``.
.. _spack-compiler-find:
``spack compiler find``
~~~~~~~~~~~~~~~~~~~~~~~
If you do not see a compiler in this list, but you want to use it with
Spack, you can simply run ``spack compiler add`` with the path to
Spack, you can simply run ``spack compiler find`` with the path to
where the compiler is installed. For example::
$ spack compiler add /usr/local/tools/ic-13.0.079
$ spack compiler find /usr/local/tools/ic-13.0.079
==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
intel@13.0.079
Or you can run ``spack compiler add`` with no arguments to force
Or you can run ``spack compiler find`` with no arguments to force
auto-detection. This is useful if you do not know where compilers are
installed, but you know that new compilers have been added to your
``PATH``. For example, using dotkit, you might do this::
$ module load gcc-4.9.0
$ spack compiler add
$ spack compiler find
==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
gcc@4.9.0
This loads the environment module for gcc-4.9.0 to get it into the
This loads the environment module for gcc-4.9.0 to add it to
``PATH``, and then it adds the compiler to Spack.
.. _spack-compiler-info:
@ -807,17 +814,22 @@ Environment Modules, you can get it with Spack:
1. Install with::
.. code-block:: sh
spack install environment-modules
2. Activate with::
MODULES_HOME=`spack location -i environment-modules`
MODULES_VERSION=`ls -1 $MODULES_HOME/Modules | head -1`
${MODULES_HOME}/Modules/${MODULES_VERSION}/bin/add.modules
Add the following two lines to your ``.bashrc`` profile (or similar):
.. code-block:: sh
MODULES_HOME=`spack location -i environment-modules`
source ${MODULES_HOME}/Modules/init/bash
In case you use a Unix shell other than bash, substitute ``bash`` by
the appropriate file in ``${MODULES_HOME}/Modules/init/``.
This adds to your ``.bashrc`` (or similar) files, enabling Environment
Modules when you log in. It will ask your permission before changing
any files.
Spack and Environment Modules
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

2
lib/spack/env/cc vendored
View File

@ -162,7 +162,7 @@ fi
# It doesn't work with -rpath.
# This variable controls whether they are added.
add_rpaths=true
if [[ mode == ld && $OSTYPE == darwin* ]]; then
if [[ $mode == ld && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then
for arg in "$@"; do
if [[ $arg == -r ]]; then
add_rpaths=false

View File

@ -136,9 +136,7 @@
# don't add a second username if it's already unique by user.
if not _tmp_user in path:
tmp_dirs.append(join_path(path, '%u', 'spack-stage'))
for path in _tmp_candidates:
if not path in tmp_dirs:
else:
tmp_dirs.append(join_path(path, 'spack-stage'))
# Whether spack should allow installation of unsafe versions of

View File

@ -44,10 +44,10 @@ def setup_parser(subparser):
scopes = spack.config.config_scopes
# Add
add_parser = sp.add_parser('add', help='Add compilers to the Spack configuration.')
add_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
add_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
# Find
find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.')
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# Remove
@ -70,7 +70,7 @@ def setup_parser(subparser):
help="Configuration scope to read from.")
def compiler_add(args):
def compiler_find(args):
"""Search either $PATH or a list of paths for compilers and add them
to Spack's configuration."""
paths = args.add_paths
@ -136,7 +136,8 @@ def compiler_list(args):
def compiler(parser, args):
action = { 'add' : compiler_add,
action = { 'add' : compiler_find,
'find' : compiler_find,
'remove' : compiler_remove,
'rm' : compiler_remove,
'info' : compiler_info,

View File

@ -23,87 +23,106 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import xml.etree.ElementTree as ET
import itertools
import re
import os
import codecs
import os
import time
import xml.dom.minidom
import xml.etree.ElementTree as ET
import llnl.util.tty as tty
from llnl.util.filesystem import *
import spack
import spack.cmd
from llnl.util.filesystem import *
from spack.build_environment import InstallError
from spack.fetch_strategy import FetchError
import spack.cmd
description = "Run package installation as a unit test, output formatted results."
def setup_parser(subparser):
subparser.add_argument(
'-j', '--jobs', action='store', type=int,
help="Explicitly set number of make jobs. Default is #cpus.")
subparser.add_argument('-j',
'--jobs',
action='store',
type=int,
help="Explicitly set number of make jobs. Default is #cpus.")
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument('-n',
'--no-checksum',
action='store_true',
dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
'-o', '--output', action='store', help="test output goes in this file")
subparser.add_argument('-o', '--output', action='store', help="test output goes in this file")
subparser.add_argument(
'package', nargs=argparse.REMAINDER, help="spec of package to install")
class JunitResultFormat(object):
def __init__(self):
self.root = ET.Element('testsuite')
self.tests = []
def add_test(self, buildId, testResult, buildInfo=None):
self.tests.append((buildId, testResult, buildInfo))
def write_to(self, stream):
self.root.set('tests', '{0}'.format(len(self.tests)))
for buildId, testResult, buildInfo in self.tests:
testcase = ET.SubElement(self.root, 'testcase')
testcase.set('classname', buildId.name)
testcase.set('name', buildId.stringId())
if testResult == TestResult.FAILED:
failure = ET.SubElement(testcase, 'failure')
failure.set('type', "Build Error")
failure.text = buildInfo
elif testResult == TestResult.SKIPPED:
skipped = ET.SubElement(testcase, 'skipped')
skipped.set('type', "Skipped Build")
skipped.text = buildInfo
ET.ElementTree(self.root).write(stream)
subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install")
class TestResult(object):
PASSED = 0
FAILED = 1
SKIPPED = 2
ERRORED = 3
class BuildId(object):
def __init__(self, spec):
self.name = spec.name
self.version = spec.version
self.hashId = spec.dag_hash()
class TestSuite(object):
def __init__(self, filename):
self.filename = filename
self.root = ET.Element('testsuite')
self.tests = []
def stringId(self):
return "-".join(str(x) for x in (self.name, self.version, self.hashId))
def __enter__(self):
return self
def __hash__(self):
return hash((self.name, self.version, self.hashId))
def append(self, item):
if not isinstance(item, TestCase):
raise TypeError('only TestCase instances may be appended to a TestSuite instance')
self.tests.append(item) # Append the item to the list of tests
def __eq__(self, other):
if not isinstance(other, BuildId):
return False
def __exit__(self, exc_type, exc_val, exc_tb):
# Prepare the header for the entire test suite
number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests)
self.root.set('errors', str(number_of_errors))
number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests)
self.root.set('failures', str(number_of_failures))
self.root.set('tests', str(len(self.tests)))
return ((self.name, self.version, self.hashId) ==
(other.name, other.version, other.hashId))
for item in self.tests:
self.root.append(item.element)
with open(self.filename, 'wb') as file:
xml_string = ET.tostring(self.root)
xml_string = xml.dom.minidom.parseString(xml_string).toprettyxml()
file.write(xml_string)
class TestCase(object):
results = {
TestResult.PASSED: None,
TestResult.SKIPPED: 'skipped',
TestResult.FAILED: 'failure',
TestResult.ERRORED: 'error',
}
def __init__(self, classname, name, time=None):
self.element = ET.Element('testcase')
self.element.set('classname', str(classname))
self.element.set('name', str(name))
if time is not None:
self.element.set('time', str(time))
self.result_type = None
def set_result(self, result_type, message=None, error_type=None, text=None):
self.result_type = result_type
result = TestCase.results[self.result_type]
if result is not None and result is not TestResult.PASSED:
subelement = ET.SubElement(self.element, result)
if error_type is not None:
subelement.set('type', error_type)
if message is not None:
subelement.set('message', str(message))
if text is not None:
subelement.text = text
def fetch_log(path):
@ -114,46 +133,76 @@ def fetch_log(path):
def failed_dependencies(spec):
return set(childSpec for childSpec in spec.dependencies.itervalues() if not
spack.repo.get(childSpec).installed)
return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log):
# Post-order traversal is not strictly required but it makes sense to output
# tests for dependencies first.
for spec in topSpec.traverse(order='post'):
if spec not in newInstalls:
continue
def get_top_spec_or_die(args):
specs = spack.cmd.parse_specs(args.package, concretize=True)
if len(specs) > 1:
tty.die("Only 1 top-level package can be specified")
top_spec = iter(specs).next()
return top_spec
failedDeps = failed_dependencies(spec)
package = spack.repo.get(spec)
if failedDeps:
result = TestResult.SKIPPED
dep = iter(failedDeps).next()
depBID = BuildId(dep)
errOutput = "Skipped due to failed dependency: {0}".format(
depBID.stringId())
elif (not package.installed) and (not package.stage.source_path):
result = TestResult.FAILED
errOutput = "Failure to fetch package resources."
elif not package.installed:
result = TestResult.FAILED
lines = getLogFunc(package.build_log_path)
errMessages = list(line for line in lines if
re.search('error:', line, re.IGNORECASE))
errOutput = errMessages if errMessages else lines[-10:]
errOutput = '\n'.join(itertools.chain(
[spec.to_yaml(), "Errors:"], errOutput,
["Build Log:", package.build_log_path]))
else:
result = TestResult.PASSED
errOutput = None
bId = BuildId(spec)
output.add_test(bId, result, errOutput)
def install_single_spec(spec, number_of_jobs):
package = spack.repo.get(spec)
# If it is already installed, skip the test
if spack.repo.get(spec).installed:
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed')
return testcase
# If it relies on dependencies that did not install, skip
if failed_dependencies(spec):
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed')
return testcase
# Otherwise try to install the spec
try:
start_time = time.time()
package.do_install(keep_prefix=False,
keep_stage=True,
ignore_deps=False,
make_jobs=number_of_jobs,
verbose=True,
fake=False)
duration = time.time() - start_time
testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.PASSED)
except InstallError:
# An InstallError is considered a failure (the recipe didn't work correctly)
duration = time.time() - start_time
# Try to get the log
lines = fetch_log(package.build_log_path)
text = '\n'.join(lines)
testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.FAILED, message='Installation failure', text=text)
except FetchError:
# A FetchError is considered an error (we didn't even start building)
duration = time.time() - start_time
testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.ERRORED, message='Unable to fetch package')
return testcase
def get_filename(args, top_spec):
if not args.output:
fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash())
output_directory = join_path(os.getcwd(), 'test-output')
if not os.path.exists(output_directory):
os.mkdir(output_directory)
output_filename = join_path(output_directory, fname)
else:
output_filename = args.output
return output_filename
def test_install(parser, args):
# Check the input
if not args.package:
tty.die("install requires a package argument")
@ -162,50 +211,15 @@ def test_install(parser, args):
tty.die("The -j option must be a positive integer!")
if args.no_checksum:
spack.do_checksum = False # TODO: remove this global.
spack.do_checksum = False # TODO: remove this global.
specs = spack.cmd.parse_specs(args.package, concretize=True)
if len(specs) > 1:
tty.die("Only 1 top-level package can be specified")
topSpec = iter(specs).next()
newInstalls = set()
for spec in topSpec.traverse():
package = spack.repo.get(spec)
if not package.installed:
newInstalls.add(spec)
if not args.output:
bId = BuildId(topSpec)
outputDir = join_path(os.getcwd(), "test-output")
if not os.path.exists(outputDir):
os.mkdir(outputDir)
outputFpath = join_path(outputDir, "test-{0}.xml".format(bId.stringId()))
else:
outputFpath = args.output
for spec in topSpec.traverse(order='post'):
# Calling do_install for the top-level package would be sufficient but
# this attempts to keep going if any package fails (other packages which
# are not dependents may succeed)
package = spack.repo.get(spec)
if (not failed_dependencies(spec)) and (not package.installed):
try:
package.do_install(
keep_prefix=False,
keep_stage=True,
ignore_deps=False,
make_jobs=args.jobs,
verbose=True,
fake=False)
except InstallError:
pass
except FetchError:
pass
jrf = JunitResultFormat()
handled = {}
create_test_output(topSpec, newInstalls, jrf)
with open(outputFpath, 'wb') as F:
jrf.write_to(F)
# Get the one and only top spec
top_spec = get_top_spec_or_die(args)
# Get the filename of the test
output_filename = get_filename(args, top_spec)
# TEST SUITE
with TestSuite(output_filename) as test_suite:
# Traverse in post order : each spec is a test case
for spec in top_spec.traverse(order='post'):
test_case = install_single_spec(spec, args.jobs)
test_suite.append(test_case)

View File

@ -157,12 +157,26 @@ def fetch(self):
tty.msg("Already downloaded %s" % self.archive_file)
return
possible_files = self.stage.expected_archive_files
save_file = None
partial_file = None
if possible_files:
save_file = self.stage.expected_archive_files[0]
partial_file = self.stage.expected_archive_files[0] + '.part'
tty.msg("Trying to fetch from %s" % self.url)
curl_args = ['-O', # save file to disk
if partial_file:
save_args = ['-C', '-', # continue partial downloads
'-o', partial_file] # use a .part file
else:
save_args = ['-O']
curl_args = save_args + [
'-f', # fail on >400 errors
'-D', '-', # print out HTML headers
'-L', self.url, ]
'-L', # resolve 3xx redirects
self.url, ]
if sys.stdout.isatty():
curl_args.append('-#') # status bar when using a tty
@ -178,6 +192,9 @@ def fetch(self):
if self.archive_file:
os.remove(self.archive_file)
if partial_file and os.path.exists(partial_file):
os.remove(partial_file)
if spack.curl.returncode == 22:
# This is a 404. Curl will print the error.
raise FailedDownloadError(
@ -209,6 +226,9 @@ def fetch(self):
"'spack clean <package>' to remove the bad archive, then fix",
"your internet gateway issue and install again.")
if save_file:
os.rename(partial_file, save_file)
if not self.archive_file:
raise FailedDownloadError(self.url)

View File

@ -210,6 +210,18 @@ def _need_to_create_path(self):
return False
@property
def expected_archive_files(self):
"""Possible archive file paths."""
paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url)))
if self.mirror_path:
paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
return paths
@property
def archive_file(self):
"""Path to the source archive within this stage directory."""

View File

@ -61,14 +61,14 @@
'optional_deps',
'make_executable',
'configure_guess',
'unit_install',
'lock',
'database',
'namespace_trie',
'yaml',
'sbang',
'environment',
'cmd.uninstall']
'cmd.uninstall',
'cmd.test_install']
def list_tests():

View File

@ -219,3 +219,27 @@ def test_ld_deps(self):
' '.join(test_command))
def test_ld_deps_reentrant(self):
"""Make sure ld -r is handled correctly on OS's where it doesn't
support rpaths."""
os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1])
os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64"
reentrant_test_command = ['-r'] + test_command
self.check_ld('dump-args', reentrant_test_command,
'ld ' +
'-rpath ' + self.prefix + '/lib ' +
'-rpath ' + self.prefix + '/lib64 ' +
'-L' + self.dep1 + '/lib ' +
'-rpath ' + self.dep1 + '/lib ' +
'-r ' +
' '.join(test_command))
os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=darwin-x86_64"
self.check_ld('dump-args', reentrant_test_command,
'ld ' +
'-L' + self.dep1 + '/lib ' +
'-r ' +
' '.join(test_command))

View File

@ -0,0 +1,190 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import collections
from contextlib import contextmanager
import StringIO
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
# Monkey-patch open to write module files to a StringIO instance
@contextmanager
def mock_open(filename, mode):
if not mode == 'wb':
raise RuntimeError('test.test_install : unexpected opening mode for monkey-patched open')
FILE_REGISTRY[filename] = StringIO.StringIO()
try:
yield FILE_REGISTRY[filename]
finally:
handle = FILE_REGISTRY[filename]
FILE_REGISTRY[filename] = handle.getvalue()
handle.close()
import os
import itertools
import unittest
import spack
import spack.cmd
# The use of __import__ is necessary to maintain a name with hyphen (which cannot be an identifier in python)
test_install = __import__("spack.cmd.test-install", fromlist=['test_install'])
class MockSpec(object):
def __init__(self, name, version, hashStr=None):
self.dependencies = {}
self.name = name
self.version = version
self.hash = hashStr if hashStr else hash((name, version))
def traverse(self, order=None):
for _, spec in self.dependencies.items():
yield spec
yield self
#allDeps = itertools.chain.from_iterable(i.traverse() for i in self.dependencies.itervalues())
#return set(itertools.chain([self], allDeps))
def dag_hash(self):
return self.hash
@property
def short_spec(self):
return '-'.join([self.name, str(self.version), str(self.hash)])
class MockPackage(object):
def __init__(self, spec, buildLogPath):
self.name = spec.name
self.spec = spec
self.installed = False
self.build_log_path = buildLogPath
def do_install(self, *args, **kwargs):
self.installed = True
class MockPackageDb(object):
def __init__(self, init=None):
self.specToPkg = {}
if init:
self.specToPkg.update(init)
def get(self, spec):
return self.specToPkg[spec]
def mock_fetch_log(path):
return []
specX = MockSpec('X', "1.2.0")
specY = MockSpec('Y', "2.3.8")
specX.dependencies['Y'] = specY
pkgX = MockPackage(specX, 'logX')
pkgY = MockPackage(specY, 'logY')
class MockArgs(object):
def __init__(self, package):
self.package = package
self.jobs = None
self.no_checksum = False
self.output = None
# TODO: add test(s) where Y fails to install
class TestInstallTest(unittest.TestCase):
"""
Tests test-install where X->Y
"""
def setUp(self):
super(TestInstallTest, self).setUp()
# Monkey patch parse specs
def monkey_parse_specs(x, concretize):
if x == 'X':
return [specX]
elif x == 'Y':
return [specY]
return []
self.parse_specs = spack.cmd.parse_specs
spack.cmd.parse_specs = monkey_parse_specs
# Monkey patch os.mkdirp
self.os_mkdir = os.mkdir
os.mkdir = lambda x: True
# Monkey patch open
test_install.open = mock_open
# Clean FILE_REGISTRY
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
pkgX.installed = False
pkgY.installed = False
# Monkey patch pkgDb
self.saved_db = spack.repo
pkgDb = MockPackageDb({specX: pkgX, specY: pkgY})
spack.repo = pkgDb
def tearDown(self):
# Remove the monkey patched test_install.open
test_install.open = open
# Remove the monkey patched os.mkdir
os.mkdir = self.os_mkdir
del self.os_mkdir
# Remove the monkey patched parse_specs
spack.cmd.parse_specs = self.parse_specs
del self.parse_specs
super(TestInstallTest, self).tearDown()
spack.repo = self.saved_db
def test_installing_both(self):
test_install.test_install(None, MockArgs('X') )
self.assertEqual(len(FILE_REGISTRY), 1)
for _, content in FILE_REGISTRY.items():
self.assertTrue('tests="2"' in content)
self.assertTrue('failures="0"' in content)
self.assertTrue('errors="0"' in content)
def test_dependency_already_installed(self):
pkgX.installed = True
pkgY.installed = True
test_install.test_install(None, MockArgs('X'))
self.assertEqual(len(FILE_REGISTRY), 1)
for _, content in FILE_REGISTRY.items():
self.assertTrue('tests="2"' in content)
self.assertTrue('failures="0"' in content)
self.assertTrue('errors="0"' in content)
self.assertEqual(sum('skipped' in line for line in content.split('\n')), 2)

View File

@ -1,126 +0,0 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import itertools
import unittest
import spack
test_install = __import__("spack.cmd.test-install",
fromlist=["BuildId", "create_test_output", "TestResult"])
class MockOutput(object):
def __init__(self):
self.results = {}
def add_test(self, buildId, passed=True, buildInfo=None):
self.results[buildId] = passed
def write_to(self, stream):
pass
class MockSpec(object):
def __init__(self, name, version, hashStr=None):
self.dependencies = {}
self.name = name
self.version = version
self.hash = hashStr if hashStr else hash((name, version))
def traverse(self, order=None):
allDeps = itertools.chain.from_iterable(i.traverse() for i in
self.dependencies.itervalues())
return set(itertools.chain([self], allDeps))
def dag_hash(self):
return self.hash
def to_yaml(self):
return "<<<MOCK YAML {0}>>>".format(test_install.BuildId(self).stringId())
class MockPackage(object):
def __init__(self, buildLogPath):
self.installed = False
self.build_log_path = buildLogPath
specX = MockSpec("X", "1.2.0")
specY = MockSpec("Y", "2.3.8")
specX.dependencies['Y'] = specY
pkgX = MockPackage('logX')
pkgY = MockPackage('logY')
bIdX = test_install.BuildId(specX)
bIdY = test_install.BuildId(specY)
class UnitInstallTest(unittest.TestCase):
"""Tests test-install where X->Y"""
def setUp(self):
super(UnitInstallTest, self).setUp()
pkgX.installed = False
pkgY.installed = False
self.saved_db = spack.repo
pkgDb = MockPackageDb({specX:pkgX, specY:pkgY})
spack.repo = pkgDb
def tearDown(self):
super(UnitInstallTest, self).tearDown()
spack.repo = self.saved_db
def test_installing_both(self):
mo = MockOutput()
pkgX.installed = True
pkgY.installed = True
test_install.create_test_output(specX, [specX, specY], mo, getLogFunc=mock_fetch_log)
self.assertEqual(mo.results,
{bIdX:test_install.TestResult.PASSED,
bIdY:test_install.TestResult.PASSED})
def test_dependency_already_installed(self):
mo = MockOutput()
pkgX.installed = True
pkgY.installed = True
test_install.create_test_output(specX, [specX], mo, getLogFunc=mock_fetch_log)
self.assertEqual(mo.results, {bIdX:test_install.TestResult.PASSED})
#TODO: add test(s) where Y fails to install
class MockPackageDb(object):
def __init__(self, init=None):
self.specToPkg = {}
if init:
self.specToPkg.update(init)
def get(self, spec):
return self.specToPkg[spec]
def mock_fetch_log(path):
return []

View File

@ -1,4 +1,4 @@
##############################################################################
#####################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
@ -84,7 +84,10 @@ function spack {
if [ "$_sp_arg" = "-h" ]; then
command spack cd -h
else
cd $(spack location $_sp_arg "$@")
LOC="$(spack location $_sp_arg "$@")"
if [[ -d "$LOC" ]] ; then
cd "$LOC"
fi
fi
return
;;

View File

@ -14,4 +14,5 @@ def install(self, spec, prefix):
make('-f',
join_path(self.stage.source_path,'build','clang','Makefile'),
parallel=False)
mkdirp(self.prefix.bin)
install(join_path(self.stage.source_path, 'src','bin','astyle'), self.prefix.bin)

View File

@ -12,6 +12,10 @@ class Binutils(Package):
version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e')
version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764')
depends_on('m4')
depends_on('flex')
depends_on('bison')
# Add a patch that creates binutils libiberty_pic.a which is preferred by OpenSpeedShop and cbtf-krell
variant('krellpatch', default=False, description="build with openspeedshop based patch.")
variant('gold', default=True, description="build the gold linker")

View File

@ -1,4 +1,5 @@
from spack import *
import os
import shutil
class Cereal(Package):
@ -30,5 +31,8 @@ def install(self, spec, prefix):
# Install
shutil.rmtree(join_path(prefix, 'doc'), ignore_errors=True)
shutil.rmtree(join_path(prefix, 'include'), ignore_errors=True)
shutil.rmtree(join_path(prefix, 'lib'), ignore_errors=True)
shutil.copytree('doc', join_path(prefix, 'doc'), symlinks=True)
shutil.copytree('include', join_path(prefix, 'include'), symlinks=True)
# Create empty directory to avoid linker warnings later
os.mkdir(join_path(prefix, 'lib'))

View File

@ -30,6 +30,7 @@ class Cmake(Package):
homepage = 'https://www.cmake.org'
url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz'
version('3.5.2', '701386a1b5ec95f8d1075ecf96383e02')
version('3.5.1', 'ca051f4a66375c89d1a524e726da0296')
version('3.5.0', '33c5d09d4c33d4ffcc63578a6ba8777e')
version('3.4.3', '4cb3ff35b2472aae70f542116d616e63')

View File

@ -50,8 +50,8 @@ class Dealii(Package):
depends_on ("trilinos", when='+trilinos+mpi')
# developer dependnecies
#depends_on ("numdiff") #FIXME
#depends_on ("astyle") #FIXME
depends_on ("numdiff", when='@dev')
depends_on ("astyle@2.04", when='@dev')
def install(self, spec, prefix):
options = []
@ -251,3 +251,6 @@ def install(self, spec, prefix):
cmake('.')
make('release')
make('run',parallel=False)
def setup_environment(self, spack_env, env):
env.set('DEAL_II_DIR', self.prefix)

View File

@ -7,6 +7,7 @@ class Dia(Package):
version('0.97.3', '0e744a0f6a6c4cb6a089e4d955392c3c')
depends_on('intltool')
depends_on('gtkplus@2.6.0:')
depends_on('cairo')
#depends_on('libart') # optional dependency, not yet supported by spack.

View File

@ -45,6 +45,7 @@ class Eigen(Package):
# TODO : dependency on googlehash, superlu, adolc missing
depends_on('cmake')
depends_on('metis@5:', when='+metis')
depends_on('scotch', when='+scotch')
depends_on('fftw', when='+fftw')

View File

@ -0,0 +1,122 @@
from spack import *
import spack
class Elk(Package):
'''An all-electron full-potential linearised augmented-plane wave
(FP-LAPW) code with many advanced features.'''
homepage = 'http://elk.sourceforge.net/'
url = 'https://sourceforge.net/projects/elk/files/elk-3.3.17.tgz'
version('3.3.17', 'f57f6230d14f3b3b558e5c71f62f0592')
# Elk provides these libraries, but allows you to specify your own
variant('blas', default=True, description='Build with custom BLAS library')
variant('lapack', default=True, description='Build with custom LAPACK library')
variant('fft', default=True, description='Build with custom FFT library')
# Elk does not provide these libraries, but allows you to use them
variant('mpi', default=True, description='Enable MPI parallelism')
variant('openmp', default=True, description='Enable OpenMP support')
variant('libxc', default=True, description='Link to Libxc functional library')
depends_on('blas', when='+blas')
depends_on('lapack', when='+lapack')
depends_on('fftw', when='+fft')
depends_on('mpi', when='+mpi')
depends_on('libxc', when='+libxc')
# Cannot be built in parallel
parallel = False
def configure(self, spec):
# Dictionary of configuration options
config = {
'MAKE': 'make',
'F90': join_path(spack.build_env_path, 'f90'),
'F77': join_path(spack.build_env_path, 'f77'),
'AR': 'ar',
'LIB_FFT': 'fftlib.a',
'SRC_MPI': 'mpi_stub.f90',
'SRC_OMP': 'omp_stub.f90',
'SRC_libxc': 'libxcifc_stub.f90',
'SRC_FFT': 'zfftifc.f90'
}
# Compiler-specific flags
flags = ''
if self.compiler.name == 'intel':
flags = '-O3 -ip -unroll -no-prec-div -openmp'
elif self.compiler.name == 'gcc':
flags = '-O3 -ffast-math -funroll-loops -fopenmp'
elif self.compiler.name == 'pgi':
flags = '-O3 -mp -lpthread'
elif self.compiler.name == 'g95':
flags = '-O3 -fno-second-underscore'
elif self.compiler.name == 'nag':
flags = '-O4 -kind=byte -dusty -dcfuns'
elif self.compiler.name == 'xl':
flags = '-O3 -qsmp=omp'
config['F90_OPTS'] = flags
config['F77_OPTS'] = flags
# BLAS/LAPACK support
blas = 'blas.a'
lapack = 'lapack.a'
if '+blas' in spec:
blas = join_path(spec['blas'].prefix.lib, 'libblas.so')
if '+lapack' in spec:
lapack = join_path(spec['lapack'].prefix.lib, 'liblapack.so')
config['LIB_LPK'] = ' '.join([lapack, blas]) # lapack must come before blas
# FFT support
if '+fft' in spec:
config['LIB_FFT'] = join_path(spec['fftw'].prefix.lib, 'libfftw3.so')
config['SRC_FFT'] = 'zfftifc_fftw.f90'
# MPI support
if '+mpi' in spec:
config.pop('SRC_MPI')
config['F90'] = join_path(spec['mpi'].prefix.bin, 'mpif90')
config['F77'] = join_path(spec['mpi'].prefix.bin, 'mpif77')
# OpenMP support
if '+openmp' in spec:
config.pop('SRC_OMP')
# Libxc support
if '+libxc' in spec:
config['LIB_libxc'] = ' '.join([
join_path(spec['libxc'].prefix.lib, 'libxcf90.so'),
join_path(spec['libxc'].prefix.lib, 'libxc.so')
])
config['SRC_libxc'] = ' '.join([
'libxc_funcs.f90',
'libxc.f90',
'libxcifc.f90'
])
# Write configuration options to include file
with open('make.inc', 'w') as inc:
for key in config:
inc.write('{0} = {1}\n'.format(key, config[key]))
def install(self, spec, prefix):
# Elk only provides an interactive setup script
self.configure(spec)
make()
make('test')
# The Elk Makefile does not provide an install target
mkdirp(prefix.bin)
install('src/elk', prefix.bin)
install('src/eos/eos', prefix.bin)
install('src/spacegroup/spacegroup', prefix.bin)
install_tree('examples', join_path(prefix, 'examples'))
install_tree('species', join_path(prefix, 'species'))

View File

@ -7,7 +7,8 @@ class Git(Package):
homepage = "http://git-scm.com"
url = "https://github.com/git/git/tarball/v2.7.1"
version('2.8.0-rc2', 'c2cf9f2cc70e35f2fafbaf9258f82e4c')
version('2.8.1', '1308448d95afa41a4135903f22262fc8')
version('2.8.0', 'eca687e46e9750121638f258cff8317b')
version('2.7.3', 'fa1c008b56618c355a32ba4a678305f6')
version('2.7.1', 'bf0706b433a8dedd27a63a72f9a66060')
@ -23,18 +24,10 @@ class Git(Package):
#version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c')
# Git compiles with curl support by default on but if your system
# does not have it you will not be able to clone https repos
variant("curl", default=False, description="Add the internal support of curl for https clone")
# Git compiles with expat support by default on but if your system
# does not have it you will not be able to push https repos
variant("expat", default=False, description="Add the internal support of expat for https push")
depends_on("openssl")
depends_on("autoconf")
depends_on("curl", when="+curl")
depends_on("expat", when="+expat")
depends_on("curl")
depends_on("expat")
# Also depends_on gettext: apt-get install gettext (Ubuntu)
@ -49,23 +42,12 @@ def install(self, spec, prefix):
"--prefix=%s" % prefix,
"--without-pcre",
"--with-openssl=%s" % spec['openssl'].prefix,
"--with-zlib=%s" % spec['zlib'].prefix
"--with-zlib=%s" % spec['zlib'].prefix,
"--with-curl=%s" % spec['curl'].prefix,
"--with-expat=%s" % spec['expat'].prefix,
]
if '+curl' in spec:
configure_args.append("--with-curl=%s" % spec['curl'].prefix)
if '+expat' in spec:
configure_args.append("--with-expat=%s" % spec['expat'].prefix)
which('autoreconf')('-i')
configure(*configure_args)
make()
make("install")

View File

@ -35,6 +35,8 @@ class Gmp(Package):
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
version('6.0.0' , '6ef5869ae735db9995619135bd856b84')
depends_on("m4")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()

View File

@ -38,7 +38,7 @@ class Hdf5(Package):
list_depth = 3
version('1.10.0', 'bdc935337ee8282579cd6bc4270ad199')
version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618')
version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618', preferred=True)
version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24')
version('1.8.13', 'c03426e9e77d7766944654280b467289')
@ -101,10 +101,10 @@ def install(self, spec, prefix):
extra_args.append('--enable-cxx')
if '+fortran' in spec:
extra_args.extend([
'--enable-fortran',
'--enable-fortran2003'
])
extra_args.append('--enable-fortran')
# '--enable-fortran2003' no longer exists as of version 1.10.0
if spec.satisfies('@:1.8.16'):
extra_args.append('--enable-fortran2003')
if '+mpi' in spec:
# The HDF5 configure script warns if cxx and mpi are enabled

View File

@ -0,0 +1,21 @@
from spack import *
class Hydra(Package):
"""Hydra is a process management system for starting parallel jobs.
Hydra is designed to natively work with existing launcher daemons
(such as ssh, rsh, fork), as well as natively integrate with resource
management systems (such as slurm, pbs, sge)."""
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/3.2/hydra-3.2.tar.gz"
list_url = "http://www.mpich.org/static/downloads/"
list_depth = 2
version('3.2', '4d670916695bf7e3a869cc336a881b39')
def install(self, spec, prefix):
configure('--prefix=%s' % prefix)
make()
make("install")

View File

@ -0,0 +1,19 @@
from spack import *
class Intltool(Package):
"""intltool is a set of tools to centralize translation of many different file formats using GNU gettext-compatible PO files."""
homepage = 'https://freedesktop.org/wiki/Software/intltool/'
version('0.51.0', '12e517cac2b57a0121cda351570f1e63')
def url_for_version(self, version):
"""Handle version-based custom URLs."""
return 'https://launchpad.net/intltool/trunk/%s/+download/intltool-%s.tar.gz' % (version, version)
def install(self, spec, prefix):
# configure, build, install:
options = ['--prefix=%s' % prefix ]
configure(*options)
make()
make('install')

View File

@ -0,0 +1,42 @@
from spack import *
import os
class Ior(Package):
"""The IOR software is used for benchmarking parallel file systems
using POSIX, MPI-IO, or HDF5 interfaces."""
homepage = "https://github.com/LLNL/ior"
url = "https://github.com/LLNL/ior/archive/3.0.1.tar.gz"
version('3.0.1', '71150025e0bb6ea1761150f48b553065')
variant('hdf5', default=False, description='support IO with HDF5 backend')
variant('ncmpi', default=False, description='support IO with NCMPI backend')
depends_on('mpi')
depends_on('hdf5+mpi', when='+hdf5')
depends_on('netcdf+mpi', when='+ncmpi')
def install(self, spec, prefix):
os.system('./bootstrap')
config_args = [
'MPICC=%s' % spec['mpi'].prefix.bin + '/mpicc',
'--prefix=%s' % prefix,
]
if '+hdf5' in spec:
config_args.append('--with-hdf5')
else:
config_args.append('--without-hdf5')
if '+ncmpi' in spec:
config_args.append('--with-ncmpi')
else:
config_args.append('--without-ncmpi')
configure(*config_args)
make()
make('install')

View File

@ -5,6 +5,7 @@ class Jemalloc(Package):
homepage = "http://www.canonware.com/jemalloc/"
url = "https://github.com/jemalloc/jemalloc/releases/download/4.0.4/jemalloc-4.0.4.tar.bz2"
version('4.1.0', 'c4e53c947905a533d5899e5cc3da1f94')
version('4.0.4', '687c5cc53b9a7ab711ccd680351ff988')
variant('stats', default=False, description='Enable heap statistics')
@ -20,5 +21,8 @@ def install(self, spec, prefix):
configure(*configure_args)
# Don't use -Werror
filter_file(r'-Werror=\S*', '', 'Makefile')
make()
make("install")

View File

@ -0,0 +1,68 @@
diff --git a/deps/Makefile b/deps/Makefile
index 6cb73be..bcd8520 100644
--- a/deps/Makefile
+++ b/deps/Makefile
@@ -1049,7 +1049,7 @@ OPENBLAS_BUILD_OPTS += NO_AFFINITY=1
# Build for all architectures - required for distribution
ifeq ($(OPENBLAS_DYNAMIC_ARCH), 1)
-OPENBLAS_BUILD_OPTS += DYNAMIC_ARCH=1
+OPENBLAS_BUILD_OPTS += DYNAMIC_ARCH=1 MAKE_NO_J=1
endif
# 64-bit BLAS interface
@@ -1085,6 +1085,7 @@ OPENBLAS_BUILD_OPTS += NO_AVX2=1
endif
$(OPENBLAS_SRC_DIR)/config.status: $(OPENBLAS_SRC_DIR)/Makefile
+ cd $(dir $@) && patch -p1 < ../openblas-make.patch
ifeq ($(OS),WINNT)
cd $(dir $@) && patch -p1 < ../openblas-win64.patch
endif
diff --git a/deps/openblas.version b/deps/openblas.version
index 7c97e1b..58b9467 100644
--- a/deps/openblas.version
+++ b/deps/openblas.version
@@ -1,2 +1,2 @@
-OPENBLAS_BRANCH=v0.2.15
-OPENBLAS_SHA1=53e849f4fcae4363a64576de00e982722c7304f9
+OPENBLAS_BRANCH=v0.2.17
+OPENBLAS_SHA1=a71e8c82f6a9f73093b631e5deab1e8da716b61f
--- a/deps/openblas-make.patch
+++ b/deps/openblas-make.patch
@@ -0,0 +1,35 @@
+diff --git a/Makefile.system b/Makefile.system
+index b89f60e..2dbdad0 100644
+--- a/Makefile.system
++++ b/Makefile.system
+@@ -139,6 +139,10 @@ NO_PARALLEL_MAKE=0
+ endif
+ GETARCH_FLAGS += -DNO_PARALLEL_MAKE=$(NO_PARALLEL_MAKE)
+
++ifdef MAKE_NO_J
++GETARCH_FLAGS += -DMAKE_NO_J=$(MAKE_NO_J)
++endif
++
+ ifdef MAKE_NB_JOBS
+ GETARCH_FLAGS += -DMAKE_NB_JOBS=$(MAKE_NB_JOBS)
+ endif
+diff --git a/getarch.c b/getarch.c
+index f9c49e6..dffad70 100644
+--- a/getarch.c
++++ b/getarch.c
+@@ -1012,6 +1012,7 @@ int main(int argc, char *argv[]){
+ #endif
+ #endif
+
++#ifndef MAKE_NO_J
+ #ifdef MAKE_NB_JOBS
+ printf("MAKE += -j %d\n", MAKE_NB_JOBS);
+ #elif NO_PARALLEL_MAKE==1
+@@ -1021,6 +1022,7 @@ int main(int argc, char *argv[]){
+ printf("MAKE += -j %d\n", get_num_cores());
+ #endif
+ #endif
++#endif
+
+ break;
+

View File

@ -4,43 +4,56 @@
class Julia(Package):
"""The Julia Language: A fresh approach to technical computing"""
homepage = "http://julialang.org"
url = "http://github.com/JuliaLang/julia/releases/download/v0.4.2/julia-0.4.2.tar.gz"
url = "https://github.com/JuliaLang/julia/releases/download/v0.4.3/julia-0.4.3-full.tar.gz"
version('0.4.3', '7b9f096798fca4bef262a64674bc2b52')
version('0.4.2', 'ccfeb4f4090c8b31083f5e1ccb03eb06')
version('master',
git='https://github.com/JuliaLang/julia.git', branch='master')
version('0.4.5', '69141ff5aa6cee7c0ec8c85a34aa49a6')
version('0.4.3', '8a4a59fd335b05090dd1ebefbbe5aaac')
patch('gc.patch')
patch('openblas.patch', when='@0.4:0.4.5')
# Build-time dependencies
depends_on("cmake @2.8:")
# Build-time dependencies:
# depends_on("awk")
# depends_on("m4")
# depends_on("pkg-config")
depends_on("python @2.6:2.9")
# I think that Julia requires the dependencies above, but it builds find (on
# my system) without these. We should enable them as necessary.
# Combined build-time and run-time dependencies:
depends_on("binutils")
depends_on("cmake @2.8:")
depends_on("git")
depends_on("openssl")
depends_on("python @2.7:2.999")
# Run-time dependencies
# I think that Julia requires the dependencies above, but it
# builds fine (on my system) without these. We should enable them
# as necessary.
# Run-time dependencies:
# depends_on("arpack")
# depends_on("fftw +float")
# depends_on("gmp")
# depends_on("libgit")
# depends_on("mpfr")
# depends_on("openblas")
# depends_on("pcre2")
# ARPACK: Requires BLAS and LAPACK; needs to use the same version as Julia.
# ARPACK: Requires BLAS and LAPACK; needs to use the same version
# as Julia.
# BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit systems. OpenBLAS
# has an option for this; make it available as variant.
# BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit
# systems. OpenBLAS has an option for this; make it available as
# variant.
# FFTW: Something doesn't work when using a pre-installed FFTW library; need
# to investigate.
# FFTW: Something doesn't work when using a pre-installed FFTW
# library; need to investigate.
# GMP, MPFR: Something doesn't work when using a pre-installed FFTW library;
# need to investigate.
# GMP, MPFR: Something doesn't work when using a pre-installed
# FFTW library; need to investigate.
# LLVM: Julia works only with specific versions, and might require patches.
# Thus we let Julia install its own LLVM.
# LLVM: Julia works only with specific versions, and might require
# patches. Thus we let Julia install its own LLVM.
# Other possible dependencies:
# USE_SYSTEM_OPENLIBM=0
@ -50,11 +63,21 @@ class Julia(Package):
# USE_SYSTEM_UTF8PROC=0
# USE_SYSTEM_LIBGIT2=0
# Run-time dependencies for Julia packages:
depends_on("hdf5")
depends_on("mpi")
def install(self, spec, prefix):
# Explicitly setting CC, CXX, or FC breaks building libuv, one of
# Julia's dependencies. This might be a Darwin-specific problem. Given
# how Spack sets up compilers, Julia should still use Spack's compilers,
# even if we don't specify them explicitly.
if '@master' in spec:
# Julia needs to know the offset from a specific commit
git = which('git')
git('fetch', '--unshallow')
# Explicitly setting CC, CXX, or FC breaks building libuv, one
# of Julia's dependencies. This might be a Darwin-specific
# problem. Given how Spack sets up compilers, Julia should
# still use Spack's compilers, even if we don't specify them
# explicitly.
options = [#"CC=cc",
#"CXX=c++",
#"FC=fc",

View File

@ -8,6 +8,9 @@ class Libpng(Package):
version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d')
version('1.6.15', '829a256f3de9307731d4f52dc071916d')
version('1.6.14', '2101b3de1d5f348925990f9aa8405660')
version('1.5.26', '3ca98347a5541a2dad55cd6d07ee60a9')
version('1.4.19', '89bcbc4fc8b31f4a403906cf4f662330')
version('1.2.56', '9508fc59d10a1ffadd9aae35116c19ee')
depends_on('zlib')

View File

@ -0,0 +1,18 @@
from spack import *
class Libxc(Package):
"""Libxc is a library of exchange-correlation functionals for
density-functional theory."""
homepage = "http://www.tddft.org/programs/octopus/wiki/index.php/Libxc"
url = "http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz"
version('2.2.2', 'd9f90a0d6e36df6c1312b6422280f2ec')
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--enable-shared')
make()
make("install")

View File

@ -136,6 +136,7 @@ def install(self, spec, prefix):
source_directory = self.stage.source_path
options.append('-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=source_directory))
options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
if '+shared' in spec:
options.append('-DSHARED:BOOL=ON')
@ -184,7 +185,3 @@ def install(self, spec, prefix):
fs = glob.glob(join_path(source_directory,'GKlib',"*.h"))
for f in fs:
install(f, GKlib_dist)
# The shared library is not installed correctly on Darwin; correct this
if (sys.platform == 'darwin') and ('+shared' in spec):
fix_darwin_install_name(prefix.lib)

View File

@ -0,0 +1,125 @@
from spack import *
import glob, string
class Mfem(Package):
"""Free, lightweight, scalable C++ library for finite element methods."""
homepage = 'http://www.mfem.org'
url = 'https://github.com/mfem/mfem'
# version('3.1', git='https://github.com/mfem/mfem.git',
# commit='dbae60fe32e071989b52efaaf59d7d0eb2a3b574')
version('3.1', '841ea5cf58de6fae4de0f553b0e01ebaab9cd9c67fa821e8a715666ecf18fc57',
url='http://goo.gl/xrScXn', expand=False)
variant('metis', default=False, description='Activate support for metis')
variant('hypre', default=False, description='Activate support for hypre')
variant('suite-sparse', default=False,
description='Activate support for SuiteSparse')
variant('mpi', default=False, description='Activate support for MPI')
variant('lapack', default=False, description='Activate support for LAPACK')
variant('debug', default=False, description='Build debug version')
depends_on('blas', when='+lapack')
depends_on('lapack', when='+lapack')
depends_on('mpi', when='+mpi')
depends_on('metis', when='+mpi')
depends_on('hypre', when='+mpi')
depends_on('hypre', when='+hypre')
depends_on('metis@4:', when='+metis')
depends_on('suite-sparse', when='+suite-sparse')
depends_on('blas', when='+suite-sparse')
depends_on('lapack', when='+suite-sparse')
depends_on('metis@5:', when='+suite-sparse ^suite-sparse@4.5:')
depends_on('cmake', when='^metis@5:')
def check_variants(self, spec):
if '+mpi' in spec and ('+hypre' not in spec or '+metis' not in spec):
raise InstallError('mfem+mpi must be built with +hypre ' +
'and +metis!')
if '+suite-sparse' in spec and ('+metis' not in spec or
'+lapack' not in spec):
raise InstallError('mfem+suite-sparse must be built with ' +
'+metis and +lapack!')
if 'metis@5:' in spec and '%clang' in spec and ('^cmake %gcc' not in spec):
raise InstallError('To work around CMake bug with clang, must ' +
'build mfem with mfem[+variants] %clang ' +
'^cmake %gcc to force CMake to build with gcc')
return
def install(self, spec, prefix):
self.check_variants(spec)
options = ['PREFIX=%s' % prefix]
if '+lapack' in spec:
lapack_lib = '-L{0} -llapack -L{1} -lblas'.format(
spec['lapack'].prefix.lib, spec['blas'].prefix.lib)
options.extend(['MFEM_USE_LAPACK=YES',
'LAPACK_OPT=-I%s' % spec['lapack'].prefix.include,
'LAPACK_LIB=%s' % lapack_lib])
if '+hypre' in spec:
options.extend(['HYPRE_DIR=%s' % spec['hypre'].prefix,
'HYPRE_OPT=-I%s' % spec['hypre'].prefix.include,
'HYPRE_LIB=-L%s' % spec['hypre'].prefix.lib +
' -lHYPRE'])
if '+metis' in spec:
metis_lib = '-L%s -lmetis' % spec['metis'].prefix.lib
if spec['metis'].satisfies('@5:'):
metis_str = 'MFEM_USE_METIS_5=YES'
else:
metis_str = 'MFEM_USE_METIS_5=NO'
options.extend([metis_str,
'METIS_DIR=%s' % spec['metis'].prefix,
'METIS_OPT=-I%s' % spec['metis'].prefix.include,
'METIS_LIB=%s' % metis_lib])
if '+mpi' in spec: options.extend(['MFEM_USE_MPI=YES'])
if '+suite-sparse' in spec:
ssp = spec['suite-sparse'].prefix
ss_lib = '-L%s' % ssp.lib
ss_lib += (' -lumfpack -lcholmod -lcolamd -lamd -lcamd' +
' -lccolamd -lsuitesparseconfig')
no_librt_archs = ['darwin-i686', 'darwin-x86_64']
no_rt = any(map(lambda a: spec.satisfies('='+a), no_librt_archs))
if not no_rt: ss_lib += ' -lrt'
ss_lib += (' ' + metis_lib + ' ' + lapack_lib)
options.extend(['MFEM_USE_SUITESPARSE=YES',
'SUITESPARSE_DIR=%s' % ssp,
'SUITESPARSE_OPT=-I%s' % ssp.include,
'SUITESPARSE_LIB=%s' % ss_lib])
if '+debug' in spec: options.extend(['MFEM_DEBUG=YES'])
# Dirty hack to cope with URL redirect
tgz_file = string.split(self.url,'/')[-1]
tar = which('tar')
tar('xzvf', tgz_file)
cd(glob.glob('mfem*')[0])
# End dirty hack to cope with URL redirect
make('config', *options)
make('all')
# Run a small test before installation
args = ['-m', join_path('data','star.mesh'), '--no-visualization']
if '+mpi' in spec:
Executable(join_path(spec['mpi'].prefix.bin,
'mpirun'))('-np',
'4',
join_path('examples','ex1p'),
*args)
else:
Executable(join_path('examples', 'ex1'))(*args)
make('install')

View File

@ -0,0 +1,154 @@
--- mrnet-3093918/include/mrnet/Types.h 2015-12-10 09:32:24.000000000 -0800
+++ mrnet_top_of_tree/include/mrnet/Types.h 2016-03-16 12:29:33.986132302 -0700
@@ -23,7 +23,7 @@
#ifndef MRNET_VERSION_MAJOR
# define MRNET_VERSION_MAJOR 5
# define MRNET_VERSION_MINOR 0
-# define MRNET_VERSION_REV 0
+# define MRNET_VERSION_REV 1
#endif
namespace MRN
--- mrnet-3093918/include/mrnet_lightweight/Types.h 2015-12-10 09:32:24.000000000 -0800
+++ mrnet_top_of_tree/include/mrnet_lightweight/Types.h 2016-03-16 12:29:33.987132302 -0700
@@ -30,7 +30,7 @@
#ifndef MRNET_VERSION_MAJOR
#define MRNET_VERSION_MAJOR 5
#define MRNET_VERSION_MINOR 0
-#define MRNET_VERSION_REV 0
+#define MRNET_VERSION_REV 1
#endif
void get_Version(int* major,
int* minor,
--- mrnet-3093918/src/lightweight/SerialGraph.c 2015-12-10 09:32:24.000000000 -0800
+++ mrnet_top_of_tree/src/lightweight/SerialGraph.c 2016-03-16 12:29:33.995132302 -0700
@@ -59,7 +59,7 @@
mrn_dbg_func_begin();
- sprintf(hoststr, "[%s:%hu:%u:", ihostname, iport, irank);
+ sprintf(hoststr, "[%s:%05hu:%u:", ihostname, iport, irank);
mrn_dbg(5, mrn_printf(FLF, stderr, "looking for SubTreeRoot: '%s'\n", hoststr));
byte_array = sg->byte_array;
@@ -110,7 +110,7 @@
mrn_dbg_func_begin();
- len = (size_t) sprintf(hoststr, "[%s:%hu:%u:0]", ihostname, iport, irank);
+ len = (size_t) sprintf(hoststr, "[%s:%05hu:%u:0]", ihostname, iport, irank);
mrn_dbg(5, mrn_printf(FLF, stderr, "adding sub tree leaf: %s\n", hoststr));
len += strlen(sg->byte_array) + 1;
@@ -139,7 +139,7 @@
mrn_dbg_func_begin();
- len = (size_t) sprintf(hoststr, "[%s:%hu:%u:1", ihostname, iport, irank);
+ len = (size_t) sprintf(hoststr, "[%s:%05hu:%u:1", ihostname, iport, irank);
mrn_dbg(5, mrn_printf(FLF, stderr, "adding sub tree root: %s\n", hoststr));
len += strlen(sg->byte_array) + 1;
@@ -360,8 +360,8 @@
char old_hoststr[256];
char new_hoststr[256];
- sprintf(old_hoststr, "[%s:%hu:%u:", hostname, UnknownPort, irank);
- sprintf(new_hoststr, "[%s:%hu:%u:", hostname, port, irank);
+ sprintf(old_hoststr, "[%s:%05hu:%u:", hostname, UnknownPort, irank);
+ sprintf(new_hoststr, "[%s:%05hu:%u:", hostname, port, irank);
old_byte_array = sg->byte_array;
new_byte_array = (char*) malloc( strlen(old_byte_array) + 10 );
--- mrnet-3093918/xplat/src/lightweight/SocketUtils.c 2015-12-10 09:32:24.000000000 -0800
+++ mrnet_top_of_tree/xplat/src/lightweight/SocketUtils.c 2016-03-16 12:29:34.006132303 -0700
@@ -15,7 +15,7 @@
#else
const XPlat_Socket InvalidSocket = INVALID_SOCKET;
#endif
-const XPlat_Port InvalidPort = (XPlat_Port)-1;
+const XPlat_Port InvalidPort = (XPlat_Port)0;
static bool_t SetTcpNoDelay( XPlat_Socket sock )
{
--- mrnet-3093918/conf/configure.in 2015-12-10 09:32:24.000000000 -0800
+++ mrnet_top_of_tree/conf/configure.in 2016-03-16 12:45:54.573196781 -0700
@@ -107,6 +107,18 @@
AC_SUBST(PURIFY)
+AC_ARG_WITH(expat,
+ [AS_HELP_STRING([--with-expat=PATH],
+ [Absolute path to installation of EXPAT libraries (note: specify the path to the directory containing "include" and "lib" sub-directories)])],
+ [EXPAT_DIR="${withval}"],
+ [EXPAT_DIR=""])
+
+if test "x$EXPAT_DIR" = "x" ; then
+ EXPAT_LIB=""
+else
+ EXPAT_LIB="-L$EXPAT_DIR/lib"
+fi
+
dnl === Checks for header files.
AC_CHECK_HEADERS([assert.h errno.h fcntl.h limits.h netdb.h signal.h stddef.h stdlib.h stdio.h string.h unistd.h arpa/inet.h netinet/in.h sys/ioctl.h sys/socket.h sys/sockio.h sys/time.h])
AC_HEADER_STDBOOL
@@ -432,7 +444,7 @@
CRAYXT_ATH_LIBS_SO="$CRAYXT_ATH_LIBS -lalps"
CRAYXT_ATH_LIBS="$CRAYXT_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc -Wl,-Bdynamic"
CRAYXE_ATH_LIBS_SO="$CRAYXE_ATH_LIBS -lalps"
- CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi -lexpat -Wl,-Bdynamic"
+ CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi $EXPAT_LIB -lexpat -Wl,-Bdynamic"
AC_CHECK_LIB( [alps], [alps_launch_tool_helper],
[HAVE_ATH_LIBS="yes"; EXTRA_LIBS="$CRAYXT_ATH_LIBS $EXTRA_LIBS"; EXTRA_LIBS_SO="$CRAYXT_ATH_LIBS_SO $EXTRA_LIBS_SO"],
--- mrnet-3093918/configure 2015-12-10 09:32:24.000000000 -0800
+++ mrnet_top_of_tree/configure 2016-03-16 13:47:20.386439143 -0700
@@ -742,6 +742,7 @@
enable_debug
enable_ltwt_threadsafe
with_purify
+with_expat
'
ac_precious_vars='build_alias
host_alias
@@ -1399,6 +1400,9 @@
containing "include" and "lib" sub-directories)
--with-launchmon=PATH Absolute path to installation of LaunchMON
--with-purify Use purify for memory debugging
+ --with-expat=PATH Absolute path to installation of EXPAT libraries
+ (note: specify the path to the directory containing
+ "include" and "lib" sub-directories)
Some influential environment variables:
CC C compiler command
@@ -3541,6 +3545,21 @@
+# Check whether --with-expat was given.
+if test "${with_expat+set}" = set; then :
+ withval=$with_expat; EXPAT_DIR="${withval}"
+else
+ EXPAT_DIR=""
+fi
+
+
+if test "x$EXPAT_DIR" = "x" ; then
+ EXPAT_LIB=""
+else
+ EXPAT_LIB="-L$EXPAT_DIR/lib"
+fi
+
+
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
@@ -5473,7 +5492,7 @@
CRAYXT_ATH_LIBS_SO="$CRAYXT_ATH_LIBS -lalps"
CRAYXT_ATH_LIBS="$CRAYXT_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc -Wl,-Bdynamic"
CRAYXE_ATH_LIBS_SO="$CRAYXE_ATH_LIBS -lalps"
- CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi -lexpat -Wl,-Bdynamic"
+ CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi $EXPAT_LIB -lexpat -Wl,-Bdynamic"
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for alps_launch_tool_helper in -lalps" >&5
$as_echo_n "checking for alps_launch_tool_helper in -lalps... " >&6; }

View File

@ -3,11 +3,17 @@
class Mrnet(Package):
"""The MRNet Multi-Cast Reduction Network."""
homepage = "http://paradyn.org/mrnet"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_5.0.1.tar.gz"
list_url = "http://ftp.cs.wisc.edu/paradyn/mrnet"
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
version('5.0.1', '17f65738cf1b9f9b95647ff85f69ecdd')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
# Add a patch that brings mrnet-5.0.1 up to date with the current development tree
# The development tree contains fixes needed for the krell based tools
variant('krellpatch', default=False, description="Build MRNet with krell openspeedshop based patch.")
patch('krell-5.0.1.patch', when='@5.0.1+krellpatch')
variant('lwthreads', default=False, description="Also build the MRNet LW threadsafe libraries")
parallel = False

View File

@ -0,0 +1,20 @@
from spack import *
class Ncview(Package):
"""Simple viewer for NetCDF files."""
homepage = "http://meteora.ucsd.edu/~pierce/ncview_home_page.html"
url = "ftp://cirrus.ucsd.edu/pub/ncview/ncview-2.1.7.tar.gz"
version('2.1.7', 'debd6ca61410aac3514e53122ab2ba07')
depends_on("netcdf")
depends_on("udunits2")
# OS Dependencies
# Ubuntu: apt-get install libxaw7-dev
# CentOS 7: yum install libXaw-devel
def install(self, spec, prefix):
configure('--prefix=%s' % prefix)
make()
make("install")

View File

@ -1,6 +1,7 @@
from spack import *
import sys
import os
import shutil
class Openblas(Package):
"""OpenBLAS: An optimized BLAS library"""
@ -12,6 +13,8 @@ class Openblas(Package):
version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9')
variant('shared', default=True, description="Build shared libraries as well as static libs.")
variant('openmp', default=True, description="Enable OpenMP support.")
variant('fpic', default=True, description="Build position independent code")
# virtual dependency
provides('blas')
@ -31,12 +34,19 @@ def install(self, spec, prefix):
if '+shared' in spec:
make_targets += ['shared']
else:
if '+fpic' in spec:
make_defs.extend(['CFLAGS=-fPIC', 'FFLAGS=-fPIC'])
make_defs += ['NO_SHARED=1']
# fix missing _dggsvd_ and _sggsvd_
if spec.satisfies('@0.2.16'):
make_defs += ['BUILD_LAPACK_DEPRECATED=1']
# Add support for OpenMP
# Note: Make sure your compiler supports OpenMP
if '+openmp' in spec:
make_defs += ['USE_OPENMP=1']
make_args = make_defs + make_targets
make(*make_args)
@ -58,6 +68,10 @@ def install(self, spec, prefix):
if '+shared' in spec:
symlink('libopenblas.%s' % dso_suffix, 'liblapack.%s' % dso_suffix)
# Openblas may pass its own test but still fail to compile Lapack
# symbols. To make sure we get working Blas and Lapack, do a small test.
self.check_install(spec)
def setup_dependent_package(self, module, dspec):
# This is WIP for a prototype interface for virtual packages.
@ -70,3 +84,60 @@ def setup_dependent_package(self, module, dspec):
if '+shared' in self.spec:
self.spec.blas_shared_lib = join_path(libdir, 'libopenblas.%s' % dso_suffix)
self.spec.lapack_shared_lib = self.spec.blas_shared_lib
def check_install(self, spec):
"Build and run a small program to test that we have Lapack symbols"
print "Checking Openblas installation..."
checkdir = "spack-check"
with working_dir(checkdir, create=True):
source = r"""
#include <cblas.h>
#include <stdio.h>
int main(void) {
int i=0;
double A[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
double B[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
double C[9] = {.5, .5, .5, .5, .5, .5, .5, .5, .5};
cblas_dgemm(CblasColMajor, CblasNoTrans, CblasTrans,
3, 3, 2, 1, A, 3, B, 3, 2, C, 3);
for (i = 0; i < 9; i++)
printf("%f\n", C[i]);
return 0;
}
"""
expected = """\
11.000000
-9.000000
5.000000
-9.000000
21.000000
-1.000000
5.000000
-1.000000
3.000000
"""
with open("check.c", 'w') as f:
f.write(source)
cc = which('cc')
# TODO: Automate these path and library settings
cc('-c', "-I%s" % join_path(spec.prefix, "include"), "check.c")
cc('-o', "check", "check.o",
"-L%s" % join_path(spec.prefix, "lib"), "-llapack", "-lblas", "-lpthread")
try:
check = Executable('./check')
output = check(return_output=True)
except:
output = ""
success = output == expected
if not success:
print "Produced output does not match expected output."
print "Expected output:"
print '-'*80
print expected
print '-'*80
print "Produced output:"
print '-'*80
print output
print '-'*80
raise RuntimeError("Openblas install check failed")
shutil.rmtree(checkdir)

View File

@ -0,0 +1,13 @@
#include <cblas.h>
#include <stdio.h>
int main(void) {
int i=0;
double A[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
double B[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
double C[9] = {.5, .5, .5, .5, .5, .5, .5, .5, .5};
cblas_dgemm(CblasColMajor, CblasNoTrans, CblasTrans,
3, 3, 2, 1, A, 3, B, 3, 2, C, 3);
for (i = 0; i < 9; i++)
printf("%f\n", C[i]);
return 0;
}

View File

@ -0,0 +1,9 @@
11.000000
-9.000000
5.000000
-9.000000
21.000000
-1.000000
5.000000
-1.000000
3.000000

View File

@ -1,7 +1,5 @@
import os
from spack import *
import os
class Openmpi(Package):
"""Open MPI is a project combining technologies and resources from
@ -46,7 +44,6 @@ class Openmpi(Package):
provides('mpi@:2.2', when='@1.6.5')
provides('mpi@:3.0', when='@1.7.5:')
depends_on('hwloc')
def url_for_version(self, version):
return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % (version.up_to(2), version)
@ -69,7 +66,6 @@ def verbs(self):
def install(self, spec, prefix):
config_args = ["--prefix=%s" % prefix,
"--with-hwloc=%s" % spec['hwloc'].prefix,
"--enable-shared",
"--enable-static"]
# Variant based arguments

View File

@ -3,6 +3,7 @@
from spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
@ -30,26 +31,14 @@ def url_for_version(self, version):
# Same idea, but just to avoid issuing the same message multiple times
warnings_given_to_user = getattr(Openssl, '_warnings_given', {})
if openssl_url is None:
latest = 'http://www.openssl.org/source/openssl-{version}.tar.gz'
older = 'http://www.openssl.org/source/old/{version_number}/openssl-{version_full}.tar.gz'
# Try to use the url where the latest tarballs are stored. If the url does not exist (404), then
# return the url for older format
version_number = '.'.join([str(x) for x in version[:-1]])
older_url = older.format(version_number=version_number, version_full=version)
latest_url = latest.format(version=version)
response = urllib.urlopen(latest.format(version=version))
if response.getcode() == 404:
openssl_url = older_url
# Checks if we already warned the user for this particular version of OpenSSL.
# If not we display a warning message and mark this version
if self.spec.satisfies('@external'):
# The version @external is reserved to system openssl. In that case return a fake url and exit
openssl_url = '@external (reserved version for system openssl)'
if not warnings_given_to_user.get(version, False):
tty.warn('This installation depends on an old version of OpenSSL, which may have known security issues. ')
tty.warn('Consider updating to the latest version of this package.')
tty.warn('More details at {homepage}'.format(homepage=Openssl.homepage))
tty.msg('Using openssl@external : the version @external is reserved for system openssl')
warnings_given_to_user[version] = True
else:
openssl_url = latest_url
# Store the computed URL
openssl_url = self.check_for_outdated_release(version, warnings_given_to_user) # Store the computed URL
openssl_urls[version] = openssl_url
# Store the updated dictionary of URLS
Openssl._openssl_url = openssl_urls
@ -58,6 +47,28 @@ def url_for_version(self, version):
return openssl_url
def check_for_outdated_release(self, version, warnings_given_to_user):
latest = 'ftp://ftp.openssl.org/source/openssl-{version}.tar.gz'
older = 'http://www.openssl.org/source/old/{version_number}/openssl-{version_full}.tar.gz'
# Try to use the url where the latest tarballs are stored. If the url does not exist (404), then
# return the url for older format
version_number = '.'.join([str(x) for x in version[:-1]])
try:
openssl_url = latest.format(version=version)
urllib.urlopen(openssl_url)
except IOError:
openssl_url = older.format(version_number=version_number, version_full=version)
# Checks if we already warned the user for this particular version of OpenSSL.
# If not we display a warning message and mark this version
if not warnings_given_to_user.get(version, False):
tty.warn(
'This installation depends on an old version of OpenSSL, which may have known security issues. ')
tty.warn('Consider updating to the latest version of this package.')
tty.warn('More details at {homepage}'.format(homepage=Openssl.homepage))
warnings_given_to_user[version] = True
return openssl_url
def install(self, spec, prefix):
# OpenSSL uses a variable APPS in its Makefile. If it happens to be set
# in the environment, then this will override what is set in the

View File

@ -0,0 +1,38 @@
from spack import *
class OsuMicroBenchmarks(Package):
"""The Ohio MicroBenchmark suite is a collection of independent MPI
message passing performance microbenchmarks developed and written at
The Ohio State University. It includes traditional benchmarks and
performance measures such as latency, bandwidth and host overhead
and can be used for both traditional and GPU-enhanced nodes."""
homepage = "http://mvapich.cse.ohio-state.edu/benchmarks/"
url = "http://mvapich.cse.ohio-state.edu/download/mvapich/osu-micro-benchmarks-5.3.tar.gz"
version('5.3', '42e22b931d451e8bec31a7424e4adfc2')
variant('cuda', default=False, description="Enable CUDA support")
depends_on('mpi')
depends_on('cuda', when='+cuda')
def install(self, spec, prefix):
config_args = [
'CC=%s' % spec['mpi'].prefix.bin + '/mpicc',
'CXX=%s' % spec['mpi'].prefix.bin + '/mpicxx',
'LDFLAGS=-lrt',
'--prefix=%s' % prefix
]
if '+cuda' in spec:
config_args.extend([
'--enable-cuda',
'--with-cuda=%s' % spec['cuda'].prefix,
])
configure(*config_args)
make()
make('install')

View File

@ -7,10 +7,15 @@ class P4est(Package):
version('1.1', '37ba7f4410958cfb38a2140339dbf64f')
# disable by default to make it work on frontend of clusters
variant('tests', default=False, description='Run small tests')
# build dependencies
depends_on('automake')
depends_on('autoconf')
depends_on('libtool@2.4.2:')
# other dependencies
depends_on('lua') # Needed for the submodule sc
depends_on('mpi')
depends_on('zlib')
def install(self, spec, prefix):
options = ['--enable-mpi',
@ -28,7 +33,5 @@ def install(self, spec, prefix):
configure('--prefix=%s' % prefix, *options)
make()
if '+tests' in self.spec:
make("check")
make("check")
make("install")

View File

@ -1,5 +1,8 @@
from spack import *
import glob
import os
import sys
from llnl.util.filesystem import fix_darwin_install_name
class Papi(Package):
"""PAPI provides the tool designer and application engineer with a
@ -18,17 +21,27 @@ class Papi(Package):
version('5.3.0', '367961dd0ab426e5ae367c2713924ffb')
def install(self, spec, prefix):
os.chdir("src/")
with working_dir("src"):
configure_args=["--prefix=%s" % prefix]
configure_args=["--prefix=%s" % prefix]
# PAPI uses MPI if MPI is present; since we don't require an
# MPI package, we ensure that all attempts to use MPI fail, so
# that PAPI does not get confused
configure_args.append('MPICC=:')
# PAPI uses MPI if MPI is present; since we don't require
# an MPI package, we ensure that all attempts to use MPI
# fail, so that PAPI does not get confused
configure_args.append('MPICC=:')
configure(*configure_args)
configure(*configure_args)
make()
make("install")
# Don't use <malloc.h>
for level in [".", "*", "*/*"]:
files = glob.iglob(join_path(level, "*.[ch]"))
filter_file(r"\<malloc\.h\>", "<stdlib.h>", *files)
make()
make("install")
# The shared library is not installed correctly on Darwin
if sys.platform == 'darwin':
os.rename(join_path(prefix.lib, 'libpapi.so'),
join_path(prefix.lib, 'libpapi.dylib'))
fix_darwin_install_name(prefix.lib)

View File

@ -0,0 +1,14 @@
diff --git a/eo/src/CMakeLists.txt b/eo/src/CMakeLists.txt
index b2b445a..d45ddc7 100644
--- a/eo/src/CMakeLists.txt
+++ b/eo/src/CMakeLists.txt
@@ -47,7 +47,7 @@ install(DIRECTORY do es ga gp other utils
add_subdirectory(es)
add_subdirectory(ga)
add_subdirectory(utils)
-#add_subdirectory(serial)
+add_subdirectory(serial) # Required when including <paradiseo/eo/utils/eoTimer.h> , which is need by <paradiseo/eo/mpi/eoMpi.h>
if(ENABLE_PYEO)
add_subdirectory(pyeo)

View File

@ -0,0 +1,13 @@
diff --git a/cmake/Config.cmake b/cmake/Config.cmake
index 02593ba..d198ca9 100644
--- a/cmake/Config.cmake
+++ b/cmake/Config.cmake
@@ -6,7 +6,7 @@ if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
# detect OS X version. (use '/usr/bin/sw_vers -productVersion' to extract V from '10.V.x'.)
execute_process (COMMAND /usr/bin/sw_vers -productVersion OUTPUT_VARIABLE MACOSX_VERSION_RAW)
- string(REGEX REPLACE "10\\.([0-9]).*" "\\1" MACOSX_VERSION "${MACOSX_VERSION_RAW}")
+ string(REGEX REPLACE "10\\.([0-9]+).*" "\\1" MACOSX_VERSION "${MACOSX_VERSION_RAW}")
if(${MACOSX_VERSION} LESS 5)
message(FATAL_ERROR "Unsupported version of OS X : ${MACOSX_VERSION_RAW}")
return()

View File

@ -0,0 +1,13 @@
diff --git a/moeo/test/t-moeo2DMinHypervolumeArchive.cpp b/moeo/test/t-moeo2DMinHypervolumeArchive.cpp
index 994a9a4..c4ba77b 100644
--- a/moeo/test/t-moeo2DMinHypervolumeArchive.cpp
+++ b/moeo/test/t-moeo2DMinHypervolumeArchive.cpp
@@ -41,7 +41,7 @@
#include <moeo>
#include <cassert>
-#include<archive/moeo2DMinHyperVolumeArchive.h>
+#include<archive/moeo2DMinHypervolumeArchive.h>
//-----------------------------------------------------------------------------

View File

@ -0,0 +1,13 @@
diff --git a/eo/tutorial/Lesson3/exercise3.1.cpp b/eo/tutorial/Lesson3/exercise3.1.cpp
index dc37479..d178941 100644
--- a/eo/tutorial/Lesson3/exercise3.1.cpp
+++ b/eo/tutorial/Lesson3/exercise3.1.cpp
@@ -289,7 +289,7 @@ void main_function(int argc, char **argv)
checkpoint.add(fdcStat);
// The Stdout monitor will print parameters to the screen ...
- eoStdoutMonitor monitor(false);
+ eoStdoutMonitor monitor;
// when called by the checkpoint (i.e. at every generation)
checkpoint.add(monitor);

View File

@ -0,0 +1,59 @@
from spack import *
import sys
class Paradiseo(Package):
"""A C++ white-box object-oriented framework dedicated to the reusable design of metaheuristics."""
homepage = "http://paradiseo.gforge.inria.fr/"
# Installing from the development version is a better option at this
# point than using the very old supplied packages
version('head', git='https://gforge.inria.fr/git/paradiseo/paradiseo.git')
# This is a version that the package formula author has tested successfully.
# However, the clone is very large (~1Gb git history). The history in the
# head version has been trimmed significantly.
version('dev-safe', git='https://gforge.inria.fr/git/paradiseo/paradiseo.git',
commit='dbb8fbe9a786efd4d1c26408ac1883442e7643a6')
variant('mpi', default=True, description='Compile with parallel and distributed metaheuristics module')
variant('smp', default=True, description='Compile with symmetric multi-processing module ')
variant('edo', default=True, description='Compile with (Experimental) EDO module')
#variant('tests', default=False, description='Compile with build tests')
#variant('doc', default=False, description='Compile with documentation')
variant('debug', default=False, description='Builds a debug version of the libraries')
# Required dependencies
depends_on ("cmake")
depends_on ("eigen")
# Optional dependencies
depends_on ("mpi", when="+mpi")
depends_on ("doxygen", when='+doc')
# Patches
patch('enable_eoserial.patch')
patch('fix_osx_detection.patch')
patch('fix_tests.patch')
patch('fix_tutorials.patch')
def install(self, spec, prefix):
options = []
options.extend(std_cmake_args)
options.extend([
'-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'),
'-DINSTALL_TYPE:STRING=MIN',
'-DMPI:BOOL=%s' % ('TRUE' if '+mpi' in spec else 'FALSE'),
'-DSMP:BOOL=%s' % ('TRUE' if '+smp' in spec else 'FALSE'), # Note: This requires a C++11 compatible compiler
'-DEDO:BOOL=%s' % ('TRUE' if '+edo' in spec else 'FALSE'),
'-DENABLE_CMAKE_TESTING:BOOL=%s' % ('TRUE' if '+tests' in spec else 'FALSE')
])
with working_dir('spack-build', create=True):
# Configure
cmake('..', *options)
# Build, test and install
make("VERBOSE=1")
if '+tests' in spec:
make("test")
make("install")

View File

@ -8,6 +8,7 @@ class ParallelNetcdf(Package):
homepage = "https://trac.mcs.anl.gov/projects/parallel-netcdf"
url = "http://cucis.ece.northwestern.edu/projects/PnetCDF/Release/parallel-netcdf-1.6.1.tar.gz"
version('1.7.0', '267eab7b6f9dc78c4d0e6def2def3aea4bc7c9f0')
version('1.6.1', '62a094eb952f9d1e15f07d56e535052604f1ac34')
depends_on("m4")

View File

@ -27,13 +27,14 @@ class Paraview(Package):
depends_on('bzip2')
depends_on('freetype')
depends_on('hdf5+mpi', when='+mpi')
depends_on('hdf5~mpi', when='~mpi')
#depends_on('hdf5+mpi', when='+mpi')
#depends_on('hdf5~mpi', when='~mpi')
depends_on('jpeg')
depends_on('libpng')
depends_on('libtiff')
depends_on('libxml2')
depends_on('netcdf')
#depends_on('netcdf')
#depends_on('netcdf-cxx')
#depends_on('protobuf') # version mismatches?
#depends_on('sqlite') # external version not supported
depends_on('zlib')
@ -75,13 +76,13 @@ def nfeature_to_bool(feature):
cmake('..',
'-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix,
'-DBUILD_TESTING:BOOL=OFF',
'-DVTK_USER_SYSTEM_FREETYPE:BOOL=ON',
'-DVTK_USER_SYSTEM_HDF5:BOOL=ON',
'-DVTK_USER_SYSTEM_JPEG:BOOL=ON',
'-DVTK_USER_SYSTEM_LIBXML2:BOOL=ON',
'-DVTK_USER_SYSTEM_NETCDF:BOOL=ON',
'-DVTK_USER_SYSTEM_TIFF:BOOL=ON',
'-DVTK_USER_SYSTEM_ZLIB:BOOL=ON',
'-DVTK_USE_SYSTEM_FREETYPE:BOOL=ON',
'-DVTK_USE_SYSTEM_HDF5:BOOL=OFF',
'-DVTK_USE_SYSTEM_JPEG:BOOL=ON',
'-DVTK_USE_SYSTEM_LIBXML2:BOOL=ON',
'-DVTK_USE_SYSTEM_NETCDF:BOOL=OFF',
'-DVTK_USE_SYSTEM_TIFF:BOOL=ON',
'-DVTK_USE_SYSTEM_ZLIB:BOOL=ON',
*feature_args)
make()
make('install')

View File

@ -10,7 +10,12 @@ class PkgConfig(Package):
parallel = False
def install(self, spec, prefix):
configure("--prefix=%s" %prefix, "--enable-shared")
configure("--prefix=%s" %prefix,
"--enable-shared",
"--with-internal-glib") # There's a bootstrapping problem here;
# glib uses pkg-config as well, so
# break the cycle by using the internal
# glib.
make()
make("install")

View File

@ -0,0 +1,14 @@
from spack import *
class PySqlalchemy(Package):
"""The Python SQL Toolkit and Object Relational Mapper"""
homepage = 'http://www.sqlalchemy.org/'
url = "https://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-1.0.12.tar.gz"
version('1.0.12', '6d19ef29883bbebdcac6613cf391cac4')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -7,7 +7,7 @@ class PyBottleneck(Package):
version('1.0.0', '380fa6f275bd24f27e7cf0e0d752f5d2')
extends('python', ignore=r'bin/f2py$')
extends('python')
depends_on('py-numpy')
def install(self, spec, prefix):

View File

@ -0,0 +1,22 @@
from spack import *
class PyCsvkit(Package):
"""A library of utilities for working with CSV, the king of tabular file
formats"""
homepage = 'http://csvkit.rtfd.org/'
url = "https://pypi.python.org/packages/source/c/csvkit/csvkit-0.9.1.tar.gz"
version('0.9.1', '48d78920019d18846933ee969502fff6')
extends('python')
depends_on('py-dateutil')
depends_on('py-dbf')
depends_on('py-xlrd')
depends_on('py-SQLAlchemy')
depends_on('py-six')
depends_on('py-openpyxl')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,15 @@
from spack import *
class PyDbf(Package):
"""Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro
.dbf files (including memos)"""
homepage = 'https://pypi.python.org/pypi/dbf'
url = "https://pypi.python.org/packages/source/d/dbf/dbf-0.96.005.tar.gz"
version('0.96.005', 'bce1a1ed8b454a30606e7e18dd2f8277')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,14 @@
from spack import *
class PyJdcal(Package):
"""Julian dates from proleptic Gregorian and Julian calendars"""
homepage = 'http://github.com/phn/jdcal'
url = "https://pypi.python.org/packages/source/j/jdcal/jdcal-1.2.tar.gz"
version('1.2', 'ab8d5ba300fd1eb01514f363d19b1eb9')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -12,7 +12,7 @@ class PyMatplotlib(Package):
variant('gui', default=False, description='Enable GUI')
variant('ipython', default=False, description='Enable ipython support')
extends('python', ignore=r'bin/nosetests.*$|bin/pbr$|bin/f2py$')
extends('python', ignore=r'bin/nosetests.*$|bin/pbr$')
depends_on('py-pyside', when='+gui')
depends_on('py-ipython', when='+ipython')
@ -26,6 +26,7 @@ class PyMatplotlib(Package):
depends_on('py-pbr')
depends_on('py-funcsigs')
depends_on('pkg-config')
depends_on('freetype')
depends_on('qt', when='+gui')
depends_on('bzip2')

View File

@ -9,7 +9,7 @@ class PyNumexpr(Package):
version('2.4.6', '17ac6fafc9ea1ce3eb970b9abccb4fbd')
version('2.5', '84f66cced45ba3e30dcf77a937763aaa')
extends('python', ignore=r'bin/f2py$')
extends('python')
depends_on('py-numpy')
def install(self, spec, prefix):

View File

@ -0,0 +1,17 @@
from spack import *
class PyOpenpyxl(Package):
"""A Python library to read/write Excel 2007 xlsx/xlsm files"""
homepage = 'http://openpyxl.readthedocs.org/'
url = "https://pypi.python.org/packages/source/o/openpyxl/openpyxl-2.4.0-a1.tar.gz"
version('2.4.0-a1', 'e5ca6d23ceccb15115d45cdf26e736fc')
extends('python')
depends_on('py-jdcal')
depends_on('py-setuptools')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -10,7 +10,7 @@ class PyPandas(Package):
version('0.16.1', 'fac4f25748f9610a3e00e765474bdea8')
version('0.18.0', 'f143762cd7a59815e348adf4308d2cf6')
extends('python', ignore=r'bin/f2py$')
extends('python')
depends_on('py-dateutil')
depends_on('py-numpy')
depends_on('py-setuptools')

View File

@ -7,7 +7,7 @@ class PyScikitImage(Package):
version('0.12.3', '04ea833383e0b6ad5f65da21292c25e1')
extends('python', ignore=r'bin/.*\.py$|bin/f2py$')
extends('python', ignore=r'bin/.*\.py$')
depends_on('py-dask')
depends_on('py-pillow')

View File

@ -5,11 +5,13 @@ class PySetuptools(Package):
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('11.3.1', '01f69212e019a2420c1693fb43593930')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('20.7.0', '5d12b39bf3e75e80fdce54e44b255615')
version('20.6.7', '45d6110f3ec14924e44c33411db64fe6')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('11.3.1', '01f69212e019a2420c1693fb43593930')
extends('python')

View File

@ -0,0 +1,15 @@
from spack import *
class PyXlrd(Package):
"""Library for developers to extract data from Microsoft Excel (tm)
spreadsheet files"""
homepage = 'http://www.python-excel.org/'
url = "https://pypi.python.org/packages/source/x/xlrd/xlrd-0.9.4.tar.gz"
version('0.9.4', '911839f534d29fe04525ef8cd88fe865')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -151,6 +151,8 @@ def python_ignore(self, ext_pkg, args):
patterns.append(r'setuptools\.pth')
patterns.append(r'bin/easy_install[^/]*$')
patterns.append(r'setuptools.*egg$')
if ext_pkg.name != 'py-numpy':
patterns.append(r'bin/f2py$')
return match_predicate(ignore_arg, patterns)

View File

@ -29,7 +29,8 @@ class Qt(Package):
depends_on("zlib")
depends_on("dbus", when='@4:')
depends_on("libtiff")
depends_on("libpng")
depends_on("libpng@1.2.56", when='@3')
depends_on("libpng", when='@4:')
depends_on("libmng")
depends_on("jpeg")
@ -120,6 +121,8 @@ def common_config_args(self):
@when('@3')
def configure(self):
# An user report that this was necessary to link Qt3 on ubuntu
os.environ['LD_LIBRARY_PATH'] = os.getcwd()+'/lib'
configure('-prefix', self.prefix,
'-v',
'-thread',

View File

@ -22,6 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Swig(Package):
@ -33,14 +34,19 @@ class Swig(Package):
code. In addition, SWIG provides a variety of customization
features that let you tailor the wrapping process to suit your
application."""
homepage = "http://www.swig.org"
url = "http://prdownloads.sourceforge.net/swig/swig-3.0.2.tar.gz"
homepage = "http://www.swig.org"
url = "http://prdownloads.sourceforge.net/swig/swig-3.0.8.tar.gz"
version('3.0.8', 'c96a1d5ecb13d38604d7e92148c73c97')
version('3.0.2', '62f9b0d010cef36a13a010dc530d0d41')
version('2.0.12', 'c3fb0b2d710cc82ed0154b91e43085a4')
version('2.0.2', 'eaf619a4169886923e5f828349504a29')
version('1.3.40', '2df766c9e03e02811b1ab4bba1c7b9cc')
depends_on('pcre')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
configure('--prefix=%s' % prefix)
make()
make("install")
make('install')

View File

@ -0,0 +1,124 @@
from spack import *
import os
import subprocess
class Turbomole(Package):
"""TURBOMOLE: Program Package for ab initio Electronic Structure
Calculations. NB: Requires a license to download."""
# NOTE: Turbomole requires purchase of a license to download. Go to the
# NOTE: Turbomole home page, http://www.turbomole-gmbh.com, for details.
# NOTE: Spack will search the current directory for this file. It is
# NOTE: probably best to add this file to a Spack mirror so that it can be
# NOTE: found from anywhere. For information on setting up a Spack mirror
# NOTE: see http://software.llnl.gov/spack/mirrors.html
homepage = "http://www.turbomole-gmbh.com/"
version('7.0.2', '92b97e1e52e8dcf02a4d9ac0147c09d6',
url="file://%s/turbolinux702.tar.gz" % os.getcwd())
variant('mpi', default=False, description='Set up MPI environment')
variant('smp', default=False, description='Set up SMP environment')
# Turbomole's install is odd. There are three variants
# - serial
# - parallel, MPI
# - parallel, SMP
#
# Only one of these can be active at a time. MPI and SMP are set as
# variants so there could be up to 3 installs per version. Switching
# between them would be accomplished with `module swap` commands.
def do_fetch(self, mirror_only=True):
if '+mpi' in self.spec and '+smp' in self.spec:
raise InstallError('Can not have both SMP and MPI enabled in the same build.')
super(Turbomole, self).do_fetch(mirror_only)
def get_tm_arch(self):
# For python-2.7 we could use `tm_arch = subprocess.check_output()`
# Use the following for compatibility with python 2.6
if 'TURBOMOLE' in os.getcwd():
tm_arch = subprocess.Popen(['sh', 'scripts/sysname'],
stdout=subprocess.PIPE).communicate()[0]
return tm_arch.rstrip('\n')
else:
return
def install(self, spec, prefix):
if spec.satisfies('@:7.0.2'):
calculate_version = 'calculate_2.4_linux64'
molecontrol_version = 'MoleControl_2.5'
tm_arch=self.get_tm_arch()
tar = which('tar')
dst = join_path(prefix, 'TURBOMOLE')
tar('-x', '-z', '-f', 'thermocalc.tar.gz')
with working_dir('thermocalc'):
cmd = 'sh install <<<y'
subprocess.call(cmd, shell=True)
install_tree('basen', join_path(dst, 'basen'))
install_tree('cabasen', join_path(dst, 'cabasen'))
install_tree(calculate_version, join_path(dst, calculate_version))
install_tree('cbasen', join_path(dst, 'cbasen'))
install_tree('DOC', join_path(dst, 'DOC'))
install_tree('jbasen', join_path(dst, 'jbasen'))
install_tree('jkbasen', join_path(dst, 'jkbasen'))
install_tree(molecontrol_version, join_path(dst, molecontrol_version))
install_tree('parameter', join_path(dst, 'parameter'))
install_tree('perlmodules', join_path(dst, 'perlmodules'))
install_tree('scripts', join_path(dst, 'scripts'))
install_tree('smprun_scripts', join_path(dst, 'smprun_scripts'))
install_tree('structures', join_path(dst, 'structures'))
install_tree('thermocalc', join_path(dst, 'thermocalc'))
install_tree('TURBOTEST', join_path(dst, 'TURBOTEST'))
install_tree('xbasen', join_path(dst, 'xbasen'))
install('Config_turbo_env', dst)
install('Config_turbo_env.tcsh', dst)
install('README', dst)
install('README_LICENSES', dst)
install('TURBOMOLE_702_LinuxPC', dst)
if '+mpi' in spec:
install_tree('bin/%s_mpi' % tm_arch, join_path(dst, 'bin', '%s_mpi' % tm_arch))
install_tree('libso/%s_mpi' % tm_arch, join_path(dst, 'libso', '%s_mpi' % tm_arch))
install_tree('mpirun_scripts/%s_mpi' % tm_arch, join_path(dst, 'mpirun_scripts', '%s_mpi' % tm_arch))
elif '+smp' in spec:
install_tree('bin/%s_smp' % tm_arch, join_path(dst, 'bin', '%s_smp' % tm_arch))
install_tree('libso/%s_smp' % tm_arch, join_path(dst, 'libso', '%s_smp' % tm_arch))
install_tree('mpirun_scripts/%s_smp' % tm_arch, join_path(dst, 'mpirun_scripts', '%s_smp' % tm_arch))
else:
install_tree('bin/%s' % tm_arch, join_path(dst, 'bin', tm_arch))
if '+mpi' in spec or '+smp' in spec:
install('mpirun_scripts/ccsdf12', join_path(dst, 'mpirun_scripts'))
install('mpirun_scripts/dscf', join_path(dst, 'mpirun_scripts'))
install('mpirun_scripts/grad', join_path(dst, 'mpirun_scripts'))
install('mpirun_scripts/mpgrad', join_path(dst, 'mpirun_scripts'))
install('mpirun_scripts/pnoccsd', join_path(dst, 'mpirun_scripts'))
install('mpirun_scripts/rdgrad', join_path(dst, 'mpirun_scripts'))
install('mpirun_scripts/ricc2', join_path(dst, 'mpirun_scripts'))
install('mpirun_scripts/ridft', join_path(dst, 'mpirun_scripts'))
def setup_environment(self, spack_env, run_env):
if self.spec.satisfies('@:7.0.2'):
molecontrol_version = 'MoleControl_2.5'
tm_arch=self.get_tm_arch()
run_env.set('TURBODIR', join_path(self.prefix, 'TURBOMOLE'))
run_env.set('MOLE_CONTROL', join_path(self.prefix, 'TURBOMOLE', molecontrol_version))
run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'thermocalc'))
run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'scripts'))
if '+mpi' in self.spec:
run_env.set('PARA_ARCH', 'MPI')
run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'bin', '%s_mpi' % tm_arch))
elif '+smp' in self.spec:
run_env.set('PARA_ARCH', 'SMP')
run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'bin', '%s_smp' % tm_arch))
else:
run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'bin', tm_arch))

View File

@ -17,6 +17,8 @@ class Wget(Package):
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
"--with-ssl=openssl")
"--with-ssl=openssl",
"OPENSSL_CFLAGS=-I%s" % spec['openssl'].prefix.include,
"OPENSSL_LIBS=-L%s -lssl -lcrypto -lz" % spec['openssl'].prefix.lib)
make()
make("install")

View File

@ -24,8 +24,8 @@ class XercesC(Package):
"""
homepage = "https://xerces.apache.org/xerces-c"
url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.2.tar.gz"
version('3.1.2', '9eb1048939e88d6a7232c67569b23985')
url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.3.tar.bz2"
version('3.1.3', '5e333b55cb43e6b025ddf0e5d0f0fb0d')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,

View File

@ -1,3 +1,4 @@
import re, os, glob
from spack import *
class Zoltan(Package):
@ -12,8 +13,13 @@ class Zoltan(Package):
base_url = "http://www.cs.sandia.gov/~kddevin/Zoltan_Distributions"
version('3.83', '1ff1bc93f91e12f2c533ddb01f2c095f')
version('3.8', '9d8fba8a990896881b85351d4327c4a9')
version('3.6', '9cce794f7241ecd8dbea36c3d7a880f9')
version('3.3', '5eb8f00bda634b25ceefa0122bd18d65')
variant('debug', default=False, description='Builds a debug version of the library')
variant('shared', default=True, description='Builds a shared version of the library')
variant('fortran', default=True, description='Enable Fortran support')
variant('mpi', default=False, description='Enable MPI support')
@ -24,28 +30,49 @@ def install(self, spec, prefix):
'--enable-f90interface' if '+fortan' in spec else '--disable-f90interface',
'--enable-mpi' if '+mpi' in spec else '--disable-mpi',
]
config_cflags = [
'-O0' if '+debug' in spec else '-O3',
'-g' if '+debug' in spec else '-g0',
]
if '+shared' in spec:
config_args.append('--with-ar=$(CXX) -shared $(LDFLAGS) -o')
config_args.append('RANLIB=echo')
config_cflags.append('-fPIC')
if '+mpi' in spec:
config_args.append('--with-mpi=%s' % spec['mpi'].prefix)
config_args.append('--with-mpi-compilers=%s' % spec['mpi'].prefix.bin)
config_args.append('CC=%s/mpicc' % spec['mpi'].prefix.bin)
config_args.append('CXX=%s/mpicxx' % spec['mpi'].prefix.bin)
config_args.append('--with-mpi=%s' % spec['mpi'].prefix)
config_args.append('--with-mpi-compilers=%s' % spec['mpi'].prefix.bin)
# NOTE: Early versions of Zoltan come packaged with a few embedded
# library packages (e.g. ParMETIS, Scotch), which messes with Spack's
# ability to descend directly into the package's source directory.
if spec.satisfies('@:3.3'):
if spec.satisfies('@:3.6'):
cd('Zoltan_v%s' % self.version)
mkdirp('build')
cd('build')
config_zoltan = Executable('../configure')
config_zoltan('--prefix=%s' % pwd(), *config_args)
config_zoltan(
'--prefix=%s' % pwd(),
'--with-cflags=%s' % ' '.join(config_cflags),
'--with-cxxflags=%s' % ' '.join(config_cflags),
*config_args)
make()
make('install')
# NOTE: Unfortunately, Zoltan doesn't provide any configuration options for
# the extension of the output library files, so this script must change these
# extensions as a post-processing step.
if '+shared' in spec:
for libpath in glob.glob('lib/*.a'):
libdir, libname = (os.path.dirname(libpath), os.path.basename(libpath))
move(libpath, os.path.join(libdir, re.sub(r'\.a$', '.so', libname)))
mkdirp(prefix)
move('include', prefix)
move('lib', prefix)