Merge branch 'features/gcc' into develop

Conflicts:
	lib/spack/spack/package.py
This commit is contained in:
Todd Gamblin 2014-11-08 22:30:46 -08:00
commit 79414947ae
22 changed files with 737 additions and 206 deletions

View File

@ -113,4 +113,5 @@ except SpackError, e:
tty.die(e.message) tty.die(e.message)
except KeyboardInterrupt: except KeyboardInterrupt:
sys.stderr.write('\n')
tty.die("Keyboard interrupt.") tty.die("Keyboard interrupt.")

10
lib/spack/env/cc vendored
View File

@ -275,9 +275,15 @@ for dir in "${libraries[@]}"; do args+=("-L$dir"); done
for lib in "${libs[@]}"; do args+=("-l$lib"); done for lib in "${libs[@]}"; do args+=("-l$lib"); done
if [ "$mode" = ccld ]; then if [ "$mode" = ccld ]; then
for dir in "${rpaths[@]}"; do args+=("-Wl,-rpath=$dir"); done for dir in "${rpaths[@]}"; do
args+=("-Wl,-rpath")
args+=("-Wl,$dir");
done
elif [ "$mode" = ld ]; then elif [ "$mode" = ld ]; then
for dir in "${rpaths[@]}"; do args+=("-rpath=$dir"); done for dir in "${rpaths[@]}"; do
args+=("-rpath")
args+=("$dir");
done
fi fi
# #

View File

@ -22,8 +22,9 @@
# along with this program; if not, write to the Free Software Foundation, # along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
__all__ = ['install', 'expand_user', 'working_dir', 'touch', 'mkdirp', __all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir',
'join_path', 'ancestor', 'can_access', 'filter_file', 'change_sed_delimiter'] 'touch', 'mkdirp', 'force_remove', 'join_path', 'ancestor',
'can_access', 'filter_file', 'change_sed_delimiter']
import os import os
import sys import sys
@ -127,10 +128,19 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
filter_file(double_quoted, '"%s"' % repl, f) filter_file(double_quoted, '"%s"' % repl, f)
def set_install_permissions(path):
"""Set appropriate permissions on the installed file."""
if os.path.isdir(path):
os.chmod(path, 0755)
else:
os.chmod(path, 0644)
def install(src, dest): def install(src, dest):
"""Manually install a file to a particular location.""" """Manually install a file to a particular location."""
tty.info("Installing %s to %s" % (src, dest)) tty.info("Installing %s to %s" % (src, dest))
shutil.copy(src, dest) shutil.copy(src, dest)
set_install_permissions(dest)
def expand_user(path): def expand_user(path):
@ -152,6 +162,15 @@ def mkdirp(*paths):
raise OSError(errno.EEXIST, "File alredy exists", path) raise OSError(errno.EEXIST, "File alredy exists", path)
def force_remove(*paths):
"""Remove files without printing errors. Like rm -f, does NOT
remove directories."""
for path in paths:
try:
os.remove(path)
except OSError, e:
pass
@contextmanager @contextmanager
def working_dir(dirname, **kwargs): def working_dir(dirname, **kwargs):
if kwargs.get('create', False): if kwargs.get('create', False):

View File

@ -65,7 +65,7 @@ def get_mac_sys_type():
if not mac_ver: if not mac_ver:
return None return None
return "macosx_{}_{}".format( return "macosx_%s_%s" % (
Version(mac_ver).up_to(2), py_platform.machine()) Version(mac_ver).up_to(2), py_platform.machine())

View File

@ -38,7 +38,7 @@
from spack.stage import Stage, FailedDownloadError from spack.stage import Stage, FailedDownloadError
from spack.version import * from spack.version import *
description ="Checksum available versions of a package to update a package file." description ="Checksum available versions of a package."
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
@ -85,24 +85,24 @@ def checksum(parser, args):
pkg = spack.db.get(args.package) pkg = spack.db.get(args.package)
# If the user asked for specific versions, use those. # If the user asked for specific versions, use those.
versions = [ver(v) for v in args.versions] if args.versions:
versions = {}
if not all(type(v) == Version for v in versions): for v in args.versions:
tty.die("Cannot generate checksums for version lists or " + v = ver(v)
"version ranges. Use unambiguous versions.") if not isinstance(v, Version):
tty.die("Cannot generate checksums for version lists or " +
if not versions: "version ranges. Use unambiguous versions.")
versions = pkg.fetch_available_versions() versions[v] = pkg.url_for_version(v)
else:
versions = pkg.fetch_remote_versions()
if not versions: if not versions:
tty.die("Could not fetch any available versions for %s." % pkg.name) tty.die("Could not fetch any versions for %s." % pkg.name)
versions = list(reversed(sorted(versions))) sorted_versions = list(reversed(sorted(versions)))
urls = [pkg.url_for_version(v) for v in versions]
tty.msg("Found %s versions of %s." % (len(versions), pkg.name),
tty.msg("Found %s versions of %s." % (len(urls), pkg.name),
*spack.cmd.elide_list( *spack.cmd.elide_list(
["%-10s%s" % (v,u) for v, u in zip(versions, urls)])) ["%-10s%s" % (v, versions[v]) for v in sorted_versions]))
print print
archives_to_fetch = tty.get_number( archives_to_fetch = tty.get_number(
"How many would you like to checksum?", default=5, abort='q') "How many would you like to checksum?", default=5, abort='q')
@ -112,10 +112,12 @@ def checksum(parser, args):
return return
version_hashes = get_checksums( version_hashes = get_checksums(
versions[:archives_to_fetch], urls[:archives_to_fetch], keep_stage=args.keep_stage) sorted_versions[:archives_to_fetch],
[versions[v] for v in sorted_versions[:archives_to_fetch]],
keep_stage=args.keep_stage)
if not version_hashes: if not version_hashes:
tty.die("Could not fetch any available versions for %s." % pkg.name) tty.die("Could not fetch any versions for %s." % pkg.name)
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes] version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines) tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)

View File

@ -159,13 +159,12 @@ def create(parser, args):
else: else:
mkdirp(os.path.dirname(pkg_path)) mkdirp(os.path.dirname(pkg_path))
versions = list(reversed(spack.package.find_versions_of_archive(url))) versions = spack.package.find_versions_of_archive(url)
archives_to_fetch = 1 archives_to_fetch = 1
if not versions: if not versions:
# If the fetch failed for some reason, revert to what the user provided # If the fetch failed for some reason, revert to what the user provided
versions = [version] versions = { version : url }
urls = [url]
else: else:
urls = [spack.url.substitute_version(url, v) for v in versions] urls = [spack.url.substitute_version(url, v) for v in versions]
if len(urls) > 1: if len(urls) > 1:
@ -181,6 +180,8 @@ def create(parser, args):
tty.msg("Aborted.") tty.msg("Aborted.")
return return
sorted_versions = list(reversed(versions))
guesser = ConfigureGuesser() guesser = ConfigureGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums( ver_hash_tuples = spack.cmd.checksum.get_checksums(
versions[:archives_to_fetch], urls[:archives_to_fetch], versions[:archives_to_fetch], urls[:archives_to_fetch],

View File

@ -31,7 +31,7 @@
import spack import spack
from spack.util.executable import * from spack.util.executable import *
description = "Query packages associated with particular git revisions in spack." description = "Query packages associated with particular git revisions."
def setup_parser(subparser): def setup_parser(subparser):
sp = subparser.add_subparsers( sp = subparser.add_subparsers(

View File

@ -0,0 +1,58 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import spack
import spack.url
description = "Inspect urls used by packages in spack."
def setup_parser(subparser):
subparser.add_argument(
'-c', '--color', action='store_true',
help="Color the parsed version and name in the urls shown. "
"Version will be cyan, name red.")
subparser.add_argument(
'-e', '--extrapolation', action='store_true',
help="Color the versions used for extrapolation as well."
"Additional versions are green, names magenta.")
def urls(parser, args):
urls = set()
for pkg in spack.db.all_packages():
url = getattr(pkg.__class__, 'url', None)
if url:
urls.add(url)
for params in pkg.versions.values():
url = params.get('url', None)
if url:
urls.add(url)
for url in sorted(urls):
if args.color or args.extrapolation:
print spack.url.color_url(url, subs=args.extrapolation, errors=True)
else:
print url

View File

@ -24,6 +24,7 @@
############################################################################## ##############################################################################
import os import os
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
import llnl.util.tty as tty
import spack import spack
description ="List available versions of a package" description ="List available versions of a package"
@ -34,4 +35,21 @@ def setup_parser(subparser):
def versions(parser, args): def versions(parser, args):
pkg = spack.db.get(args.package) pkg = spack.db.get(args.package)
colify(reversed(pkg.fetch_available_versions()))
safe_versions = pkg.versions
fetched_versions = pkg.fetch_remote_versions()
remote_versions = set(fetched_versions).difference(safe_versions)
tty.msg("Safe versions (already checksummed):")
colify(sorted(safe_versions, reverse=True), indent=2)
tty.msg("Remote versions (not yet checksummed):")
if not remote_versions:
if not fetched_versions:
print " Found no versions for %s" % pkg.name
tty.debug("Check the list_url and list_depth attribute on the "
"package to help Spack find versions.")
else:
print " Found no unckecksummed versions for %s" % pkg.name
else:
colify(sorted(remote_versions, reverse=True), indent=2)

View File

@ -68,7 +68,7 @@ def concretize_version(self, spec):
# If there are known avaialble versions, return the most recent # If there are known avaialble versions, return the most recent
# version that satisfies the spec # version that satisfies the spec
pkg = spec.package pkg = spec.package
valid_versions = [v for v in pkg.available_versions valid_versions = [v for v in pkg.versions
if any(v.satisfies(sv) for sv in spec.versions)] if any(v.satisfies(sv) for sv in spec.versions)]
if valid_versions: if valid_versions:

View File

@ -39,7 +39,7 @@
import subprocess import subprocess
import platform as py_platform import platform as py_platform
import multiprocessing import multiprocessing
from urlparse import urlparse from urlparse import urlparse, urljoin
import textwrap import textwrap
from StringIO import StringIO from StringIO import StringIO
@ -335,9 +335,6 @@ def __init__(self, spec):
if '.' in self.name: if '.' in self.name:
self.name = self.name[self.name.rindex('.') + 1:] self.name = self.name[self.name.rindex('.') + 1:]
# This is set by scraping a web page.
self._available_versions = None
# Sanity check some required variables that could be # Sanity check some required variables that could be
# overridden by package authors. # overridden by package authors.
def ensure_has_dict(attr_name): def ensure_has_dict(attr_name):
@ -372,14 +369,15 @@ def ensure_has_dict(attr_name):
# Init fetch strategy and url to None # Init fetch strategy and url to None
self._fetcher = None self._fetcher = None
self.url = None self.url = getattr(self.__class__, 'url', None)
# Fix up self.url if this package fetches with a URLFetchStrategy. # Fix up self.url if this package fetches with a URLFetchStrategy.
# This makes self.url behave sanely. # This makes self.url behave sanely.
if self.spec.versions.concrete: if self.spec.versions.concrete:
# TODO: this is a really roundabout way of determining the type of fetch to do. # TODO: this is a really roundabout way of determining the type
# TODO: figure out a more sane fetch strategy/package init order # TODO: of fetch to do. figure out a more sane fetch strategy/package
# TODO: (right now it's conflated with stage, package, and the tests make assumptions) # TODO: init order (right now it's conflated with stage, package, and
# TODO: the tests make assumptions)
f = fs.for_package_version(self, self.version) f = fs.for_package_version(self, self.version)
if isinstance(f, fs.URLFetchStrategy): if isinstance(f, fs.URLFetchStrategy):
self.url = self.url_for_version(self.spec.version) self.url = self.url_for_version(self.spec.version)
@ -869,73 +867,70 @@ def format_doc(self, **kwargs):
return results.getvalue() return results.getvalue()
@property
def all_urls(self):
urls = []
if self.url:
urls.append(self.url)
for args in self.versions.values():
if 'url' in args:
urls.append(args['url'])
return urls
def fetch_available_versions(self): def fetch_remote_versions(self):
if not hasattr(self, 'url'): """Try to find remote versions of this package using the
list_url and any other URLs described in the package file."""
if not self.all_urls:
raise VersionFetchError(self.__class__) raise VersionFetchError(self.__class__)
# If not, then try to fetch using list_url try:
if not self._available_versions: return find_versions_of_archive(
try: *self.all_urls, list_url=self.list_url, list_depth=self.list_depth)
self._available_versions = find_versions_of_archive( except spack.error.NoNetworkConnectionError, e:
self.url, tty.die("Package.fetch_versions couldn't connect to:",
list_url=self.list_url, e.url, e.message)
list_depth=self.list_depth)
if not self._available_versions:
tty.warn("Found no versions for %s" % self.name,
"Check the list_url and list_depth attribute on the "
+ self.name + " package.",
"Use them to tell Spack where to look for versions.")
except spack.error.NoNetworkConnectionError, e:
tty.die("Package.fetch_available_versions couldn't connect to:",
e.url, e.message)
return self._available_versions
@property def find_versions_of_archive(*archive_urls, **kwargs):
def available_versions(self):
# If the package overrode available_versions, then use that.
if self.versions is not None:
return VersionList(self.versions.keys())
else:
vlist = self.fetch_available_versions()
if not vlist:
vlist = ver([self.version])
return vlist
def find_versions_of_archive(archive_url, **kwargs):
list_url = kwargs.get('list_url', None) list_url = kwargs.get('list_url', None)
list_depth = kwargs.get('list_depth', 1) list_depth = kwargs.get('list_depth', 1)
if not list_url: # Generate a list of list_urls based on archive urls and any
list_url = url.find_list_url(archive_url) # explicitly listed list_url in the package
list_urls = set()
# This creates a regex from the URL with a capture group for the if list_url:
# version part of the URL. The capture group is converted to a list_urls.add(list_url)
# generic wildcard, so we can use this to extract things on a page for aurl in archive_urls:
# that look like archive URLs. list_urls.add(url.find_list_url(aurl))
url_regex = url.wildcard_version(archive_url)
# We'll be a bit more liberal and just look for the archive part,
# not the full path.
archive_regex = os.path.basename(url_regex)
# Grab some web pages to scrape. # Grab some web pages to scrape.
page_map = get_pages(list_url, depth=list_depth) page_map = {}
for lurl in list_urls:
page_map.update(get_pages(lurl, depth=list_depth))
# Scrape them for archive URLs
regexes = []
for aurl in archive_urls:
# This creates a regex from the URL with a capture group for
# the version part of the URL. The capture group is converted
# to a generic wildcard, so we can use this to extract things
# on a page that look like archive URLs.
url_regex = url.wildcard_version(aurl)
# We'll be a bit more liberal and just look for the archive
# part, not the full path.
regexes.append(os.path.basename(url_regex))
# Build a version list from all the matches we find # Build a version list from all the matches we find
versions = VersionList() versions = {}
for site, page in page_map.iteritems(): for page_url, content in page_map.iteritems():
# extract versions from matches. # extract versions from matches.
matches = re.finditer(archive_regex, page) for regex in regexes:
version_strings = set(m.group(1) for m in matches) versions.update(
for v in version_strings: (Version(m.group(1)), urljoin(page_url, m.group(0)))
versions.add(Version(v)) for m in re.finditer(regex, content))
return versions return versions
@ -998,8 +993,8 @@ class VersionFetchError(PackageError):
"""Raised when a version URL cannot automatically be determined.""" """Raised when a version URL cannot automatically be determined."""
def __init__(self, cls): def __init__(self, cls):
super(VersionFetchError, self).__init__( super(VersionFetchError, self).__init__(
"Cannot fetch version for package %s " % cls.__name__ + "Cannot fetch versions for package %s " % cls.__name__ +
"because it does not define a default url.") "because it does not define any URLs to fetch.")
class NoURLError(PackageError): class NoURLError(PackageError):

View File

@ -1096,8 +1096,9 @@ def __getitem__(self, name):
def __contains__(self, spec): def __contains__(self, spec):
"""True if this spec has any dependency that satisfies the supplied """True if this spec satisfis the provided spec, or if any dependency
spec.""" does. If the spec has no name, then we parse this one first.
"""
spec = self._autospec(spec) spec = self._autospec(spec)
for s in self.traverse(): for s in self.traverse():
if s.satisfies(spec): if s.satisfies(spec):

View File

@ -32,60 +32,69 @@
from spack.test.mock_packages_test import * from spack.test.mock_packages_test import *
class UrlExtrapolateTest(MockPackagesTest): class UrlExtrapolateTest(unittest.TestCase):
def test_known_version(self): def check_url(self, base, version, new_url):
d = spack.db.get('dyninst') self.assertEqual(url.substitute_version(base, version), new_url)
self.assertEqual(
d.url_for_version('8.2'), 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz')
self.assertEqual(
d.url_for_version('8.1.2'), 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz')
self.assertEqual(
d.url_for_version('8.1.1'), 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz')
def test_extrapolate_version(self): def test_libelf_version(self):
d = spack.db.get('dyninst') base = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
self.check_url(base, '0.8.13', base)
# Nearest URL for 8.1.1.5 is 8.1.1, and the URL there is self.check_url(base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz")
# release8.1/DyninstAPI-8.1.1.tgz. Only the last part matches self.check_url(base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz")
# the version, so only extrapolate the last part. Obviously self.check_url(base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz")
# dyninst has ambiguous URL versions, but we want to make sure
# extrapolation works in a well-defined way.
self.assertEqual(
d.url_for_version('8.1.1.5'), 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.5.tgz')
# 8.2 matches both the release8.2 component and the DyninstAPI-8.2 component.
# Extrapolation should replace both with the new version.
# TODO: figure out a consistent policy for this.
# self.assertEqual(
# d.url_for_version('8.2.3'), 'http://www.paradyn.org/release8.2.3/DyninstAPI-8.2.3.tgz')
def test_with_package(self): def test_libdwarf_version(self):
d = spack.db.get('dyninst@8.2') base = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz') self.check_url(base, '20130729', base)
self.check_url(base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz")
d = spack.db.get('dyninst@8.1.2')
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz')
d = spack.db.get('dyninst@8.1.1')
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz')
def test_concrete_package(self): def test_dyninst_version(self):
s = Spec('dyninst@8.2') # Dyninst has a version twice in the URL.
s.concretize() base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
d = spack.db.get(s) self.check_url(base, '8.1.2', base)
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz') self.check_url(base, '8.2',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.2/DyninstAPI-8.2.tgz")
self.check_url(base, '8.3.1',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz")
s = Spec('dyninst@8.1.2')
s.concretize()
d = spack.db.get(s)
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz')
s = Spec('dyninst@8.1.1') def test_partial_version_prefix(self):
s.concretize() # Test now with a partial prefix earlier in the URL -- this is
d = spack.db.get(s) # hard to figure out so Spack only substitutes the last
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz') # instance of the version.
base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.1.2.tgz"
self.check_url(base, '8.1.2', base)
self.check_url(base, '8.1.4',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.1.4.tgz")
self.check_url(base, '8.2',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.2.tgz")
self.check_url(base, '8.3.1',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.3.1.tgz")
def test_scalasca_partial_version(self):
# Note that this probably doesn't actually work, but sites are
# inconsistent about their directory structure, so it's not
# clear what is right. This test is for consistency and to
# document behavior. If you figure out a good way to handle
# this case, fix the tests too.
self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz')
self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz')
def test_mpileaks_version(self):
self.check_url('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3',
'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz')
def test_gcc(self):
self.check_url('http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2', '4.7',
'http://open-source-box.org/gcc/gcc-4.7/gcc-4.7.tar.bz2')
self.check_url('http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2', '4.4.7',
'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2')

View File

@ -295,3 +295,30 @@ def test_hdf5_version(self):
self.check( self.check(
'hdf5', '1.8.13', 'hdf5', '1.8.13',
'http://www.hdfgroup.org/ftp/HDF5/current/src/hdf5-1.8.13.tar.bz2') 'http://www.hdfgroup.org/ftp/HDF5/current/src/hdf5-1.8.13.tar.bz2')
def test_scalasca_version(self):
self.check(
'cube', '4.2.3',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz')
self.check(
'cube', '4.3-TP1',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz')
def test_mpileaks_version(self):
self.check(
'mpileaks', '1.0',
'https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz')
self.check(
'mpileaks', '1.0',
'https://github.com/hpc/mpileaks/releases/download/1.0/mpileaks-1.0.tar.gz')
def test_gcc_version(self):
self.check(
'gcc', '4.4.7',
'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2')
def test_gcc_version_precedence(self):
# prefer the version in the tarball, not in the url prefix.
self.check(
'gcc', '4.4.7',
'http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.4.7.tar.bz2')

View File

@ -46,6 +46,9 @@
""" """
import os import os
import re import re
from StringIO import StringIO
from llnl.util.tty.color import *
import spack.error import spack.error
import spack.util.compression as comp import spack.util.compression as comp
@ -57,27 +60,6 @@
# "path" seemed like the most generic term. # "path" seemed like the most generic term.
# #
class UrlParseError(spack.error.SpackError):
"""Raised when the URL module can't parse something correctly."""
def __init__(self, msg, path):
super(UrlParseError, self).__init__(msg)
self.path = path
class UndetectableVersionError(UrlParseError):
"""Raised when we can't parse a version from a string."""
def __init__(self, path):
super(UndetectableVersionError, self).__init__(
"Couldn't detect version in: " + path, path)
class UndetectableNameError(UrlParseError):
"""Raised when we can't parse a package name from a string."""
def __init__(self, path):
super(UndetectableNameError, self).__init__(
"Couldn't parse package name in: " + path, path)
def find_list_url(url): def find_list_url(url):
"""Finds a good list URL for the supplied URL. This depends on """Finds a good list URL for the supplied URL. This depends on
the site. By default, just assumes that a good list URL is the the site. By default, just assumes that a good list URL is the
@ -98,7 +80,7 @@ def find_list_url(url):
return os.path.dirname(url) return os.path.dirname(url)
def parse_version_string_with_indices(path): def parse_version_offset(path):
"""Try to extract a version string from a filename or URL. This is taken """Try to extract a version string from a filename or URL. This is taken
largely from Homebrew's Version class.""" largely from Homebrew's Version class."""
@ -112,6 +94,7 @@ def parse_version_string_with_indices(path):
# Take basename to avoid including parent dirs in version name # Take basename to avoid including parent dirs in version name
# Remember the offset of the stem in the full path. # Remember the offset of the stem in the full path.
stem = os.path.basename(path) stem = os.path.basename(path)
offset = len(path) - len(stem)
version_types = [ version_types = [
# GitHub tarballs, e.g. v1.2.3 # GitHub tarballs, e.g. v1.2.3
@ -132,6 +115,10 @@ def parse_version_string_with_indices(path):
# e.g. https://github.com/erlang/otp/tarball/OTP_R15B01 (erlang style) # e.g. https://github.com/erlang/otp/tarball/OTP_R15B01 (erlang style)
(r'[-_](R\d+[AB]\d*(-\d+)?)', path), (r'[-_](R\d+[AB]\d*(-\d+)?)', path),
# e.g., https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz
# e.g., https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
(r'github.com/[^/]+/[^/]+/releases/download/v?([^/]+)/.*$', path),
# e.g. boost_1_39_0 # e.g. boost_1_39_0
(r'((\d+_)+\d+)$', stem), (r'((\d+_)+\d+)$', stem),
@ -146,7 +133,7 @@ def parse_version_string_with_indices(path):
(r'-((\d+\.)*\d+)$', stem), (r'-((\d+\.)*\d+)$', stem),
# e.g. foobar-4.5.1b # e.g. foobar-4.5.1b
(r'-((\d+\.)*\d+([a-z]|rc|RC)\d*)$', stem), (r'-((\d+\.)*\d+\-?([a-z]|rc|RC|tp|TP)\d*)$', stem),
# e.g. foobar-4.5.0-beta1, or foobar-4.50-beta # e.g. foobar-4.5.0-beta1, or foobar-4.50-beta
(r'-((\d+\.)*\d+-beta(\d+)?)$', stem), (r'-((\d+\.)*\d+-beta(\d+)?)$', stem),
@ -172,13 +159,18 @@ def parse_version_string_with_indices(path):
# e.g. http://www.ijg.org/files/jpegsrc.v8d.tar.gz # e.g. http://www.ijg.org/files/jpegsrc.v8d.tar.gz
(r'\.v(\d+[a-z]?)', stem)] (r'\.v(\d+[a-z]?)', stem)]
for vtype in version_types: for i, vtype in enumerate(version_types):
regex, match_string = vtype[:2] regex, match_string = vtype
match = re.search(regex, match_string) match = re.search(regex, match_string)
if match and match.group(1) is not None: if match and match.group(1) is not None:
version = match.group(1) version = match.group(1)
start = path.index(version) start = match.start(1)
return version, start, start+len(version)
# if we matched from the basename, then add offset in.
if match_string is stem:
start += offset
return version, start, len(version)
raise UndetectableVersionError(path) raise UndetectableVersionError(path)
@ -187,30 +179,58 @@ def parse_version(path):
"""Given a URL or archive name, extract a version from it and return """Given a URL or archive name, extract a version from it and return
a version object. a version object.
""" """
ver, start, end = parse_version_string_with_indices(path) ver, start, l = parse_version_offset(path)
return Version(ver) return Version(ver)
def parse_name(path, ver=None): def parse_name_offset(path, v=None):
if ver is None: if v is None:
ver = parse_version(path) v = parse_version(path)
ntypes = (r'/sourceforge/([^/]+)/', # Strip archive extension
r'/([^/]+)/(tarball|zipball)/', path = comp.strip_extension(path)
r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % ver,
r'github.com/[^/]+/([^/]+)/archive',
r'/([^/]+)[_.-]v?%s' % ver,
r'/([^/]+)%s' % ver,
r'^([^/]+)[_.-]v?%s' % ver,
r'^([^/]+)%s' % ver)
for nt in ntypes: # Allow matching with either path or stem, as with the version.
match = re.search(nt, path) stem = os.path.basename(path)
offset = len(path) - len(stem)
name_types = [
(r'/sourceforge/([^/]+)/', path),
(r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' % (v, v), path),
(r'/([^/]+)/(tarball|zipball)/', path),
(r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % v, path),
(r'github.com/[^/]+/([^/]+)/archive', path),
(r'([^/]+)[_.-]v?%s' % v, stem), # prefer the stem
(r'([^/]+)%s' % v, stem),
(r'/([^/]+)[_.-]v?%s' % v, path), # accept the path if name is not in stem.
(r'/([^/]+)%s' % v, path),
(r'^([^/]+)[_.-]v?%s' % v, path),
(r'^([^/]+)%s' % v, path)]
for i, name_type in enumerate(name_types):
regex, match_string = name_type
match = re.search(regex, match_string)
if match: if match:
return match.group(1) name = match.group(1)
start = match.start(1)
# if we matched from the basename, then add offset in.
if match_string is stem:
start += offset
return name, start, len(name)
raise UndetectableNameError(path) raise UndetectableNameError(path)
def parse_name(path, ver=None):
name, start, l = parse_name_offset(path, ver)
return name
def parse_name_and_version(path): def parse_name_and_version(path):
ver = parse_version(path) ver = parse_version(path)
name = parse_name(path, ver) name = parse_name(path, ver)
@ -218,7 +238,7 @@ def parse_name_and_version(path):
def insensitize(string): def insensitize(string):
"""Chagne upper and lowercase letters to be case insensitive in """Change upper and lowercase letters to be case insensitive in
the provided string. e.g., 'a' because '[Aa]', 'B' becomes the provided string. e.g., 'a' because '[Aa]', 'B' becomes
'[bB]', etc. Use for building regexes.""" '[bB]', etc. Use for building regexes."""
def to_ins(match): def to_ins(match):
@ -227,12 +247,53 @@ def to_ins(match):
return re.sub(r'([a-zA-Z])', to_ins, string) return re.sub(r'([a-zA-Z])', to_ins, string)
def substitute_version(path, new_version): def cumsum(elts, init=0, fn=lambda x:x):
"""Given a URL or archive name, find the version in the path and substitute """Return cumulative sum of result of fn on each element in elts."""
the new version for it. sums = []
s = init
for i, e in enumerate(elts):
sums.append(s)
s += fn(e)
return sums
def substitution_offsets(path):
"""This returns offsets for substituting versions and names in the provided path.
It is a helper for substitute_version().
""" """
ver, start, end = parse_version_string_with_indices(path) # Get name and version offsets
return path[:start] + str(new_version) + path[end:] try:
ver, vs, vl = parse_version_offset(path)
name, ns, nl = parse_name_offset(path, ver)
except UndetectableNameError, e:
return (None, -1, -1, (), ver, vs, vl, (vs,))
except UndetectableVersionError, e:
return (None, -1, -1, (), None, -1, -1, ())
# protect extensions like bz2 from getting inadvertently
# considered versions.
ext = comp.extension(path)
path = comp.strip_extension(path)
# Construct a case-insensitive regular expression for the package name.
name_re = '(%s)' % insensitize(name)
# Split the string apart by things that match the name so that if the
# name contains numbers or things that look like versions, we don't
# accidentally substitute them with a version.
name_parts = re.split(name_re, path)
offsets = cumsum(name_parts, 0, len)
name_offsets = offsets[1::2]
ver_offsets = []
for i in xrange(0, len(name_parts), 2):
vparts = re.split(ver, name_parts[i])
voffsets = cumsum(vparts, offsets[i], len)
ver_offsets.extend(voffsets[1::2])
return (name, ns, nl, tuple(name_offsets),
ver, vs, vl, tuple(ver_offsets))
def wildcard_version(path): def wildcard_version(path):
@ -242,6 +303,10 @@ def wildcard_version(path):
# Get name and version, so we can treat them specially # Get name and version, so we can treat them specially
name, v = parse_name_and_version(path) name, v = parse_name_and_version(path)
# protect extensions like bz2 from wildcarding.
ext = comp.extension(path)
path = comp.strip_extension(path)
# Construct a case-insensitive regular expression for the package name. # Construct a case-insensitive regular expression for the package name.
name_re = '(%s)' % insensitize(name) name_re = '(%s)' % insensitize(name)
@ -261,4 +326,107 @@ def wildcard_version(path):
name_parts[i] = vgroup.join(re.escape(vp) for vp in vparts) name_parts[i] = vgroup.join(re.escape(vp) for vp in vparts)
# Put it all back together with original name matches intact. # Put it all back together with original name matches intact.
return ''.join(name_parts) return ''.join(name_parts) + '.' + ext
def substitute_version(path, new_version):
"""Given a URL or archive name, find the version in the path and
substitute the new version for it. Replace all occurrences of
the version *if* they don't overlap with the package name.
Simple example::
substitute_version('http://www.mr511.de/software/libelf-0.8.13.tar.gz', '2.9.3')
->'http://www.mr511.de/software/libelf-2.9.3.tar.gz'
Complex examples::
substitute_version('http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz', 2.1)
-> 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz'
# In this string, the "2" in mvapich2 is NOT replaced.
substitute_version('http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.tar.gz', 2.1)
-> 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz'
"""
(name, ns, nl, noffs,
ver, vs, vl, voffs) = substitution_offsets(path)
new_path = ''
last = 0
for vo in voffs:
new_path += path[last:vo]
new_path += str(new_version)
last = vo + vl
new_path += path[last:]
return new_path
def color_url(path, **kwargs):
"""Color the parts of the url according to Spack's parsing.
Colors are:
Cyan: The version found by parse_version_offset().
Red: The name found by parse_name_offset().
Green: Instances of version string substituted by substitute_version().
Magenta: Instances of the name (protected from substitution).
Optional args:
errors=True Append parse errors at end of string.
subs=True Color substitutions as well as parsed name/version.
"""
errors = kwargs.get('errors', False)
subs = kwargs.get('subs', False)
(name, ns, nl, noffs,
ver, vs, vl, voffs) = substitution_offsets(path)
nends = [no + nl - 1 for no in noffs]
vends = [vo + vl - 1 for vo in voffs]
nerr = verr = 0
out = StringIO()
for i in range(len(path)):
if i == vs: out.write('@c'); verr += 1
elif i == ns: out.write('@r'); nerr += 1
elif subs:
if i in voffs: out.write('@g')
elif i in noffs: out.write('@m')
out.write(path[i])
if i == vs + vl - 1: out.write('@.'); verr += 1
elif i == ns + nl - 1: out.write('@.'); nerr += 1
elif subs:
if i in vends or i in nends:
out.write('@.')
if errors:
if nerr == 0: out.write(" @r{[no name]}")
if verr == 0: out.write(" @r{[no version]}")
if nerr == 1: out.write(" @r{[incomplete name]}")
if verr == 1: out.write(" @r{[incomplete version]}")
return colorize(out.getvalue())
class UrlParseError(spack.error.SpackError):
"""Raised when the URL module can't parse something correctly."""
def __init__(self, msg, path):
super(UrlParseError, self).__init__(msg)
self.path = path
class UndetectableVersionError(UrlParseError):
"""Raised when we can't parse a version from a string."""
def __init__(self, path):
super(UndetectableVersionError, self).__init__(
"Couldn't detect version in: " + path, path)
class UndetectableNameError(UrlParseError):
"""Raised when we can't parse a package name from a string."""
def __init__(self, path):
super(UndetectableNameError, self).__init__(
"Couldn't parse package name in: " + path, path)

View File

@ -25,7 +25,7 @@
import re import re
import sys import sys
import subprocess import subprocess
import urllib2 import urllib2, cookielib
import urlparse import urlparse
from multiprocessing import Pool from multiprocessing import Pool
from HTMLParser import HTMLParser, HTMLParseError from HTMLParser import HTMLParser, HTMLParseError
@ -68,7 +68,7 @@ def _spider(args):
pool. Firing off all the child links at once makes the fetch MUCH pool. Firing off all the child links at once makes the fetch MUCH
faster for pages with lots of children. faster for pages with lots of children.
""" """
url, depth, max_depth, raise_on_error = args url, visited, root, opener, depth, max_depth, raise_on_error = args
pages = {} pages = {}
try: try:
@ -82,12 +82,12 @@ def _spider(args):
resp = urllib2.urlopen(req, timeout=TIMEOUT) resp = urllib2.urlopen(req, timeout=TIMEOUT)
if not "Content-type" in resp.headers: if not "Content-type" in resp.headers:
tty.warn("ignoring page " + url) tty.debug("ignoring page " + url)
return pages return pages
if not resp.headers["Content-type"].startswith('text/html'): if not resp.headers["Content-type"].startswith('text/html'):
tty.warn("ignoring page " + url + " with content type " + tty.debug("ignoring page " + url + " with content type " +
resp.headers["Content-type"]) resp.headers["Content-type"])
return pages return pages
# Do the real GET request when we know it's just HTML. # Do the real GET request when we know it's just HTML.
@ -114,15 +114,30 @@ def _spider(args):
# Evaluate the link relative to the page it came from. # Evaluate the link relative to the page it came from.
abs_link = urlparse.urljoin(response_url, raw_link) abs_link = urlparse.urljoin(response_url, raw_link)
subcalls.append((abs_link, depth+1, max_depth, raise_on_error))
# Skip things outside the root directory
if not abs_link.startswith(root):
continue
# Skip already-visited links
if abs_link in visited:
continue
subcalls.append((abs_link, visited, root, None, depth+1, max_depth, raise_on_error))
visited.add(abs_link)
if subcalls: if subcalls:
pool = Pool(processes=len(subcalls)) try:
dicts = pool.map(_spider, subcalls) pool = Pool(processes=len(subcalls))
for d in dicts: dicts = pool.map(_spider, subcalls)
pages.update(d) for d in dicts:
pages.update(d)
finally:
pool.terminate()
pool.join()
except urllib2.URLError, e: except urllib2.URLError, e:
tty.debug(e)
if raise_on_error: if raise_on_error:
raise spack.error.NoNetworkConnectionError(str(e), url) raise spack.error.NoNetworkConnectionError(str(e), url)
@ -137,7 +152,8 @@ def _spider(args):
tty.warn(msg, url, "HTMLParseError: " + str(e)) tty.warn(msg, url, "HTMLParseError: " + str(e))
except Exception, e: except Exception, e:
pass # Other types of errors are completely ignored. # Other types of errors are completely ignored, except in debug mode.
tty.debug("Error in _spider: %s" % e)
return pages return pages
@ -151,5 +167,5 @@ def get_pages(root_url, **kwargs):
performance over a sequential fetch. performance over a sequential fetch.
""" """
max_depth = kwargs.setdefault('depth', 1) max_depth = kwargs.setdefault('depth', 1)
pages = _spider((root_url, 1, max_depth, False)) pages = _spider((root_url, set(), root_url, None, 1, max_depth, False))
return pages return pages

View File

@ -0,0 +1,90 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
from contextlib import closing
from glob import glob
class Gcc(Package):
"""The GNU Compiler Collection includes front ends for C, C++,
Objective-C, Fortran, and Java."""
homepage = "https://gcc.gnu.org"
list_url = 'http://open-source-box.org/gcc/'
list_depth = 2
version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43',
url="http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2")
version('4.9.1', 'fddf71348546af523353bd43d34919c1',
url="http://open-source-box.org/gcc/gcc-4.9.1/gcc-4.9.1.tar.bz2")
depends_on("mpc")
depends_on("mpfr")
depends_on("gmp")
depends_on("libelf")
def install(self, spec, prefix):
# libjava/configure needs a minor fix to install into spack paths.
filter_file(r"'@.*@'", "'@[[:alnum:]]*@'", 'libjava/configure', string=True)
# Rest of install is straightforward.
configure("--prefix=%s" % prefix,
"--libdir=%s/lib64" % prefix,
"--disable-multilib",
"--enable-languages=c,c++,fortran,java,objc,go",
"--enable-lto",
"--with-quad")
make()
make("install")
self.write_rpath_specs()
@property
def spec_dir(self):
# e.g. lib64/gcc/x86_64-unknown-linux-gnu/4.9.2
spec_dir = glob("%s/lib64/gcc/*/*" % self.prefix)
return spec_dir[0] if spec_dir else None
def write_rpath_specs(self):
"""Generate a spec file so the linker adds a rpath to the libs
the compiler used to build the executable."""
if not self.spec_dir:
tty.warn("Could not install specs for %s." % self.spec.format('$_$@'))
return
gcc = Executable(join_path(self.prefix.bin, 'gcc'))
lines = gcc('-dumpspecs', return_output=True).split("\n")
for i, line in enumerate(lines):
if line.startswith("*link:"):
specs_file = join_path(self.spec_dir, 'specs')
with closing(open(specs_file, 'w')) as out:
out.write(lines[i] + "\n")
out.write("-rpath %s/lib:%s/lib64 \\\n"
% (self.prefix, self.prefix))
out.write(lines[i+1] + "\n")
set_install_permissions(specs_file)

View File

@ -0,0 +1,40 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Gmp(Package):
"""GMP is a free library for arbitrary precision arithmetic,
operating on signed integers, rational numbers, and
floating-point numbers."""
homepage = "https://gmplib.org"
url = "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2"
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
version('6.0.0' , '6ef5869ae735db9995619135bd856b84')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -5,7 +5,7 @@ class Jpeg(Package):
homepage = "http://www.ijg.org" homepage = "http://www.ijg.org"
url = "http://www.ijg.org/files/jpegsrc.v9a.tar.gz" url = "http://www.ijg.org/files/jpegsrc.v9a.tar.gz"
version('9', 'b397211ddfd506b92cd5e02a22ac924d') version('9a', 'b397211ddfd506b92cd5e02a22ac924d')
def install(self, spec, prefix): def install(self, spec, prefix):
configure("--prefix=%s" % prefix) configure("--prefix=%s" % prefix)

View File

@ -0,0 +1,42 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Mpc(Package):
"""Gnu Mpc is a C library for the arithmetic of complex numbers
with arbitrarily high precision and correct rounding of the
result."""
homepage = "http://www.multiprecision.org"
url = "ftp://ftp.gnu.org/gnu/mpc/mpc-1.0.2.tar.gz"
version('1.0.2', '68fadff3358fb3e7976c7a398a0af4c3')
depends_on("gmp")
depends_on("mpfr")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -0,0 +1,38 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Mpfr(Package):
"""The MPFR library is a C library for multiple-precision
floating-point computations with correct rounding."""
homepage = "http://www.mpfr.org"
url = "http://www.mpfr.org/mpfr-current/mpfr-3.1.2.tar.bz2"
version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -13,9 +13,9 @@ class Openmpi(Package):
version('1.8.2', 'ab538ed8e328079d566fc797792e016e', version('1.8.2', 'ab538ed8e328079d566fc797792e016e',
url='http://www.open-mpi.org/software/ompi/v1.8/downloads/openmpi-1.8.2.tar.gz') url='http://www.open-mpi.org/software/ompi/v1.8/downloads/openmpi-1.8.2.tar.gz')
version('1.6.5', '03aed2a4aa4d0b27196962a2a65fc475', version('1.6.5', '03aed2a4aa4d0b27196962a2a65fc475',
url = "http://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2") url = "http://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2")
patch('ad_lustre_rwcontig_open_source.patch', when="@1.6.5") patch('ad_lustre_rwcontig_open_source.patch', when="@1.6.5")
patch('llnl-platforms.patch', when="@1.6.5") patch('llnl-platforms.patch', when="@1.6.5")
@ -27,8 +27,8 @@ def install(self, spec, prefix):
# TODO: use variants for this, e.g. +lanl, +llnl, etc. # TODO: use variants for this, e.g. +lanl, +llnl, etc.
# use this for LANL builds, but for LLNL builds, we need: # use this for LANL builds, but for LLNL builds, we need:
# "--with-platform=contrib/platform/llnl/optimized" # "--with-platform=contrib/platform/llnl/optimized"
if self.version == ver("1.6.5"): if self.version == ver("1.6.5") and '+lanl' in spec:
confg_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas") config_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas")
# TODO: Spack should make it so that you can't actually find # TODO: Spack should make it so that you can't actually find
# these compilers if they're "disabled" for the current # these compilers if they're "disabled" for the current