Merge branch 'features/gcc' into develop

Conflicts:
	lib/spack/spack/package.py
This commit is contained in:
Todd Gamblin 2014-11-08 22:30:46 -08:00
commit 79414947ae
22 changed files with 737 additions and 206 deletions

View File

@ -113,4 +113,5 @@ except SpackError, e:
tty.die(e.message)
except KeyboardInterrupt:
sys.stderr.write('\n')
tty.die("Keyboard interrupt.")

10
lib/spack/env/cc vendored
View File

@ -275,9 +275,15 @@ for dir in "${libraries[@]}"; do args+=("-L$dir"); done
for lib in "${libs[@]}"; do args+=("-l$lib"); done
if [ "$mode" = ccld ]; then
for dir in "${rpaths[@]}"; do args+=("-Wl,-rpath=$dir"); done
for dir in "${rpaths[@]}"; do
args+=("-Wl,-rpath")
args+=("-Wl,$dir");
done
elif [ "$mode" = ld ]; then
for dir in "${rpaths[@]}"; do args+=("-rpath=$dir"); done
for dir in "${rpaths[@]}"; do
args+=("-rpath")
args+=("$dir");
done
fi
#

View File

@ -22,8 +22,9 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
__all__ = ['install', 'expand_user', 'working_dir', 'touch', 'mkdirp',
'join_path', 'ancestor', 'can_access', 'filter_file', 'change_sed_delimiter']
__all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir',
'touch', 'mkdirp', 'force_remove', 'join_path', 'ancestor',
'can_access', 'filter_file', 'change_sed_delimiter']
import os
import sys
@ -127,10 +128,19 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
filter_file(double_quoted, '"%s"' % repl, f)
def set_install_permissions(path):
"""Set appropriate permissions on the installed file."""
if os.path.isdir(path):
os.chmod(path, 0755)
else:
os.chmod(path, 0644)
def install(src, dest):
"""Manually install a file to a particular location."""
tty.info("Installing %s to %s" % (src, dest))
shutil.copy(src, dest)
set_install_permissions(dest)
def expand_user(path):
@ -152,6 +162,15 @@ def mkdirp(*paths):
raise OSError(errno.EEXIST, "File alredy exists", path)
def force_remove(*paths):
"""Remove files without printing errors. Like rm -f, does NOT
remove directories."""
for path in paths:
try:
os.remove(path)
except OSError, e:
pass
@contextmanager
def working_dir(dirname, **kwargs):
if kwargs.get('create', False):

View File

@ -65,7 +65,7 @@ def get_mac_sys_type():
if not mac_ver:
return None
return "macosx_{}_{}".format(
return "macosx_%s_%s" % (
Version(mac_ver).up_to(2), py_platform.machine())

View File

@ -38,7 +38,7 @@
from spack.stage import Stage, FailedDownloadError
from spack.version import *
description ="Checksum available versions of a package to update a package file."
description ="Checksum available versions of a package."
def setup_parser(subparser):
subparser.add_argument(
@ -85,24 +85,24 @@ def checksum(parser, args):
pkg = spack.db.get(args.package)
# If the user asked for specific versions, use those.
versions = [ver(v) for v in args.versions]
if not all(type(v) == Version for v in versions):
tty.die("Cannot generate checksums for version lists or " +
"version ranges. Use unambiguous versions.")
if not versions:
versions = pkg.fetch_available_versions()
if args.versions:
versions = {}
for v in args.versions:
v = ver(v)
if not isinstance(v, Version):
tty.die("Cannot generate checksums for version lists or " +
"version ranges. Use unambiguous versions.")
versions[v] = pkg.url_for_version(v)
else:
versions = pkg.fetch_remote_versions()
if not versions:
tty.die("Could not fetch any available versions for %s." % pkg.name)
tty.die("Could not fetch any versions for %s." % pkg.name)
versions = list(reversed(sorted(versions)))
urls = [pkg.url_for_version(v) for v in versions]
sorted_versions = list(reversed(sorted(versions)))
tty.msg("Found %s versions of %s." % (len(urls), pkg.name),
tty.msg("Found %s versions of %s." % (len(versions), pkg.name),
*spack.cmd.elide_list(
["%-10s%s" % (v,u) for v, u in zip(versions, urls)]))
["%-10s%s" % (v, versions[v]) for v in sorted_versions]))
print
archives_to_fetch = tty.get_number(
"How many would you like to checksum?", default=5, abort='q')
@ -112,10 +112,12 @@ def checksum(parser, args):
return
version_hashes = get_checksums(
versions[:archives_to_fetch], urls[:archives_to_fetch], keep_stage=args.keep_stage)
sorted_versions[:archives_to_fetch],
[versions[v] for v in sorted_versions[:archives_to_fetch]],
keep_stage=args.keep_stage)
if not version_hashes:
tty.die("Could not fetch any available versions for %s." % pkg.name)
tty.die("Could not fetch any versions for %s." % pkg.name)
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)

View File

@ -159,13 +159,12 @@ def create(parser, args):
else:
mkdirp(os.path.dirname(pkg_path))
versions = list(reversed(spack.package.find_versions_of_archive(url)))
versions = spack.package.find_versions_of_archive(url)
archives_to_fetch = 1
if not versions:
# If the fetch failed for some reason, revert to what the user provided
versions = [version]
urls = [url]
versions = { version : url }
else:
urls = [spack.url.substitute_version(url, v) for v in versions]
if len(urls) > 1:
@ -181,6 +180,8 @@ def create(parser, args):
tty.msg("Aborted.")
return
sorted_versions = list(reversed(versions))
guesser = ConfigureGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums(
versions[:archives_to_fetch], urls[:archives_to_fetch],

View File

@ -31,7 +31,7 @@
import spack
from spack.util.executable import *
description = "Query packages associated with particular git revisions in spack."
description = "Query packages associated with particular git revisions."
def setup_parser(subparser):
sp = subparser.add_subparsers(

View File

@ -0,0 +1,58 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import spack
import spack.url
description = "Inspect urls used by packages in spack."
def setup_parser(subparser):
subparser.add_argument(
'-c', '--color', action='store_true',
help="Color the parsed version and name in the urls shown. "
"Version will be cyan, name red.")
subparser.add_argument(
'-e', '--extrapolation', action='store_true',
help="Color the versions used for extrapolation as well."
"Additional versions are green, names magenta.")
def urls(parser, args):
urls = set()
for pkg in spack.db.all_packages():
url = getattr(pkg.__class__, 'url', None)
if url:
urls.add(url)
for params in pkg.versions.values():
url = params.get('url', None)
if url:
urls.add(url)
for url in sorted(urls):
if args.color or args.extrapolation:
print spack.url.color_url(url, subs=args.extrapolation, errors=True)
else:
print url

View File

@ -24,6 +24,7 @@
##############################################################################
import os
from llnl.util.tty.colify import colify
import llnl.util.tty as tty
import spack
description ="List available versions of a package"
@ -34,4 +35,21 @@ def setup_parser(subparser):
def versions(parser, args):
pkg = spack.db.get(args.package)
colify(reversed(pkg.fetch_available_versions()))
safe_versions = pkg.versions
fetched_versions = pkg.fetch_remote_versions()
remote_versions = set(fetched_versions).difference(safe_versions)
tty.msg("Safe versions (already checksummed):")
colify(sorted(safe_versions, reverse=True), indent=2)
tty.msg("Remote versions (not yet checksummed):")
if not remote_versions:
if not fetched_versions:
print " Found no versions for %s" % pkg.name
tty.debug("Check the list_url and list_depth attribute on the "
"package to help Spack find versions.")
else:
print " Found no unckecksummed versions for %s" % pkg.name
else:
colify(sorted(remote_versions, reverse=True), indent=2)

View File

@ -68,7 +68,7 @@ def concretize_version(self, spec):
# If there are known avaialble versions, return the most recent
# version that satisfies the spec
pkg = spec.package
valid_versions = [v for v in pkg.available_versions
valid_versions = [v for v in pkg.versions
if any(v.satisfies(sv) for sv in spec.versions)]
if valid_versions:

View File

@ -39,7 +39,7 @@
import subprocess
import platform as py_platform
import multiprocessing
from urlparse import urlparse
from urlparse import urlparse, urljoin
import textwrap
from StringIO import StringIO
@ -335,9 +335,6 @@ def __init__(self, spec):
if '.' in self.name:
self.name = self.name[self.name.rindex('.') + 1:]
# This is set by scraping a web page.
self._available_versions = None
# Sanity check some required variables that could be
# overridden by package authors.
def ensure_has_dict(attr_name):
@ -372,14 +369,15 @@ def ensure_has_dict(attr_name):
# Init fetch strategy and url to None
self._fetcher = None
self.url = None
self.url = getattr(self.__class__, 'url', None)
# Fix up self.url if this package fetches with a URLFetchStrategy.
# This makes self.url behave sanely.
if self.spec.versions.concrete:
# TODO: this is a really roundabout way of determining the type of fetch to do.
# TODO: figure out a more sane fetch strategy/package init order
# TODO: (right now it's conflated with stage, package, and the tests make assumptions)
# TODO: this is a really roundabout way of determining the type
# TODO: of fetch to do. figure out a more sane fetch strategy/package
# TODO: init order (right now it's conflated with stage, package, and
# TODO: the tests make assumptions)
f = fs.for_package_version(self, self.version)
if isinstance(f, fs.URLFetchStrategy):
self.url = self.url_for_version(self.spec.version)
@ -869,73 +867,70 @@ def format_doc(self, **kwargs):
return results.getvalue()
@property
def all_urls(self):
urls = []
if self.url:
urls.append(self.url)
for args in self.versions.values():
if 'url' in args:
urls.append(args['url'])
return urls
def fetch_available_versions(self):
if not hasattr(self, 'url'):
def fetch_remote_versions(self):
"""Try to find remote versions of this package using the
list_url and any other URLs described in the package file."""
if not self.all_urls:
raise VersionFetchError(self.__class__)
# If not, then try to fetch using list_url
if not self._available_versions:
try:
self._available_versions = find_versions_of_archive(
self.url,
list_url=self.list_url,
list_depth=self.list_depth)
if not self._available_versions:
tty.warn("Found no versions for %s" % self.name,
"Check the list_url and list_depth attribute on the "
+ self.name + " package.",
"Use them to tell Spack where to look for versions.")
except spack.error.NoNetworkConnectionError, e:
tty.die("Package.fetch_available_versions couldn't connect to:",
e.url, e.message)
return self._available_versions
try:
return find_versions_of_archive(
*self.all_urls, list_url=self.list_url, list_depth=self.list_depth)
except spack.error.NoNetworkConnectionError, e:
tty.die("Package.fetch_versions couldn't connect to:",
e.url, e.message)
@property
def available_versions(self):
# If the package overrode available_versions, then use that.
if self.versions is not None:
return VersionList(self.versions.keys())
else:
vlist = self.fetch_available_versions()
if not vlist:
vlist = ver([self.version])
return vlist
def find_versions_of_archive(archive_url, **kwargs):
def find_versions_of_archive(*archive_urls, **kwargs):
list_url = kwargs.get('list_url', None)
list_depth = kwargs.get('list_depth', 1)
if not list_url:
list_url = url.find_list_url(archive_url)
# This creates a regex from the URL with a capture group for the
# version part of the URL. The capture group is converted to a
# generic wildcard, so we can use this to extract things on a page
# that look like archive URLs.
url_regex = url.wildcard_version(archive_url)
# We'll be a bit more liberal and just look for the archive part,
# not the full path.
archive_regex = os.path.basename(url_regex)
# Generate a list of list_urls based on archive urls and any
# explicitly listed list_url in the package
list_urls = set()
if list_url:
list_urls.add(list_url)
for aurl in archive_urls:
list_urls.add(url.find_list_url(aurl))
# Grab some web pages to scrape.
page_map = get_pages(list_url, depth=list_depth)
page_map = {}
for lurl in list_urls:
page_map.update(get_pages(lurl, depth=list_depth))
# Scrape them for archive URLs
regexes = []
for aurl in archive_urls:
# This creates a regex from the URL with a capture group for
# the version part of the URL. The capture group is converted
# to a generic wildcard, so we can use this to extract things
# on a page that look like archive URLs.
url_regex = url.wildcard_version(aurl)
# We'll be a bit more liberal and just look for the archive
# part, not the full path.
regexes.append(os.path.basename(url_regex))
# Build a version list from all the matches we find
versions = VersionList()
for site, page in page_map.iteritems():
versions = {}
for page_url, content in page_map.iteritems():
# extract versions from matches.
matches = re.finditer(archive_regex, page)
version_strings = set(m.group(1) for m in matches)
for v in version_strings:
versions.add(Version(v))
for regex in regexes:
versions.update(
(Version(m.group(1)), urljoin(page_url, m.group(0)))
for m in re.finditer(regex, content))
return versions
@ -998,8 +993,8 @@ class VersionFetchError(PackageError):
"""Raised when a version URL cannot automatically be determined."""
def __init__(self, cls):
super(VersionFetchError, self).__init__(
"Cannot fetch version for package %s " % cls.__name__ +
"because it does not define a default url.")
"Cannot fetch versions for package %s " % cls.__name__ +
"because it does not define any URLs to fetch.")
class NoURLError(PackageError):

View File

@ -1096,8 +1096,9 @@ def __getitem__(self, name):
def __contains__(self, spec):
"""True if this spec has any dependency that satisfies the supplied
spec."""
"""True if this spec satisfis the provided spec, or if any dependency
does. If the spec has no name, then we parse this one first.
"""
spec = self._autospec(spec)
for s in self.traverse():
if s.satisfies(spec):

View File

@ -32,60 +32,69 @@
from spack.test.mock_packages_test import *
class UrlExtrapolateTest(MockPackagesTest):
class UrlExtrapolateTest(unittest.TestCase):
def test_known_version(self):
d = spack.db.get('dyninst')
self.assertEqual(
d.url_for_version('8.2'), 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz')
self.assertEqual(
d.url_for_version('8.1.2'), 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz')
self.assertEqual(
d.url_for_version('8.1.1'), 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz')
def check_url(self, base, version, new_url):
self.assertEqual(url.substitute_version(base, version), new_url)
def test_extrapolate_version(self):
d = spack.db.get('dyninst')
# Nearest URL for 8.1.1.5 is 8.1.1, and the URL there is
# release8.1/DyninstAPI-8.1.1.tgz. Only the last part matches
# the version, so only extrapolate the last part. Obviously
# dyninst has ambiguous URL versions, but we want to make sure
# extrapolation works in a well-defined way.
self.assertEqual(
d.url_for_version('8.1.1.5'), 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.5.tgz')
# 8.2 matches both the release8.2 component and the DyninstAPI-8.2 component.
# Extrapolation should replace both with the new version.
# TODO: figure out a consistent policy for this.
# self.assertEqual(
# d.url_for_version('8.2.3'), 'http://www.paradyn.org/release8.2.3/DyninstAPI-8.2.3.tgz')
def test_libelf_version(self):
base = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
self.check_url(base, '0.8.13', base)
self.check_url(base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz")
self.check_url(base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz")
self.check_url(base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz")
def test_with_package(self):
d = spack.db.get('dyninst@8.2')
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz')
d = spack.db.get('dyninst@8.1.2')
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz')
d = spack.db.get('dyninst@8.1.1')
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz')
def test_libdwarf_version(self):
base = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
self.check_url(base, '20130729', base)
self.check_url(base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz")
def test_concrete_package(self):
s = Spec('dyninst@8.2')
s.concretize()
d = spack.db.get(s)
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz')
def test_dyninst_version(self):
# Dyninst has a version twice in the URL.
base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
self.check_url(base, '8.1.2', base)
self.check_url(base, '8.2',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.2/DyninstAPI-8.2.tgz")
self.check_url(base, '8.3.1',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz")
s = Spec('dyninst@8.1.2')
s.concretize()
d = spack.db.get(s)
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz')
s = Spec('dyninst@8.1.1')
s.concretize()
d = spack.db.get(s)
self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz')
def test_partial_version_prefix(self):
# Test now with a partial prefix earlier in the URL -- this is
# hard to figure out so Spack only substitutes the last
# instance of the version.
base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.1.2.tgz"
self.check_url(base, '8.1.2', base)
self.check_url(base, '8.1.4',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.1.4.tgz")
self.check_url(base, '8.2',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.2.tgz")
self.check_url(base, '8.3.1',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.3.1.tgz")
def test_scalasca_partial_version(self):
# Note that this probably doesn't actually work, but sites are
# inconsistent about their directory structure, so it's not
# clear what is right. This test is for consistency and to
# document behavior. If you figure out a good way to handle
# this case, fix the tests too.
self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz')
self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz')
def test_mpileaks_version(self):
self.check_url('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3',
'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz')
def test_gcc(self):
self.check_url('http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2', '4.7',
'http://open-source-box.org/gcc/gcc-4.7/gcc-4.7.tar.bz2')
self.check_url('http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2', '4.4.7',
'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2')

View File

@ -295,3 +295,30 @@ def test_hdf5_version(self):
self.check(
'hdf5', '1.8.13',
'http://www.hdfgroup.org/ftp/HDF5/current/src/hdf5-1.8.13.tar.bz2')
def test_scalasca_version(self):
self.check(
'cube', '4.2.3',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz')
self.check(
'cube', '4.3-TP1',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz')
def test_mpileaks_version(self):
self.check(
'mpileaks', '1.0',
'https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz')
self.check(
'mpileaks', '1.0',
'https://github.com/hpc/mpileaks/releases/download/1.0/mpileaks-1.0.tar.gz')
def test_gcc_version(self):
self.check(
'gcc', '4.4.7',
'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2')
def test_gcc_version_precedence(self):
# prefer the version in the tarball, not in the url prefix.
self.check(
'gcc', '4.4.7',
'http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.4.7.tar.bz2')

View File

@ -46,6 +46,9 @@
"""
import os
import re
from StringIO import StringIO
from llnl.util.tty.color import *
import spack.error
import spack.util.compression as comp
@ -57,27 +60,6 @@
# "path" seemed like the most generic term.
#
class UrlParseError(spack.error.SpackError):
"""Raised when the URL module can't parse something correctly."""
def __init__(self, msg, path):
super(UrlParseError, self).__init__(msg)
self.path = path
class UndetectableVersionError(UrlParseError):
"""Raised when we can't parse a version from a string."""
def __init__(self, path):
super(UndetectableVersionError, self).__init__(
"Couldn't detect version in: " + path, path)
class UndetectableNameError(UrlParseError):
"""Raised when we can't parse a package name from a string."""
def __init__(self, path):
super(UndetectableNameError, self).__init__(
"Couldn't parse package name in: " + path, path)
def find_list_url(url):
"""Finds a good list URL for the supplied URL. This depends on
the site. By default, just assumes that a good list URL is the
@ -98,7 +80,7 @@ def find_list_url(url):
return os.path.dirname(url)
def parse_version_string_with_indices(path):
def parse_version_offset(path):
"""Try to extract a version string from a filename or URL. This is taken
largely from Homebrew's Version class."""
@ -112,6 +94,7 @@ def parse_version_string_with_indices(path):
# Take basename to avoid including parent dirs in version name
# Remember the offset of the stem in the full path.
stem = os.path.basename(path)
offset = len(path) - len(stem)
version_types = [
# GitHub tarballs, e.g. v1.2.3
@ -132,6 +115,10 @@ def parse_version_string_with_indices(path):
# e.g. https://github.com/erlang/otp/tarball/OTP_R15B01 (erlang style)
(r'[-_](R\d+[AB]\d*(-\d+)?)', path),
# e.g., https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz
# e.g., https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
(r'github.com/[^/]+/[^/]+/releases/download/v?([^/]+)/.*$', path),
# e.g. boost_1_39_0
(r'((\d+_)+\d+)$', stem),
@ -146,7 +133,7 @@ def parse_version_string_with_indices(path):
(r'-((\d+\.)*\d+)$', stem),
# e.g. foobar-4.5.1b
(r'-((\d+\.)*\d+([a-z]|rc|RC)\d*)$', stem),
(r'-((\d+\.)*\d+\-?([a-z]|rc|RC|tp|TP)\d*)$', stem),
# e.g. foobar-4.5.0-beta1, or foobar-4.50-beta
(r'-((\d+\.)*\d+-beta(\d+)?)$', stem),
@ -172,13 +159,18 @@ def parse_version_string_with_indices(path):
# e.g. http://www.ijg.org/files/jpegsrc.v8d.tar.gz
(r'\.v(\d+[a-z]?)', stem)]
for vtype in version_types:
regex, match_string = vtype[:2]
for i, vtype in enumerate(version_types):
regex, match_string = vtype
match = re.search(regex, match_string)
if match and match.group(1) is not None:
version = match.group(1)
start = path.index(version)
return version, start, start+len(version)
start = match.start(1)
# if we matched from the basename, then add offset in.
if match_string is stem:
start += offset
return version, start, len(version)
raise UndetectableVersionError(path)
@ -187,30 +179,58 @@ def parse_version(path):
"""Given a URL or archive name, extract a version from it and return
a version object.
"""
ver, start, end = parse_version_string_with_indices(path)
ver, start, l = parse_version_offset(path)
return Version(ver)
def parse_name(path, ver=None):
if ver is None:
ver = parse_version(path)
def parse_name_offset(path, v=None):
if v is None:
v = parse_version(path)
ntypes = (r'/sourceforge/([^/]+)/',
r'/([^/]+)/(tarball|zipball)/',
r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % ver,
r'github.com/[^/]+/([^/]+)/archive',
r'/([^/]+)[_.-]v?%s' % ver,
r'/([^/]+)%s' % ver,
r'^([^/]+)[_.-]v?%s' % ver,
r'^([^/]+)%s' % ver)
# Strip archive extension
path = comp.strip_extension(path)
for nt in ntypes:
match = re.search(nt, path)
# Allow matching with either path or stem, as with the version.
stem = os.path.basename(path)
offset = len(path) - len(stem)
name_types = [
(r'/sourceforge/([^/]+)/', path),
(r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' % (v, v), path),
(r'/([^/]+)/(tarball|zipball)/', path),
(r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % v, path),
(r'github.com/[^/]+/([^/]+)/archive', path),
(r'([^/]+)[_.-]v?%s' % v, stem), # prefer the stem
(r'([^/]+)%s' % v, stem),
(r'/([^/]+)[_.-]v?%s' % v, path), # accept the path if name is not in stem.
(r'/([^/]+)%s' % v, path),
(r'^([^/]+)[_.-]v?%s' % v, path),
(r'^([^/]+)%s' % v, path)]
for i, name_type in enumerate(name_types):
regex, match_string = name_type
match = re.search(regex, match_string)
if match:
return match.group(1)
name = match.group(1)
start = match.start(1)
# if we matched from the basename, then add offset in.
if match_string is stem:
start += offset
return name, start, len(name)
raise UndetectableNameError(path)
def parse_name(path, ver=None):
name, start, l = parse_name_offset(path, ver)
return name
def parse_name_and_version(path):
ver = parse_version(path)
name = parse_name(path, ver)
@ -218,7 +238,7 @@ def parse_name_and_version(path):
def insensitize(string):
"""Chagne upper and lowercase letters to be case insensitive in
"""Change upper and lowercase letters to be case insensitive in
the provided string. e.g., 'a' because '[Aa]', 'B' becomes
'[bB]', etc. Use for building regexes."""
def to_ins(match):
@ -227,12 +247,53 @@ def to_ins(match):
return re.sub(r'([a-zA-Z])', to_ins, string)
def substitute_version(path, new_version):
"""Given a URL or archive name, find the version in the path and substitute
the new version for it.
def cumsum(elts, init=0, fn=lambda x:x):
"""Return cumulative sum of result of fn on each element in elts."""
sums = []
s = init
for i, e in enumerate(elts):
sums.append(s)
s += fn(e)
return sums
def substitution_offsets(path):
"""This returns offsets for substituting versions and names in the provided path.
It is a helper for substitute_version().
"""
ver, start, end = parse_version_string_with_indices(path)
return path[:start] + str(new_version) + path[end:]
# Get name and version offsets
try:
ver, vs, vl = parse_version_offset(path)
name, ns, nl = parse_name_offset(path, ver)
except UndetectableNameError, e:
return (None, -1, -1, (), ver, vs, vl, (vs,))
except UndetectableVersionError, e:
return (None, -1, -1, (), None, -1, -1, ())
# protect extensions like bz2 from getting inadvertently
# considered versions.
ext = comp.extension(path)
path = comp.strip_extension(path)
# Construct a case-insensitive regular expression for the package name.
name_re = '(%s)' % insensitize(name)
# Split the string apart by things that match the name so that if the
# name contains numbers or things that look like versions, we don't
# accidentally substitute them with a version.
name_parts = re.split(name_re, path)
offsets = cumsum(name_parts, 0, len)
name_offsets = offsets[1::2]
ver_offsets = []
for i in xrange(0, len(name_parts), 2):
vparts = re.split(ver, name_parts[i])
voffsets = cumsum(vparts, offsets[i], len)
ver_offsets.extend(voffsets[1::2])
return (name, ns, nl, tuple(name_offsets),
ver, vs, vl, tuple(ver_offsets))
def wildcard_version(path):
@ -242,6 +303,10 @@ def wildcard_version(path):
# Get name and version, so we can treat them specially
name, v = parse_name_and_version(path)
# protect extensions like bz2 from wildcarding.
ext = comp.extension(path)
path = comp.strip_extension(path)
# Construct a case-insensitive regular expression for the package name.
name_re = '(%s)' % insensitize(name)
@ -261,4 +326,107 @@ def wildcard_version(path):
name_parts[i] = vgroup.join(re.escape(vp) for vp in vparts)
# Put it all back together with original name matches intact.
return ''.join(name_parts)
return ''.join(name_parts) + '.' + ext
def substitute_version(path, new_version):
"""Given a URL or archive name, find the version in the path and
substitute the new version for it. Replace all occurrences of
the version *if* they don't overlap with the package name.
Simple example::
substitute_version('http://www.mr511.de/software/libelf-0.8.13.tar.gz', '2.9.3')
->'http://www.mr511.de/software/libelf-2.9.3.tar.gz'
Complex examples::
substitute_version('http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz', 2.1)
-> 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz'
# In this string, the "2" in mvapich2 is NOT replaced.
substitute_version('http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.tar.gz', 2.1)
-> 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz'
"""
(name, ns, nl, noffs,
ver, vs, vl, voffs) = substitution_offsets(path)
new_path = ''
last = 0
for vo in voffs:
new_path += path[last:vo]
new_path += str(new_version)
last = vo + vl
new_path += path[last:]
return new_path
def color_url(path, **kwargs):
"""Color the parts of the url according to Spack's parsing.
Colors are:
Cyan: The version found by parse_version_offset().
Red: The name found by parse_name_offset().
Green: Instances of version string substituted by substitute_version().
Magenta: Instances of the name (protected from substitution).
Optional args:
errors=True Append parse errors at end of string.
subs=True Color substitutions as well as parsed name/version.
"""
errors = kwargs.get('errors', False)
subs = kwargs.get('subs', False)
(name, ns, nl, noffs,
ver, vs, vl, voffs) = substitution_offsets(path)
nends = [no + nl - 1 for no in noffs]
vends = [vo + vl - 1 for vo in voffs]
nerr = verr = 0
out = StringIO()
for i in range(len(path)):
if i == vs: out.write('@c'); verr += 1
elif i == ns: out.write('@r'); nerr += 1
elif subs:
if i in voffs: out.write('@g')
elif i in noffs: out.write('@m')
out.write(path[i])
if i == vs + vl - 1: out.write('@.'); verr += 1
elif i == ns + nl - 1: out.write('@.'); nerr += 1
elif subs:
if i in vends or i in nends:
out.write('@.')
if errors:
if nerr == 0: out.write(" @r{[no name]}")
if verr == 0: out.write(" @r{[no version]}")
if nerr == 1: out.write(" @r{[incomplete name]}")
if verr == 1: out.write(" @r{[incomplete version]}")
return colorize(out.getvalue())
class UrlParseError(spack.error.SpackError):
"""Raised when the URL module can't parse something correctly."""
def __init__(self, msg, path):
super(UrlParseError, self).__init__(msg)
self.path = path
class UndetectableVersionError(UrlParseError):
"""Raised when we can't parse a version from a string."""
def __init__(self, path):
super(UndetectableVersionError, self).__init__(
"Couldn't detect version in: " + path, path)
class UndetectableNameError(UrlParseError):
"""Raised when we can't parse a package name from a string."""
def __init__(self, path):
super(UndetectableNameError, self).__init__(
"Couldn't parse package name in: " + path, path)

View File

@ -25,7 +25,7 @@
import re
import sys
import subprocess
import urllib2
import urllib2, cookielib
import urlparse
from multiprocessing import Pool
from HTMLParser import HTMLParser, HTMLParseError
@ -68,7 +68,7 @@ def _spider(args):
pool. Firing off all the child links at once makes the fetch MUCH
faster for pages with lots of children.
"""
url, depth, max_depth, raise_on_error = args
url, visited, root, opener, depth, max_depth, raise_on_error = args
pages = {}
try:
@ -82,12 +82,12 @@ def _spider(args):
resp = urllib2.urlopen(req, timeout=TIMEOUT)
if not "Content-type" in resp.headers:
tty.warn("ignoring page " + url)
tty.debug("ignoring page " + url)
return pages
if not resp.headers["Content-type"].startswith('text/html'):
tty.warn("ignoring page " + url + " with content type " +
resp.headers["Content-type"])
tty.debug("ignoring page " + url + " with content type " +
resp.headers["Content-type"])
return pages
# Do the real GET request when we know it's just HTML.
@ -114,15 +114,30 @@ def _spider(args):
# Evaluate the link relative to the page it came from.
abs_link = urlparse.urljoin(response_url, raw_link)
subcalls.append((abs_link, depth+1, max_depth, raise_on_error))
# Skip things outside the root directory
if not abs_link.startswith(root):
continue
# Skip already-visited links
if abs_link in visited:
continue
subcalls.append((abs_link, visited, root, None, depth+1, max_depth, raise_on_error))
visited.add(abs_link)
if subcalls:
pool = Pool(processes=len(subcalls))
dicts = pool.map(_spider, subcalls)
for d in dicts:
pages.update(d)
try:
pool = Pool(processes=len(subcalls))
dicts = pool.map(_spider, subcalls)
for d in dicts:
pages.update(d)
finally:
pool.terminate()
pool.join()
except urllib2.URLError, e:
tty.debug(e)
if raise_on_error:
raise spack.error.NoNetworkConnectionError(str(e), url)
@ -137,7 +152,8 @@ def _spider(args):
tty.warn(msg, url, "HTMLParseError: " + str(e))
except Exception, e:
pass # Other types of errors are completely ignored.
# Other types of errors are completely ignored, except in debug mode.
tty.debug("Error in _spider: %s" % e)
return pages
@ -151,5 +167,5 @@ def get_pages(root_url, **kwargs):
performance over a sequential fetch.
"""
max_depth = kwargs.setdefault('depth', 1)
pages = _spider((root_url, 1, max_depth, False))
pages = _spider((root_url, set(), root_url, None, 1, max_depth, False))
return pages

View File

@ -0,0 +1,90 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
from contextlib import closing
from glob import glob
class Gcc(Package):
"""The GNU Compiler Collection includes front ends for C, C++,
Objective-C, Fortran, and Java."""
homepage = "https://gcc.gnu.org"
list_url = 'http://open-source-box.org/gcc/'
list_depth = 2
version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43',
url="http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2")
version('4.9.1', 'fddf71348546af523353bd43d34919c1',
url="http://open-source-box.org/gcc/gcc-4.9.1/gcc-4.9.1.tar.bz2")
depends_on("mpc")
depends_on("mpfr")
depends_on("gmp")
depends_on("libelf")
def install(self, spec, prefix):
# libjava/configure needs a minor fix to install into spack paths.
filter_file(r"'@.*@'", "'@[[:alnum:]]*@'", 'libjava/configure', string=True)
# Rest of install is straightforward.
configure("--prefix=%s" % prefix,
"--libdir=%s/lib64" % prefix,
"--disable-multilib",
"--enable-languages=c,c++,fortran,java,objc,go",
"--enable-lto",
"--with-quad")
make()
make("install")
self.write_rpath_specs()
@property
def spec_dir(self):
# e.g. lib64/gcc/x86_64-unknown-linux-gnu/4.9.2
spec_dir = glob("%s/lib64/gcc/*/*" % self.prefix)
return spec_dir[0] if spec_dir else None
def write_rpath_specs(self):
"""Generate a spec file so the linker adds a rpath to the libs
the compiler used to build the executable."""
if not self.spec_dir:
tty.warn("Could not install specs for %s." % self.spec.format('$_$@'))
return
gcc = Executable(join_path(self.prefix.bin, 'gcc'))
lines = gcc('-dumpspecs', return_output=True).split("\n")
for i, line in enumerate(lines):
if line.startswith("*link:"):
specs_file = join_path(self.spec_dir, 'specs')
with closing(open(specs_file, 'w')) as out:
out.write(lines[i] + "\n")
out.write("-rpath %s/lib:%s/lib64 \\\n"
% (self.prefix, self.prefix))
out.write(lines[i+1] + "\n")
set_install_permissions(specs_file)

View File

@ -0,0 +1,40 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Gmp(Package):
"""GMP is a free library for arbitrary precision arithmetic,
operating on signed integers, rational numbers, and
floating-point numbers."""
homepage = "https://gmplib.org"
url = "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2"
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
version('6.0.0' , '6ef5869ae735db9995619135bd856b84')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -5,7 +5,7 @@ class Jpeg(Package):
homepage = "http://www.ijg.org"
url = "http://www.ijg.org/files/jpegsrc.v9a.tar.gz"
version('9', 'b397211ddfd506b92cd5e02a22ac924d')
version('9a', 'b397211ddfd506b92cd5e02a22ac924d')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)

View File

@ -0,0 +1,42 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Mpc(Package):
"""Gnu Mpc is a C library for the arithmetic of complex numbers
with arbitrarily high precision and correct rounding of the
result."""
homepage = "http://www.multiprecision.org"
url = "ftp://ftp.gnu.org/gnu/mpc/mpc-1.0.2.tar.gz"
version('1.0.2', '68fadff3358fb3e7976c7a398a0af4c3')
depends_on("gmp")
depends_on("mpfr")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -0,0 +1,38 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Mpfr(Package):
"""The MPFR library is a C library for multiple-precision
floating-point computations with correct rounding."""
homepage = "http://www.mpfr.org"
url = "http://www.mpfr.org/mpfr-current/mpfr-3.1.2.tar.bz2"
version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -13,9 +13,9 @@ class Openmpi(Package):
version('1.8.2', 'ab538ed8e328079d566fc797792e016e',
url='http://www.open-mpi.org/software/ompi/v1.8/downloads/openmpi-1.8.2.tar.gz')
version('1.6.5', '03aed2a4aa4d0b27196962a2a65fc475',
url = "http://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2")
patch('ad_lustre_rwcontig_open_source.patch', when="@1.6.5")
patch('llnl-platforms.patch', when="@1.6.5")
@ -27,8 +27,8 @@ def install(self, spec, prefix):
# TODO: use variants for this, e.g. +lanl, +llnl, etc.
# use this for LANL builds, but for LLNL builds, we need:
# "--with-platform=contrib/platform/llnl/optimized"
if self.version == ver("1.6.5"):
confg_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas")
if self.version == ver("1.6.5") and '+lanl' in spec:
config_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas")
# TODO: Spack should make it so that you can't actually find
# these compilers if they're "disabled" for the current