Merge branch 'develop' into eschnett/openssl-darwin

This commit is contained in:
Erik Schnetter 2016-01-05 13:09:57 -05:00
commit 25934200de
26 changed files with 668 additions and 388 deletions

View File

@ -34,8 +34,8 @@
import spack import spack
import spack.cmd import spack.cmd
import spack.cmd.checksum import spack.cmd.checksum
import spack.package
import spack.url import spack.url
import spack.util.web
from spack.util.naming import * from spack.util.naming import *
import spack.util.crypto as crypto import spack.util.crypto as crypto
@ -166,7 +166,7 @@ def create(parser, args):
tty.msg("This looks like a URL for %s version %s." % (name, version)) tty.msg("This looks like a URL for %s version %s." % (name, version))
tty.msg("Creating template for package %s" % name) tty.msg("Creating template for package %s" % name)
versions = spack.package.find_versions_of_archive(url) versions = spack.util.web.find_versions_of_archive(url)
rkeys = sorted(versions.keys(), reverse=True) rkeys = sorted(versions.keys(), reverse=True)
versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys))) versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))

View File

@ -0,0 +1,75 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import llnl.util.tty as tty
import spack
import spack.url
from spack.util.web import find_versions_of_archive
description = "Show parsing of a URL, optionally spider web for other versions."
def setup_parser(subparser):
subparser.add_argument('url', help="url of a package archive")
subparser.add_argument(
'-s', '--spider', action='store_true', help="Spider the source page for versions.")
def print_name_and_version(url):
name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url)
underlines = [" "] * max(ns+nl, vs+vl)
for i in range(ns, ns+nl):
underlines[i] = '-'
for i in range(vs, vs+vl):
underlines[i] = '~'
print " %s" % url
print " %s" % ''.join(underlines)
def url_parse(parser, args):
url = args.url
ver, vs, vl = spack.url.parse_version_offset(url)
name, ns, nl = spack.url.parse_name_offset(url, ver)
tty.msg("Parsing URL:")
try:
print_name_and_version(url)
except spack.url.UrlParseError as e:
tty.error(str(e))
print
tty.msg("Substituting version 9.9.9b:")
newurl = spack.url.substitute_version(url, '9.9.9b')
print_name_and_version(newurl)
if args.spider:
print
tty.msg("Spidering for versions:")
versions = find_versions_of_archive(url)
for v in sorted(versions):
print "%-20s%s" % (v, versions[v])

View File

@ -687,7 +687,7 @@ def for_package_version(pkg, version):
class FetchError(spack.error.SpackError): class FetchError(spack.error.SpackError):
def __init__(self, msg, long_msg): def __init__(self, msg, long_msg=None):
super(FetchError, self).__init__(msg, long_msg) super(FetchError, self).__init__(msg, long_msg)
@ -705,7 +705,7 @@ def __init__(self, msg, long_msg):
class NoDigestError(FetchError): class NoDigestError(FetchError):
def __init__(self, msg, long_msg): def __init__(self, msg, long_msg=None):
super(NoDigestError, self).__init__(msg, long_msg) super(NoDigestError, self).__init__(msg, long_msg)

View File

@ -733,9 +733,10 @@ def do_patch(self):
# Construct paths to special files in the archive dir used to # Construct paths to special files in the archive dir used to
# keep track of whether patches were successfully applied. # keep track of whether patches were successfully applied.
archive_dir = self.stage.source_path archive_dir = self.stage.source_path
good_file = join_path(archive_dir, '.spack_patched') good_file = join_path(archive_dir, '.spack_patched')
bad_file = join_path(archive_dir, '.spack_patch_failed') no_patches_file = join_path(archive_dir, '.spack_no_patches')
bad_file = join_path(archive_dir, '.spack_patch_failed')
# If we encounter an archive that failed to patch, restage it # If we encounter an archive that failed to patch, restage it
# so that we can apply all the patches again. # so that we can apply all the patches again.
@ -749,29 +750,46 @@ def do_patch(self):
if os.path.isfile(good_file): if os.path.isfile(good_file):
tty.msg("Already patched %s" % self.name) tty.msg("Already patched %s" % self.name)
return return
elif os.path.isfile(no_patches_file):
tty.msg("No patches needed for %s." % self.name)
return
# Apply all the patches for specs that match this one # Apply all the patches for specs that match this one
patched = False
for spec, patch_list in self.patches.items(): for spec, patch_list in self.patches.items():
if self.spec.satisfies(spec): if self.spec.satisfies(spec):
for patch in patch_list: for patch in patch_list:
tty.msg('Applying patch %s' % patch.path_or_url)
try: try:
patch.apply(self.stage) patch.apply(self.stage)
tty.msg('Applied patch %s' % patch.path_or_url)
patched = True
except: except:
# Touch bad file if anything goes wrong. # Touch bad file if anything goes wrong.
tty.msg('Patch %s failed.' % patch.path_or_url)
touch(bad_file) touch(bad_file)
raise raise
# patch succeeded. Get rid of failed file & touch good file so we if has_patch_fun:
# don't try to patch again again next time. try:
self.patch()
tty.msg("Ran patch() for %s." % self.name)
patched = True
except:
tty.msg("patch() function failed for %s." % self.name)
touch(bad_file)
raise
# Get rid of any old failed file -- patches have either succeeded
# or are not needed. This is mostly defensive -- it's needed
# if the restage() method doesn't clean *everything* (e.g., for a repo)
if os.path.isfile(bad_file): if os.path.isfile(bad_file):
os.remove(bad_file) os.remove(bad_file)
touch(good_file)
if has_patch_fun: # touch good or no patches file so that we skip next time.
self.patch() if patched:
touch(good_file)
tty.msg("Patched %s" % self.name) else:
touch(no_patches_file)
def do_fake_install(self): def do_fake_install(self):
@ -1164,7 +1182,7 @@ def fetch_remote_versions(self):
raise VersionFetchError(self.__class__) raise VersionFetchError(self.__class__)
try: try:
return find_versions_of_archive( return spack.util.web.find_versions_of_archive(
*self.all_urls, list_url=self.list_url, list_depth=self.list_depth) *self.all_urls, list_url=self.list_url, list_depth=self.list_depth)
except spack.error.NoNetworkConnectionError, e: except spack.error.NoNetworkConnectionError, e:
tty.die("Package.fetch_versions couldn't connect to:", tty.die("Package.fetch_versions couldn't connect to:",
@ -1188,49 +1206,6 @@ def rpath_args(self):
return " ".join("-Wl,-rpath=%s" % p for p in self.rpath) return " ".join("-Wl,-rpath=%s" % p for p in self.rpath)
def find_versions_of_archive(*archive_urls, **kwargs):
list_url = kwargs.get('list_url', None)
list_depth = kwargs.get('list_depth', 1)
# Generate a list of list_urls based on archive urls and any
# explicitly listed list_url in the package
list_urls = set()
if list_url:
list_urls.add(list_url)
for aurl in archive_urls:
list_urls.add(spack.url.find_list_url(aurl))
# Grab some web pages to scrape.
page_map = {}
for lurl in list_urls:
pages = spack.util.web.get_pages(lurl, depth=list_depth)
page_map.update(pages)
# Scrape them for archive URLs
regexes = []
for aurl in archive_urls:
# This creates a regex from the URL with a capture group for
# the version part of the URL. The capture group is converted
# to a generic wildcard, so we can use this to extract things
# on a page that look like archive URLs.
url_regex = spack.url.wildcard_version(aurl)
# We'll be a bit more liberal and just look for the archive
# part, not the full path.
regexes.append(os.path.basename(url_regex))
# Build a version list from all the matches we find
versions = {}
for page_url, content in page_map.iteritems():
# extract versions from matches.
for regex in regexes:
versions.update(
(Version(m.group(1)), urljoin(page_url, m.group(0)))
for m in re.finditer(regex, content))
return versions
def validate_package_url(url_string): def validate_package_url(url_string):
"""Determine whether spack can handle a particular URL or not.""" """Determine whether spack can handle a particular URL or not."""
url = urlparse(url_string) url = urlparse(url_string)

View File

@ -82,14 +82,18 @@ def __init__(self, url_or_fetch_strategy, **kwargs):
stage object later). If name is not provided, then this stage object later). If name is not provided, then this
stage will be given a unique name automatically. stage will be given a unique name automatically.
""" """
# TODO: fetch/stage coupling needs to be reworked -- the logic
# TODO: here is convoluted and not modular enough.
if isinstance(url_or_fetch_strategy, basestring): if isinstance(url_or_fetch_strategy, basestring):
self.fetcher = fs.from_url(url_or_fetch_strategy) self.fetcher = fs.from_url(url_or_fetch_strategy)
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy): elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy self.fetcher = url_or_fetch_strategy
else: else:
raise ValueError("Can't construct Stage without url or fetch strategy") raise ValueError("Can't construct Stage without url or fetch strategy")
self.fetcher.set_stage(self) self.fetcher.set_stage(self)
self.default_fetcher = self.fetcher # self.fetcher can change with mirrors.
self.skip_checksum_for_mirror = True # used for mirrored archives of repositories.
self.name = kwargs.get('name') self.name = kwargs.get('name')
self.mirror_path = kwargs.get('mirror_path') self.mirror_path = kwargs.get('mirror_path')
@ -198,17 +202,18 @@ def _setup(self):
@property @property
def archive_file(self): def archive_file(self):
"""Path to the source archive within this stage directory.""" """Path to the source archive within this stage directory."""
if not isinstance(self.fetcher, fs.URLFetchStrategy): paths = []
return None if isinstance(self.fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url)))
paths = [os.path.join(self.path, os.path.basename(self.fetcher.url))]
if self.mirror_path: if self.mirror_path:
paths.append(os.path.join(self.path, os.path.basename(self.mirror_path))) paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
for path in paths: for path in paths:
if os.path.exists(path): if os.path.exists(path):
return path return path
return None else:
return None
@property @property
@ -238,23 +243,34 @@ def fetch(self):
"""Downloads an archive or checks out code from a repository.""" """Downloads an archive or checks out code from a repository."""
self.chdir() self.chdir()
fetchers = [self.fetcher] fetchers = [self.default_fetcher]
# TODO: move mirror logic out of here and clean it up! # TODO: move mirror logic out of here and clean it up!
# TODO: Or @alalazo may have some ideas about how to use a
# TODO: CompositeFetchStrategy here.
self.skip_checksum_for_mirror = True
if self.mirror_path: if self.mirror_path:
urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()] urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()]
# If this archive is normally fetched from a tarball URL,
# then use the same digest. `spack mirror` ensures that
# the checksum will be the same.
digest = None digest = None
if isinstance(self.fetcher, fs.URLFetchStrategy): if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.fetcher.digest digest = self.default_fetcher.digest
fetchers = [fs.URLFetchStrategy(url, digest)
for url in urls] + fetchers # Have to skip the checkesum for things archived from
for f in fetchers: # repositories. How can this be made safer?
f.set_stage(self) self.skip_checksum_for_mirror = not bool(digest)
for url in urls:
fetchers.insert(0, fs.URLFetchStrategy(url, digest))
for fetcher in fetchers: for fetcher in fetchers:
try: try:
fetcher.fetch() fetcher.set_stage(self)
self.fetcher = fetcher
self.fetcher.fetch()
break break
except spack.error.SpackError, e: except spack.error.SpackError, e:
tty.msg("Fetching from %s failed." % fetcher) tty.msg("Fetching from %s failed." % fetcher)
@ -262,13 +278,22 @@ def fetch(self):
continue continue
else: else:
errMessage = "All fetchers failed for %s" % self.name errMessage = "All fetchers failed for %s" % self.name
self.fetcher = self.default_fetcher
raise fs.FetchError(errMessage, None) raise fs.FetchError(errMessage, None)
def check(self): def check(self):
"""Check the downloaded archive against a checksum digest. """Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository.""" No-op if this stage checks code out of a repository."""
self.fetcher.check() if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror:
tty.warn("Fetching from mirror without a checksum!",
"This package is normally checked out from a version "
"control system, but it has been archived on a spack "
"mirror. This means we cannot know a checksum for the "
"tarball in advance. Be sure that your connection to "
"this mirror is secure!.")
else:
self.fetcher.check()
def expand_archive(self): def expand_archive(self):

View File

@ -23,6 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import re import re
import os
import sys import sys
import subprocess import subprocess
import urllib2, cookielib import urllib2, cookielib
@ -70,7 +71,9 @@ def _spider(args):
""" """
url, visited, root, opener, depth, max_depth, raise_on_error = args url, visited, root, opener, depth, max_depth, raise_on_error = args
pages = {} pages = {} # dict from page URL -> text content.
links = set() # set of all links seen on visited pages.
try: try:
# Make a HEAD request first to check the content type. This lets # Make a HEAD request first to check the content type. This lets
# us ignore tarballs and gigantic files. # us ignore tarballs and gigantic files.
@ -99,42 +102,45 @@ def _spider(args):
page = response.read() page = response.read()
pages[response_url] = page pages[response_url] = page
# If we're not at max depth, parse out the links in the page # Parse out the links in the page
link_parser = LinkParser()
subcalls = []
link_parser.feed(page)
while link_parser.links:
raw_link = link_parser.links.pop()
abs_link = urlparse.urljoin(response_url, raw_link)
links.add(abs_link)
# Skip stuff that looks like an archive
if any(raw_link.endswith(suf) for suf in ALLOWED_ARCHIVE_TYPES):
continue
# Skip things outside the root directory
if not abs_link.startswith(root):
continue
# Skip already-visited links
if abs_link in visited:
continue
# If we're not at max depth, follow links.
if depth < max_depth: if depth < max_depth:
link_parser = LinkParser() subcalls.append((abs_link, visited, root, None,
subcalls = [] depth+1, max_depth, raise_on_error))
link_parser.feed(page) visited.add(abs_link)
while link_parser.links: if subcalls:
raw_link = link_parser.links.pop() try:
pool = Pool(processes=len(subcalls))
# Skip stuff that looks like an archive results = pool.map(_spider, subcalls)
if any(raw_link.endswith(suf) for suf in ALLOWED_ARCHIVE_TYPES): for sub_pages, sub_links in results:
continue pages.update(sub_pages)
links.update(sub_links)
# Evaluate the link relative to the page it came from. finally:
abs_link = urlparse.urljoin(response_url, raw_link) pool.terminate()
pool.join()
# Skip things outside the root directory
if not abs_link.startswith(root):
continue
# Skip already-visited links
if abs_link in visited:
continue
subcalls.append((abs_link, visited, root, None, depth+1, max_depth, raise_on_error))
visited.add(abs_link)
if subcalls:
try:
pool = Pool(processes=len(subcalls))
dicts = pool.map(_spider, subcalls)
for d in dicts:
pages.update(d)
finally:
pool.terminate()
pool.join()
except urllib2.URLError, e: except urllib2.URLError, e:
tty.debug(e) tty.debug(e)
@ -155,10 +161,10 @@ def _spider(args):
# Other types of errors are completely ignored, except in debug mode. # Other types of errors are completely ignored, except in debug mode.
tty.debug("Error in _spider: %s" % e) tty.debug("Error in _spider: %s" % e)
return pages return pages, links
def get_pages(root_url, **kwargs): def spider(root_url, **kwargs):
"""Gets web pages from a root URL. """Gets web pages from a root URL.
If depth is specified (e.g., depth=2), then this will also fetches pages If depth is specified (e.g., depth=2), then this will also fetches pages
linked from the root and its children up to depth. linked from the root and its children up to depth.
@ -167,5 +173,69 @@ def get_pages(root_url, **kwargs):
performance over a sequential fetch. performance over a sequential fetch.
""" """
max_depth = kwargs.setdefault('depth', 1) max_depth = kwargs.setdefault('depth', 1)
pages = _spider((root_url, set(), root_url, None, 1, max_depth, False)) pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False))
return pages return pages, links
def find_versions_of_archive(*archive_urls, **kwargs):
"""Scrape web pages for new versions of a tarball.
Arguments:
archive_urls:
URLs for different versions of a package. Typically these
are just the tarballs from the package file itself. By
default, this searches the parent directories of archives.
Keyword Arguments:
list_url:
URL for a listing of archives. Spack wills scrape these
pages for download links that look like the archive URL.
list_depth:
Max depth to follow links on list_url pages.
"""
list_url = kwargs.get('list_url', None)
list_depth = kwargs.get('list_depth', 1)
# Generate a list of list_urls based on archive urls and any
# explicitly listed list_url in the package
list_urls = set()
if list_url:
list_urls.add(list_url)
for aurl in archive_urls:
list_urls.add(spack.url.find_list_url(aurl))
# Grab some web pages to scrape.
pages = {}
links = set()
for lurl in list_urls:
p, l = spider(lurl, depth=list_depth)
pages.update(p)
links.update(l)
# Scrape them for archive URLs
regexes = []
for aurl in archive_urls:
# This creates a regex from the URL with a capture group for
# the version part of the URL. The capture group is converted
# to a generic wildcard, so we can use this to extract things
# on a page that look like archive URLs.
url_regex = spack.url.wildcard_version(aurl)
# We'll be a bit more liberal and just look for the archive
# part, not the full path.
regexes.append(os.path.basename(url_regex))
# Build a dict version -> URL from any links that match the wildcards.
versions = {}
for url in links:
if any(re.search(r, url) for r in regexes):
try:
ver = spack.url.parse_version(url)
versions[ver] = url
except spack.url.UndetectableVersionError as e:
continue
return versions

View File

@ -55,13 +55,11 @@
# avoids the need to come up with a user-friendly naming scheme for # avoids the need to come up with a user-friendly naming scheme for
# spack dotfiles. # spack dotfiles.
######################################################################## ########################################################################
function spack { function spack {
# save raw arguments into an array before butchering them # save raw arguments into an array before butchering them
args=() args=( "$@" )
for a in "$@"; do
# yup, this is awful, blame bash2 compat
args=("${args[@]}" "$a")
done
# accumulate initial flags for main spack command # accumulate initial flags for main spack command
_sp_flags="" _sp_flags=""
while [[ "$1" =~ ^- ]]; do while [[ "$1" =~ ^- ]]; do

View File

@ -14,6 +14,7 @@ class Boost(Package):
list_url = "http://sourceforge.net/projects/boost/files/boost/" list_url = "http://sourceforge.net/projects/boost/files/boost/"
list_depth = 2 list_depth = 2
version('1.60.0', '65a840e1a0b13a558ff19eeb2c4f0cbe')
version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87') version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87')
version('1.58.0', 'b8839650e61e9c1c0a89f371dd475546') version('1.58.0', 'b8839650e61e9c1c0a89f371dd475546')
version('1.57.0', '1be49befbdd9a5ce9def2983ba3e7b76') version('1.57.0', '1be49befbdd9a5ce9def2983ba3e7b76')
@ -48,11 +49,11 @@ class Boost(Package):
variant('mpi', default=False, description='Activate the component Boost.MPI') variant('mpi', default=False, description='Activate the component Boost.MPI')
variant('compression', default=True, description='Activate the compression Boost.iostreams') variant('compression', default=True, description='Activate the compression Boost.iostreams')
depends_on('mpi', when='+mpi')
depends_on('python', when='+python') depends_on('python', when='+python')
depends_on('zlib', when='+compression') depends_on('mpi', when='+mpi')
depends_on('bzip2', when='+compression') depends_on('bzip2', when='+compression')
depends_on('zlib', when='+compression')
def url_for_version(self, version): def url_for_version(self, version):
"""Handle Boost's weird URLs, which write the version two different ways.""" """Handle Boost's weird URLs, which write the version two different ways."""
parts = [str(p) for p in Version(version)] parts = [str(p) for p in Version(version)]
@ -61,20 +62,23 @@ def url_for_version(self, version):
return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % ( return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % (
dots, underscores) dots, underscores)
def determine_toolset(self): def determine_toolset(self, spec):
toolsets = {'gcc': 'gcc', if spec.satisfies("=darwin-x86_64"):
return 'darwin'
toolsets = {'g++': 'gcc',
'icpc': 'intel', 'icpc': 'intel',
'clang++': 'clang'} 'clang++': 'clang'}
for cc, toolset in toolsets.iteritems(): for cc, toolset in toolsets.iteritems():
if(cc in self.compiler.cxx_names): if cc in self.compiler.cxx_names:
return toolset return toolset
# fallback to gcc if no toolset found # fallback to gcc if no toolset found
return 'gcc' return 'gcc'
def determine_bootstrap_options(self, spec, options): def determine_bootstrap_options(self, spec, options):
options.append('--with-toolset=%s' % self.determine_toolset()) options.append('--with-toolset=%s' % self.determine_toolset(spec))
without_libs = [] without_libs = []
if '~mpi' in spec: if '~mpi' in spec:
@ -82,17 +86,20 @@ def determine_bootstrap_options(self, spec, options):
if '~python' in spec: if '~python' in spec:
without_libs.append('python') without_libs.append('python')
else: else:
options.append('--with-python=%s' % (spec['python'].prefix.bin + '/python')) options.append('--with-python=%s' %
join_path(spec['python'].prefix.bin, 'python'))
if without_libs: if without_libs:
options.append('--without-libraries=%s' % ','.join(without_libs)) options.append('--without-libraries=%s' % ','.join(without_libs))
with open('user-config.jam', 'w') as f: with open('user-config.jam', 'w') as f:
if '+mpi' in spec: if '+mpi' in spec:
f.write('using mpi : %s ;\n' % (spec['mpi'].prefix.bin + '/mpicxx')) f.write('using mpi : %s ;\n' %
joinpath(spec['mpi'].prefix.bin, 'mpicxx'))
if '+python' in spec: if '+python' in spec:
f.write('using python : %s : %s ;\n' % (spec['python'].version, f.write('using python : %s : %s ;\n' %
(spec['python'].prefix.bin + '/python'))) (spec['python'].version,
joinpath(spec['python'].prefix.bin, 'python')))
def determine_b2_options(self, spec, options): def determine_b2_options(self, spec, options):
if '+debug' in spec: if '+debug' in spec:
@ -101,22 +108,26 @@ def determine_b2_options(self, spec, options):
options.append('variant=release') options.append('variant=release')
if '~compression' in spec: if '~compression' in spec:
options.extend(['-s NO_BZIP2=1', options.extend([
'-s NO_ZLIB=1', '-s', 'NO_BZIP2=1',
]) '-s', 'NO_ZLIB=1'])
if '+compression' in spec: if '+compression' in spec:
options.extend(['-s BZIP2_INCLUDE=%s' % spec['bzip2'].prefix.include, options.extend([
'-s BZIP2_LIBPATH=%s' % spec['bzip2'].prefix.lib, '-s', 'BZIP2_INCLUDE=%s' % spec['bzip2'].prefix.include,
'-s ZLIB_INCLUDE=%s' % spec['zlib'].prefix.include, '-s', 'BZIP2_LIBPATH=%s' % spec['bzip2'].prefix.lib,
'-s ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib]) '-s', 'ZLIB_INCLUDE=%s' % spec['zlib'].prefix.include,
'-s', 'ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib,
])
options.extend(['toolset=%s' % self.determine_toolset(), options.extend([
'link=static,shared', 'toolset=%s' % self.determine_toolset(spec),
'--layout=tagged']) 'link=static,shared',
'threading=single,multi',
'--layout=tagged'])
def install(self, spec, prefix): def install(self, spec, prefix):
# to make him find the user-config.jam # to make Boost find the user-config.jam
env['BOOST_BUILD_PATH'] = './' env['BOOST_BUILD_PATH'] = './'
bootstrap = Executable('./bootstrap.sh') bootstrap = Executable('./bootstrap.sh')
@ -130,9 +141,8 @@ def install(self, spec, prefix):
b2name = './b2' if spec.satisfies('@1.47:') else './bjam' b2name = './b2' if spec.satisfies('@1.47:') else './bjam'
b2 = Executable(b2name) b2 = Executable(b2name)
b2_options = ['-j %s' % make_jobs] b2_options = ['-j', '%s' % make_jobs]
self.determine_b2_options(spec, b2_options) self.determine_b2_options(spec, b2_options)
b2('install', 'threading=single', *b2_options) b2('install', *b2_options)
b2('install', 'threading=multi', *b2_options)

View File

@ -1,58 +1,55 @@
# FIXME: Add copyright statement ##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import * from spack import *
from contextlib import closing
class Cube(Package): class Cube(Package):
"""Cube the profile viewer for Score-P and Scalasca profiles. It """
displays a multi-dimensional performance space consisting Cube the profile viewer for Score-P and Scalasca profiles. It displays a multi-dimensional performance space
of the dimensions (i) performance metric, (ii) call path, consisting of the dimensions:
and (iii) system resource.""" - performance metric
- call path
- system resource
"""
homepage = "http://www.scalasca.org/software/cube-4.x/download.html" homepage = "http://www.scalasca.org/software/cube-4.x/download.html"
url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz" url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz"
version('4.3.3', '07e109248ed8ffc7bdcce614264a2909', version('4.3.3', '07e109248ed8ffc7bdcce614264a2909',
url='http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3.3.tar.gz') url='http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3.3.tar.gz')
version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20') version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20',
url="http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz")
version('4.3TP1', 'a2090fbc7b2ba394bd5c09ba971e237f', # TODO : add variant that builds GUI on top of Qt
url = 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz')
# Using CC as C++ compiler provides quirky workaround for a Score-P build system attempt
# to guess a matching C compiler when configuring scorep-score
backend_user_provided = """\
CC=cc
CXX=CC
F77=f77
FC=f90
#CFLAGS=-fPIC
#CXXFLAGS=-fPIC
"""
frontend_user_provided = """\
CC_FOR_BUILD=cc
CXX_FOR_BUILD=CC
F77_FOR_BUILD=f70
FC_FOR_BUILD=f90
"""
def install(self, spec, prefix): def install(self, spec, prefix):
# Use a custom compiler configuration, otherwise the score-p
# build system messes with spack's compiler settings.
# Create these three files in the build directory
with closing(open("vendor/common/build-config/platforms/platform-backend-user-provided", "w")) as backend_file:
backend_file.write(self.backend_user_provided)
with closing(open("vendor/common/build-config/platforms/platform-frontend-user-provided", "w")) as frontend_file:
frontend_file.write(self.frontend_user_provided)
configure_args = ["--prefix=%s" % prefix, configure_args = ["--prefix=%s" % prefix,
"--with-custom-compilers", "--without-paraver",
"--without-paraver",
"--without-gui"] "--without-gui"]
configure(*configure_args) configure(*configure_args)
make(parallel=False) make(parallel=False)
make("install", parallel=False) make("install", parallel=False)

View File

@ -39,54 +39,21 @@ class Fftw(Package):
version('3.3.4', '2edab8c06b24feeb3b82bbb3ebf3e7b3') version('3.3.4', '2edab8c06b24feeb3b82bbb3ebf3e7b3')
########## variant('float', default=True, description='Produces a single precision version of the library')
# Floating point precision variant('long_double', default=True, description='Produces a long double precision version of the library')
FLOAT = 'float' variant('quad', default=False, description='Produces a quad precision version of the library (works only with GCC and libquadmath)')
LONG_DOUBLE = 'long_double'
QUAD_PRECISION = 'quad'
PRECISION_OPTIONS = {
FLOAT: '--enable-float',
LONG_DOUBLE: '--enable--long-double',
QUAD_PRECISION: '--enable-quad-precision'
}
variant(FLOAT, default=False, description='Produces a single precision version of the library')
variant(LONG_DOUBLE, default=False, description='Produces a long double precision version of the library')
variant(QUAD_PRECISION, default=False, description='Produces a quad precision version of the library (works only with GCC and libquadmath)')
##########
variant('mpi', default=False, description='Activate MPI support') variant('mpi', default=False, description='Activate MPI support')
depends_on('mpi', when='+mpi') depends_on('mpi', when='+mpi')
@staticmethod
def enabled(x):
"""
Given a variant name returns the string that means the variant is enabled
:param x: variant name
"""
# FIXME : duplicated from MVAPICH2
return '+' + x
def check_fortran_availability(self, options):
if not self.compiler.f77 or not self.compiler.fc:
options.append("--disable-fortran")
def set_floating_point_precision(self, spec, options):
l = [option for variant, option in Fftw.PRECISION_OPTIONS.iteritems() if self.enabled(variant) in spec]
if len(l) > 1:
raise RuntimeError('At most one floating point precision variant may activated per build.')
options.extend(l)
def install(self, spec, prefix): def install(self, spec, prefix):
options = ['--prefix=%s' % prefix, options = ['--prefix=%s' % prefix,
'--enable-shared', '--enable-shared',
'--enable-threads', '--enable-threads',
'--enable-openmp'] '--enable-openmp']
self.check_fortran_availability(options) if not self.compiler.f77 or not self.compiler.fc:
self.set_floating_point_precision(spec, options) options.append("--disable-fortran")
if '+mpi' in spec: if '+mpi' in spec:
options.append('--enable-mpi') options.append('--enable-mpi')
@ -94,3 +61,15 @@ def install(self, spec, prefix):
make() make()
make("install") make("install")
if '+float' in spec:
configure('--enable-float', *options)
make()
make("install")
if '+long_double' in spec:
configure('--enable-long-double', *options)
make()
make("install")
if '+quad' in spec:
configure('--enable-quad-precision', *options)
make()
make("install")

View File

@ -15,6 +15,8 @@ class Hwloc(Package):
homepage = "http://www.open-mpi.org/projects/hwloc/" homepage = "http://www.open-mpi.org/projects/hwloc/"
url = "http://www.open-mpi.org/software/hwloc/v1.9/downloads/hwloc-1.9.tar.gz" url = "http://www.open-mpi.org/software/hwloc/v1.9/downloads/hwloc-1.9.tar.gz"
version('1.11.2', '486169cbe111cdea57be12638828ebbf',
url='http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-1.11.2.tar.bz2')
version('1.11.1', '002742efd3a8431f98d6315365a2b543', version('1.11.1', '002742efd3a8431f98d6315365a2b543',
url='http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-1.11.1.tar.bz2') url='http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-1.11.1.tar.bz2')
version('1.9', '1f9f9155682fe8946a97c08896109508') version('1.9', '1f9f9155682fe8946a97c08896109508')
@ -26,4 +28,3 @@ def install(self, spec, prefix):
make() make()
make("install") make("install")

View File

@ -0,0 +1,66 @@
from spack import *
import os
class Julia(Package):
"""The Julia Language: A fresh approach to technical computing"""
homepage = "http://julialang.org"
url = "http://github.com/JuliaLang/julia/releases/download/v0.4.2/julia-0.4.2.tar.gz"
version('0.4.2', 'ccfeb4f4090c8b31083f5e1ccb03eb06')
# Build-time dependencies
# depends_on("cmake")
# depends_on("awk")
# depends_on("m4")
# depends_on("pkg-config")
# I think that Julia requires the dependencies above, but it builds find (on
# my system) without these. We should enable them as necessary.
# Run-time dependencies
# depends_on("arpack")
# depends_on("fftw +float")
# depends_on("gmp")
# depends_on("mpfr")
# depends_on("pcre2")
# ARPACK: Requires BLAS and LAPACK; needs to use the same version as Julia.
# BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit systems. OpenBLAS
# has an option for this; make it available as variant.
# FFTW: Something doesn't work when using a pre-installed FFTW library; need
# to investigate.
# GMP, MPFR: Something doesn't work when using a pre-installed FFTW library;
# need to investigate.
# LLVM: Julia works only with specific versions, and might require patches.
# Thus we let Julia install its own LLVM.
# Other possible dependencies:
# USE_SYSTEM_OPENLIBM=0
# USE_SYSTEM_OPENSPECFUN=0
# USE_SYSTEM_DSFMT=0
# USE_SYSTEM_SUITESPARSE=0
# USE_SYSTEM_UTF8PROC=0
# USE_SYSTEM_LIBGIT2=0
def install(self, spec, prefix):
# Explicitly setting CC, CXX, or FC breaks building libuv, one of
# Julia's dependencies. This might be a Darwin-specific problem. Given
# how Spack sets up compilers, Julia should still use Spack's compilers,
# even if we don't specify them explicitly.
options = [#"CC=cc",
#"CXX=c++",
#"FC=fc",
#"USE_SYSTEM_ARPACK=1",
#"USE_SYSTEM_FFTW=1",
#"USE_SYSTEM_GMP=1",
#"USE_SYSTEM_MPFR=1",
#TODO "USE_SYSTEM_PCRE=1",
"prefix=%s" % prefix]
with open('Make.user', 'w') as f:
f.write('\n'.join(options) + '\n')
make()
make("install")

View File

@ -0,0 +1,14 @@
from spack import *
class Libedit(Package):
"""An autotools compatible port of the NetBSD editline library"""
homepage = "http://thrysoee.dk/editline/"
url = "http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz"
version('3.1', '43cdb5df3061d78b5e9d59109871b4f6', url="http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz")
def install(self, spec, prefix):
configure('--prefix=%s' % prefix)
make()
make("install")

View File

@ -1,4 +1,5 @@
from spack import * from spack import *
import os.path
class Libpciaccess(Package): class Libpciaccess(Package):
"""Generic PCI access library.""" """Generic PCI access library."""
@ -13,6 +14,12 @@ class Libpciaccess(Package):
depends_on('libtool') depends_on('libtool')
def install(self, spec, prefix): def install(self, spec, prefix):
# libpciaccess does not support OS X
if spec.satisfies('=darwin-x86_64'):
# create a dummy directory
mkdir(prefix.lib)
return
from subprocess import call from subprocess import call
call(["./autogen.sh"]) call(["./autogen.sh"])
configure("--prefix=%s" % prefix) configure("--prefix=%s" % prefix)

View File

@ -0,0 +1,22 @@
from spack import *
import os
class Ninja(Package):
""" A small, fast Make alternative """
homepage = "https://martine.github.io/ninja/"
url = "https://github.com/martine/ninja/archive/v1.6.0.tar.gz"
version('1.6.0', '254133059f2da79d8727f654d7198f43')
extends('python')
def install(self, spec, prefix):
sh = which('sh')
python('configure.py', '--bootstrap')
cp = which('cp')
bindir = os.path.join(prefix, 'bin')
mkdir(bindir)
cp('-a', '-t', bindir, 'ninja')
cp('-ra', 'misc', prefix)

View File

@ -1,18 +1,38 @@
# FIXME: Add copyright statement here ##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import * from spack import *
from contextlib import closing
class Opari2(Package): class Opari2(Package):
"""OPARI2 is a source-to-source instrumentation tool for OpenMP and """
hybrid codes. It surrounds OpenMP directives and runtime library OPARI2 is a source-to-source instrumentation tool for OpenMP and hybrid codes. It surrounds OpenMP directives and
calls with calls to the POMP2 measurement interface. runtime library calls with calls to the POMP2 measurement interface. OPARI2 will provide you with a new
OPARI2 will provide you with a new initialization method that allows initialization method that allows for multi-directory and parallel builds as well as the usage of pre-instrumented
for multi-directory and parallel builds as well as the usage of libraries. Furthermore, an efficient way of tracking parent-child relationships was added. Additionally, we extended
pre-instrumented libraries. Furthermore, an efficient way of OPARI2 to support instrumentation of OpenMP 3.0 tied tasks.
tracking parent-child relationships was added. Additionally, we """
extended OPARI2 to support instrumentation of OpenMP 3.0
tied tasks. """
homepage = "http://www.vi-hps.org/projects/score-p" homepage = "http://www.vi-hps.org/projects/score-p"
url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz" url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz"
@ -21,47 +41,8 @@ class Opari2(Package):
url='http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.4.tar.gz') url='http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.4.tar.gz')
version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e') version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e')
backend_user_provided = """\
CC=cc
CXX=c++
F77=f77
FC=f90
CFLAGS=-fPIC
CXXFLAGS=-fPIC
"""
frontend_user_provided = """\
CC_FOR_BUILD=cc
CXX_FOR_BUILD=c++
F77_FOR_BUILD=f70
FC_FOR_BUILD=f90
CFLAGS_FOR_BUILD=-fPIC
CXXFLAGS_FOR_BUILD=-fPIC
"""
mpi_user_provided = """\
MPICC=mpicc
MPICXX=mpicxx
MPIF77=mpif77
MPIFC=mpif90
MPI_CFLAGS=-fPIC
MPI_CXXFLAGS=-fPIC
"""
def install(self, spec, prefix): def install(self, spec, prefix):
# Use a custom compiler configuration, otherwise the score-p
# build system messes with spack's compiler settings.
# Create these three files in the build directory
with closing(open("platform-backend-user-provided", "w")) as backend_file:
backend_file.write(self.backend_user_provided)
with closing(open("platform-frontend-user-provided", "w")) as frontend_file:
frontend_file.write(self.frontend_user_provided)
with closing(open("platform-mpi-user-provided", "w")) as mpi_file:
mpi_file.write(self.mpi_user_provided)
# FIXME: Modify the configure line to suit your build system here.
configure("--prefix=%s" % prefix, configure("--prefix=%s" % prefix,
"--with-custom-compilers",
"--enable-shared") "--enable-shared")
# FIXME: Add logic to build and install here
make() make()
make("install") make("install")

View File

@ -1,12 +1,35 @@
# FIXME: Add copyright ##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import * from spack import *
from contextlib import closing
import os
class Otf2(Package): class Otf2(Package):
"""The Open Trace Format 2 is a highly scalable, memory efficient event """
trace data format plus support library.""" The Open Trace Format 2 is a highly scalable, memory efficient event trace data format plus support library.
"""
homepage = "http://www.vi-hps.org/score-p" homepage = "http://www.vi-hps.org/score-p"
url = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz" url = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz"
@ -22,57 +45,11 @@ class Otf2(Package):
version('1.2.1', '8fb3e11fb7489896596ae2c7c83d7fc8', version('1.2.1', '8fb3e11fb7489896596ae2c7c83d7fc8',
url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz") url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz")
backend_user_provided = """\
CC=cc
CXX=c++
F77=f77
FC=f90
CFLAGS=-fPIC
CXXFLAGS=-fPIC
"""
frontend_user_provided = """\
CC_FOR_BUILD=cc
CXX_FOR_BUILD=c++
F77_FOR_BUILD=f70
FC_FOR_BUILD=f90
CFLAGS_FOR_BUILD=-fPIC
CXXFLAGS_FOR_BUILD=-fPIC
"""
mpi_user_provided = """\
MPICC=cc
MPICXX=c++
MPIF77=f77
MPIFC=f90
MPI_CFLAGS=-fPIC
MPI_CXXFLAGS=-fPIC
"""
@when('@:1.2.1')
def version_specific_args(self):
return ["--with-platform=disabled", "CC=cc", "CXX=c++", "F77=f77", "F90=f90", "CFLAGS=-fPIC", "CXXFLAGS=-fPIC"]
@when('@1.3:')
def version_specific_args(self):
# TODO: figure out what scorep's build does as of otf2 1.3
return ["--with-custom-compilers"]
def install(self, spec, prefix): def install(self, spec, prefix):
# Use a custom compiler configuration, otherwise the score-p
# build system messes with spack's compiler settings.
# Create these three files in the build directory
with closing(open("platform-backend-user-provided", "w")) as backend_file:
backend_file.write(self.backend_user_provided)
with closing(open("platform-frontend-user-provided", "w")) as frontend_file:
frontend_file.write(self.frontend_user_provided)
with closing(open("platform-mpi-user-provided", "w")) as mpi_file:
mpi_file.write(self.mpi_user_provided)
configure_args=["--prefix=%s" % prefix, configure_args=["--prefix=%s" % prefix,
"--enable-shared"] "--enable-shared",
"CFLAGS=-fPIC",
configure_args.extend(self.version_specific_args()) "CXXFLAGS=-fPIC"]
configure(*configure_args) configure(*configure_args)
make() make()
make("install") make("install")

View File

@ -0,0 +1,15 @@
from spack import *
class Pcre2(Package):
"""The PCRE2 package contains Perl Compatible Regular Expression
libraries. These are useful for implementing regular expression
pattern matching using the same syntax and semantics as Perl 5."""
homepage = "http://www.pcre.org"""
url = "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre2-10.20.tar.bz2"
version('10.20', 'dcd027c57ecfdc8a6c3af9d0acf5e3f7')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -0,0 +1,15 @@
from spack import *
class PyBlessings(Package):
"""A nicer, kinder way to write to the terminal """
homepage = "https://github.com/erikrose/blessings"
url = "https://pypi.python.org/packages/source/b/blessings/blessings-1.6.tar.gz"
version('1.6', '4f552a8ebcd4982693c92571beb99394')
depends_on('py-setuptools')
extends("python")
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,16 @@
from spack import *
class PyCoverage(Package):
""" Testing coverage checker for python """
# FIXME: add a proper url for your package's homepage here.
homepage = "http://nedbatchelder.com/code/coverage/"
url = "https://pypi.python.org/packages/source/c/coverage/coverage-4.0a6.tar.gz"
version('4.0a6', '1bb4058062646148965bef0796b61efc')
depends_on('py-setuptools')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,15 @@
from spack import *
class PyMysqldb1(Package):
"""Legacy mysql bindings for python"""
homepage = "https://github.com/farcepest/MySQLdb1"
url = "https://github.com/farcepest/MySQLdb1/archive/MySQLdb-1.2.5.tar.gz"
version('1.2.5', '332c8f4955b6bc0c79ea15170bf7321b')
extends('python')
depends_on('py-setuptools')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,15 @@
from spack import *
class PyTappy(Package):
"""Python TAP interface module for unit tests"""
homepage = "https://github.com/mblayman/tappy"
# base https://pypi.python.org/pypi/cffi
url = "https://pypi.python.org/packages/source/t/tap.py/tap.py-1.6.tar.gz"
version('1.6', 'c8bdb93ad66e05f939905172a301bedf')
extends('python')
depends_on('py-setuptools')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,16 @@
from spack import *
class PyUrwid(Package):
"""A full-featured console UI library"""
homepage = "http://urwid.org/"
url = "https://pypi.python.org/packages/source/u/urwid/urwid-1.3.0.tar.gz"
version('1.3.0', 'a989acd54f4ff1a554add464803a9175')
depends_on('py-setuptools')
extends("python")
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -5,6 +5,7 @@ class Rsync(Package):
homepage = "https://rsync.samba.org" homepage = "https://rsync.samba.org"
url = "https://download.samba.org/pub/rsync/rsync-3.1.1.tar.gz" url = "https://download.samba.org/pub/rsync/rsync-3.1.1.tar.gz"
version('3.1.2', '0f758d7e000c0f7f7d3792610fad70cb')
version('3.1.1', '43bd6676f0b404326eee2d63be3cdcfe') version('3.1.1', '43bd6676f0b404326eee2d63be3cdcfe')
def install(self, spec, prefix): def install(self, spec, prefix):

View File

@ -1,65 +1,63 @@
# FIXME: Add copyright ##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import * from spack import *
class Scalasca(Package): class Scalasca(Package):
"""Scalasca is a software tool that supports the performance optimization """
of parallel programs by measuring and analyzing their runtime behavior. Scalasca is a software tool that supports the performance optimization of parallel programs by measuring and
The analysis identifies potential performance bottlenecks - in analyzing their runtime behavior. The analysis identifies potential performance bottlenecks - in particular those
particular those concerning communication and synchronization - and concerning communication and synchronization - and offers guidance in exploring their causes.
offers guidance in exploring their causes.""" """
# FIXME: add a proper url for your package's homepage here.
homepage = "http://www.scalasca.org" homepage = "http://www.scalasca.org"
url = "http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz" url = "http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz"
version('2.1', 'bab9c2b021e51e2ba187feec442b96e6', version('2.2.2', '2bafce988b0522d18072f7771e491ab9',
url = 'http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz' ) url='http://apps.fz-juelich.de/scalasca/releases/scalasca/2.2/dist/scalasca-2.2.2.tar.gz')
version('2.1', 'bab9c2b021e51e2ba187feec442b96e6',
url='http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz')
depends_on("mpi") depends_on("mpi")
depends_on("otf2@1.4") ##########
depends_on("cube@4.2.3") # Hard-code dependencies for Scalasca according to what stated in the release page
# The OTF2 library path should be detected automatically from SCOREP
backend_user_provided = """\ # SCALASCA 2.2.2
CC=cc depends_on("scorep@1.4:", when='@2.2.2')
CXX=c++ depends_on("cube@4.3:", when='@2.2.2')
F77=f77 # SCALASCA 2.1
FC=f90 depends_on("scorep@1.3", when='@2.1')
CFLAGS=-fPIC depends_on("cube@4.2:", when='@2.1')
CXXFLAGS=-fPIC ##########
"""
frontend_user_provided = """\
CC_FOR_BUILD=cc
CXX_FOR_BUILD=c++
F77_FOR_BUILD=f70
FC_FOR_BUILD=f90
CFLAGS_FOR_BUILD=-fPIC
CXXFLAGS_FOR_BUILD=-fPIC
"""
mpi_user_provided = """\
MPICC=mpicc
MPICXX=mpicxx
MPIF77=mpif77
MPIFC=mpif90
MPI_CFLAGS=-fPIC
MPI_CXXFLAGS=-fPIC
"""
def install(self, spec, prefix): def install(self, spec, prefix):
configure_args = ["--prefix=%s" % prefix, configure_args = ["--prefix=%s" % prefix,
"--with-custom-compilers",
"--with-otf2=%s" % spec['otf2'].prefix.bin,
"--with-cube=%s" % spec['cube'].prefix.bin, "--with-cube=%s" % spec['cube'].prefix.bin,
"--enable-shared"] "--enable-shared"]
configure(*configure_args) configure(*configure_args)
make() make()
make("install") make("install")
# FIXME: Modify the configure line to suit your build system here.
configure("--prefix=%s" % prefix)
# FIXME: Add logic to build and install here
make()
make("install")

View File

@ -28,9 +28,9 @@
class Scorep(Package): class Scorep(Package):
""" """
The Score-P measurement infrastructure is a highly scalable and The Score-P measurement infrastructure is a highly scalable and easy-to-use tool suite for profiling, event
easy-to-use tool suite for profiling, event tracing, and online tracing, and online analysis of HPC applications.
analysis of HPC applications.""" """
homepage = "http://www.vi-hps.org/projects/score-p" homepage = "http://www.vi-hps.org/projects/score-p"
url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz" url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz"
@ -55,15 +55,6 @@ class Scorep(Package):
depends_on("mpi") depends_on("mpi")
depends_on("papi") depends_on("papi")
def get_compiler_config_line(self):
backend_user_provided = ['CC=%s' % self.compiler.cc_names[0],
'CXX=%s' % self.compiler.cxx_names[0],
'F77=%s' % self.compiler.f77_names[0] if len(self.compiler.f77_names) else "",
'FC=%s' % self.compiler.fc_names[0] if len(self.compiler.fc_names) else "",
'CFLAGS=-fPIC %s' % self.rpath_args,
'CXXFLAGS=-fPIC %s'% self.rpath_args]
return backend_user_provided
def install(self, spec, prefix): def install(self, spec, prefix):
configure = Executable( join_path(self.stage.source_path, 'configure') ) configure = Executable( join_path(self.stage.source_path, 'configure') )
with working_dir('spack-build', create=True): with working_dir('spack-build', create=True):
@ -73,8 +64,9 @@ def install(self, spec, prefix):
"--with-cube=%s" % spec['cube'].prefix.bin, "--with-cube=%s" % spec['cube'].prefix.bin,
"--with-papi-header=%s" % spec['papi'].prefix.include, "--with-papi-header=%s" % spec['papi'].prefix.include,
"--with-papi-lib=%s" % spec['papi'].prefix.lib, "--with-papi-lib=%s" % spec['papi'].prefix.lib,
"--enable-shared"] "--enable-shared",
configure_args.extend(self.get_compiler_config_line()) "CFLAGS=-fPIC",
"CXXFLAGS=-fPIC"]
configure(*configure_args) configure(*configure_args)
make() make()
make("install") make("install")