From a73d91a2e58a040d488bec1368212d93e058ed4d Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 12 Jul 2016 16:59:08 -0400 Subject: [PATCH 001/284] Add URL and md5 for jdk-8u92. Installed on Centos7.2 system, `java -version` works. --- var/spack/repos/builtin/packages/jdk/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py index 2ec86f05e3c..f0b871742ca 100644 --- a/var/spack/repos/builtin/packages/jdk/package.py +++ b/var/spack/repos/builtin/packages/jdk/package.py @@ -40,6 +40,8 @@ class Jdk(Package): version('8u66-linux-x64', '88f31f3d642c3287134297b8c10e61bf', url="http://download.oracle.com/otn-pub/java/jdk/8u66-b17/jdk-8u66-linux-x64.tar.gz") + version('8u92-linux-x64', '65a1cc17ea362453a6e0eb4f13be76e4', + url="http://download.oracle.com/otn-pub/java/jdk/8u92-b14/jdk-8u92-linux-x64.tar.gz") # Oracle requires that you accept their License Agreement in order # to access the Java packages in download.oracle.com. In order to From b8bd02c5b2f3813b325d96d364b406b2f13df48f Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 13 Jul 2016 09:48:53 +0200 Subject: [PATCH 002/284] p4est: use run_tests instead of a variant --- .../repos/builtin/packages/p4est/package.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/p4est/package.py b/var/spack/repos/builtin/packages/p4est/package.py index d0db4f7f207..642f9231d89 100644 --- a/var/spack/repos/builtin/packages/p4est/package.py +++ b/var/spack/repos/builtin/packages/p4est/package.py @@ -24,22 +24,22 @@ ############################################################################## from spack import * + class P4est(Package): - """Dynamic management of a collection (a forest) of adaptive octrees in parallel""" + """Dynamic management of a collection (a forest) of adaptive octrees in + parallel""" homepage = "http://www.p4est.org" url = "http://p4est.github.io/release/p4est-1.1.tar.gz" version('1.1', '37ba7f4410958cfb38a2140339dbf64f') - variant('tests', default=True, description='Run small tests') - # build dependencies depends_on('automake') depends_on('autoconf') depends_on('libtool@2.4.2:') # other dependencies - depends_on('lua') # Needed for the submodule sc + depends_on('lua') # Needed for the submodule sc depends_on('mpi') depends_on('zlib') @@ -59,11 +59,6 @@ def install(self, spec, prefix): configure('--prefix=%s' % prefix, *options) make() - # Make tests optional as sometimes mpiexec can't be run with an error: - # mpiexec has detected an attempt to run as root. - # Running at root is *strongly* discouraged as any mistake (e.g., in - # defining TMPDIR) or bug can result in catastrophic damage to the OS - # file system, leaving your system in an unusable state. - if '+tests' in self.spec: - make("check") + if self.run_tests: + make("check") make("install") From de9cce3e80e32380b9a7153fc4a143150e1c00cd Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 13 Jul 2016 11:45:33 +0200 Subject: [PATCH 003/284] p4est: flake8 --- .../repos/builtin/packages/p4est/package.py | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/p4est/package.py b/var/spack/repos/builtin/packages/p4est/package.py index 642f9231d89..1a97c3e90ac 100644 --- a/var/spack/repos/builtin/packages/p4est/package.py +++ b/var/spack/repos/builtin/packages/p4est/package.py @@ -44,17 +44,18 @@ class P4est(Package): depends_on('zlib') def install(self, spec, prefix): - options = ['--enable-mpi', - '--enable-shared', - '--disable-vtk-binary', - '--without-blas', - 'CPPFLAGS=-DSC_LOG_PRIORITY=SC_LP_ESSENTIAL', - 'CFLAGS=-O2', - 'CC=%s' % self.spec['mpi'].mpicc, - 'CXX=%s' % self.spec['mpi'].mpicxx, - 'FC=%s' % self.spec['mpi'].mpifc, - 'F77=%s' % self.spec['mpi'].mpif77 - ] + options = [ + '--enable-mpi', + '--enable-shared', + '--disable-vtk-binary', + '--without-blas', + 'CPPFLAGS=-DSC_LOG_PRIORITY=SC_LP_ESSENTIAL', + 'CFLAGS=-O2', + 'CC=%s' % self.spec['mpi'].mpicc, + 'CXX=%s' % self.spec['mpi'].mpicxx, + 'FC=%s' % self.spec['mpi'].mpifc, + 'F77=%s' % self.spec['mpi'].mpif77 + ] configure('--prefix=%s' % prefix, *options) From 64b1bafd41ee5f9770e7082d45a95059a48a6aaa Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Fri, 15 Jul 2016 12:39:40 -0400 Subject: [PATCH 004/284] Make flake8 happy with .../jdk/package.py My PR for adding a new jdk version failed its travis run because of existing flake8 violoations. This package cleans them up. I'm not particularly pythonic, but most of the complaints were obvious. HOWEVER, I'm not particularly confident about handling long lines by taking an flake8 exception comment onto the end. Feedback welcome... --- .../repos/builtin/packages/jdk/package.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py index f0b871742ca..593a6d83404 100644 --- a/var/spack/repos/builtin/packages/jdk/package.py +++ b/var/spack/repos/builtin/packages/jdk/package.py @@ -22,21 +22,20 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -#------------------------------------------------------------------------------ +# # Author: Justin Too -#------------------------------------------------------------------------------ +# import distutils -from distutils import dir_util -from subprocess import call import spack from spack import * import llnl.util.tty as tty + class Jdk(Package): """The Java Development Kit (JDK) released by Oracle Corporation in the form of a binary product aimed at Java developers.""" - homepage = "http://www.oracle.com/technetwork/java/javase/downloads/index.html" + homepage = "http://www.oracle.com/technetwork/java/javase/downloads/index.html" # noqa: E501 version('8u66-linux-x64', '88f31f3d642c3287134297b8c10e61bf', url="http://download.oracle.com/otn-pub/java/jdk/8u66-b17/jdk-8u66-linux-x64.tar.gz") @@ -48,10 +47,10 @@ class Jdk(Package): # automate this process, we need to utilize these additional curl # commandline options. # - # See http://stackoverflow.com/questions/10268583/how-to-automate-download-and-installation-of-java-jdk-on-linux - curl_options=[ - '-j', # junk cookies - '-H', # specify required License Agreement cookie + # See http://stackoverflow.com/questions/10268583/how-to-automate-download-and-installation-of-java-jdk-on-linux # noqa: E501 + curl_options = [ + '-j', # junk cookies + '-H', # specify required License Agreement cookie 'Cookie: oraclelicense=accept-securebackup-cookie'] def do_fetch(self, mirror_only=False): @@ -67,6 +66,5 @@ def do_fetch(self, mirror_only=False): # Now perform the actual fetch super(Jdk, self).do_fetch(mirror_only) - def install(self, spec, prefix): distutils.dir_util.copy_tree(".", prefix) From 4dec06e4f48044977acf484b95532b6f682ad960 Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Mon, 18 Jul 2016 20:14:44 -0400 Subject: [PATCH 005/284] Make HDF5 1.10 the default version Apply this only after NetCDF 4.4.1 is supported. --- var/spack/repos/builtin/packages/hdf5/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index 51a5823aa5c..d169940c863 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -40,7 +40,7 @@ class Hdf5(Package): version('1.10.0-patch1', '9180ff0ef8dc2ef3f61bd37a7404f295') version('1.10.0', 'bdc935337ee8282579cd6bc4270ad199') - version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618', preferred=True) + version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618') version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24') version('1.8.13', 'c03426e9e77d7766944654280b467289') From afa4cdc2d876fa9d4743813f7961661875430df1 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 19 Jul 2016 12:33:04 -0500 Subject: [PATCH 006/284] Updates to Armadillo package --- var/spack/repos/builtin/packages/armadillo/package.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/armadillo/package.py b/var/spack/repos/builtin/packages/armadillo/package.py index b3e5994e305..5043a6a2123 100644 --- a/var/spack/repos/builtin/packages/armadillo/package.py +++ b/var/spack/repos/builtin/packages/armadillo/package.py @@ -33,11 +33,13 @@ class Armadillo(Package): homepage = "http://arma.sourceforge.net/" url = "http://sourceforge.net/projects/arma/files/armadillo-7.200.1.tar.xz" + version('7.200.2', 'b21585372d67a8876117fd515d8cf0a2') version('7.200.1', 'ed86d6df0058979e107502e1fe3e469e') variant('hdf5', default=False, description='Include HDF5 support') - depends_on('arpack') + depends_on('cmake@2.8:', type='build') + depends_on('arpack-ng') # old arpack causes undefined symbols depends_on('blas') depends_on('lapack') depends_on('superlu@5.2:') @@ -46,8 +48,8 @@ class Armadillo(Package): def install(self, spec, prefix): cmake_args = [ # ARPACK support - '-DARPACK_LIBRARY={0}/libarpack.a'.format( - spec['arpack'].prefix.lib), + '-DARPACK_LIBRARY={0}/libarpack.so'.format( + spec['arpack-ng'].prefix.lib), # BLAS support '-DBLAS_LIBRARY={0}'.format(spec['blas'].blas_shared_lib), # LAPACK support From 8034536054b4fcd94809bde379ecc5e2d6a61647 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 19 Jul 2016 12:37:34 -0500 Subject: [PATCH 007/284] Fix shared object suffix for macOS --- var/spack/repos/builtin/packages/armadillo/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/armadillo/package.py b/var/spack/repos/builtin/packages/armadillo/package.py index 5043a6a2123..4356f60aca2 100644 --- a/var/spack/repos/builtin/packages/armadillo/package.py +++ b/var/spack/repos/builtin/packages/armadillo/package.py @@ -48,8 +48,8 @@ class Armadillo(Package): def install(self, spec, prefix): cmake_args = [ # ARPACK support - '-DARPACK_LIBRARY={0}/libarpack.so'.format( - spec['arpack-ng'].prefix.lib), + '-DARPACK_LIBRARY={0}/libarpack.{1}'.format( + spec['arpack-ng'].prefix.lib, dso_suffix), # BLAS support '-DBLAS_LIBRARY={0}'.format(spec['blas'].blas_shared_lib), # LAPACK support From 1315753e704615a37cde4e3a6b342cd253bfd95b Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Mon, 18 Jul 2016 14:23:29 -0400 Subject: [PATCH 008/284] deptypes: support special deptypes by string --- lib/spack/docs/packaging_guide.rst | 6 +++--- lib/spack/spack/directives.py | 2 +- lib/spack/spack/spec.py | 7 ++++++- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 6bafaecc7db..70def5c39a0 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1307,9 +1307,9 @@ The dependency types are: If not specified, ``type`` is assumed to be ``("build", "link")``. This is the common case for compiled language usage. Also available are the aliases -``alldeps`` for all dependency types and ``nolink`` (``("build", "run")``) for -use by dependencies which are not expressed via a linker (e.g., Python or Lua -module loading). +``"alldeps"`` for all dependency types and ``"nolink"`` (``("build", "run")``) +for use by dependencies which are not expressed via a linker (e.g., Python or +Lua module loading). .. _setup-dependent-environment: diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 88d2aaf4728..e92dd6fb679 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -189,7 +189,7 @@ def _depends_on(pkg, spec, when=None, type=None): type = ('build', 'link') if isinstance(type, str): - type = (type,) + type = spack.spec.special_types.get(type, (type,)) for deptype in type: if deptype not in spack.spec.alldeps: diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index e694f2b2da0..8bdae0445e6 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -155,6 +155,10 @@ # Special types of dependencies. alldeps = ('build', 'link', 'run') nolink = ('build', 'run') +special_types = { + 'alldeps': alldeps, + 'nolink': nolink, +} def index_specs(specs): @@ -542,7 +546,8 @@ def _deptype_norm(self, deptype): return alldeps # Force deptype to be a set object so that we can do set intersections. if isinstance(deptype, str): - return (deptype,) + # Support special deptypes. + return special_types.get(deptype, (deptype,)) return deptype def _find_deps(self, where, deptype): From f4fb9a07718068fb96e22a43be6066daada92d10 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Wed, 20 Jul 2016 11:35:13 -0700 Subject: [PATCH 009/284] Set default link type to dynamic on cray. Includes hooks for platform-based environment changes --- lib/spack/spack/architecture.py | 6 ++++++ lib/spack/spack/build_environment.py | 6 ++++++ lib/spack/spack/platforms/cray_xc.py | 7 +++++++ 3 files changed, 19 insertions(+) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index a7cda2bf681..2701fa9a0c5 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -190,6 +190,12 @@ def operating_system(self, name): return self.operating_sys.get(name, None) + @classmethod + def setup_platform_environment(self, env): + """ Subclass can override this method if it requires any + platform-specific build environment modifications. + """ + pass @classmethod def detect(self): diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 93fb0690f7f..4e799eeea17 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -248,6 +248,11 @@ def set_build_environment_variables(pkg, env, dirty=False): ci = join_path(item, 'case-insensitive') if os.path.isdir(ci): env_paths.append(ci) + +# for item in pkg.spec.platform.env_paths: +# env_paths.append(item) + # TODO: move platform-specific knowledge to platform. + # (join_path(spack.build_env_path, 'cray')) for item in reversed(env_paths): env.prepend_path('PATH', item) @@ -444,6 +449,7 @@ def setup_package(pkg, dirty=False): set_compiler_environment_variables(pkg, spack_env) set_build_environment_variables(pkg, spack_env, dirty) + pkg.spec.architecture.platform.setup_platform_environment(spack_env) load_external_modules(pkg) # traverse in postorder so package can use vars from its dependencies spec = pkg.spec diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py index e3c7761a945..aee375ac897 100644 --- a/lib/spack/spack/platforms/cray_xc.py +++ b/lib/spack/spack/platforms/cray_xc.py @@ -44,6 +44,13 @@ def __init__(self): self.add_operating_system(str(linux_dist), linux_dist) self.add_operating_system('CNL10', Cnl()) + @classmethod + def setup_platform_environment(self, env): + """ Change the linker to default dynamic to be more + similar to linux/standard linker behavior + """ + env.set('CRAYPE_LINK_TYPE', 'dynamic') + @classmethod def detect(self): try: From 7db41700623f33a4266c8265e45b511bc1aed24a Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Wed, 20 Jul 2016 11:36:47 -0700 Subject: [PATCH 010/284] removed commented-out code --- lib/spack/spack/build_environment.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 4e799eeea17..8e40e9883dc 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -249,11 +249,6 @@ def set_build_environment_variables(pkg, env, dirty=False): if os.path.isdir(ci): env_paths.append(ci) -# for item in pkg.spec.platform.env_paths: -# env_paths.append(item) - # TODO: move platform-specific knowledge to platform. - # (join_path(spack.build_env_path, 'cray')) - for item in reversed(env_paths): env.prepend_path('PATH', item) env.set_path(SPACK_ENV_PATH, env_paths) From 412618d53108d6417c88c62d85377f69f8bf1829 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Wed, 20 Jul 2016 11:52:16 -0700 Subject: [PATCH 011/284] fixed flake errors --- lib/spack/spack/architecture.py | 13 ++++--------- lib/spack/spack/build_environment.py | 2 +- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index 2701fa9a0c5..6c3c478f91e 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -76,7 +76,6 @@ will be responsible for compiler detection. """ import os -import imp import inspect from llnl.util.lang import memoized, list_modules, key_ordering @@ -206,15 +205,12 @@ def detect(self): """ raise NotImplementedError() - def __repr__(self): return self.__str__() - def __str__(self): return self.name - def _cmp_key(self): t_keys = ''.join(str(t._cmp_key()) for t in sorted(self.targets.values())) @@ -285,7 +281,7 @@ def find_compilers(self, *paths): # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. - clist = reduce(lambda x, y: x+y, compiler_lists) + clist = reduce(lambda x, y: x + y, compiler_lists) return clist def find_compiler(self, cmp_cls, *path): @@ -326,7 +322,7 @@ def find_compiler(self, cmp_cls, *path): # prefer the one with more compilers. prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc] - newcount = len([p for p in paths if p is not None]) + newcount = len([p for p in paths if p is not None]) prevcount = len([p for p in prev_paths if p is not None]) # Don't add if it's not an improvement over prev compiler. @@ -343,6 +339,7 @@ def to_dict(self): d['version'] = self.version return d + @key_ordering class Arch(object): """Architecture is now a class to help with setting attributes. @@ -383,11 +380,9 @@ def __str__(self): else: return '' - def __contains__(self, string): return string in str(self) - def _cmp_key(self): if isinstance(self.platform, Platform): platform = self.platform.name @@ -430,7 +425,7 @@ def _operating_system_from_dict(os_name, plat=None): if isinstance(os_name, dict): name = os_name['name'] version = os_name['version'] - return plat.operating_system(name+version) + return plat.operating_system(name + version) else: return plat.operating_system(os_name) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 8e40e9883dc..fbcba08367b 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -248,7 +248,7 @@ def set_build_environment_variables(pkg, env, dirty=False): ci = join_path(item, 'case-insensitive') if os.path.isdir(ci): env_paths.append(ci) - + for item in reversed(env_paths): env.prepend_path('PATH', item) env.set_path(SPACK_ENV_PATH, env_paths) From 6983c1d30d6e4f93ee8d8a0df87e3c9ea8cde103 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Wed, 20 Jul 2016 12:04:00 -0700 Subject: [PATCH 012/284] Documented linker default --- lib/spack/docs/basic_usage.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 50a161a1750..948092047bd 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -1866,6 +1866,10 @@ to call the Cray compiler wrappers during build time. For more on compiler configuration, check out :ref:`compiler-config`. +Spack sets the default Cray link type to dynamic, to better match other +other platforms. Individual packages can enable static linking (which is the +default outside of Spack on cray systems) using the -static flag. + Setting defaults and using Cray modules ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From c9f4e8ce5aac7fbcc1934606c1a330cdb5e7a36c Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Wed, 20 Jul 2016 13:13:47 -0600 Subject: [PATCH 013/284] ilmbase : Add new IlmBase package --- .../repos/builtin/packages/ilmbase/package.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 var/spack/repos/builtin/packages/ilmbase/package.py diff --git a/var/spack/repos/builtin/packages/ilmbase/package.py b/var/spack/repos/builtin/packages/ilmbase/package.py new file mode 100644 index 00000000000..873c830623f --- /dev/null +++ b/var/spack/repos/builtin/packages/ilmbase/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Ilmbase(Package): + """OpenEXR ILM Base libraries (high dynamic-range image file format)""" + + homepage = "http://www.openexr.com/" + url = "http://download.savannah.nongnu.org/releases/openexr/ilmbase-2.2.0.tar.gz" + + version('2.2.0', 'b540db502c5fa42078249f43d18a4652') + version('2.1.0', 'af1115f4d759c574ce84efcde9845d29') + version('2.0.1', '74c0d0d2873960bd0dc1993f8e03f0ae') + version('1.0.2', '26c133ee8ca48e1196fbfb3ffe292ab4') + version('0.9.0', '4df45f8116cb7a013b286caf6da30a2e') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix)) + make('install') From eda1176ba7d70327ca847d6b17afa02f8cca0d5b Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Wed, 20 Jul 2016 12:26:31 -0700 Subject: [PATCH 014/284] added package as argument to setup_platform_environment --- lib/spack/spack/architecture.py | 2 +- lib/spack/spack/build_environment.py | 2 +- lib/spack/spack/platforms/cray_xc.py | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index 6c3c478f91e..974505ee3a0 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -190,7 +190,7 @@ def operating_system(self, name): return self.operating_sys.get(name, None) @classmethod - def setup_platform_environment(self, env): + def setup_platform_environment(self, pkg, env): """ Subclass can override this method if it requires any platform-specific build environment modifications. """ diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index fbcba08367b..5affd3c7c56 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -444,7 +444,7 @@ def setup_package(pkg, dirty=False): set_compiler_environment_variables(pkg, spack_env) set_build_environment_variables(pkg, spack_env, dirty) - pkg.spec.architecture.platform.setup_platform_environment(spack_env) + pkg.spec.architecture.platform.setup_platform_environment(pkg, spack_env) load_external_modules(pkg) # traverse in postorder so package can use vars from its dependencies spec = pkg.spec diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py index aee375ac897..2b065d5bbd1 100644 --- a/lib/spack/spack/platforms/cray_xc.py +++ b/lib/spack/spack/platforms/cray_xc.py @@ -45,11 +45,14 @@ def __init__(self): self.add_operating_system('CNL10', Cnl()) @classmethod - def setup_platform_environment(self, env): + def setup_platform_environment(self, pkg, env): """ Change the linker to default dynamic to be more similar to linux/standard linker behavior """ env.set('CRAYPE_LINK_TYPE', 'dynamic') + cray_wrapper_names = join_path(spack.build_env_path, 'cray') + if os.path.isdir(cray_wrapper_names): + env.prepend_path('PATH', cray_wrapper_names) @classmethod def detect(self): From 24c14ff7a84af509c1592ab44419e2d7cb3cd6e6 Mon Sep 17 00:00:00 2001 From: Samuel Knight Date: Wed, 20 Jul 2016 16:38:47 +0000 Subject: [PATCH 015/284] Added gnu packages datamash, parallel, and screen --- .../builtin/packages/datamash/package.py | 45 ++++++++++++++ .../builtin/packages/parallel/package.py | 45 ++++++++++++++ .../repos/builtin/packages/screen/package.py | 59 +++++++++++++++++++ 3 files changed, 149 insertions(+) create mode 100644 var/spack/repos/builtin/packages/datamash/package.py create mode 100644 var/spack/repos/builtin/packages/parallel/package.py create mode 100644 var/spack/repos/builtin/packages/screen/package.py diff --git a/var/spack/repos/builtin/packages/datamash/package.py b/var/spack/repos/builtin/packages/datamash/package.py new file mode 100644 index 00000000000..ef30c50ebe1 --- /dev/null +++ b/var/spack/repos/builtin/packages/datamash/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Datamash(Package): + """ + GNU datamash is a command-line program which performs basic numeric, + textual and statistical operations on input textual data files. + """ + + homepage = "https://www.gnu.org/software/datamash/" + url = "http://ftp.gnu.org/gnu/datamash/datamash-1.0.5.tar.gz" + + version('1.1.0', '79a6affca08107a095e97e4237fc8775') + version('1.0.7', '9f317bab07454032ba9c068e7f17b04b') + version('1.0.6', 'ff26fdef0f343cb695cf1853e14a1a5b') + version('1.0.5', '9a29549dc7feca49fdc5fab696614e11') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/parallel/package.py b/var/spack/repos/builtin/packages/parallel/package.py new file mode 100644 index 00000000000..2306dace554 --- /dev/null +++ b/var/spack/repos/builtin/packages/parallel/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Parallel(Package): + """ + GNU parallel is a shell tool for executing jobs in parallel using + one or more computers. A job can be a single command or a small + script that has to be run for each of the lines in the input. + """ + + homepage = "http://www.gnu.org/software/parallel/" + url = "http://ftp.gnu.org/gnu/parallel/parallel-20160422.tar.bz2" + + version('20160422', '24621f684130472694333709bd4454cb') + version('20160322', '4e81e0d36902ab4c4e969ee6f35e6e57') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/screen/package.py b/var/spack/repos/builtin/packages/screen/package.py new file mode 100644 index 00000000000..17335603e6f --- /dev/null +++ b/var/spack/repos/builtin/packages/screen/package.py @@ -0,0 +1,59 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Screen(Package): + """ + Screen is a full-screen window manager that multiplexes a physical + terminal between several processes, typically interactive shells. + """ + + homepage = "https://www.gnu.org/software/screen/" + url = "http://ftp.gnu.org/gnu/screen/screen-4.3.1.tar.gz" + + version('4.3.1', '5bb3b0ff2674e29378c31ad3411170ad') + version('4.3.0', 'f76d28eadc4caaf6cdff00685ae6ad46') + version('4.2.1', '419a0594e2b25039239af8b90eda7d92') + version('4.2.0', 'e5199156a8ac863bbf92495a7638b612') + version('4.0.3', '8506fd205028a96c741e4037de6e3c42') + version('4.0.2', 'ed68ea9b43d9fba0972cb017a24940a1') + version('3.9.15', '0dff6fdc3fbbceabf25a43710fbfe75f') + version('3.9.11', '19572f92404995e7b2dea8117204dd67') + version('3.9.10', 'bbe271715d1dee038b3cd72d6d2f05fb') + version('3.9.9', '9a8b1d6c7438c64b884c4f7d7662afdc') + version('3.9.8', '8ddfebe32c2d45410ce89ea9779bb1cf') + version('3.9.4', '7de72cd18f7adcdf993ecc6764d0478a') + version('3.7.6', '9a353b828d79c3c143109265cae663a7') + version('3.7.4', 'c5ab40b068968075e41e25607dfce543') + version('3.7.2', '2d6db5de7fb0cf849cc5a6f94203f029') + version('3.7.1', '27cdd29318446561ef7c966041cbd2c9') + + depends_on('ncurses') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + make() + make("install") From f9137f606f68392cca56bd19810c3c86addbae7a Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Wed, 20 Jul 2016 15:37:31 -0500 Subject: [PATCH 016/284] Set environment variables This commit adds `setup_environment` to export the libraries and headers. Fixes #1314. --- var/spack/repos/builtin/packages/R/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/var/spack/repos/builtin/packages/R/package.py b/var/spack/repos/builtin/packages/R/package.py index 11c5909efaf..5e7c8492f6c 100644 --- a/var/spack/repos/builtin/packages/R/package.py +++ b/var/spack/repos/builtin/packages/R/package.py @@ -136,6 +136,14 @@ def setup_dependent_environment(self, spack_env, run_env, extension_spec): run_env.prepend_path('R_LIBS', os.path.join( extension_spec.prefix, self.r_lib_dir)) + def setup_environment(self, spack_env, run_env): + run_env.prepend_path('LIBRARY_PATH', + join_path(self.prefix, 'rlib', 'R', 'lib')) + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'rlib', 'R', 'lib')) + run_env.prepend_path('CPATH', + join_path(self.prefix, 'rlib', 'R', 'include')) + def setup_dependent_package(self, module, ext_spec): """Called before R modules' install() methods. In most cases, extensions will only need to have one line: From e973adf84b9c431e95f792d24c5b15941780017e Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Wed, 20 Jul 2016 14:49:03 -0600 Subject: [PATCH 017/284] openexr : Add new package --- .../repos/builtin/packages/openexr/package.py | 54 +++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 var/spack/repos/builtin/packages/openexr/package.py diff --git a/var/spack/repos/builtin/packages/openexr/package.py b/var/spack/repos/builtin/packages/openexr/package.py new file mode 100644 index 00000000000..3619bd063ca --- /dev/null +++ b/var/spack/repos/builtin/packages/openexr/package.py @@ -0,0 +1,54 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Openexr(Package): + """OpenEXR Graphics Tools (high dynamic-range image file format)""" + + homepage = "http://www.openexr.com/" + url = "https://savannah.nongnu.org/download/openexr/openexr-2.2.0.tar.gz" + + version('2.2.0', 'b64e931c82aa3790329c21418373db4e') + version('2.1.0', '33735d37d2ee01c6d8fbd0df94fb8b43') + version('2.0.1', '4387e6050d2faa65dd5215618ff2ddce') + version('1.7.0', '27113284f7d26a58f853c346e0851d7a') + version('1.6.1', '11951f164f9c872b183df75e66de145a') + version('1.5.0', '55342d2256ab3ae99da16f16b2e12ce9') + version('1.4.0a', 'd0a4b9a930c766fa51561b05fb204afe') + version('1.3.2', '1522fe69135016c52eb88fc7d8514409') + + variant('debug', default=False, + description='Builds a debug version of the libraries') + + depends_on('pkg-config', type='build') + depends_on('ilmbase') + + def install(self, spec, prefix): + configure_options = ['--prefix={0}'.format(prefix)] + if '+debug' not in spec: + configure_options.append('--disable-debug') + configure(*configure_options) + make('install') From 0c75174ec323757400e9095e1cb9fa66e557a723 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Wed, 20 Jul 2016 15:56:20 -0500 Subject: [PATCH 018/284] Fix indent/flake8 error. --- var/spack/repos/builtin/packages/R/package.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/R/package.py b/var/spack/repos/builtin/packages/R/package.py index 5e7c8492f6c..ad06c2ca48f 100644 --- a/var/spack/repos/builtin/packages/R/package.py +++ b/var/spack/repos/builtin/packages/R/package.py @@ -138,11 +138,11 @@ def setup_dependent_environment(self, spack_env, run_env, extension_spec): def setup_environment(self, spack_env, run_env): run_env.prepend_path('LIBRARY_PATH', - join_path(self.prefix, 'rlib', 'R', 'lib')) + join_path(self.prefix, 'rlib', 'R', 'lib')) run_env.prepend_path('LD_LIBRARY_PATH', - join_path(self.prefix, 'rlib', 'R', 'lib')) + join_path(self.prefix, 'rlib', 'R', 'lib')) run_env.prepend_path('CPATH', - join_path(self.prefix, 'rlib', 'R', 'include')) + join_path(self.prefix, 'rlib', 'R', 'include')) def setup_dependent_package(self, module, ext_spec): """Called before R modules' install() methods. In most cases, From 2344ffd0c7e5a0c495f0000eaa66c9bea1f81c38 Mon Sep 17 00:00:00 2001 From: Mitchell Devlin Date: Wed, 20 Jul 2016 15:24:23 -0500 Subject: [PATCH 019/284] add libxsmm package --- .../repos/builtin/packages/libxsmm/package.py | 53 +++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 var/spack/repos/builtin/packages/libxsmm/package.py diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py new file mode 100644 index 00000000000..b8adeed406f --- /dev/null +++ b/var/spack/repos/builtin/packages/libxsmm/package.py @@ -0,0 +1,53 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Libxsmm(Package): + '''LIBXSMM is a library for small dense and small sparse matrix-matrix + multiplications targeting Intel Architecture (x86).''' + + homepage = 'https://github.com/hfp/libxsmm' + url = 'https://github.com/xianyi/libxsmm/archive/1.4.3.tar.gz' + + version('1.4.3', '9839bf0fb8be7badf1e97ce4c817149b') + version('1.4.2', 'ea025761437f3b5c936821b9ca21ec31') + version('1.4.1', '71648500ea4510529845d329091917df') + version('1.4', 'b42f91bf5285e7ad0463446e55ebdc2b') + + def manual_install(self, prefix): + install_tree('include', prefix.include) + install_tree('lib', prefix.lib) + install_tree('documentation', prefix.share + '/libxsmm') + + def install(self, spec, prefix): + make_args = [ + 'ROW_MAJOR=0', + 'INDICES_M=$(echo $(seq 1 24))', + 'INDICES_N=$(echo $(seq 1 24))', + 'INDICES_K=$(echo $(seq 1 24))' + ] + make(*make_args) + self.manual_install(prefix) From 9e1d26e973336bcfa5f21abee7ffc9ec4fd30bb7 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Tue, 12 Jul 2016 20:15:12 -0600 Subject: [PATCH 020/284] opencv : Add CUDA extensions --- var/spack/repos/builtin/packages/opencv/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 989c66316c3..9c17cff5916 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -45,6 +45,7 @@ class Opencv(Package): variant('eigen', default=True, description='Activates support for eigen') variant('ipp', default=True, description='Activates support for IPP') + variant('cuda', default=False, description='Activates support for CUDA') depends_on('zlib') depends_on('libpng') @@ -55,9 +56,9 @@ class Opencv(Package): depends_on('py-numpy') depends_on('eigen', when='+eigen') + depends_on('cuda', when='+cuda') # FIXME : GUI extensions missing - # FIXME : CUDA extensions still missing def install(self, spec, prefix): cmake_options = [] @@ -66,7 +67,8 @@ def install(self, spec, prefix): cmake_options.extend(['-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'), '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if '+shared' in spec else 'OFF'), '-DENABLE_PRECOMPILED_HEADERS:BOOL=OFF', - '-DWITH_IPP:BOOL=%s' % ('ON' if '+ipp' in spec else 'OFF')]) + '-DWITH_IPP:BOOL=%s' % ('ON' if '+ipp' in spec else 'OFF'), + '-DWITH_CUDA:BOOL=%s' % ('ON' if '+cuda' in spec else 'OFF')]) with working_dir('spack_build', create=True): cmake('..', *cmake_options) From cf0f5756f55a0bb3390975950a7f12b18833262d Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Tue, 12 Jul 2016 20:27:21 -0600 Subject: [PATCH 021/284] opencv : Fix python extensions issues --- .../repos/builtin/packages/opencv/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 9c17cff5916..3d887f07750 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -58,6 +58,8 @@ class Opencv(Package): depends_on('eigen', when='+eigen') depends_on('cuda', when='+cuda') + extends('python') + # FIXME : GUI extensions missing def install(self, spec, prefix): @@ -70,6 +72,19 @@ def install(self, spec, prefix): '-DWITH_IPP:BOOL=%s' % ('ON' if '+ipp' in spec else 'OFF'), '-DWITH_CUDA:BOOL=%s' % ('ON' if '+cuda' in spec else 'OFF')]) + python_prefix = spec['python'].prefix + python_lib = python_prefix.lib + if spec.satisfies('^python@3:'): + python = join_path(python_prefix.bin, 'python3') + cmake_options.extend(['-DBUILD_opencv_python3=ON', + '-DPYTHON_EXECUTABLE={0}'.format(python), + '-DPYTHON_LIBRARIES={0}'.format(python_lib)]) + elif spec.satisfies('^python@2:3'): + python = join_path(python_prefix.bin, 'python2') + cmake_options.extend(['-DBUILD_opencv_python2=ON', + '-DPYTHON_EXECUTABLE={0}'.format(python), + '-DPYTHON_LIBRARIES={0}'.format(python_lib)]) + with working_dir('spack_build', create=True): cmake('..', *cmake_options) make('VERBOSE=1') From 578ddea418efe744862e0f5d22aaa65e521e9f9f Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Tue, 12 Jul 2016 20:29:04 -0600 Subject: [PATCH 022/284] opencv : Fix pep8 style issues --- .../repos/builtin/packages/opencv/package.py | 49 ++++++++++++------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 3d887f07750..301ac46c0ed 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -26,22 +26,28 @@ class Opencv(Package): + """OpenCV is released under a BSD license and hence it's free for both + academic and commercial use. It has C++, C, Python and Java interfaces and + supports Windows, Linux, Mac OS, iOS and Android. OpenCV was designed for + computational efficiency and with a strong focus on real-time applications. + Written in optimized C/C++, the library can take advantage of multi-core + processing. Enabled with OpenCL, it can take advantage of the hardware + acceleration of the underlying heterogeneous compute platform. Adopted all + around the world, OpenCV has more than 47 thousand people of user community + and estimated number of downloads exceeding 9 million. Usage ranges from + interactive art, to mines inspection, stitching maps on the web or through + advanced robotics. """ - OpenCV is released under a BSD license and hence it's free for both academic and commercial use. It has C++, C, - Python and Java interfaces and supports Windows, Linux, Mac OS, iOS and Android. OpenCV was designed for - computational efficiency and with a strong focus on real-time applications. Written in optimized C/C++, the library - can take advantage of multi-core processing. Enabled with OpenCL, it can take advantage of the hardware - acceleration of the underlying heterogeneous compute platform. Adopted all around the world, OpenCV has more than - 47 thousand people of user community and estimated number of downloads exceeding 9 million. Usage ranges from - interactive art, to mines inspection, stitching maps on the web or through advanced robotics. - """ + homepage = 'http://opencv.org/' url = 'https://github.com/Itseez/opencv/archive/3.1.0.tar.gz' version('3.1.0', '70e1dd07f0aa06606f1bc0e3fa15abd3') - variant('shared', default=True, description='Enables the build of shared libraries') - variant('debug', default=False, description='Builds a debug version of the libraries') + variant('shared', default=True, + description='Enables the build of shared libraries') + variant('debug', default=False, + description='Builds a debug version of the libraries') variant('eigen', default=True, description='Activates support for eigen') variant('ipp', default=True, description='Activates support for IPP') @@ -66,24 +72,29 @@ def install(self, spec, prefix): cmake_options = [] cmake_options.extend(std_cmake_args) - cmake_options.extend(['-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'), - '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if '+shared' in spec else 'OFF'), - '-DENABLE_PRECOMPILED_HEADERS:BOOL=OFF', - '-DWITH_IPP:BOOL=%s' % ('ON' if '+ipp' in spec else 'OFF'), - '-DWITH_CUDA:BOOL=%s' % ('ON' if '+cuda' in spec else 'OFF')]) + cmake_options.extend( + ['-DCMAKE_BUILD_TYPE:STRING=%s' % ( + 'Debug' if '+debug' in spec else 'Release'), + '-DBUILD_SHARED_LIBS:BOOL=%s' % ( + 'ON' if '+shared' in spec else 'OFF'), + '-DENABLE_PRECOMPILED_HEADERS:BOOL=OFF', + '-DWITH_IPP:BOOL=%s' % ( + 'ON' if '+ipp' in spec else 'OFF'), + '-DWITH_CUDA:BOOL=%s' % ( + 'ON' if '+cuda' in spec else 'OFF')]) python_prefix = spec['python'].prefix python_lib = python_prefix.lib if spec.satisfies('^python@3:'): python = join_path(python_prefix.bin, 'python3') cmake_options.extend(['-DBUILD_opencv_python3=ON', - '-DPYTHON_EXECUTABLE={0}'.format(python), - '-DPYTHON_LIBRARIES={0}'.format(python_lib)]) + '-DPYTHON_EXECUTABLE=%s' % (python), + '-DPYTHON_LIBRARIES=%s' % (python_lib)]) elif spec.satisfies('^python@2:3'): python = join_path(python_prefix.bin, 'python2') cmake_options.extend(['-DBUILD_opencv_python2=ON', - '-DPYTHON_EXECUTABLE={0}'.format(python), - '-DPYTHON_LIBRARIES={0}'.format(python_lib)]) + '-DPYTHON_EXECUTABLE=%s' % (python), + '-DPYTHON_LIBRARIES=%s' % (python_lib)]) with working_dir('spack_build', create=True): cmake('..', *cmake_options) From 2cde6aaabdaefac7777137fddeae16af39aa0527 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Wed, 13 Jul 2016 16:30:28 -0600 Subject: [PATCH 023/284] opencv : Add GUI support --- .../repos/builtin/packages/opencv/package.py | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 301ac46c0ed..518a5f37ae8 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -52,6 +52,9 @@ class Opencv(Package): variant('eigen', default=True, description='Activates support for eigen') variant('ipp', default=True, description='Activates support for IPP') variant('cuda', default=False, description='Activates support for CUDA') + variant('gtk', default=False, description='Activates support for GTK') + variant('vtk', default=False, description='Activates support for VTK') + variant('qt', default=False, description='Activates support for QT') depends_on('zlib') depends_on('libpng') @@ -63,11 +66,12 @@ class Opencv(Package): depends_on('eigen', when='+eigen') depends_on('cuda', when='+cuda') + depends_on('gtkplus', when='+gtk') + depends_on('vtk', when='+vtk') + depends_on('qt', when='+qt') extends('python') - # FIXME : GUI extensions missing - def install(self, spec, prefix): cmake_options = [] cmake_options.extend(std_cmake_args) @@ -81,7 +85,18 @@ def install(self, spec, prefix): '-DWITH_IPP:BOOL=%s' % ( 'ON' if '+ipp' in spec else 'OFF'), '-DWITH_CUDA:BOOL=%s' % ( - 'ON' if '+cuda' in spec else 'OFF')]) + 'ON' if '+cuda' in spec else 'OFF'), + '-DWITH_QT:BOOL=%s' % ( + 'ON' if '+qt' in spec else 'OFF'), + '-DWITH_VTK:BOOL=%s' % ( + 'ON' if '+vtk' in spec else 'OFF')]) + + if '^gtkplus@3:' in spec: + cmake_options.extend(['-DWITH_GTK:BOOL=ON', + '-DWITH_GTK_2_X:BOOL=OFF']) + elif '^gtkplus@2:3' in spec: + cmake_options.extend(['-DWITH_GTK:BOOL=OFF', + '-DWITH_GTK_2_X:BOOL=ON']) python_prefix = spec['python'].prefix python_lib = python_prefix.lib From ce902bf27af230a662890d0e557b4a96df9ae473 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Wed, 13 Jul 2016 18:33:37 -0600 Subject: [PATCH 024/284] opencv : Minor style changes --- .../repos/builtin/packages/opencv/package.py | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 518a5f37ae8..c01b83a8527 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -76,20 +76,16 @@ def install(self, spec, prefix): cmake_options = [] cmake_options.extend(std_cmake_args) - cmake_options.extend( - ['-DCMAKE_BUILD_TYPE:STRING=%s' % ( + cmake_options.extend([ + '-DCMAKE_BUILD_TYPE:STRING=%s' % ( 'Debug' if '+debug' in spec else 'Release'), - '-DBUILD_SHARED_LIBS:BOOL=%s' % ( + '-DBUILD_SHARED_LIBS:BOOL=%s' % ( 'ON' if '+shared' in spec else 'OFF'), - '-DENABLE_PRECOMPILED_HEADERS:BOOL=OFF', - '-DWITH_IPP:BOOL=%s' % ( - 'ON' if '+ipp' in spec else 'OFF'), - '-DWITH_CUDA:BOOL=%s' % ( - 'ON' if '+cuda' in spec else 'OFF'), - '-DWITH_QT:BOOL=%s' % ( - 'ON' if '+qt' in spec else 'OFF'), - '-DWITH_VTK:BOOL=%s' % ( - 'ON' if '+vtk' in spec else 'OFF')]) + '-DENABLE_PRECOMPILED_HEADERS:BOOL=OFF', + '-DWITH_IPP:BOOL=%s' % ('ON' if '+ipp' in spec else 'OFF'), + '-DWITH_CUDA:BOOL=%s' % ('ON' if '+cuda' in spec else 'OFF'), + '-DWITH_QT:BOOL=%s' % ('ON' if '+qt' in spec else 'OFF'), + '-DWITH_VTK:BOOL=%s' % ('ON' if '+vtk' in spec else 'OFF')]) if '^gtkplus@3:' in spec: cmake_options.extend(['-DWITH_GTK:BOOL=ON', @@ -103,13 +99,13 @@ def install(self, spec, prefix): if spec.satisfies('^python@3:'): python = join_path(python_prefix.bin, 'python3') cmake_options.extend(['-DBUILD_opencv_python3=ON', - '-DPYTHON_EXECUTABLE=%s' % (python), - '-DPYTHON_LIBRARIES=%s' % (python_lib)]) + '-DPYTHON_EXECUTABLE=%s' % python, + '-DPYTHON_LIBRARIES=%s' % python_lib]) elif spec.satisfies('^python@2:3'): python = join_path(python_prefix.bin, 'python2') cmake_options.extend(['-DBUILD_opencv_python2=ON', - '-DPYTHON_EXECUTABLE=%s' % (python), - '-DPYTHON_LIBRARIES=%s' % (python_lib)]) + '-DPYTHON_EXECUTABLE=%s' % python, + '-DPYTHON_LIBRARIES=%s' % python_lib]) with working_dir('spack_build', create=True): cmake('..', *cmake_options) From 0080ffcf9f3963ee2e8c869139340bf0384377fc Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Wed, 13 Jul 2016 18:34:40 -0600 Subject: [PATCH 025/284] opencv : Use `in` operator --- var/spack/repos/builtin/packages/opencv/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index c01b83a8527..13d57d9163d 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -96,12 +96,12 @@ def install(self, spec, prefix): python_prefix = spec['python'].prefix python_lib = python_prefix.lib - if spec.satisfies('^python@3:'): + if '^python@3:' in spec: python = join_path(python_prefix.bin, 'python3') cmake_options.extend(['-DBUILD_opencv_python3=ON', '-DPYTHON_EXECUTABLE=%s' % python, '-DPYTHON_LIBRARIES=%s' % python_lib]) - elif spec.satisfies('^python@2:3'): + elif '^python@2:3' in spec: python = join_path(python_prefix.bin, 'python2') cmake_options.extend(['-DBUILD_opencv_python2=ON', '-DPYTHON_EXECUTABLE=%s' % python, From 3fac2dcc4196bf17700fa9d99d5847a85f4b89b0 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Fri, 15 Jul 2016 20:19:23 -0600 Subject: [PATCH 026/284] opencv : Fix cmake not disabling gtk --- var/spack/repos/builtin/packages/opencv/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 13d57d9163d..bc04be8a312 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -87,7 +87,10 @@ def install(self, spec, prefix): '-DWITH_QT:BOOL=%s' % ('ON' if '+qt' in spec else 'OFF'), '-DWITH_VTK:BOOL=%s' % ('ON' if '+vtk' in spec else 'OFF')]) - if '^gtkplus@3:' in spec: + if '+gtk' not in spec: + cmake_options.extend(['-DWITH_GTK:BOOL=OFF', + '-DWITH_GTK_2_X:BOOL=OFF']) + elif '^gtkplus@3:' in spec: cmake_options.extend(['-DWITH_GTK:BOOL=ON', '-DWITH_GTK_2_X:BOOL=OFF']) elif '^gtkplus@2:3' in spec: From c588ce9648766cfd40834aa2131caed9536988ee Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Fri, 15 Jul 2016 20:21:26 -0600 Subject: [PATCH 027/284] opencv : Improve cmake detecting python --- .../repos/builtin/packages/opencv/package.py | 33 +++++++++++++------ 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index bc04be8a312..02237a0efc8 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -97,18 +97,31 @@ def install(self, spec, prefix): cmake_options.extend(['-DWITH_GTK:BOOL=OFF', '-DWITH_GTK_2_X:BOOL=ON']) - python_prefix = spec['python'].prefix - python_lib = python_prefix.lib + python = spec['python'] if '^python@3:' in spec: - python = join_path(python_prefix.bin, 'python3') - cmake_options.extend(['-DBUILD_opencv_python3=ON', - '-DPYTHON_EXECUTABLE=%s' % python, - '-DPYTHON_LIBRARIES=%s' % python_lib]) + python_exe = join_path(python.prefix.bin, 'python3') + cmake_options.extend([ + '-DBUILD_opencv_python3=ON', + '-DPYTHON3_EXECUTABLE=%s' % python_exe, + '-DPYTHON3_LIBRARIES=%s' % python.prefix.lib, + '-DPYTHON3_INCLUDE_DIR=%s' % python.prefix.include, + '-DBUILD_opencv_python2=OFF', + '-DPYTHON2_EXECUTABLE=', + '-DPYTHON2_LIBRARIES=', + '-DPYTHON2_INCLUDE_DIR=', + ]) elif '^python@2:3' in spec: - python = join_path(python_prefix.bin, 'python2') - cmake_options.extend(['-DBUILD_opencv_python2=ON', - '-DPYTHON_EXECUTABLE=%s' % python, - '-DPYTHON_LIBRARIES=%s' % python_lib]) + python_exe = join_path(python.prefix.bin, 'python2') + cmake_options.extend([ + '-DBUILD_opencv_python2=ON', + '-DPYTHON2_EXECUTABLE=%s' % python_exe, + '-DPYTHON2_LIBRARIES=%s' % python.prefix.lib, + '-DPYTHON2_INCLUDE_DIR=%s' % python.prefix.include, + '-DBUILD_opencv_python3=OFF', + '-DPYTHON3_EXECUTABLE=', + '-DPYTHON3_LIBRARIES=', + '-DPYTHON3_INCLUDE_DIR=', + ]) with working_dir('spack_build', create=True): cmake('..', *cmake_options) From 8cf03c209f392a887b57f71f217bd0368cbefe43 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Fri, 15 Jul 2016 20:28:50 -0600 Subject: [PATCH 028/284] opencv : Make extending python optional --- .../repos/builtin/packages/opencv/package.py | 59 +++++++++++-------- 1 file changed, 33 insertions(+), 26 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 02237a0efc8..508f567659b 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -55,22 +55,23 @@ class Opencv(Package): variant('gtk', default=False, description='Activates support for GTK') variant('vtk', default=False, description='Activates support for VTK') variant('qt', default=False, description='Activates support for QT') + variant('python', default=False, + description='Enables the build of Python extensions') depends_on('zlib') depends_on('libpng') depends_on('libjpeg-turbo') depends_on('libtiff') - depends_on('python') - depends_on('py-numpy') - depends_on('eigen', when='+eigen') depends_on('cuda', when='+cuda') depends_on('gtkplus', when='+gtk') depends_on('vtk', when='+vtk') depends_on('qt', when='+qt') - extends('python') + depends_on('py-numpy', when='+python') + + extends('python', when='+python') def install(self, spec, prefix): cmake_options = [] @@ -97,30 +98,36 @@ def install(self, spec, prefix): cmake_options.extend(['-DWITH_GTK:BOOL=OFF', '-DWITH_GTK_2_X:BOOL=ON']) - python = spec['python'] - if '^python@3:' in spec: - python_exe = join_path(python.prefix.bin, 'python3') + if '+python' in spec: + python = spec['python'] + if '^python@3:' in spec: + python_exe = join_path(python.prefix.bin, 'python3') + cmake_options.extend([ + '-DBUILD_opencv_python3=ON', + '-DPYTHON3_EXECUTABLE=%s' % python_exe, + '-DPYTHON3_LIBRARIES=%s' % python.prefix.lib, + '-DPYTHON3_INCLUDE_DIR=%s' % python.prefix.include, + '-DBUILD_opencv_python2=OFF', + '-DPYTHON2_EXECUTABLE=', + '-DPYTHON2_LIBRARIES=', + '-DPYTHON2_INCLUDE_DIR=', + ]) + elif '^python@2:3' in spec: + python_exe = join_path(python.prefix.bin, 'python2') + cmake_options.extend([ + '-DBUILD_opencv_python2=ON', + '-DPYTHON2_EXECUTABLE=%s' % python_exe, + '-DPYTHON2_LIBRARIES=%s' % python.prefix.lib, + '-DPYTHON2_INCLUDE_DIR=%s' % python.prefix.include, + '-DBUILD_opencv_python3=OFF', + '-DPYTHON3_EXECUTABLE=', + '-DPYTHON3_LIBRARIES=', + '-DPYTHON3_INCLUDE_DIR=', + ]) + else: cmake_options.extend([ - '-DBUILD_opencv_python3=ON', - '-DPYTHON3_EXECUTABLE=%s' % python_exe, - '-DPYTHON3_LIBRARIES=%s' % python.prefix.lib, - '-DPYTHON3_INCLUDE_DIR=%s' % python.prefix.include, '-DBUILD_opencv_python2=OFF', - '-DPYTHON2_EXECUTABLE=', - '-DPYTHON2_LIBRARIES=', - '-DPYTHON2_INCLUDE_DIR=', - ]) - elif '^python@2:3' in spec: - python_exe = join_path(python.prefix.bin, 'python2') - cmake_options.extend([ - '-DBUILD_opencv_python2=ON', - '-DPYTHON2_EXECUTABLE=%s' % python_exe, - '-DPYTHON2_LIBRARIES=%s' % python.prefix.lib, - '-DPYTHON2_INCLUDE_DIR=%s' % python.prefix.include, - '-DBUILD_opencv_python3=OFF', - '-DPYTHON3_EXECUTABLE=', - '-DPYTHON3_LIBRARIES=', - '-DPYTHON3_INCLUDE_DIR=', + '-DBUILD_opencv_python3=OFF' ]) with working_dir('spack_build', create=True): From 0654ee6a10868ff0fb7eed6572829da1358d5f9b Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Fri, 15 Jul 2016 20:30:37 -0600 Subject: [PATCH 029/284] opencv : Fix style inconsistencies --- .../repos/builtin/packages/opencv/package.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 508f567659b..2e5b427cd0c 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -86,17 +86,22 @@ def install(self, spec, prefix): '-DWITH_IPP:BOOL=%s' % ('ON' if '+ipp' in spec else 'OFF'), '-DWITH_CUDA:BOOL=%s' % ('ON' if '+cuda' in spec else 'OFF'), '-DWITH_QT:BOOL=%s' % ('ON' if '+qt' in spec else 'OFF'), - '-DWITH_VTK:BOOL=%s' % ('ON' if '+vtk' in spec else 'OFF')]) + '-DWITH_VTK:BOOL=%s' % ('ON' if '+vtk' in spec else 'OFF') + ]) if '+gtk' not in spec: cmake_options.extend(['-DWITH_GTK:BOOL=OFF', '-DWITH_GTK_2_X:BOOL=OFF']) elif '^gtkplus@3:' in spec: - cmake_options.extend(['-DWITH_GTK:BOOL=ON', - '-DWITH_GTK_2_X:BOOL=OFF']) + cmake_options.extend([ + '-DWITH_GTK:BOOL=ON', + '-DWITH_GTK_2_X:BOOL=OFF' + ]) elif '^gtkplus@2:3' in spec: - cmake_options.extend(['-DWITH_GTK:BOOL=OFF', - '-DWITH_GTK_2_X:BOOL=ON']) + cmake_options.extend([ + '-DWITH_GTK:BOOL=OFF', + '-DWITH_GTK_2_X:BOOL=ON' + ]) if '+python' in spec: python = spec['python'] From 10c285a774c3218c952f30605b960212bf8dd302 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 11:40:39 -0600 Subject: [PATCH 030/284] opencv : Use string.format() --- .../repos/builtin/packages/opencv/package.py | 39 +++++++++++-------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 2e5b427cd0c..bbec27054e0 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * +from glob import glob class Opencv(Package): @@ -78,20 +79,26 @@ def install(self, spec, prefix): cmake_options.extend(std_cmake_args) cmake_options.extend([ - '-DCMAKE_BUILD_TYPE:STRING=%s' % ( - 'Debug' if '+debug' in spec else 'Release'), - '-DBUILD_SHARED_LIBS:BOOL=%s' % ( - 'ON' if '+shared' in spec else 'OFF'), + '-DCMAKE_BUILD_TYPE:STRING={0}'.format(( + 'Debug' if '+debug' in spec else 'Release')), + '-DBUILD_SHARED_LIBS:BOOL={0}'.format(( + 'ON' if '+shared' in spec else 'OFF')), '-DENABLE_PRECOMPILED_HEADERS:BOOL=OFF', - '-DWITH_IPP:BOOL=%s' % ('ON' if '+ipp' in spec else 'OFF'), - '-DWITH_CUDA:BOOL=%s' % ('ON' if '+cuda' in spec else 'OFF'), - '-DWITH_QT:BOOL=%s' % ('ON' if '+qt' in spec else 'OFF'), - '-DWITH_VTK:BOOL=%s' % ('ON' if '+vtk' in spec else 'OFF') + '-DWITH_IPP:BOOL={0}'.format(( + 'ON' if '+ipp' in spec else 'OFF')), + '-DWITH_CUDA:BOOL={0}'.format(( + 'ON' if '+cuda' in spec else 'OFF')), + '-DWITH_QT:BOOL={0}'.format(( + 'ON' if '+qt' in spec else 'OFF')), + '-DWITH_VTK:BOOL={0}'.format(( + 'ON' if '+vtk' in spec else 'OFF')) ]) if '+gtk' not in spec: - cmake_options.extend(['-DWITH_GTK:BOOL=OFF', - '-DWITH_GTK_2_X:BOOL=OFF']) + cmake_options.extend([ + '-DWITH_GTK:BOOL=OFF', + '-DWITH_GTK_2_X:BOOL=OFF' + ]) elif '^gtkplus@3:' in spec: cmake_options.extend([ '-DWITH_GTK:BOOL=ON', @@ -109,9 +116,9 @@ def install(self, spec, prefix): python_exe = join_path(python.prefix.bin, 'python3') cmake_options.extend([ '-DBUILD_opencv_python3=ON', - '-DPYTHON3_EXECUTABLE=%s' % python_exe, - '-DPYTHON3_LIBRARIES=%s' % python.prefix.lib, - '-DPYTHON3_INCLUDE_DIR=%s' % python.prefix.include, + '-DPYTHON3_EXECUTABLE={0}'.format(python_exe), + '-DPYTHON3_LIBRARIES={0}'.format(python.prefix.lib), + '-DPYTHON3_INCLUDE_DIR={0}'.format(python.prefix.include), '-DBUILD_opencv_python2=OFF', '-DPYTHON2_EXECUTABLE=', '-DPYTHON2_LIBRARIES=', @@ -121,9 +128,9 @@ def install(self, spec, prefix): python_exe = join_path(python.prefix.bin, 'python2') cmake_options.extend([ '-DBUILD_opencv_python2=ON', - '-DPYTHON2_EXECUTABLE=%s' % python_exe, - '-DPYTHON2_LIBRARIES=%s' % python.prefix.lib, - '-DPYTHON2_INCLUDE_DIR=%s' % python.prefix.include, + '-DPYTHON2_EXECUTABLE={0}'.format(python_exe), + '-DPYTHON2_LIBRARIES={0}'.format(python.prefix.lib), + '-DPYTHON2_INCLUDE_DIR={0}'.format(python.prefix.include), '-DBUILD_opencv_python3=OFF', '-DPYTHON3_EXECUTABLE=', '-DPYTHON3_LIBRARIES=', From da2b695f9387c2d7b0345c26efa7ce0c4d75df58 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 14:45:34 -0600 Subject: [PATCH 031/284] opencv : Fix lib finding for python --- .../repos/builtin/packages/opencv/package.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index bbec27054e0..89bb97039fa 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -112,29 +112,28 @@ def install(self, spec, prefix): if '+python' in spec: python = spec['python'] + try: + python_lib = glob(join_path(python.prefix.lib, 'libpython*.so'))[0] + except KeyError: + raise InstallError('Cannot find libpython') if '^python@3:' in spec: python_exe = join_path(python.prefix.bin, 'python3') cmake_options.extend([ '-DBUILD_opencv_python3=ON', '-DPYTHON3_EXECUTABLE={0}'.format(python_exe), - '-DPYTHON3_LIBRARIES={0}'.format(python.prefix.lib), + '-DPYTHON3_LIBRARY={0}'.format(python_lib), '-DPYTHON3_INCLUDE_DIR={0}'.format(python.prefix.include), '-DBUILD_opencv_python2=OFF', - '-DPYTHON2_EXECUTABLE=', - '-DPYTHON2_LIBRARIES=', - '-DPYTHON2_INCLUDE_DIR=', ]) elif '^python@2:3' in spec: python_exe = join_path(python.prefix.bin, 'python2') + python_lib = glob(join_path(python.prefix.lib, '*.so')) cmake_options.extend([ '-DBUILD_opencv_python2=ON', '-DPYTHON2_EXECUTABLE={0}'.format(python_exe), - '-DPYTHON2_LIBRARIES={0}'.format(python.prefix.lib), + '-DPYTHON2_LIBRARY={0}'.format(python_lib), '-DPYTHON2_INCLUDE_DIR={0}'.format(python.prefix.include), '-DBUILD_opencv_python3=OFF', - '-DPYTHON3_EXECUTABLE=', - '-DPYTHON3_LIBRARIES=', - '-DPYTHON3_INCLUDE_DIR=', ]) else: cmake_options.extend([ From d3b97227a18dc4135ff674a26aecb0e7eb5de27c Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 15:26:38 -0600 Subject: [PATCH 032/284] opencv : Add optional jdk dependency --- var/spack/repos/builtin/packages/opencv/package.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 89bb97039fa..da8d441ca25 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -58,6 +58,8 @@ class Opencv(Package): variant('qt', default=False, description='Activates support for QT') variant('python', default=False, description='Enables the build of Python extensions') + variant('java', default=False, + description='Activates support for Java') depends_on('zlib') depends_on('libpng') @@ -69,6 +71,7 @@ class Opencv(Package): depends_on('gtkplus', when='+gtk') depends_on('vtk', when='+vtk') depends_on('qt', when='+qt') + depends_on('jdk', when='+java') depends_on('py-numpy', when='+python') @@ -91,7 +94,9 @@ def install(self, spec, prefix): '-DWITH_QT:BOOL={0}'.format(( 'ON' if '+qt' in spec else 'OFF')), '-DWITH_VTK:BOOL={0}'.format(( - 'ON' if '+vtk' in spec else 'OFF')) + 'ON' if '+vtk' in spec else 'OFF')), + '-DBUILD_opencv_java:BOOL={0}'.format(( + 'ON' if '+java' in spec else 'OFF')), ]) if '+gtk' not in spec: From 97143768c895bb38a66c6c70fb5a6487dbfa291e Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 16:30:31 -0600 Subject: [PATCH 033/284] opencv : Add libtiff cmake support --- var/spack/repos/builtin/packages/opencv/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index da8d441ca25..05c13240ded 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -99,6 +99,14 @@ def install(self, spec, prefix): 'ON' if '+java' in spec else 'OFF')), ]) + libtiff = spec['libtiff'] + cmake_options.extend([ + '-DTIFF_LIBRARY_{0}:FILEPATH={1}'.format(( + 'DEBUG' if '+debug' in spec else 'RELEASE'), + join_path(libtiff.prefix.lib, 'libtiff.so')), + '-DTIFF_INCLUDE_DIR:PATH={0}'.format(libtiff.prefix.include) + ]) + if '+gtk' not in spec: cmake_options.extend([ '-DWITH_GTK:BOOL=OFF', From fc79b104f01f50fcdc7d980e19e689d7eb75a285 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 16:35:37 -0600 Subject: [PATCH 034/284] opencv : Add libjpeg-turbo cmake support --- var/spack/repos/builtin/packages/opencv/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 05c13240ded..7664a070006 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -99,6 +99,13 @@ def install(self, spec, prefix): 'ON' if '+java' in spec else 'OFF')), ]) + libjpeg = spec['libjpeg-turbo'] + cmake_options.extend([ + '-DJPEG_LIBRARY:FILEPATH={0}'.format( + join_path(libjpeg.prefix.lib, 'libjpeg.so'), + '-DJPEG_INCLUDE_DIR:PATH={0}'.format(libjpeg.prefix.include) + ]) + libtiff = spec['libtiff'] cmake_options.extend([ '-DTIFF_LIBRARY_{0}:FILEPATH={1}'.format(( From f2e8f27c1578b3850202300520f646cb52d917c8 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 16:39:07 -0600 Subject: [PATCH 035/284] opencv : Add libpng cmake support --- var/spack/repos/builtin/packages/opencv/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 7664a070006..b8774d852cc 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -99,6 +99,14 @@ def install(self, spec, prefix): 'ON' if '+java' in spec else 'OFF')), ]) + libpng = spec['libpng'] + cmake_options.extend([ + '-DPNG_LIBRARY_{0}:FILEPATH={1}'.format(( + 'DEBUG' if '+debug' in spec else 'RELEASE'), + join_path(libpng.prefix.lib, 'libpng.so')), + '-DPNG_INCLUDE_DIR:PATH={0}'.format(libpng.prefix.include) + ]) + libjpeg = spec['libjpeg-turbo'] cmake_options.extend([ '-DJPEG_LIBRARY:FILEPATH={0}'.format( From 47514d07b12e5e2f995be73952349d0f09106095 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 16:49:37 -0600 Subject: [PATCH 036/284] opencv : Use dso_suffix --- .../repos/builtin/packages/opencv/package.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index b8774d852cc..3835a76cfca 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -20,9 +20,7 @@ # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * from glob import glob @@ -52,6 +50,7 @@ class Opencv(Package): variant('eigen', default=True, description='Activates support for eigen') variant('ipp', default=True, description='Activates support for IPP') + variant('jasper', default=True, description='Activates support for JasPer') variant('cuda', default=False, description='Activates support for CUDA') variant('gtk', default=False, description='Activates support for GTK') variant('vtk', default=False, description='Activates support for VTK') @@ -66,7 +65,9 @@ class Opencv(Package): depends_on('libjpeg-turbo') depends_on('libtiff') - depends_on('eigen', when='+eigen') + depends_on('jasper', when='+jasper') + depends_on('cmake', type='build') + depends_on('eigen', when='+eigen', type='build') depends_on('cuda', when='+cuda') depends_on('gtkplus', when='+gtk') depends_on('vtk', when='+vtk') @@ -103,14 +104,16 @@ def install(self, spec, prefix): cmake_options.extend([ '-DPNG_LIBRARY_{0}:FILEPATH={1}'.format(( 'DEBUG' if '+debug' in spec else 'RELEASE'), - join_path(libpng.prefix.lib, 'libpng.so')), + join_path(libpng.prefix.lib, + 'libpng.{0}'.format(dso_suffix))), '-DPNG_INCLUDE_DIR:PATH={0}'.format(libpng.prefix.include) ]) libjpeg = spec['libjpeg-turbo'] cmake_options.extend([ '-DJPEG_LIBRARY:FILEPATH={0}'.format( - join_path(libjpeg.prefix.lib, 'libjpeg.so'), + join_path(libjpeg.prefix.lib, + 'libjpeg.{0}'.format(dso_suffix))), '-DJPEG_INCLUDE_DIR:PATH={0}'.format(libjpeg.prefix.include) ]) @@ -118,7 +121,8 @@ def install(self, spec, prefix): cmake_options.extend([ '-DTIFF_LIBRARY_{0}:FILEPATH={1}'.format(( 'DEBUG' if '+debug' in spec else 'RELEASE'), - join_path(libtiff.prefix.lib, 'libtiff.so')), + join_path(libtiff.prefix.lib, + 'libtiff.{0}'.format(dso_suffix))), '-DTIFF_INCLUDE_DIR:PATH={0}'.format(libtiff.prefix.include) ]) From e5ae48a9da944a3ceda3e314d6502cd289722d7d Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 17:09:01 -0600 Subject: [PATCH 037/284] opencv : Add jasper cmake support --- var/spack/repos/builtin/packages/opencv/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 3835a76cfca..748de80ccf3 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -126,6 +126,15 @@ def install(self, spec, prefix): '-DTIFF_INCLUDE_DIR:PATH={0}'.format(libtiff.prefix.include) ]) + jasper = spec['jasper'] + cmake_options.extend([ + '-DJASPER_LIBRARY_{0}:FILEPATH={1}'.format(( + 'DEBUG' if '+debug' in spec else 'RELEASE'), + join_path(jasper.prefix.lib, + 'libjasper.{0}'.format(dso_suffix))), + '-DJASPER_INCLUDE_DIR:PATH={0}'.format(jasper.prefix.include) + ]) + if '+gtk' not in spec: cmake_options.extend([ '-DWITH_GTK:BOOL=OFF', From 670157b3647c40c7bf7b6c0d693e1ff2c775acd9 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 17:50:54 -0600 Subject: [PATCH 038/284] opencv : Add zlib cmake support --- var/spack/repos/builtin/packages/opencv/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 748de80ccf3..cc91627d001 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -100,6 +100,15 @@ def install(self, spec, prefix): 'ON' if '+java' in spec else 'OFF')), ]) + zlib = spec['zlib'] + cmake_options.extend([ + '-DZLIB_LIBRARY_{0}:FILEPATH={1}'.format(( + 'DEBUG' if '+debug' in spec else 'RELEASE'), + join_path(zlib.prefix.lib, + 'libz.{0}'.format(dso_suffix))), + '-DZLIB_INCLUDE_DIR:PATH={0}'.format(zlib.prefix.include) + ]) + libpng = spec['libpng'] cmake_options.extend([ '-DPNG_LIBRARY_{0}:FILEPATH={1}'.format(( From ea9fa81ba5b8a8b9211d129e9bad5b5c146f5ad1 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 18:53:36 -0600 Subject: [PATCH 039/284] opencv : Fix python include dir for cmake --- .../repos/builtin/packages/opencv/package.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index cc91627d001..8e5725d43ce 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -162,17 +162,26 @@ def install(self, spec, prefix): if '+python' in spec: python = spec['python'] + try: - python_lib = glob(join_path(python.prefix.lib, 'libpython*.so'))[0] + python_lib = glob(join_path(python.prefix.lib, + 'libpython*.so'))[0] except KeyError: raise InstallError('Cannot find libpython') + + try: + python_include_dir = glob(join_path(python.prefix.include, + 'python*'))[0] + except KeyError: + raise InstallError('Cannot find python include directory') + if '^python@3:' in spec: python_exe = join_path(python.prefix.bin, 'python3') cmake_options.extend([ '-DBUILD_opencv_python3=ON', '-DPYTHON3_EXECUTABLE={0}'.format(python_exe), '-DPYTHON3_LIBRARY={0}'.format(python_lib), - '-DPYTHON3_INCLUDE_DIR={0}'.format(python.prefix.include), + '-DPYTHON3_INCLUDE_DIR={0}'.format(python_include_dir), '-DBUILD_opencv_python2=OFF', ]) elif '^python@2:3' in spec: @@ -182,7 +191,7 @@ def install(self, spec, prefix): '-DBUILD_opencv_python2=ON', '-DPYTHON2_EXECUTABLE={0}'.format(python_exe), '-DPYTHON2_LIBRARY={0}'.format(python_lib), - '-DPYTHON2_INCLUDE_DIR={0}'.format(python.prefix.include), + '-DPYTHON2_INCLUDE_DIR={0}'.format(python_include_dir), '-DBUILD_opencv_python3=OFF', ]) else: From bd02892f7bb36094c9dcfbc5e1553420d9708c96 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Mon, 18 Jul 2016 21:57:08 -0600 Subject: [PATCH 040/284] opencv : Use dso_suffix for python lib --- var/spack/repos/builtin/packages/opencv/package.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 8e5725d43ce..28da53462b4 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -164,8 +164,8 @@ def install(self, spec, prefix): python = spec['python'] try: - python_lib = glob(join_path(python.prefix.lib, - 'libpython*.so'))[0] + python_lib = glob(join_path( + python.prefix.lib, 'libpython*.{0}'.format(dso_suffix)))[0] except KeyError: raise InstallError('Cannot find libpython') @@ -186,7 +186,6 @@ def install(self, spec, prefix): ]) elif '^python@2:3' in spec: python_exe = join_path(python.prefix.bin, 'python2') - python_lib = glob(join_path(python.prefix.lib, '*.so')) cmake_options.extend([ '-DBUILD_opencv_python2=ON', '-DPYTHON2_EXECUTABLE={0}'.format(python_exe), From a9541997aeb78001792d2bdeb186ff5bcf3d25d2 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Wed, 20 Jul 2016 18:19:03 -0600 Subject: [PATCH 041/284] opencv : Reorder depends_on statements --- var/spack/repos/builtin/packages/opencv/package.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 28da53462b4..ad96422ce8c 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -20,7 +20,9 @@ # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * from glob import glob @@ -60,20 +62,20 @@ class Opencv(Package): variant('java', default=False, description='Activates support for Java') + depends_on('cmake', type='build') + depends_on('eigen', when='+eigen', type='build') + depends_on('zlib') depends_on('libpng') depends_on('libjpeg-turbo') depends_on('libtiff') depends_on('jasper', when='+jasper') - depends_on('cmake', type='build') - depends_on('eigen', when='+eigen', type='build') depends_on('cuda', when='+cuda') depends_on('gtkplus', when='+gtk') depends_on('vtk', when='+vtk') depends_on('qt', when='+qt') depends_on('jdk', when='+java') - depends_on('py-numpy', when='+python') extends('python', when='+python') From a09bebcaea24a90072d0650760434be11fcf3f29 Mon Sep 17 00:00:00 2001 From: mwilliammyers Date: Wed, 20 Jul 2016 18:19:59 -0600 Subject: [PATCH 042/284] opencv : Add section comments --- var/spack/repos/builtin/packages/opencv/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index ad96422ce8c..8f592342b08 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -102,6 +102,7 @@ def install(self, spec, prefix): 'ON' if '+java' in spec else 'OFF')), ]) + # Media I/O zlib = spec['zlib'] cmake_options.extend([ '-DZLIB_LIBRARY_{0}:FILEPATH={1}'.format(( @@ -146,6 +147,7 @@ def install(self, spec, prefix): '-DJASPER_INCLUDE_DIR:PATH={0}'.format(jasper.prefix.include) ]) + # GUI if '+gtk' not in spec: cmake_options.extend([ '-DWITH_GTK:BOOL=OFF', @@ -162,6 +164,7 @@ def install(self, spec, prefix): '-DWITH_GTK_2_X:BOOL=ON' ]) + # Python if '+python' in spec: python = spec['python'] From 16f67b5bb1b6c8a37022f2a422ef5ae862ead956 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 10 Jul 2016 21:21:43 -0500 Subject: [PATCH 043/284] Setup environment for Intel Parallel Studio Set up the environment for the Intel compilers and tools. This commit does the following: - Unset variables that were incorrect from the auto guess prefix inspections. - Add a RemovePath environment_modifications_formats for dotkit. - Set the module environment variables appropriate for the different variants. - Change the component logic so that the '+all' variant works. It was getting split by letter and leaving COMPONENTS empty. - Added a variant checking function. - Added NONRPM_DB_DIR to the silent.cfg so that the product database goes to the installation directory. - With the product database in prefix the code to remove the product database file from the home directory is no longer needed and was removed. - Reformat the 'tools' variant description. There are probably more variables needed for the '+tools' for the 'professional' product version but I do not have access to that. --- lib/spack/spack/modules.py | 1 + .../packages/intel-parallel-studio/package.py | 139 +++++++++++++++++- .../repos/builtin/packages/intel/package.py | 44 +++++- 3 files changed, 169 insertions(+), 15 deletions(-) diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 72656b8ae0c..db8b20ae42e 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -471,6 +471,7 @@ class Dotkit(EnvModule): path = join_path(spack.share_path, 'dotkit') environment_modifications_formats = { PrependPath: 'dk_alter {name} {value}\n', + RemovePath: 'dk_unalter {name} {value}\n', SetEnv: 'dk_setenv {name} {value}\n' } diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 493ca16417c..068306985f8 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -42,15 +42,26 @@ class IntelParallelStudio(IntelInstaller): variant('daal', default=True, description="Install the Intel DAAL libraries") variant('ipp', default=True, description="Install the Intel IPP libraries") - variant('tools', default=True, description="""Install the Intel Advisor,\ -VTune Amplifier, and Inspector tools""") + variant('tools', default=True, description="Install the Intel Advisor, " + "VTune Amplifier, and Inspector tools") provides('mpi', when='@cluster:+mpi') provides('mkl', when='+mkl') provides('daal', when='+daal') provides('ipp', when='+ipp') + def check_variants(self, spec): + error_message = '\t{variant} can not be turned off if "+all" is set' + + errors = [error_message.format(variant=x) + for x in ('mpi', 'mkl', 'daal', 'ipp', 'tools') + if ('~' + x) in self.spec] + if errors: + errors = ['incompatible variants given'] + errors + raise InstallError('\n'.join(errors)) + def install(self, spec, prefix): + self.check_variants(spec) base_components = "ALL" # when in doubt, install everything mpi_components = "" @@ -58,9 +69,7 @@ def install(self, spec, prefix): daal_components = "" ipp_components = "" - if spec.satisfies('+all'): - base_components = "ALL" - else: + if not spec.satisfies('+all'): all_components = get_all_components() regex = '(comp|openmp|intel-tbb|icc|ifort|psxe|icsxe-pset)' base_components = \ @@ -77,8 +86,8 @@ def install(self, spec, prefix): regex = '(gdb|vtune|inspector|advisor)' tool_components = \ filter_pick(all_components, re.compile(regex).search) + components = base_components - components = base_components if not spec.satisfies('+all'): if spec.satisfies('+mpi') and 'cluster' in str(spec.version): components += mpi_components @@ -92,7 +101,10 @@ def install(self, spec, prefix): spec.satisfies('@professional')): components += tool_components - self.intel_components = ';'.join(components) + if spec.satisfies('+all'): + self.intel_components = 'ALL' + else: + self.intel_components = ';'.join(components) IntelInstaller.install(self, spec, prefix) absbindir = os.path.dirname(os.path.realpath(os.path.join( @@ -142,3 +154,116 @@ def install(self, spec, prefix): os.symlink(os.path.join(self.prefix.man, "common", "man1"), os.path.join(self.prefix.man, "man1")) + + def setup_environment(self, spack_env, run_env): + major_ver = self.version[1] + + # Remove paths that were guessed but are incorrect for this package. + run_env.remove_path('LIBRARY_PATH', + join_path(self.prefix, 'lib')) + run_env.remove_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'lib')) + run_env.remove_path('CPATH', + join_path(self.prefix, 'include')) + + # Add the default set of variables + run_env.prepend_path('LIBRARY_PATH', + join_path(self.prefix, 'lib', 'intel64')) + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'lib', 'intel64')) + run_env.prepend_path('LIBRARY_PATH', + join_path(self.prefix, 'tbb', 'lib', + 'intel64', 'gcc4.4')) + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'tbb', 'lib', + 'intel64', 'gcc4.4')) + run_env.prepend_path('CPATH', + join_path(self.prefix, 'tbb', 'include')) + run_env.prepend_path('MIC_LIBRARY_PATH', + join_path(self.prefix, 'lib', 'mic')) + run_env.prepend_path('MIC_LD_LIBRARY_PATH', + join_path(self.prefix, 'lib', 'mic')) + run_env.prepend_path('MIC_LIBRARY_PATH', + join_path(self.prefix, 'tbb','lib', 'mic')) + run_env.prepend_path('MIC_LD_LIBRARY_PATH', + join_path(self.prefix, 'tbb','lib', 'mic')) + + if self.spec.satisfies('+all'): + run_env.prepend_path('PATH', + join_path(self.prefix, + 'debugger_{0}'.format(major_ver), + 'gdb', 'intel64_mic', 'bin')) + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, + 'debugger_{0}'.format(major_ver), + 'libipt', 'intel64', 'lib')) + run_env.set('GDBSERVER_MIC', + join_path(self.prefix, + 'debugger_{0}'.format(major_ver), 'gdb', + 'targets', 'mic', 'bin', 'gdbserver')) + run_env.set('GDB_CROSS', + join_path(self.prefix, + 'debugger_{0}'.format(major_ver), + 'gdb', 'intel64_mic', 'bin', 'gdb-mic')) + run_env.set('MPM_LAUNCHER', + join_path(self.prefix, + 'debugger_{0}'.format(major_ver), 'mpm', + 'mic', + 'bin', 'start_mpm.sh')) + run_env.set('INTEL_PYTHONHOME', + join_path(self.prefix, + 'debugger_{0}'.format(major_ver), 'python', + 'intel64')) + + if (self.spec.satisfies('+all') or self.spec.satisfies('+mpi')) and \ + self.spec.satisfies('@cluster'): + run_env.prepend_path('PATH', + join_path(self.prefix, 'mpi', 'intel64', 'bin')) + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'mpi', 'intel64', 'lib')) + run_env.prepend_path('LIBRARY_PATH', + join_path(self.prefix, 'mpi', 'intel64', 'lib')) + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'mpi', 'mic', 'lib')) + run_env.prepend_path('MIC_LIBRARY_PATH', + join_path(self.prefix, 'mpi', 'mic', 'lib')) + run_env.prepend_path('MIC_LD_LIBRARY_PATH', + join_path(self.prefix, 'mpi', 'mic', 'lib')) + run_env.set('I_MPI_ROOT', join_path(self.prefix, 'mpi')) + + if self.spec.satisfies('+all') or self.spec.satisfies('+mkl'): + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'mkl', 'lib', 'intel64')) + run_env.prepend_path('LIBRARY_PATH', + join_path(self.prefix, 'mkl', 'lib', 'intel64')) + run_env.prepend_path('CPATH', + join_path(self.prefix, 'mkl', 'include')) + run_env.prepend_path('MIC_LD_LIBRARY_PATH', + join_path(self.prefix, 'mkl','lib', 'mic')) + run_env.set('MKLROOT', join_path(self.prefix, 'mkl')) + + if self.spec.satisfies('+all') or self.spec.satisfies('+daal'): + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'daal', 'lib', + 'intel64_lin')) + run_env.prepend_path('LIBRARY_PATH', + join_path(self.prefix, 'daal', 'lib', + 'intel64_lin')) + run_env.prepend_path('CPATH', + join_path(self.prefix, 'daal', 'include')) + run_env.prepend_path('CLASSPATH', + join_path(self.prefix, 'daal', 'lib', + 'daal.jar')) + run_env.set('DAALROOT', join_path(self.prefix, 'daal')) + + if self.spec.satisfies('+all') or self.spec.satisfies('+ipp'): + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'ipp', 'lib', 'intel64')) + run_env.prepend_path('LIBRARY_PATH', + join_path(self.prefix, 'ipp', 'lib', 'intel64')) + run_env.prepend_path('CPATH', + join_path(self.prefix, 'ipp', 'include')) + run_env.prepend_path('MIC_LD_LIBRARY_PATH', + join_path(self.prefix, 'ipp','lib', 'mic')) + run_env.set('IPPROOT', join_path(self.prefix, 'ipp')) + diff --git a/var/spack/repos/builtin/packages/intel/package.py b/var/spack/repos/builtin/packages/intel/package.py index 56d9fabddf3..7988926e6de 100644 --- a/var/spack/repos/builtin/packages/intel/package.py +++ b/var/spack/repos/builtin/packages/intel/package.py @@ -49,13 +49,6 @@ def global_license_file(self): def install(self, spec, prefix): - # Remove the installation DB, otherwise it will try to install into - # location of other Intel builds - if os.path.exists(os.path.join(os.environ["HOME"], "intel", - "intel_sdp_products.db")): - os.remove(os.path.join(os.environ["HOME"], "intel", - "intel_sdp_products.db")) - if not hasattr(self, "intel_prefix"): self.intel_prefix = self.prefix @@ -66,12 +59,14 @@ def install(self, spec, prefix): PSET_MODE=install CONTINUE_WITH_INSTALLDIR_OVERWRITE=yes PSET_INSTALL_DIR=%s +NONRPM_DB_DIR=%s ACTIVATION_LICENSE_FILE=%s ACTIVATION_TYPE=license_file PHONEHOME_SEND_USAGE_DATA=no CONTINUE_WITH_OPTIONAL_ERROR=yes COMPONENTS=%s -""" % (self.intel_prefix, self.global_license_file, self.intel_components)) +""" % (self.intel_prefix, self.intel_prefix, self.global_license_file, + self.intel_components)) install_script = Executable("./install.sh") install_script('--silent', silent_config_filename) @@ -123,3 +118,36 @@ def install(self, spec, prefix): os.symlink(os.path.join(self.prefix.man, "common", "man1"), os.path.join(self.prefix.man, "man1")) + + def setup_environment(self, spack_env, run_env): + + # Remove paths that were guessed but are incorrect for this package. + run_env.remove_path('LIBRARY_PATH', + join_path(self.prefix, 'lib')) + run_env.remove_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'lib')) + run_env.remove_path('CPATH', + join_path(self.prefix, 'include')) + + # Add the default set of variables + run_env.prepend_path('LIBRARY_PATH', + join_path(self.prefix, 'lib', 'intel64')) + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'lib', 'intel64')) + run_env.prepend_path('LIBRARY_PATH', + join_path(self.prefix, 'tbb', 'lib', + 'intel64', 'gcc4.4')) + run_env.prepend_path('LD_LIBRARY_PATH', + join_path(self.prefix, 'tbb', 'lib', + 'intel64', 'gcc4.4')) + run_env.prepend_path('CPATH', + join_path(self.prefix, 'tbb', 'include')) + run_env.prepend_path('MIC_LIBRARY_PATH', + join_path(self.prefix, 'lib', 'mic')) + run_env.prepend_path('MIC_LD_LIBRARY_PATH', + join_path(self.prefix, 'lib', 'mic')) + run_env.prepend_path('MIC_LIBRARY_PATH', + join_path(self.prefix, 'tbb','lib', 'mic')) + run_env.prepend_path('MIC_LD_LIBRARY_PATH', + join_path(self.prefix, 'tbb','lib', 'mic')) + From 5e5e36a6596e13c60330b674adf1237fc06f512d Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 10 Jul 2016 21:59:46 -0500 Subject: [PATCH 044/284] Fix flake8 errors. --- .../packages/intel-parallel-studio/package.py | 30 +++++++++++-------- .../repos/builtin/packages/intel/package.py | 11 ++++--- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 068306985f8..223b0ce6661 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -184,9 +184,9 @@ def setup_environment(self, spack_env, run_env): run_env.prepend_path('MIC_LD_LIBRARY_PATH', join_path(self.prefix, 'lib', 'mic')) run_env.prepend_path('MIC_LIBRARY_PATH', - join_path(self.prefix, 'tbb','lib', 'mic')) + join_path(self.prefix, 'tbb', 'lib', 'mic')) run_env.prepend_path('MIC_LD_LIBRARY_PATH', - join_path(self.prefix, 'tbb','lib', 'mic')) + join_path(self.prefix, 'tbb', 'lib', 'mic')) if self.spec.satisfies('+all'): run_env.prepend_path('PATH', @@ -218,11 +218,14 @@ def setup_environment(self, spack_env, run_env): if (self.spec.satisfies('+all') or self.spec.satisfies('+mpi')) and \ self.spec.satisfies('@cluster'): run_env.prepend_path('PATH', - join_path(self.prefix, 'mpi', 'intel64', 'bin')) + join_path(self.prefix, 'mpi', 'intel64', + 'bin')) run_env.prepend_path('LD_LIBRARY_PATH', - join_path(self.prefix, 'mpi', 'intel64', 'lib')) + join_path(self.prefix, 'mpi', 'intel64', + 'lib')) run_env.prepend_path('LIBRARY_PATH', - join_path(self.prefix, 'mpi', 'intel64', 'lib')) + join_path(self.prefix, 'mpi', 'intel64', + 'lib')) run_env.prepend_path('LD_LIBRARY_PATH', join_path(self.prefix, 'mpi', 'mic', 'lib')) run_env.prepend_path('MIC_LIBRARY_PATH', @@ -233,13 +236,15 @@ def setup_environment(self, spack_env, run_env): if self.spec.satisfies('+all') or self.spec.satisfies('+mkl'): run_env.prepend_path('LD_LIBRARY_PATH', - join_path(self.prefix, 'mkl', 'lib', 'intel64')) + join_path(self.prefix, 'mkl', 'lib', + 'intel64')) run_env.prepend_path('LIBRARY_PATH', - join_path(self.prefix, 'mkl', 'lib', 'intel64')) + join_path(self.prefix, 'mkl', 'lib', + 'intel64')) run_env.prepend_path('CPATH', join_path(self.prefix, 'mkl', 'include')) run_env.prepend_path('MIC_LD_LIBRARY_PATH', - join_path(self.prefix, 'mkl','lib', 'mic')) + join_path(self.prefix, 'mkl', 'lib', 'mic')) run_env.set('MKLROOT', join_path(self.prefix, 'mkl')) if self.spec.satisfies('+all') or self.spec.satisfies('+daal'): @@ -258,12 +263,13 @@ def setup_environment(self, spack_env, run_env): if self.spec.satisfies('+all') or self.spec.satisfies('+ipp'): run_env.prepend_path('LD_LIBRARY_PATH', - join_path(self.prefix, 'ipp', 'lib', 'intel64')) + join_path(self.prefix, 'ipp', 'lib', + 'intel64')) run_env.prepend_path('LIBRARY_PATH', - join_path(self.prefix, 'ipp', 'lib', 'intel64')) + join_path(self.prefix, 'ipp', 'lib', + 'intel64')) run_env.prepend_path('CPATH', join_path(self.prefix, 'ipp', 'include')) run_env.prepend_path('MIC_LD_LIBRARY_PATH', - join_path(self.prefix, 'ipp','lib', 'mic')) + join_path(self.prefix, 'ipp', 'lib', 'mic')) run_env.set('IPPROOT', join_path(self.prefix, 'ipp')) - diff --git a/var/spack/repos/builtin/packages/intel/package.py b/var/spack/repos/builtin/packages/intel/package.py index 7988926e6de..d171411946f 100644 --- a/var/spack/repos/builtin/packages/intel/package.py +++ b/var/spack/repos/builtin/packages/intel/package.py @@ -66,7 +66,7 @@ def install(self, spec, prefix): CONTINUE_WITH_OPTIONAL_ERROR=yes COMPONENTS=%s """ % (self.intel_prefix, self.intel_prefix, self.global_license_file, - self.intel_components)) + self.intel_components)) install_script = Executable("./install.sh") install_script('--silent', silent_config_filename) @@ -106,8 +106,8 @@ def install(self, spec, prefix): self.prefix.lib, "intel64", "libimf.a")))[0] # symlink or copy? - os.symlink(self.global_license_file, os.path.join(absbindir, - "license.lic")) + os.symlink(self.global_license_file, + os.path.join(absbindir, "license.lic")) if spec.satisfies('+rpath'): for compiler_command in ["icc", "icpc", "ifort"]: @@ -147,7 +147,6 @@ def setup_environment(self, spack_env, run_env): run_env.prepend_path('MIC_LD_LIBRARY_PATH', join_path(self.prefix, 'lib', 'mic')) run_env.prepend_path('MIC_LIBRARY_PATH', - join_path(self.prefix, 'tbb','lib', 'mic')) + join_path(self.prefix, 'tbb', 'lib', 'mic')) run_env.prepend_path('MIC_LD_LIBRARY_PATH', - join_path(self.prefix, 'tbb','lib', 'mic')) - + join_path(self.prefix, 'tbb', 'lib', 'mic')) From 0be4c7dfbdfd1c84a8d2ed97cdb649fdf4e84ed7 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Tue, 12 Jul 2016 07:37:36 -0500 Subject: [PATCH 045/284] Changes as discussed in PR review - removed unneeded entry for `debugger/gdb/intel64_mic` - set `+mpi` directory name to impi --- .../packages/intel-parallel-studio/package.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 223b0ce6661..57f03d247fa 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -189,10 +189,6 @@ def setup_environment(self, spack_env, run_env): join_path(self.prefix, 'tbb', 'lib', 'mic')) if self.spec.satisfies('+all'): - run_env.prepend_path('PATH', - join_path(self.prefix, - 'debugger_{0}'.format(major_ver), - 'gdb', 'intel64_mic', 'bin')) run_env.prepend_path('LD_LIBRARY_PATH', join_path(self.prefix, 'debugger_{0}'.format(major_ver), @@ -218,21 +214,21 @@ def setup_environment(self, spack_env, run_env): if (self.spec.satisfies('+all') or self.spec.satisfies('+mpi')) and \ self.spec.satisfies('@cluster'): run_env.prepend_path('PATH', - join_path(self.prefix, 'mpi', 'intel64', + join_path(self.prefix, 'impi', 'intel64', 'bin')) run_env.prepend_path('LD_LIBRARY_PATH', - join_path(self.prefix, 'mpi', 'intel64', + join_path(self.prefix, 'impi', 'intel64', 'lib')) run_env.prepend_path('LIBRARY_PATH', - join_path(self.prefix, 'mpi', 'intel64', + join_path(self.prefix, 'impi', 'intel64', 'lib')) run_env.prepend_path('LD_LIBRARY_PATH', - join_path(self.prefix, 'mpi', 'mic', 'lib')) + join_path(self.prefix, 'impi', 'mic', 'lib')) run_env.prepend_path('MIC_LIBRARY_PATH', - join_path(self.prefix, 'mpi', 'mic', 'lib')) + join_path(self.prefix, 'impi', 'mic', 'lib')) run_env.prepend_path('MIC_LD_LIBRARY_PATH', - join_path(self.prefix, 'mpi', 'mic', 'lib')) - run_env.set('I_MPI_ROOT', join_path(self.prefix, 'mpi')) + join_path(self.prefix, 'impi', 'mic', 'lib')) + run_env.set('I_MPI_ROOT', join_path(self.prefix, 'impi')) if self.spec.satisfies('+all') or self.spec.satisfies('+mkl'): run_env.prepend_path('LD_LIBRARY_PATH', From ead1b93f75c3eddcf3f5c6b72ad4dc3ee6e34ee6 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Wed, 13 Jul 2016 16:46:43 -0500 Subject: [PATCH 046/284] MPI related changes - Redid the code for setting the itac symlink for the cluster edition. - Removed the *PATH variables for MPI to avoid a conflict with other MPI environment modules. - Added missing test for `+all` when checking variants. --- .../packages/intel-parallel-studio/package.py | 48 +++++++++---------- 1 file changed, 22 insertions(+), 26 deletions(-) diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 57f03d247fa..fb98b2473c5 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -53,12 +53,13 @@ class IntelParallelStudio(IntelInstaller): def check_variants(self, spec): error_message = '\t{variant} can not be turned off if "+all" is set' - errors = [error_message.format(variant=x) - for x in ('mpi', 'mkl', 'daal', 'ipp', 'tools') - if ('~' + x) in self.spec] - if errors: - errors = ['incompatible variants given'] + errors - raise InstallError('\n'.join(errors)) + if self.spec.satisfies('+all'): + errors = [error_message.format(variant=x) + for x in ('mpi', 'mkl', 'daal', 'ipp', 'tools') + if ('~' + x) in self.spec] + if errors: + errors = ['incompatible variants given'] + errors + raise InstallError('\n'.join(errors)) def install(self, spec, prefix): self.check_variants(spec) @@ -89,7 +90,7 @@ def install(self, spec, prefix): components = base_components if not spec.satisfies('+all'): - if spec.satisfies('+mpi') and 'cluster' in str(spec.version): + if spec.satisfies('+mpi'): components += mpi_components if spec.satisfies('+mkl'): components += mkl_components @@ -128,8 +129,11 @@ def install(self, spec, prefix): if (spec.satisfies('+all') or spec.satisfies('+mpi')) and \ spec.satisfies('@cluster'): - os.symlink(self.global_license_file, os.path.join( - self.prefix, "itac_latest", "license.lic")) + for ifile in os.listdir(os.path.join(self.prefix, "itac")): + if os.path.isdir(os.path.join(self.prefix, "itac", ifile)): + os.symlink(self.global_license_file, + os.path.join(self.prefix, "itac", ifile, + "license.lic")) if spec.satisfies('~newdtags'): wrappers = ["mpif77", "mpif77", "mpif90", "mpif90", "mpigcc", "mpigcc", "mpigxx", "mpigxx", @@ -156,6 +160,8 @@ def install(self, spec, prefix): os.path.join(self.prefix.man, "man1")) def setup_environment(self, spack_env, run_env): + # TODO: Determine variables needed for the professional edition. + major_ver = self.version[1] # Remove paths that were guessed but are incorrect for this package. @@ -211,23 +217,13 @@ def setup_environment(self, spack_env, run_env): 'debugger_{0}'.format(major_ver), 'python', 'intel64')) - if (self.spec.satisfies('+all') or self.spec.satisfies('+mpi')) and \ - self.spec.satisfies('@cluster'): - run_env.prepend_path('PATH', - join_path(self.prefix, 'impi', 'intel64', - 'bin')) - run_env.prepend_path('LD_LIBRARY_PATH', - join_path(self.prefix, 'impi', 'intel64', - 'lib')) - run_env.prepend_path('LIBRARY_PATH', - join_path(self.prefix, 'impi', 'intel64', - 'lib')) - run_env.prepend_path('LD_LIBRARY_PATH', - join_path(self.prefix, 'impi', 'mic', 'lib')) - run_env.prepend_path('MIC_LIBRARY_PATH', - join_path(self.prefix, 'impi', 'mic', 'lib')) - run_env.prepend_path('MIC_LD_LIBRARY_PATH', - join_path(self.prefix, 'impi', 'mic', 'lib')) + if (self.spec.satisfies('+all') or self.spec.satisfies('+mpi')): + # Only I_MPI_ROOT is set here because setting the various PATH + # variables will potentially be in conflict with other MPI + # environment modules. The I_MPI_ROOT environment variable can be + # used as a base to set necessary PATH variables for using Intel + # MPI. It is also possible to set the variables in the modules.yaml + # file if Intel MPI is the dominant, or only, MPI on a system. run_env.set('I_MPI_ROOT', join_path(self.prefix, 'impi')) if self.spec.satisfies('+all') or self.spec.satisfies('+mkl'): From 4e1f86881a82578ab2758b9db7a6190fcaafc3cb Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 21 Jul 2016 13:15:10 +0200 Subject: [PATCH 047/284] Update documentation for recursive module loading. --- lib/spack/docs/basic_usage.rst | 61 +++++++++++++++++----------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 948092047bd..df9a3901bfc 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -546,7 +546,7 @@ More formally, a spec consists of the following pieces: boolean variants * ``name=`` Optional compiler flag specifiers. Valid flag names are ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``. -* ``target= os=`` Optional architecture specifier +* ``target= os=`` Optional architecture specifier (``target=haswell os=CNL10``) * ``^`` Dependency specs (``^callpath@1.1``) @@ -764,12 +764,12 @@ words ``target`` and/or ``os`` (``target=x86-64 os=debian7``). You can also use the triplet form of platform, operating system and processor. .. code-block:: sh - + spack install libelf arch=cray_xc-CNL10-haswell Users on non-Cray systems won't have to worry about specifying the architecture. Spack will autodetect what kind of operating system is on your machine as well -as the processor. For more information on how the architecture can be +as the processor. For more information on how the architecture can be used on Cray machines, check here :ref:`spack-cray` @@ -1147,11 +1147,12 @@ packages use RPATH to find their dependencies: this can be true in particular for Python extensions, which are currently *not* built with RPATH. -Modules may be loaded recursively with the command: +Modules may be loaded recursively with the ``load`` command's +``--dependencies`` or ``-r`` argument: .. code-block:: sh - $ module load `spack module tcl --dependencies ... + $ spack load --dependencies ... More than one spec may be placed on the command line here. @@ -1793,36 +1794,36 @@ A nicer error message is TBD in future versions of Spack. Spack on Cray ----------------------------- -Spack differs slightly when used on a Cray system. The architecture spec +Spack differs slightly when used on a Cray system. The architecture spec can differentiate between the front-end and back-end processor and operating system. -For example, on Edison at NERSC, the back-end target processor +For example, on Edison at NERSC, the back-end target processor is \"Ivy Bridge\", so you can specify to use the back-end this way: .. code-block:: sh - + spack install zlib target=ivybridge You can also use the operating system to build against the back-end: .. code-block:: sh - + spack install zlib os=CNL10 -Notice that the name includes both the operating system name and the major +Notice that the name includes both the operating system name and the major version number concatenated together. -Alternatively, if you want to build something for the front-end, -you can specify the front-end target processor. The processor for a login node +Alternatively, if you want to build something for the front-end, +you can specify the front-end target processor. The processor for a login node on Edison is \"Sandy bridge\" so we specify on the command line like so: .. code-block:: sh - + spack install zlib target=sandybridge And the front-end operating system is: .. code-block:: sh - + spack install zlib os=SuSE11 @@ -1830,13 +1831,13 @@ And the front-end operating system is: Cray compiler detection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Spack can detect compilers using two methods. For the front-end, we treat -everything the same. The difference lies in back-end compiler detection. -Back-end compiler detection is made via the Tcl module avail command. -Once it detects the compiler it writes the appropriate PrgEnv and compiler -module name to compilers.yaml and sets the paths to each compiler with Cray\'s -compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load -the correct PrgEnv and compiler module and will call appropriate wrapper. +Spack can detect compilers using two methods. For the front-end, we treat +everything the same. The difference lies in back-end compiler detection. +Back-end compiler detection is made via the Tcl module avail command. +Once it detects the compiler it writes the appropriate PrgEnv and compiler +module name to compilers.yaml and sets the paths to each compiler with Cray\'s +compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load +the correct PrgEnv and compiler module and will call appropriate wrapper. The compilers.yaml config file will also differ. There is a modules section that is filled with the compiler\'s Programming Environment @@ -1849,8 +1850,8 @@ and module name. On other systems, this field is empty []:: - intel/15.0.109 ... -As mentioned earlier, the compiler paths will look different on a Cray system. -Since most compilers are invoked using cc, CC and ftn, the paths for each +As mentioned earlier, the compiler paths will look different on a Cray system. +Since most compilers are invoked using cc, CC and ftn, the paths for each compiler are replaced with their respective Cray compiler wrapper names:: ... @@ -1862,7 +1863,7 @@ compiler are replaced with their respective Cray compiler wrapper names:: ... As opposed to an explicit path to the compiler executable. This allows Spack -to call the Cray compiler wrappers during build time. +to call the Cray compiler wrappers during build time. For more on compiler configuration, check out :ref:`compiler-config`. @@ -1889,11 +1890,11 @@ Here\'s an example of an external configuration for cray modules: This tells Spack that for whatever package that depends on mpi, load the cray-mpich module into the environment. You can then be able to use whatever environment variables, libraries, etc, that are brought into the environment -via module load. +via module load. -You can set the default compiler that Spack can use for each compiler type. -If you want to use the Cray defaults, then set them under *all:* in packages.yaml. -In the compiler field, set the compiler specs in your order of preference. +You can set the default compiler that Spack can use for each compiler type. +If you want to use the Cray defaults, then set them under *all:* in packages.yaml. +In the compiler field, set the compiler specs in your order of preference. Whenever you build with that compiler type, Spack will concretize to that version. Here is an example of a full packages.yaml used at NERSC @@ -1921,11 +1922,11 @@ Here is an example of a full packages.yaml used at NERSC Here we tell spack that whenever we want to build with gcc use version 5.2.0 or if we want to build with intel compilers, use version 16.0.0.109. We add a spec -for each compiler type for each cray modules. This ensures that for each +for each compiler type for each cray modules. This ensures that for each compiler on our system we can use that external module. -For more on external packages check out the section :ref:`sec-external_packages`. +For more on external packages check out the section :ref:`sec-external_packages`. Getting Help ----------------------- From 732c24f603864ec7dbec425ac4507b47fdb7a641 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 21 Jul 2016 13:15:53 +0200 Subject: [PATCH 048/284] Fix recursive module loading. --- share/spack/setup-env.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index c6183f990df..2eb1dfecb31 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -117,19 +117,19 @@ function spack { # If spack module command comes back with an error, do nothing. case $_sp_subcommand in "use") - if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type dotkit $_sp_spec); then + if _sp_full_spec=$(command spack $_sp_flags module loads --input-only $_sp_subcommand_args --module-type dotkit $_sp_spec); then use $_sp_module_args $_sp_full_spec fi ;; "unuse") - if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type dotkit $_sp_spec); then + if _sp_full_spec=$(command spack $_sp_flags module loads --input-only $_sp_subcommand_args --module-type dotkit $_sp_spec); then unuse $_sp_module_args $_sp_full_spec fi ;; "load") - if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type tcl $_sp_spec); then + if _sp_full_spec=$(command spack $_sp_flags module loads --input-only $_sp_subcommand_args --module-type tcl $_sp_spec); then module load $_sp_module_args $_sp_full_spec fi ;; "unload") - if _sp_full_spec=$(command spack $_sp_flags module find $_sp_subcommand_args --module-type tcl $_sp_spec); then + if _sp_full_spec=$(command spack $_sp_flags module loads --input-only $_sp_subcommand_args --module-type tcl $_sp_spec); then module unload $_sp_module_args $_sp_full_spec fi ;; esac From bd91dd9d6d4607fd603cc1f3baaf753a5f4915bf Mon Sep 17 00:00:00 2001 From: Paul Hopkins Date: Mon, 11 Jul 2016 11:44:15 +0100 Subject: [PATCH 049/284] Allow users to supply preferred variants via packages.yaml --- lib/spack/spack/concretize.py | 9 +++++++-- lib/spack/spack/config.py | 8 +++++++- lib/spack/spack/preferred_packages.py | 7 +++++++ 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 386df08b2e5..622a7efde51 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -254,13 +254,18 @@ def concretize_architecture(self, spec): def concretize_variants(self, spec): """If the spec already has variants filled in, return. Otherwise, add - the default variants from the package specification. + the user preferences from packages.yaml or the default variants from + the package specification. """ changed = False + preferred_variants = spack.pkgsort.spec_preferred_variants(spec.package_class.name) for name, variant in spec.package_class.variants.items(): if name not in spec.variants: - spec.variants[name] = spack.spec.VariantSpec(name, variant.default) changed = True + if name in preferred_variants: + spec.variants[name] = preferred_variants.get(name) + else: + spec.variants[name] = spack.spec.VariantSpec(name, variant.default) return changed diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 8b5e96f97d1..e2e7dbc0ee4 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -257,7 +257,13 @@ 'paths': { 'type' : 'object', 'default' : {}, - } + }, + 'variants': { + 'oneOf' : [ + { 'type' : 'string' }, + { 'type' : 'array', + 'items' : { 'type' : 'string' } }, + ], }, },},},},},}, 'modules': { diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index 1b94f03de7f..5f18e212b6c 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -158,6 +158,13 @@ def spec_has_preferred_provider(self, pkgname, provider_str): return bool(self._order_for_package(pkgname, 'providers', provider_str, False)) + def spec_preferred_variants(self, pkgname): + """Return a VariantMap of preferred variants and their values""" + variants = self.preferred.get(pkgname, {}).get('variants', '') + if not isinstance(variants, basestring): + variants = "".join(variants) + return spack.spec.Spec(pkgname + variants).variants + def version_compare(self, pkgname, a, b): """Return less-than-0, 0, or greater than 0 if version a of pkgname is respectively less-than, equal-to, or greater-than version b of From 480fe9cb9ac2e0c6e2da4557abe9fd99fff6c64d Mon Sep 17 00:00:00 2001 From: Paul Hopkins Date: Mon, 11 Jul 2016 12:03:28 +0100 Subject: [PATCH 050/284] Re-add documentation for variant preferences --- lib/spack/docs/configuration.rst | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst index f2ffa072643..d39c9320219 100644 --- a/lib/spack/docs/configuration.rst +++ b/lib/spack/docs/configuration.rst @@ -142,8 +142,9 @@ Here's an example packages.yaml file that sets preferred packages: .. code-block:: sh packages: - dyninst: + opencv: compiler: [gcc@4.9] + variants: +debug gperftools: version: [2.2, 2.4, 2.3] all: @@ -153,17 +154,17 @@ Here's an example packages.yaml file that sets preferred packages: At a high level, this example is specifying how packages should be -concretized. The dyninst package should prefer using gcc 4.9. -The gperftools package should prefer version +concretized. The opencv package should prefer using gcc 4.9 and +be built with debug options. The gperftools package should prefer version 2.2 over 2.4. Every package on the system should prefer mvapich for -its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9). +its MPI and gcc 4.4.7 (except for opencv, which overrides this by preferring gcc 4.9). These options are used to fill in implicit defaults. Any of them can be overwritten on the command line if explicitly requested. Each packages.yaml file begins with the string ``packages:`` and package names are specified on the next level. The special string ``all`` applies settings to each package. Underneath each package name is -one or more components: ``compiler``, ``version``, +one or more components: ``compiler``, ``variants``, ``version``, or ``providers``. Each component has an ordered list of spec ``constraints``, with earlier entries in the list being preferred over later entries. From 00280936da8993b2403af9ce6474bc08e494620b Mon Sep 17 00:00:00 2001 From: Paul Hopkins Date: Mon, 11 Jul 2016 16:14:09 +0100 Subject: [PATCH 051/284] Flake8 fixes to ensure qa tests pass --- lib/spack/spack/concretize.py | 174 ++++++++++++++++++++-------------- 1 file changed, 103 insertions(+), 71 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 622a7efde51..5180f3cf042 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -40,12 +40,12 @@ import spack.error from spack.version import * from functools import partial -from spec import DependencyMap from itertools import chain from spack.config import * class DefaultConcretizer(object): + """This class doesn't have any state, it just provides some methods for concretization. You can subclass it to override just some of the default concretization strategies, or you can override all of them. @@ -61,14 +61,17 @@ def _valid_virtuals_and_externals(self, spec): if not providers: raise UnsatisfiableProviderSpecError(providers[0], spec) spec_w_preferred_providers = find_spec( - spec, lambda(x): spack.pkgsort.spec_has_preferred_provider(x.name, spec.name)) + spec, lambda x: spack.pkgsort.spec_has_preferred_provider(x.name, spec.name)) # NOQA: ignore=E501 if not spec_w_preferred_providers: spec_w_preferred_providers = spec - provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name) + provider_cmp = partial(spack.pkgsort.provider_compare, + spec_w_preferred_providers.name, + spec.name) candidates = sorted(providers, cmp=provider_cmp) - # For each candidate package, if it has externals, add those to the usable list. - # if it's not buildable, then *only* add the externals. + # For each candidate package, if it has externals, add those + # to the usable list. if it's not buildable, then *only* add + # the externals. usable = [] for cspec in candidates: if is_spec_buildable(cspec): @@ -85,7 +88,7 @@ def _valid_virtuals_and_externals(self, spec): def cmp_externals(a, b): if a.name != b.name and (not a.external or a.external_module and - not b.external and b.external_module): + not b.external and b.external_module): # We're choosing between different providers, so # maintain order from provider sort return candidates.index(a) - candidates.index(b) @@ -114,26 +117,26 @@ def choose_virtual_or_external(self, spec): # Find the nearest spec in the dag that has a compiler. We'll # use that spec to calibrate compiler compatibility. - abi_exemplar = find_spec(spec, lambda(x): x.compiler) + abi_exemplar = find_spec(spec, lambda x: x.compiler) if not abi_exemplar: abi_exemplar = spec.root # Make a list including ABI compatibility of specs with the exemplar. strict = [spack.abi.compatible(c, abi_exemplar) for c in candidates] - loose = [spack.abi.compatible(c, abi_exemplar, loose=True) for c in candidates] + loose = [spack.abi.compatible(c, abi_exemplar, loose=True) + for c in candidates] keys = zip(strict, loose, candidates) # Sort candidates from most to least compatibility. # Note: # 1. We reverse because True > False. # 2. Sort is stable, so c's keep their order. - keys.sort(key=lambda k:k[:2], reverse=True) + keys.sort(key=lambda k: k[:2], reverse=True) # Pull the candidates back out and return them in order - candidates = [c for s,l,c in keys] + candidates = [c for s, l, c in keys] return candidates - def concretize_version(self, spec): """If the spec is already concrete, return. Otherwise take the preferred version from spackconfig, and default to the package's @@ -167,7 +170,8 @@ def prefer_key(v): if valid_versions: # Disregard @develop and take the next valid version - if ver(valid_versions[0]) == ver('develop') and len(valid_versions) > 1: + if ver(valid_versions[0]) == ver('develop') and \ + len(valid_versions) > 1: spec.versions = ver([valid_versions[1]]) else: spec.versions = ver([valid_versions[0]]) @@ -193,40 +197,45 @@ def prefer_key(v): return True # Things changed def _concretize_operating_system(self, spec): - platform = spec.architecture.platform if spec.architecture.platform_os is not None and isinstance( - spec.architecture.platform_os,spack.architecture.OperatingSystem): + spec.architecture.platform_os, + spack.architecture.OperatingSystem): return False if spec.root.architecture and spec.root.architecture.platform_os: - if isinstance(spec.root.architecture.platform_os,spack.architecture.OperatingSystem): - spec.architecture.platform_os = spec.root.architecture.platform_os + if isinstance(spec.root.architecture.platform_os, + spack.architecture.OperatingSystem): + spec.architecture.platform_os = \ + spec.root.architecture.platform_os else: - spec.architecture.platform_os = spec.architecture.platform.operating_system('default_os') - return True #changed + spec.architecture.platform_os = \ + spec.architecture.platform.operating_system('default_os') + return True # changed def _concretize_target(self, spec): - platform = spec.architecture.platform if spec.architecture.target is not None and isinstance( spec.architecture.target, spack.architecture.Target): return False if spec.root.architecture and spec.root.architecture.target: - if isinstance(spec.root.architecture.target,spack.architecture.Target): + if isinstance(spec.root.architecture.target, + spack.architecture.Target): spec.architecture.target = spec.root.architecture.target else: - spec.architecture.target = spec.architecture.platform.target('default_target') - return True #changed + spec.architecture.target = spec.architecture.platform.target( + 'default_target') + return True # changed def _concretize_platform(self, spec): if spec.architecture.platform is not None and isinstance( spec.architecture.platform, spack.architecture.Platform): return False if spec.root.architecture and spec.root.architecture.platform: - if isinstance(spec.root.architecture.platform,spack.architecture.Platform): + if isinstance(spec.root.architecture.platform, + spack.architecture.Platform): spec.architecture.platform = spec.root.architecture.platform else: spec.architecture.platform = spack.architecture.platform() - return True #changed? + return True # changed? def concretize_architecture(self, spec): """If the spec is empty provide the defaults of the platform. If the @@ -245,30 +254,29 @@ def concretize_architecture(self, spec): return True # Concretize the operating_system and target based of the spec - ret = any((self._concretize_platform(spec), - self._concretize_operating_system(spec), - self._concretize_target(spec))) + ret = any((self._concretize_platform(spec), + self._concretize_operating_system(spec), + self._concretize_target(spec))) return ret - - def concretize_variants(self, spec): """If the spec already has variants filled in, return. Otherwise, add the user preferences from packages.yaml or the default variants from the package specification. """ changed = False - preferred_variants = spack.pkgsort.spec_preferred_variants(spec.package_class.name) + preferred_variants = spack.pkgsort.spec_preferred_variants( + spec.package_class.name) for name, variant in spec.package_class.variants.items(): if name not in spec.variants: changed = True if name in preferred_variants: spec.variants[name] = preferred_variants.get(name) else: - spec.variants[name] = spack.spec.VariantSpec(name, variant.default) + spec.variants[name] = \ + spack.spec.VariantSpec(name, variant.default) return changed - def concretize_compiler(self, spec): """If the spec already has a compiler, we're done. If not, then take the compiler used for the nearest ancestor with a compiler @@ -283,30 +291,32 @@ def concretize_compiler(self, spec): """ # Pass on concretizing the compiler if the target is not yet determined if not spec.architecture.platform_os: - #Although this usually means changed, this means awaiting other changes + # Although this usually means changed, this means awaiting other + # changes return True # Only use a matching compiler if it is of the proper style - # Takes advantage of the proper logic already existing in compiler_for_spec - # Should think whether this can be more efficient + # Takes advantage of the proper logic already existing in + # compiler_for_spec Should think whether this can be more + # efficient def _proper_compiler_style(cspec, arch): platform = arch.platform compilers = spack.compilers.compilers_for_spec(cspec, platform=platform) return filter(lambda c: c.operating_system == - arch.platform_os, compilers) - #return compilers - + arch.platform_os, compilers) + # return compilers all_compilers = spack.compilers.all_compilers() if (spec.compiler and spec.compiler.concrete and - spec.compiler in all_compilers): + spec.compiler in all_compilers): return False - #Find the another spec that has a compiler, or the root if none do - other_spec = spec if spec.compiler else find_spec(spec, lambda(x) : x.compiler) + # Find the another spec that has a compiler, or the root if none do + other_spec = spec if spec.compiler else find_spec( + spec, lambda x: x.compiler) if not other_spec: other_spec = spec.root @@ -318,9 +328,12 @@ def _proper_compiler_style(cspec, arch): spec.compiler = other_compiler.copy() return True - # Filter the compilers into a sorted list based on the compiler_order from spackconfig - compiler_list = all_compilers if not other_compiler else spack.compilers.find(other_compiler) - cmp_compilers = partial(spack.pkgsort.compiler_compare, other_spec.name) + # Filter the compilers into a sorted list based on the compiler_order + # from spackconfig + compiler_list = all_compilers if not other_compiler else \ + spack.compilers.find(other_compiler) + cmp_compilers = partial( + spack.pkgsort.compiler_compare, other_spec.name) matches = sorted(compiler_list, cmp=cmp_compilers) if not matches: raise UnavailableCompilerVersionError(other_compiler) @@ -335,7 +348,6 @@ def _proper_compiler_style(cspec, arch): assert(spec.compiler.concrete) return True # things changed. - def concretize_compiler_flags(self, spec): """ The compiler flags are updated to match those of the spec whose @@ -343,54 +355,66 @@ def concretize_compiler_flags(self, spec): Default specs set at the compiler level will still be added later. """ - if not spec.architecture.platform_os: - #Although this usually means changed, this means awaiting other changes + # Although this usually means changed, this means awaiting other + # changes return True ret = False for flag in spack.spec.FlagMap.valid_compiler_flags(): try: nearest = next(p for p in spec.traverse(direction='parents') - if ((p.compiler == spec.compiler and p is not spec) - and flag in p.compiler_flags)) - if not flag in spec.compiler_flags or \ - not (sorted(spec.compiler_flags[flag]) >= sorted(nearest.compiler_flags[flag])): + if ((p.compiler == spec.compiler and + p is not spec) and + flag in p.compiler_flags)) + if flag not in spec.compiler_flags or \ + not (sorted(spec.compiler_flags[flag]) >= + sorted(nearest.compiler_flags[flag])): if flag in spec.compiler_flags: - spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) | - set(nearest.compiler_flags[flag])) + spec.compiler_flags[flag] = list( + set(spec.compiler_flags[flag]) | + set(nearest.compiler_flags[flag])) else: - spec.compiler_flags[flag] = nearest.compiler_flags[flag] + spec.compiler_flags[ + flag] = nearest.compiler_flags[flag] ret = True except StopIteration: - if (flag in spec.root.compiler_flags and ((not flag in spec.compiler_flags) or - sorted(spec.compiler_flags[flag]) != sorted(spec.root.compiler_flags[flag]))): + if (flag in spec.root.compiler_flags and + ((flag not in spec.compiler_flags) or + sorted(spec.compiler_flags[flag]) != + sorted(spec.root.compiler_flags[flag]))): if flag in spec.compiler_flags: - spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) | - set(spec.root.compiler_flags[flag])) + spec.compiler_flags[flag] = list( + set(spec.compiler_flags[flag]) | + set(spec.root.compiler_flags[flag])) else: - spec.compiler_flags[flag] = spec.root.compiler_flags[flag] + spec.compiler_flags[ + flag] = spec.root.compiler_flags[flag] ret = True else: - if not flag in spec.compiler_flags: + if flag not in spec.compiler_flags: spec.compiler_flags[flag] = [] # Include the compiler flag defaults from the config files # This ensures that spack will detect conflicts that stem from a change # in default compiler flags. - compiler = spack.compilers.compiler_for_spec(spec.compiler, spec.architecture) + compiler = spack.compilers.compiler_for_spec( + spec.compiler, spec.architecture) for flag in compiler.flags: if flag not in spec.compiler_flags: spec.compiler_flags[flag] = compiler.flags[flag] if compiler.flags[flag] != []: ret = True else: - if ((sorted(spec.compiler_flags[flag]) != sorted(compiler.flags[flag])) and - (not set(spec.compiler_flags[flag]) >= set(compiler.flags[flag]))): + if ((sorted(spec.compiler_flags[flag]) != + sorted(compiler.flags[flag])) and + (not set(spec.compiler_flags[flag]) >= + set(compiler.flags[flag]))): ret = True - spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) | - set(compiler.flags[flag])) + spec.compiler_flags[flag] = list( + set(spec.compiler_flags[flag]) | + set(compiler.flags[flag])) return ret @@ -401,8 +425,8 @@ def find_spec(spec, condition): # First search parents, then search children deptype = ('build', 'link') dagiter = chain( - spec.traverse(direction='parents', deptype=deptype, root=False), - spec.traverse(direction='children', deptype=deptype, root=False)) + spec.traverse(direction='parents', deptype=deptype, root=False), + spec.traverse(direction='children', deptype=deptype, root=False)) visited = set() for relative in dagiter: if condition(relative): @@ -411,8 +435,10 @@ def find_spec(spec, condition): # Then search all other relatives in the DAG *except* spec for relative in spec.root.traverse(deptypes=spack.alldeps): - if relative is spec: continue - if id(relative) in visited: continue + if relative is spec: + continue + if id(relative) in visited: + continue if condition(relative): return relative @@ -459,8 +485,10 @@ def cmp_specs(lhs, rhs): class UnavailableCompilerVersionError(spack.error.SpackError): + """Raised when there is no available compiler that satisfies a compiler spec.""" + def __init__(self, compiler_spec): super(UnavailableCompilerVersionError, self).__init__( "No available compiler version matches '%s'" % compiler_spec, @@ -468,16 +496,20 @@ def __init__(self, compiler_spec): class NoValidVersionError(spack.error.SpackError): + """Raised when there is no way to have a concrete version for a particular spec.""" + def __init__(self, spec): super(NoValidVersionError, self).__init__( - "There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)) + "There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)) # NOQA: ignore=E501 class NoBuildError(spack.error.SpackError): + """Raised when a package is configured with the buildable option False, but no satisfactory external versions can be found""" + def __init__(self, spec): super(NoBuildError, self).__init__( - "The spec '%s' is configured as not buildable, and no matching external installs were found" % spec.name) + "The spec '%s' is configured as not buildable,and no matching external installs were found" % spec.name) # NOQA: ignore=E501 From cf8f14780f54aa3bd6bad982c173ea996c2ff5fe Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 21 Jul 2016 11:13:17 -0500 Subject: [PATCH 052/284] MVAPICH2 depends on libpciaccess --- var/spack/repos/builtin/packages/mvapich2/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py index da386445bbd..0fa5821b081 100644 --- a/var/spack/repos/builtin/packages/mvapich2/package.py +++ b/var/spack/repos/builtin/packages/mvapich2/package.py @@ -76,6 +76,7 @@ class Mvapich2(Package): ########## # FIXME : CUDA support is missing + depends_on('libpciaccess') def url_for_version(self, version): base_url = "http://mvapich.cse.ohio-state.edu/download" From a27cb639d805b247c0e6d8348466cfb759f93191 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Fri, 15 Jul 2016 16:40:15 -0400 Subject: [PATCH 053/284] The lmod package should depend_on('tcl') The lmod package needs a tclsh. Up until now it just assumed that one was available on the system. This change adds a depends_on('tcl') to the lmod package. The tcl package installs a tclsh script with an embedded version number (e.g. tclsh8.6) but the lmod configuration looks for tclsh. This change extends the tcl package to symlink tclshX.Y to tclsh in the tcl package bin directory. --- var/spack/repos/builtin/packages/lmod/package.py | 1 + var/spack/repos/builtin/packages/tcl/package.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py index efa235f646a..caa9ea17b13 100644 --- a/var/spack/repos/builtin/packages/lmod/package.py +++ b/var/spack/repos/builtin/packages/lmod/package.py @@ -44,6 +44,7 @@ class Lmod(Package): depends_on('lua@5.2:') depends_on('lua-luaposix', type=nolink) depends_on('lua-luafilesystem', type=nolink) + depends_on('tcl') parallel = False diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py index ef922314d8f..16d896acc6c 100644 --- a/var/spack/repos/builtin/packages/tcl/package.py +++ b/var/spack/repos/builtin/packages/tcl/package.py @@ -57,3 +57,5 @@ def install(self, spec, prefix): configure("--prefix={0}".format(prefix)) make() make("install") + with working_dir(prefix.bin): + symlink('tclsh{0}'.format(self.version.up_to(2)), 'tclsh') From 52f0249c61a0336efeb593a930af6ed61f07333b Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Fri, 15 Jul 2016 18:58:32 -0400 Subject: [PATCH 054/284] tcl is actually a runtime dependency. lmod uses tclsh at runtime, so adjust the depends_on accordingly (thanks @davydden!). --- var/spack/repos/builtin/packages/lmod/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py index caa9ea17b13..14b6e02b3a4 100644 --- a/var/spack/repos/builtin/packages/lmod/package.py +++ b/var/spack/repos/builtin/packages/lmod/package.py @@ -44,7 +44,7 @@ class Lmod(Package): depends_on('lua@5.2:') depends_on('lua-luaposix', type=nolink) depends_on('lua-luafilesystem', type=nolink) - depends_on('tcl') + depends_on('tcl', type=nolink) parallel = False From 9523e50732ff651e1ae073791708c312217bedb1 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Thu, 21 Jul 2016 13:58:16 -0400 Subject: [PATCH 055/284] Fix shebang line in tcl scripts Lmod's configure script goes to the trouble of finding tclsh. This change uses that info to rewrite the #! lines in the tcl scripts so that they call the tclsh that the configure script discovered. It needs to massage the existing shebang lines into something that the sed statement in the makefile can manipulate and it needs to add the path_to_tclsh info into the set of sed statements. Checked with versions 6.4.1 and 6.3.7 (the checksum for 6.0.1 is incorrect, a fix for another time). --- .../repos/builtin/packages/lmod/fix_tclsh_paths.patch | 10 ++++++++++ var/spack/repos/builtin/packages/lmod/package.py | 11 ++++++++++- 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin/packages/lmod/fix_tclsh_paths.patch diff --git a/var/spack/repos/builtin/packages/lmod/fix_tclsh_paths.patch b/var/spack/repos/builtin/packages/lmod/fix_tclsh_paths.patch new file mode 100644 index 00000000000..70f0d479256 --- /dev/null +++ b/var/spack/repos/builtin/packages/lmod/fix_tclsh_paths.patch @@ -0,0 +1,10 @@ +--- a/Makefile.in 2016-07-21 13:03:27.861000000 -0400 ++++ b/Makefile.in 2016-07-21 13:03:58.416000000 -0400 +@@ -197,6 +197,7 @@ + -e 's|@colorize@|$(COLORIZE)|g' \ + -e 's|@duplicate_paths@|$(DUPLICATE_PATHS)|g' \ + -e 's|@allow_tcl_mfiles@|$(ALLOW_TCL_MFILES)|g' \ ++ -e 's|@path_to_tclsh@|$(PATH_TO_TCLSH)|g' \ + -e 's|@mpath_avail@|$(MPATH_AVAIL)|g' \ + -e 's|@short_time@|$(SHORT_TIME)|g' \ + -e 's|@cacheDirs@|$(SPIDER_CACHE_DIRS)|g' \ diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py index 14b6e02b3a4..c8936d768b8 100644 --- a/var/spack/repos/builtin/packages/lmod/package.py +++ b/var/spack/repos/builtin/packages/lmod/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * - +from glob import glob class Lmod(Package): """ @@ -54,6 +54,15 @@ def setup_environment(self, spack_env, run_env): spack_env.append_path('LUA_PATH', stage_lua_path.format( version=self.version), separator=';') + patch('fix_tclsh_paths.patch') + def patch(self): + """The tcl scripts should use the tclsh that was discovered + by the configure script. Touch up their #! lines so that the + sed in the Makefile's install step has something to work on. + Requires the change in the associated patch file.fg""" + for tclscript in glob('src/*.tcl'): + filter_file(r'^#!.*tclsh', '#!@path_to_tclsh@', tclscript) + def install(self, spec, prefix): configure('--prefix=%s' % prefix) make('install') From 4c105895abe249962e0651fdba8875c70752551a Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Thu, 21 Jul 2016 14:24:51 -0400 Subject: [PATCH 056/284] Fix flake8 violations --- var/spack/repos/builtin/packages/lmod/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py index c8936d768b8..01911c1a304 100644 --- a/var/spack/repos/builtin/packages/lmod/package.py +++ b/var/spack/repos/builtin/packages/lmod/package.py @@ -25,6 +25,7 @@ from spack import * from glob import glob + class Lmod(Package): """ Lmod is a Lua based module system that easily handles the MODULEPATH @@ -55,6 +56,7 @@ def setup_environment(self, spack_env, run_env): version=self.version), separator=';') patch('fix_tclsh_paths.patch') + def patch(self): """The tcl scripts should use the tclsh that was discovered by the configure script. Touch up their #! lines so that the From 796308ed8525dcf982770b243e6a6cf7f2d3ecf7 Mon Sep 17 00:00:00 2001 From: Mitchell Devlin Date: Thu, 21 Jul 2016 13:39:27 -0500 Subject: [PATCH 057/284] edit --- .../repos/builtin/packages/libxsmm/package.py | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py index b8adeed406f..961e1717149 100644 --- a/var/spack/repos/builtin/packages/libxsmm/package.py +++ b/var/spack/repos/builtin/packages/libxsmm/package.py @@ -30,24 +30,37 @@ class Libxsmm(Package): multiplications targeting Intel Architecture (x86).''' homepage = 'https://github.com/hfp/libxsmm' - url = 'https://github.com/xianyi/libxsmm/archive/1.4.3.tar.gz' + url = 'https://github.com/hfp/libxsmm/archive/1.4.3.tar.gz' version('1.4.3', '9839bf0fb8be7badf1e97ce4c817149b') version('1.4.2', 'ea025761437f3b5c936821b9ca21ec31') version('1.4.1', '71648500ea4510529845d329091917df') version('1.4', 'b42f91bf5285e7ad0463446e55ebdc2b') + def patch(self): + kwargs = {'ignore_absent': False, 'backup': False, 'string': True} + makefile = FileFilter('Makefile.inc') + + # Spack sets CC, CXX, and FC to point to the compiler wrappers + # Don't let Makefile.inc overwrite these + makefile.filter('CC = icc', 'CC ?= icc', **kwargs) + makefile.filter('CC = gcc', 'CC ?= gcc', **kwargs) + makefile.filter('CXX = icpc', 'CXX ?= icpc', **kwargs) + makefile.filter('CXX = g.*', 'CXX ?= g++', **kwargs) + makefile.filter('FC = ifort', 'FC ?= ifort', **kwargs) + makefile.filter('FC = gfortran', 'FC ?= gfortran', **kwargs) + def manual_install(self, prefix): install_tree('include', prefix.include) install_tree('lib', prefix.lib) - install_tree('documentation', prefix.share + '/libxsmm') + install_tree('documentation', prefix.share + '/libxsmm/doc') def install(self, spec, prefix): make_args = [ 'ROW_MAJOR=0', - 'INDICES_M=$(echo $(seq 1 24))', - 'INDICES_N=$(echo $(seq 1 24))', - 'INDICES_K=$(echo $(seq 1 24))' + 'INDICES_M={0}'.format(' '.join(str(i) for i in range(1, 25))), + 'INDICES_N={0}'.format(' '.join(str(i) for i in range(1, 25))), + 'INDICES_K={0}'.format(' '.join(str(i) for i in range(1, 25))) ] make(*make_args) self.manual_install(prefix) From 4efe0221657f769dae6071f8cf49a54b184bcb09 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Thu, 21 Jul 2016 15:36:22 -0400 Subject: [PATCH 058/284] Update with info about gcc-5.4.0 --- var/spack/repos/builtin/packages/gcc/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py index 224105ea0f0..72a5cb22f8a 100644 --- a/var/spack/repos/builtin/packages/gcc/package.py +++ b/var/spack/repos/builtin/packages/gcc/package.py @@ -15,6 +15,7 @@ class Gcc(Package): list_depth = 2 version('6.1.0', '8fb6cb98b8459f5863328380fbf06bd1') + version('5.4.0', '4c626ac2a83ef30dfb9260e6f59c2b30') version('5.3.0', 'c9616fd448f980259c31de613e575719') version('5.2.0', 'a51bcfeb3da7dd4c623e27207ed43467') version('4.9.3', '6f831b4d251872736e8e9cc09746f327') From 2ae7429b18363128aaa35bca8d6b8cd3cd2f267a Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Thu, 21 Jul 2016 14:04:22 -0700 Subject: [PATCH 059/284] Bug fix for cray_xc platform --- lib/spack/env/cray/CC | 1 + lib/spack/env/cray/cc | 1 + lib/spack/env/cray/ftn | 1 + lib/spack/spack/platforms/cray_xc.py | 2 +- 4 files changed, 4 insertions(+), 1 deletion(-) create mode 120000 lib/spack/env/cray/CC create mode 120000 lib/spack/env/cray/cc create mode 120000 lib/spack/env/cray/ftn diff --git a/lib/spack/env/cray/CC b/lib/spack/env/cray/CC new file mode 120000 index 00000000000..82c2b8e90a3 --- /dev/null +++ b/lib/spack/env/cray/CC @@ -0,0 +1 @@ +../cc \ No newline at end of file diff --git a/lib/spack/env/cray/cc b/lib/spack/env/cray/cc new file mode 120000 index 00000000000..82c2b8e90a3 --- /dev/null +++ b/lib/spack/env/cray/cc @@ -0,0 +1 @@ +../cc \ No newline at end of file diff --git a/lib/spack/env/cray/ftn b/lib/spack/env/cray/ftn new file mode 120000 index 00000000000..82c2b8e90a3 --- /dev/null +++ b/lib/spack/env/cray/ftn @@ -0,0 +1 @@ +../cc \ No newline at end of file diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py index 2b065d5bbd1..d3aab74faef 100644 --- a/lib/spack/spack/platforms/cray_xc.py +++ b/lib/spack/spack/platforms/cray_xc.py @@ -3,7 +3,7 @@ from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl from spack.util.executable import which - +from spack.util.file_system import join_path class CrayXc(Platform): priority = 20 From f0d5317913d0c47806e102126ea67aacbcf4db80 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Thu, 21 Jul 2016 14:11:34 -0700 Subject: [PATCH 060/284] fixed --- lib/spack/spack/platforms/cray_xc.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py index d3aab74faef..a8e7aaa3162 100644 --- a/lib/spack/spack/platforms/cray_xc.py +++ b/lib/spack/spack/platforms/cray_xc.py @@ -1,9 +1,10 @@ import os +import spack from spack.architecture import Platform, Target from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl from spack.util.executable import which -from spack.util.file_system import join_path +from llnl.util.filesystem import join_path class CrayXc(Platform): priority = 20 @@ -50,9 +51,9 @@ def setup_platform_environment(self, pkg, env): similar to linux/standard linker behavior """ env.set('CRAYPE_LINK_TYPE', 'dynamic') - cray_wrapper_names = join_path(spack.build_env_path, 'cray') - if os.path.isdir(cray_wrapper_names): - env.prepend_path('PATH', cray_wrapper_names) +# cray_wrapper_names = join_path(spack.build_env_path, 'cray') +# if os.path.isdir(cray_wrapper_names): +# env.prepend_path('PATH', cray_wrapper_names) @classmethod def detect(self): From 1544f98ee013040f6d92cb38df9a216e64059292 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Thu, 21 Jul 2016 14:36:10 -0700 Subject: [PATCH 061/284] fixed flake8 errors --- lib/spack/spack/platforms/cray_xc.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py index a8e7aaa3162..92db78c43ca 100644 --- a/lib/spack/spack/platforms/cray_xc.py +++ b/lib/spack/spack/platforms/cray_xc.py @@ -1,10 +1,11 @@ import os -import spack +#import spack from spack.architecture import Platform, Target from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl from spack.util.executable import which -from llnl.util.filesystem import join_path +#from llnl.util.filesystem import join_path + class CrayXc(Platform): priority = 20 From bcbe9c02dab6d4bc2690896ba259ed1b211f4346 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Thu, 21 Jul 2016 14:50:23 -0700 Subject: [PATCH 062/284] Added cray wrapper names directory to spack_env_paths so the spack compiler wrapper will remove them from the environment (prevents hang) --- lib/spack/spack/platforms/cray_xc.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py index 92db78c43ca..03d0383cc51 100644 --- a/lib/spack/spack/platforms/cray_xc.py +++ b/lib/spack/spack/platforms/cray_xc.py @@ -1,10 +1,10 @@ import os -#import spack +import spack from spack.architecture import Platform, Target from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl from spack.util.executable import which -#from llnl.util.filesystem import join_path +from llnl.util.filesystem import join_path class CrayXc(Platform): @@ -52,9 +52,10 @@ def setup_platform_environment(self, pkg, env): similar to linux/standard linker behavior """ env.set('CRAYPE_LINK_TYPE', 'dynamic') -# cray_wrapper_names = join_path(spack.build_env_path, 'cray') -# if os.path.isdir(cray_wrapper_names): -# env.prepend_path('PATH', cray_wrapper_names) + cray_wrapper_names = join_path(spack.build_env_path, 'cray') + if os.path.isdir(cray_wrapper_names): + env.prepend_path('PATH', cray_wrapper_names) + env.prepend_path('SPACK_ENV_PATHS', cray_wrapper_names) @classmethod def detect(self): From 8523f75e6c8d6194dcc7884ab7eca37cfa37265b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 21 Jul 2016 11:07:43 -0700 Subject: [PATCH 063/284] Remove remote website pinging from OpenSSL - OpenSSL no longer checks remote versions on the openssl site. - Spack is used on systems that aren't connected to the internet, and this check is probably in the wrong place and affects too many commands. We can work on figuring out a better, more configurable place to put a check like this. --- .../repos/builtin/packages/openssl/package.py | 63 +++---------------- 1 file changed, 8 insertions(+), 55 deletions(-) diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index e7c105d5f5b..b9eada9e83a 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -22,7 +22,6 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import urllib import llnl.util.tty as tty from spack import * @@ -50,61 +49,15 @@ class Openssl(Package): parallel = False def url_for_version(self, version): - # This URL is computed pinging the place where the latest version is - # stored. To avoid slowdown due to repeated pinging, we store the URL - # in a private class attribute to do the job only once per version - openssl_urls = getattr(Openssl, '_openssl_url', {}) - openssl_url = openssl_urls.get(version, None) - # Same idea, but just to avoid issuing the same message multiple times - warnings_given_to_user = getattr(Openssl, '_warnings_given', {}) - if openssl_url is None: - if self.spec.satisfies('@system'): - # The version @system is reserved to system openssl. In that - # case return a fake url and exit - openssl_url = '@system (reserved version for system openssl)' - if not warnings_given_to_user.get(version, False): - tty.msg('Using openssl@system: ' - 'the version @system is reserved for system openssl') - warnings_given_to_user[version] = True - else: - openssl_url = self.check_for_outdated_release( - version, warnings_given_to_user) # Store the computed URL - openssl_urls[version] = openssl_url - # Store the updated dictionary of URLS - Openssl._openssl_url = openssl_urls - # Store the updated dictionary of warnings - Openssl._warnings_given = warnings_given_to_user - - return openssl_url - - def check_for_outdated_release(self, version, warnings_given_to_user): - latest = 'ftp://ftp.openssl.org/source/openssl-{version}.tar.gz' - older = 'http://www.openssl.org/source/old/{version_number}/openssl-{version_full}.tar.gz' # NOQA: ignore=E501 - # Try to use the url where the latest tarballs are stored. - # If the url does not exist (404), then return the url for - # older format - version_number = '.'.join([str(x) for x in version[:-1]]) - try: - openssl_url = latest.format(version=version) - urllib.urlopen(openssl_url) - except IOError: - openssl_url = older.format( - version_number=version_number, version_full=version) - # Checks if we already warned the user for this particular - # version of OpenSSL. If not we display a warning message - # and mark this version - if not warnings_given_to_user.get(version, False): - tty.warn( - 'This installation depends on an old version of OpenSSL, ' - 'which may have known security issues. ') - tty.warn( - 'Consider updating to the latest version of this package.') - tty.warn('More details at {homepage}'.format( - homepage=Openssl.homepage)) - warnings_given_to_user[version] = True - - return openssl_url + if '@system' in self.spec: + return '@system (reserved version for system openssl)' + else: + return super(Openssl, self).url_for_version(self.spec) + def handle_fetch_error(self, error): + tty.warn("Fetching OpenSSL failed. This may indicate that OpenSSL has " + "been updated, and the version in your instance of Spack is " + "insecure. Consider updating to the latest OpenSSL version.") def install(self, spec, prefix): # OpenSSL uses a variable APPS in its Makefile. If it happens to be set From e083c91d5e0edb08a255da120e9133ea53da1ad5 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Thu, 21 Jul 2016 19:58:04 -0500 Subject: [PATCH 064/284] Fix alpha case of dependencies. A while ago I was asked to convert packages to all lowercase. That was done but some dependencies did not get converted in the specification. This commit fixes that as well as a couple of urls that need to be made explicit and a missing dependency on jdk. --- var/spack/repos/builtin/packages/r-rjava/package.py | 2 ++ var/spack/repos/builtin/packages/r-rmysql/package.py | 2 +- var/spack/repos/builtin/packages/r-rodbc/package.py | 2 +- var/spack/repos/builtin/packages/r-rpostgresql/package.py | 2 +- var/spack/repos/builtin/packages/r-rsqlite/package.py | 2 +- var/spack/repos/builtin/packages/r-xlconnect/package.py | 7 ++++--- .../repos/builtin/packages/r-xlconnectjars/package.py | 5 +++-- var/spack/repos/builtin/packages/r-xlsx/package.py | 2 +- var/spack/repos/builtin/packages/r-xlsxjars/package.py | 2 +- 9 files changed, 15 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/r-rjava/package.py b/var/spack/repos/builtin/packages/r-rjava/package.py index 0ce0942602e..47804bd7fc1 100644 --- a/var/spack/repos/builtin/packages/r-rjava/package.py +++ b/var/spack/repos/builtin/packages/r-rjava/package.py @@ -37,6 +37,8 @@ class RRjava(Package): extends('R') + depends_on('jdk') + def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-rmysql/package.py b/var/spack/repos/builtin/packages/r-rmysql/package.py index 774c166f8b3..086374df077 100644 --- a/var/spack/repos/builtin/packages/r-rmysql/package.py +++ b/var/spack/repos/builtin/packages/r-rmysql/package.py @@ -36,7 +36,7 @@ class RRmysql(Package): extends('R') - depends_on('r-DBI') + depends_on('r-dbi') depends_on('mariadb') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/r-rodbc/package.py b/var/spack/repos/builtin/packages/r-rodbc/package.py index 9cc9aebd9a1..11fe0ace56e 100644 --- a/var/spack/repos/builtin/packages/r-rodbc/package.py +++ b/var/spack/repos/builtin/packages/r-rodbc/package.py @@ -36,7 +36,7 @@ class RRodbc(Package): extends('R') - depends_on('unixODBC') + depends_on('unixodbc') def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-rpostgresql/package.py b/var/spack/repos/builtin/packages/r-rpostgresql/package.py index 83c9b855257..53e1978ffe0 100644 --- a/var/spack/repos/builtin/packages/r-rpostgresql/package.py +++ b/var/spack/repos/builtin/packages/r-rpostgresql/package.py @@ -44,7 +44,7 @@ class RRpostgresql(Package): extends('R') - depends_on('r-DBI') + depends_on('r-dbi') depends_on('postgresql') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/r-rsqlite/package.py b/var/spack/repos/builtin/packages/r-rsqlite/package.py index 9602f6d927c..7c03ab89505 100644 --- a/var/spack/repos/builtin/packages/r-rsqlite/package.py +++ b/var/spack/repos/builtin/packages/r-rsqlite/package.py @@ -38,7 +38,7 @@ class RRsqlite(Package): extends('R') - depends_on('r-DBI') + depends_on('r-dbi') def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-xlconnect/package.py b/var/spack/repos/builtin/packages/r-xlconnect/package.py index 919291a0233..4ee7163be15 100644 --- a/var/spack/repos/builtin/packages/r-xlconnect/package.py +++ b/var/spack/repos/builtin/packages/r-xlconnect/package.py @@ -33,12 +33,13 @@ class RXlconnect(Package): url = "https://cran.r-project.org/src/contrib/XLConnect_0.2-11.tar.gz" list_url = "https://cran.r-project.org/src/contrib/Archive/XLConnect" - version('0.2-11', '9d1769a103cda05665df399cc335017d') + version('0.2-11', '9d1769a103cda05665df399cc335017d', + url='https://cran.r-project.org/src/contrib/Archive/XLConnect/XLConnect_0.2-11.tar.gz') extends('R') - depends_on('r-XLConnectJars') - depends_on('r-rJava') + depends_on('r-xlconnectjars') + depends_on('r-rjava') def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-xlconnectjars/package.py b/var/spack/repos/builtin/packages/r-xlconnectjars/package.py index 15d08cc5afa..c1fb3658838 100644 --- a/var/spack/repos/builtin/packages/r-xlconnectjars/package.py +++ b/var/spack/repos/builtin/packages/r-xlconnectjars/package.py @@ -32,11 +32,12 @@ class RXlconnectjars(Package): url = "https://cran.r-project.org/src/contrib/XLConnectJars_0.2-9.tar.gz" list_url = "https://cran.r-project.org/src/contrib/Archive/XLConnectJars" - version('0.2-9', 'e6d6b1acfede26acaa616ee421bd30fb') + version('0.2-9', 'e6d6b1acfede26acaa616ee421bd30fb', + url='https://cran.r-project.org/src/contrib/Archive/XLConnectJars/XLConnectJars_0.2-9.tar.gz') extends('R') - depends_on('r-rJava') + depends_on('r-rjava') def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-xlsx/package.py b/var/spack/repos/builtin/packages/r-xlsx/package.py index 0aac6cdd1f1..99d41dbb947 100644 --- a/var/spack/repos/builtin/packages/r-xlsx/package.py +++ b/var/spack/repos/builtin/packages/r-xlsx/package.py @@ -37,7 +37,7 @@ class RXlsx(Package): extends('R') - depends_on('r-rJava') + depends_on('r-rjava') depends_on('r-xlsxjars') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/r-xlsxjars/package.py b/var/spack/repos/builtin/packages/r-xlsxjars/package.py index cff6e7427e3..80e86d0c733 100644 --- a/var/spack/repos/builtin/packages/r-xlsxjars/package.py +++ b/var/spack/repos/builtin/packages/r-xlsxjars/package.py @@ -37,7 +37,7 @@ class RXlsxjars(Package): extends('R') - depends_on('r-rJava') + depends_on('r-rjava') def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), From 00d7fd8e215470a9161c6e7512f290d439598e6d Mon Sep 17 00:00:00 2001 From: alalazo Date: Fri, 22 Jul 2016 18:14:36 +0200 Subject: [PATCH 065/284] test_install : removed commented code --- lib/spack/spack/test/cmd/test_install.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/lib/spack/spack/test/cmd/test_install.py b/lib/spack/spack/test/cmd/test_install.py index a94d3c8bba0..5e80776279a 100644 --- a/lib/spack/spack/test/cmd/test_install.py +++ b/lib/spack/spack/test/cmd/test_install.py @@ -22,11 +22,10 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import StringIO import collections from contextlib import contextmanager -import StringIO - FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) # Monkey-patch open to write module files to a StringIO instance @@ -45,7 +44,6 @@ def mock_open(filename, mode): handle.close() import os -import itertools import unittest import spack @@ -88,10 +86,6 @@ def traverse(self, order=None): for _, spec in self._dependencies.items(): yield spec.spec yield self - #from_iterable = itertools.chain.from_iterable - #allDeps = from_iterable(i.traverse() - # for i in self.dependencies()) - #return set(itertools.chain([self], allDeps)) def dag_hash(self): return self.hash From fb2d2303d667253c7dfa166607dced4b4b43e26e Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Fri, 22 Jul 2016 11:08:08 -0700 Subject: [PATCH 066/284] Fixed a bug that was causing Python installs to be affected by user config. --- .../repos/builtin/packages/python/package.py | 40 ++++++++++++++----- 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index bbb1e9c13ad..7903f430fcb 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -27,6 +27,7 @@ from contextlib import closing import spack +import llnl.util.tty as tty from llnl.util.lang import match_predicate from spack import * from spack.util.environment import * @@ -72,9 +73,29 @@ class Python(Package): depends_on("tk", when="+tk") depends_on("tcl", when="+tk") + @when('@2.7,3.4:') + def patch(self): + # NOTE: Python's default installation procedure makes it possible for a + # user's local configurations to change the Spack installation. In + # order to prevent this behavior for a full installation, we must + # modify the installation script so that it ignores user files. + ff = FileFilter('Makefile.pre.in') + ff.filter( + r'^(.*)setup\.py(.*)((build)|(install))(.*)$', + r'\1setup.py\2 --no-user-cfg \3\6' + ) + def install(self, spec, prefix): + # TODO: The '--no-user-cfg' option for Python installation is only in + # Python v2.7 and v3.4+ (see https://bugs.python.org/issue1180) and + # adding support for ignoring user configuration will require + # significant changes to this package for other Python versions. + if not spec.satisfies('@2.7,3.4:'): + tty.warn(('Python v{0} may not install properly if Python ' + 'user configurations are present.').format(self.version)) + # Need this to allow python build to find the Python installation. - env['PYTHONHOME'] = prefix + env['PYTHONHOME'], env['PYTHONPATH'] = prefix, prefix env['MACOSX_DEPLOYMENT_TARGET'] = '10.6' # Rest of install is pretty standard except setup.py needs to @@ -193,6 +214,8 @@ def site_packages_dir(self): def setup_dependent_environment(self, spack_env, run_env, extension_spec): """Set PYTHONPATH to include site-packages dir for the extension and any other python extensions it depends on.""" + pythonhome = self.prefix + spack_env.set('PYTHONHOME', pythonhome) python_paths = [] for d in extension_spec.traverse(deptype=nolink, deptype_query='run'): @@ -214,15 +237,14 @@ def setup_dependent_package(self, module, ext_spec): In most cases, extensions will only need to have one line:: - python('setup.py', 'install', '--prefix={0}'.format(prefix))""" + setup_py('install', '--prefix={0}'.format(prefix))""" + python_path = join_path( + self.spec.prefix.bin, + 'python{0}'.format('3' if self.spec.satisfies('@3') else '') + ) - # Python extension builds can have a global python executable function - if Version("3.0.0") <= self.version < Version("4.0.0"): - module.python = Executable(join_path(self.spec.prefix.bin, - 'python3')) - else: - module.python = Executable(join_path(self.spec.prefix.bin, - 'python')) + module.python = Executable(python_path) + module.setup_py = Executable(python_path + ' setup.py --no-user-cfg') # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs. module.python_lib_dir = join_path(ext_spec.prefix, From ac2e0962ce64a5b0824e79ad4266cbdb6207b336 Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Fri, 22 Jul 2016 11:12:02 -0700 Subject: [PATCH 067/284] Improved the build flag construction code in the python install method. --- .../repos/builtin/packages/python/package.py | 34 ++++--------------- 1 file changed, 7 insertions(+), 27 deletions(-) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 7903f430fcb..f755527607c 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -101,32 +101,13 @@ def install(self, spec, prefix): # Rest of install is pretty standard except setup.py needs to # be able to read the CPPFLAGS and LDFLAGS as it scans for the # library and headers to build - include_dirs = [ - spec['openssl'].prefix.include, spec['bzip2'].prefix.include, - spec['readline'].prefix.include, spec['ncurses'].prefix.include, - spec['sqlite'].prefix.include, spec['zlib'].prefix.include - ] - - library_dirs = [ - spec['openssl'].prefix.lib, spec['bzip2'].prefix.lib, - spec['readline'].prefix.lib, spec['ncurses'].prefix.lib, - spec['sqlite'].prefix.lib, spec['zlib'].prefix.lib - ] - - if '+tk' in spec: - include_dirs.extend([ - spec['tk'].prefix.include, spec['tcl'].prefix.include - ]) - library_dirs.extend([ - spec['tk'].prefix.lib, spec['tcl'].prefix.lib - ]) - + dep_pfxs = [dspec.prefix for dspec in spec.dependencies('link')] config_args = [ - "--prefix={0}".format(prefix), - "--with-threads", - "--enable-shared", - "CPPFLAGS=-I{0}".format(" -I".join(include_dirs)), - "LDFLAGS=-L{0}".format(" -L".join(library_dirs)) + '--prefix={0}'.format(prefix), + '--with-threads', + '--enable-shared', + 'CPPFLAGS=-I{0}'.format(' -I'.join(dp.include for dp in dep_pfxs)), + 'LDFLAGS=-L{0}'.format(' -L'.join(dp.lib for dp in dep_pfxs)), ] if '+ucs4' in spec: @@ -142,9 +123,8 @@ def install(self, spec, prefix): config_args.append('--without-ensurepip') configure(*config_args) - make() - make("install") + make('install') self.filter_compilers(spec, prefix) From fa92f58167352efa21c787bf38e878ad471484f8 Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Fri, 22 Jul 2016 11:17:19 -0700 Subject: [PATCH 068/284] Adding temporary script that fixes all Python extensions w/ 'setup_py'. --- share/spack/csh/convert-pyext.sh | 5 +++++ var/spack/repos/builtin/packages/py-setuptools/package.py | 7 +++++-- 2 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 share/spack/csh/convert-pyext.sh diff --git a/share/spack/csh/convert-pyext.sh b/share/spack/csh/convert-pyext.sh new file mode 100644 index 00000000000..a48bcdbccae --- /dev/null +++ b/share/spack/csh/convert-pyext.sh @@ -0,0 +1,5 @@ +#!/bin/bash --noprofile +PYEXT_REGEX=".*/.*/package.py" + +find var/spack/repos/builtin/packages/ -type f -regextype sed -regex ${PYEXT_REGEX} -exec \ + sed -i 's/python('\''setup.py'\'', /setup_py(/' {} \; diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py index 68032cb68dc..08d5e5d5526 100644 --- a/var/spack/repos/builtin/packages/py-setuptools/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools/package.py @@ -24,8 +24,11 @@ ############################################################################## from spack import * + class PySetuptools(Package): - """Easily download, build, install, upgrade, and uninstall Python packages.""" + """A Python utility that aids in the process of downloading, building, + upgrading, installing, and uninstalling Python packages.""" + homepage = "https://pypi.python.org/pypi/setuptools" url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz" @@ -40,4 +43,4 @@ class PySetuptools(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) From ebbcebac627e7385dbdddc273062f8a8e576f9ee Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 22 Jul 2016 13:55:00 -0500 Subject: [PATCH 069/284] Use example deptypes in newly created packages --- lib/spack/spack/cmd/create.py | 43 +++++++++++++++++++++++++++-------- 1 file changed, 34 insertions(+), 9 deletions(-) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 2c440096d11..da74ceb2f6e 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -96,8 +96,7 @@ class ${class_name}(Package): ${versions} - # FIXME: Add additional dependencies if required. - ${dependencies} +${dependencies} def install(self, spec, prefix): ${install} @@ -105,13 +104,39 @@ def install(self, spec, prefix): # Build dependencies and extensions dependencies_dict = { - 'autotools': "# depends_on('foo')", - 'cmake': "depends_on('cmake')", - 'scons': "depends_on('scons')", - 'python': "extends('python')", - 'R': "extends('R')", - 'octave': "extends('octave')", - 'unknown': "# depends_on('foo')" + 'autotools': """\ + # FIXME: Add dependencies if required. + # depends_on('foo')""", + + 'cmake': """\ + # FIXME: Add additional dependencies if required. + depends_on('cmake', type='build')""", + + 'scons': """\ + # FIXME: Add additional dependencies if required. + depends_on('scons', type='build')""", + + 'python': """\ + extends('python') + + # FIXME: Add additional dependencies if required. + # depends_on('py-foo', type=nolink)""", + + 'R': """\ + extends('R') + + # FIXME: Add additional dependencies if required. + # depends_on('r-foo', type=nolink)""", + + 'octave': """\ + extends('octave') + + # FIXME: Add additional dependencies if required. + # depends_on('octave-foo', type=nolink)""", + + 'unknown': """\ + # FIXME: Add dependencies if required. + # depends_on('foo')""" } # Default installation instructions From ec9959b152ca0b5d11621c1d1e5acc4bba2a8db1 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Fri, 22 Jul 2016 17:05:01 -0500 Subject: [PATCH 070/284] R extension dependencies with compiler wrapper This commit introduces a mechanism to insure that R package dependencies are built with the Spack compiler wrapper. A copy of Makeconf is made before `filter_compilers` is called. This is then pointed to by the R_MAKEVARS_SITE environment variable set up in `setup_dependent_environment`. With this the normal compilers are used outside of spack and the spack wrapper compilers are used inside of spack. This commit also standardizes on the `join_path` call. It also sets the commented build command to reflect what is actually used with the newer string formatting. --- var/spack/repos/builtin/packages/R/package.py | 40 ++++++++++++------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/builtin/packages/R/package.py b/var/spack/repos/builtin/packages/R/package.py index ad06c2ca48f..554adc77935 100644 --- a/var/spack/repos/builtin/packages/R/package.py +++ b/var/spack/repos/builtin/packages/R/package.py @@ -22,10 +22,9 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os - from spack import * from spack.util.environment import * +import shutil class R(Package): @@ -74,6 +73,10 @@ class R(Package): depends_on('pcre') depends_on('jdk') + @property + def etcdir(self): + return join_path(prefix, 'rlib', 'R', 'etc') + def install(self, spec, prefix): rlibdir = join_path(prefix, 'rlib') configure_args = ['--prefix=%s' % prefix, @@ -88,6 +91,12 @@ def install(self, spec, prefix): make() make('install') + # Make a copy of Makeconf because it will be needed to properly build R + # dependencies in Spack. + src_makeconf = join_path(self.etcdir, 'Makeconf') + dst_makeconf = join_path(self.etcdir, 'Makeconf.spack') + shutil.copy(src_makeconf, dst_makeconf) + self.filter_compilers(spec, prefix) def filter_compilers(self, spec, prefix): @@ -98,18 +107,16 @@ def filter_compilers(self, spec, prefix): cc and c++. We want them to be bound to whatever compiler they were built with.""" - etcdir = join_path(prefix, 'rlib', 'R', 'etc') - kwargs = {'ignore_absent': True, 'backup': False, 'string': True} - filter_file(env['CC'], self.compiler.cc, - join_path(etcdir, 'Makeconf'), **kwargs) + filter_file(env['CC'], self.compiler.cc, + join_path(self.etcdir, 'Makeconf'), **kwargs) filter_file(env['CXX'], self.compiler.cxx, - join_path(etcdir, 'Makeconf'), **kwargs) + join_path(self.etcdir, 'Makeconf'), **kwargs) filter_file(env['F77'], self.compiler.f77, - join_path(etcdir, 'Makeconf'), **kwargs) + join_path(self.etcdir, 'Makeconf'), **kwargs) filter_file(env['FC'], self.compiler.fc, - join_path(etcdir, 'Makeconf'), **kwargs) + join_path(self.etcdir, 'Makeconf'), **kwargs) # ======================================================================== # Set up environment to make install easy for R extensions. @@ -117,7 +124,7 @@ def filter_compilers(self, spec, prefix): @property def r_lib_dir(self): - return os.path.join('rlib', 'R', 'library') + return join_path('rlib', 'R', 'library') def setup_dependent_environment(self, spack_env, run_env, extension_spec): # Set R_LIBS to include the library dir for the @@ -125,15 +132,17 @@ def setup_dependent_environment(self, spack_env, run_env, extension_spec): r_libs_path = [] for d in extension_spec.traverse(deptype=nolink, deptype_query='run'): if d.package.extends(self.spec): - r_libs_path.append(os.path.join(d.prefix, self.r_lib_dir)) + r_libs_path.append(join_path(d.prefix, self.r_lib_dir)) r_libs_path = ':'.join(r_libs_path) spack_env.set('R_LIBS', r_libs_path) + spack_env.set('R_MAKEVARS_SITE', + join_path(self.etcdir, 'Makeconf.spack')) # For run time environment set only the path for extension_spec and # prepend it to R_LIBS if extension_spec.package.extends(self.spec): - run_env.prepend_path('R_LIBS', os.path.join( + run_env.prepend_path('R_LIBS', join_path( extension_spec.prefix, self.r_lib_dir)) def setup_environment(self, spack_env, run_env): @@ -147,13 +156,14 @@ def setup_environment(self, spack_env, run_env): def setup_dependent_package(self, module, ext_spec): """Called before R modules' install() methods. In most cases, extensions will only need to have one line: - R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % - self.stage.source_path)""" + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path)""" + # R extension builds can have a global R executable function module.R = Executable(join_path(self.spec.prefix.bin, 'R')) # Add variable for library directry - module.r_lib_dir = os.path.join(ext_spec.prefix, self.r_lib_dir) + module.r_lib_dir = join_path(ext_spec.prefix, self.r_lib_dir) # Make the site packages directory for extensions, if it does not exist # already. From 773bca159aef2024c1d7421a080097f52f4f509a Mon Sep 17 00:00:00 2001 From: Greg Lee Date: Fri, 22 Jul 2016 15:49:36 -0700 Subject: [PATCH 071/284] do not ignore nosetests script in py-nose --- var/spack/repos/builtin/packages/py-nose/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-nose/package.py b/var/spack/repos/builtin/packages/py-nose/package.py index d15a1784d1e..eb3dd772196 100644 --- a/var/spack/repos/builtin/packages/py-nose/package.py +++ b/var/spack/repos/builtin/packages/py-nose/package.py @@ -36,7 +36,7 @@ class PyNose(Package): version('1.3.6', '0ca546d81ca8309080fc80cb389e7a16') version('1.3.7', '4d3ad0ff07b61373d2cefc89c5d0b20b') - extends('python', ignore=r'bin/nosetests.*$') + extends('python') depends_on('py-setuptools', type='build') def install(self, spec, prefix): From b51be2bb1bc9dd3c422af2f159c94bc0277fcecc Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 23 Jul 2016 18:11:09 -0500 Subject: [PATCH 072/284] Have fetch use list_url This PR allows archive file retrieval from urls derived from the `list_url` setting in a package file. This allows for continued retrieval of checksummed archive files even when they are moved to a new remote location when a package is updated upstream. --- lib/spack/spack/stage.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index b08cce43b8b..0914afe3a70 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -37,6 +37,7 @@ import spack.config import spack.fetch_strategy as fs import spack.error +from spack.version import Version STAGE_PREFIX = 'spack-stage-' @@ -306,6 +307,18 @@ def fetch(self, mirror_only=False): fetchers.insert(0, fs.URLFetchStrategy(url, digest)) fetchers.insert(0, spack.cache.fetcher(self.mirror_path, digest)) + # Look for the archive in list_url + archive_version = spack.url.parse_version(self.default_fetcher.url) + package_name = os.path.dirname(self.mirror_path) + pkg = spack.repo.get(package_name) + versions = pkg.fetch_remote_versions() + try: + url_from_list = versions[Version(archive_version)] + fetchers.append(fs.URLFetchStrategy(url_from_list, digest)) + except KeyError: + tty.msg("Can not find version %s in url_list" % + archive_version) + for fetcher in fetchers: try: fetcher.set_stage(self) From 4181fd79cd6993ccb5cb95c4d3dd8d2cdf0b17c3 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 23 Jul 2016 19:00:57 -0500 Subject: [PATCH 073/284] Fix flake8 errors related to lines Fixed the flake 8 errors that involved too many blank lines or not enough blank lines. Basically, all of the flake8 errors except line length errors. --- lib/spack/spack/stage.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 0914afe3a70..cf1c9d7b506 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -144,7 +144,6 @@ def __init__(self, url_or_fetch_strategy, # Flag to decide whether to delete the stage folder on exit or not self.keep = keep - def __enter__(self): """ Entering a stage context will create the stage directory @@ -155,7 +154,6 @@ def __enter__(self): self.create() return self - def __exit__(self, exc_type, exc_val, exc_tb): """ Exiting from a stage context will delete the stage directory unless: @@ -174,7 +172,6 @@ def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is None and not self.keep: self.destroy() - def _need_to_create_path(self): """Makes sure nothing weird has happened since the last time we looked at path. Returns True if path already exists and is ok. @@ -334,7 +331,6 @@ def fetch(self, mirror_only=False): self.fetcher = self.default_fetcher raise fs.FetchError(errMessage, None) - def check(self): """Check the downloaded archive against a checksum digest. No-op if this stage checks code out of a repository.""" @@ -348,11 +344,9 @@ def check(self): else: self.fetcher.check() - def cache_local(self): spack.cache.store(self.fetcher, self.mirror_path) - def expand_archive(self): """Changes to the stage directory and attempt to expand the downloaded archive. Fail if the stage is not set up or if the archive is not yet @@ -509,8 +503,11 @@ def chdir(self): raise ChdirError("Setup failed: no such directory: " + self.path) # DIY stages do nothing as context managers. - def __enter__(self): pass - def __exit__(self, exc_type, exc_val, exc_tb): pass + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + pass def chdir_to_source(self): self.chdir() From a8855f48aab61e174b4c9a1532e255b0a8b910c7 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 23 Jul 2016 19:43:55 -0500 Subject: [PATCH 074/284] Add dependency type tp r- packages This PR adds the `nolink` dependency type to r- package dependencies. This is needed due to the new dependency types in Spack. A couple of packages were updated with new versions as well. --- .../builtin/packages/r-devtools/package.py | 16 ++++++++-------- .../repos/builtin/packages/r-dplyr/package.py | 17 +++++++++-------- .../repos/builtin/packages/r-ggplot2/package.py | 12 ++++++------ .../repos/builtin/packages/r-ggvis/package.py | 14 +++++++------- .../builtin/packages/r-gridextra/package.py | 2 +- .../builtin/packages/r-htmltools/package.py | 4 ++-- .../repos/builtin/packages/r-httpuv/package.py | 2 +- .../repos/builtin/packages/r-httr/package.py | 10 +++++----- .../builtin/packages/r-lubridate/package.py | 3 ++- .../repos/builtin/packages/r-magic/package.py | 2 +- .../repos/builtin/packages/r-matrix/package.py | 2 +- .../repos/builtin/packages/r-memoise/package.py | 2 +- .../repos/builtin/packages/r-munsell/package.py | 2 +- .../repos/builtin/packages/r-plyr/package.py | 2 +- .../builtin/packages/r-rcppeigen/package.py | 4 ++-- .../builtin/packages/r-reshape2/package.py | 6 +++--- .../repos/builtin/packages/r-rmysql/package.py | 2 +- .../builtin/packages/r-rpostgresql/package.py | 2 +- .../repos/builtin/packages/r-rsqlite/package.py | 2 +- .../repos/builtin/packages/r-rstan/package.py | 14 +++++++------- .../repos/builtin/packages/r-scales/package.py | 12 ++++++------ .../repos/builtin/packages/r-shiny/package.py | 14 +++++++------- .../repos/builtin/packages/r-stringr/package.py | 4 ++-- .../repos/builtin/packages/r-tibble/package.py | 6 +++--- .../repos/builtin/packages/r-tidyr/package.py | 12 ++++++------ .../builtin/packages/r-xlconnect/package.py | 8 ++++---- .../builtin/packages/r-xlconnectjars/package.py | 6 +++--- .../repos/builtin/packages/r-xlsx/package.py | 4 ++-- .../builtin/packages/r-xlsxjars/package.py | 2 +- 29 files changed, 95 insertions(+), 93 deletions(-) diff --git a/var/spack/repos/builtin/packages/r-devtools/package.py b/var/spack/repos/builtin/packages/r-devtools/package.py index 5f0b7b87792..75506dd257d 100644 --- a/var/spack/repos/builtin/packages/r-devtools/package.py +++ b/var/spack/repos/builtin/packages/r-devtools/package.py @@ -36,14 +36,14 @@ class RDevtools(Package): extends('R') - depends_on('r-httr') - depends_on('r-memoise') - depends_on('r-whisker') - depends_on('r-digest') - depends_on('r-rstudioapi') - depends_on('r-jsonlite') - depends_on('r-git2r') - depends_on('r-withr') + depends_on('r-httr', type=nolink) + depends_on('r-memoise', type=nolink) + depends_on('r-whisker', type=nolink) + depends_on('r-digest', type=nolink) + depends_on('r-rstudioapi', type=nolink) + depends_on('r-jsonlite', type=nolink) + depends_on('r-git2r', type=nolink) + depends_on('r-withr', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-dplyr/package.py b/var/spack/repos/builtin/packages/r-dplyr/package.py index b065e0b8173..ded76512783 100644 --- a/var/spack/repos/builtin/packages/r-dplyr/package.py +++ b/var/spack/repos/builtin/packages/r-dplyr/package.py @@ -36,14 +36,15 @@ class RDplyr(Package): version('0.5.0', '1fcafcacca70806eea2e6d465cdb94ef') extends('R') - depends_on('r-assertthat') - depends_on('r-R6') - depends_on('r-rcpp') - depends_on('r-tibble') - depends_on('r-magrittr') - depends_on('r-lazyeval') - depends_on('r-dbi') - depends_on('r-bh') + + depends_on('r-assertthat', type=nolink) + depends_on('r-R6', type=nolink) + depends_on('r-rcpp', type=nolink) + depends_on('r-tibble', type=nolink) + depends_on('r-magrittr', type=nolink) + depends_on('r-lazyeval', type=nolink) + depends_on('r-dbi', type=nolink) + depends_on('r-bh', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-ggplot2/package.py b/var/spack/repos/builtin/packages/r-ggplot2/package.py index 2d1f53af265..a8cf21930fe 100644 --- a/var/spack/repos/builtin/packages/r-ggplot2/package.py +++ b/var/spack/repos/builtin/packages/r-ggplot2/package.py @@ -42,12 +42,12 @@ class RGgplot2(Package): extends('R') - depends_on('r-digest') - depends_on('r-gtable') - depends_on('r-mass') - depends_on('r-plyr') - depends_on('r-reshape2') - depends_on('r-scales') + depends_on('r-digest', type=nolink) + depends_on('r-gtable', type=nolink) + depends_on('r-mass', type=nolink) + depends_on('r-plyr', type=nolink) + depends_on('r-reshape2', type=nolink) + depends_on('r-scales', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-ggvis/package.py b/var/spack/repos/builtin/packages/r-ggvis/package.py index 8fc1f397c80..29b2069ae9c 100644 --- a/var/spack/repos/builtin/packages/r-ggvis/package.py +++ b/var/spack/repos/builtin/packages/r-ggvis/package.py @@ -38,13 +38,13 @@ class RGgvis(Package): extends('R') - depends_on('r-assertthat') - depends_on('r-jsonlite') - depends_on('r-shiny') - depends_on('r-magrittr') - depends_on('r-dplyr') - depends_on('r-lazyeval') - depends_on('r-htmltools') + depends_on('r-assertthat', type=nolink) + depends_on('r-jsonlite', type=nolink) + depends_on('r-shiny', type=nolink) + depends_on('r-magrittr', type=nolink) + depends_on('r-dplyr', type=nolink) + depends_on('r-lazyeval', type=nolink) + depends_on('r-htmltools', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-gridextra/package.py b/var/spack/repos/builtin/packages/r-gridextra/package.py index d215d106782..537426d42f5 100644 --- a/var/spack/repos/builtin/packages/r-gridextra/package.py +++ b/var/spack/repos/builtin/packages/r-gridextra/package.py @@ -37,7 +37,7 @@ class RGridextra(Package): extends('R') - depends_on('r-gtable') + depends_on('r-gtable', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-htmltools/package.py b/var/spack/repos/builtin/packages/r-htmltools/package.py index 0aea5643724..21cdbe9184d 100644 --- a/var/spack/repos/builtin/packages/r-htmltools/package.py +++ b/var/spack/repos/builtin/packages/r-htmltools/package.py @@ -36,8 +36,8 @@ class RHtmltools(Package): extends('R') - depends_on('r-digest') - depends_on('r-rcpp') + depends_on('r-digest', type=nolink) + depends_on('r-rcpp', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-httpuv/package.py b/var/spack/repos/builtin/packages/r-httpuv/package.py index 6ab12bcf9d0..57025d3a3cc 100644 --- a/var/spack/repos/builtin/packages/r-httpuv/package.py +++ b/var/spack/repos/builtin/packages/r-httpuv/package.py @@ -42,7 +42,7 @@ class RHttpuv(Package): extends('R') - depends_on('r-rcpp') + depends_on('r-rcpp', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-httr/package.py b/var/spack/repos/builtin/packages/r-httr/package.py index 77ec34ab03d..227594fc0d5 100644 --- a/var/spack/repos/builtin/packages/r-httr/package.py +++ b/var/spack/repos/builtin/packages/r-httr/package.py @@ -38,11 +38,11 @@ class RHttr(Package): extends('R') - depends_on('r-jsonlite') - depends_on('r-mime') - depends_on('r-curl') - depends_on('r-openssl') - depends_on('r-R6') + depends_on('r-jsonlite', type=nolink) + depends_on('r-mime', type=nolink) + depends_on('r-curl', type=nolink) + depends_on('r-openssl', type=nolink) + depends_on('r-R6', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-lubridate/package.py b/var/spack/repos/builtin/packages/r-lubridate/package.py index 88576911f04..2e5661fa4d3 100644 --- a/var/spack/repos/builtin/packages/r-lubridate/package.py +++ b/var/spack/repos/builtin/packages/r-lubridate/package.py @@ -40,7 +40,8 @@ class RLubridate(Package): version('1.5.6', 'a5dc44817548ee219d26a10bae92e611') extends('R') - depends_on('r-stringr') + + depends_on('r-stringr', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-magic/package.py b/var/spack/repos/builtin/packages/r-magic/package.py index f6ba97740bd..2b8f95351a1 100644 --- a/var/spack/repos/builtin/packages/r-magic/package.py +++ b/var/spack/repos/builtin/packages/r-magic/package.py @@ -39,7 +39,7 @@ class RMagic(Package): extends('R') - depends_on('r-abind') + depends_on('r-abind', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-matrix/package.py b/var/spack/repos/builtin/packages/r-matrix/package.py index fbd8a7b4dfd..1b45dc55caa 100644 --- a/var/spack/repos/builtin/packages/r-matrix/package.py +++ b/var/spack/repos/builtin/packages/r-matrix/package.py @@ -37,7 +37,7 @@ class RMatrix(Package): extends('R') - depends_on('r-lattice') + depends_on('r-lattice', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-memoise/package.py b/var/spack/repos/builtin/packages/r-memoise/package.py index 6a0fb786508..81f3ff1dab2 100644 --- a/var/spack/repos/builtin/packages/r-memoise/package.py +++ b/var/spack/repos/builtin/packages/r-memoise/package.py @@ -37,7 +37,7 @@ class RMemoise(Package): extends('R') - depends_on('r-digest') + depends_on('r-digest', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-munsell/package.py b/var/spack/repos/builtin/packages/r-munsell/package.py index 3216c95e001..b96b90f9c7e 100644 --- a/var/spack/repos/builtin/packages/r-munsell/package.py +++ b/var/spack/repos/builtin/packages/r-munsell/package.py @@ -40,7 +40,7 @@ class RMunsell(Package): extends('R') - depends_on('r-colorspace') + depends_on('r-colorspace', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-plyr/package.py b/var/spack/repos/builtin/packages/r-plyr/package.py index 192e7e8b18e..fe4512347ab 100644 --- a/var/spack/repos/builtin/packages/r-plyr/package.py +++ b/var/spack/repos/builtin/packages/r-plyr/package.py @@ -42,7 +42,7 @@ class RPlyr(Package): extends('R') - depends_on('r-rcpp') + depends_on('r-rcpp', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-rcppeigen/package.py b/var/spack/repos/builtin/packages/r-rcppeigen/package.py index 3175628a735..ecf9256ab3a 100644 --- a/var/spack/repos/builtin/packages/r-rcppeigen/package.py +++ b/var/spack/repos/builtin/packages/r-rcppeigen/package.py @@ -48,8 +48,8 @@ class RRcppeigen(Package): extends('R') - depends_on('r-matrix') - depends_on('r-rcpp') + depends_on('r-matrix', type=nolink) + depends_on('r-rcpp', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-reshape2/package.py b/var/spack/repos/builtin/packages/r-reshape2/package.py index a96a3140086..769e82aff9d 100644 --- a/var/spack/repos/builtin/packages/r-reshape2/package.py +++ b/var/spack/repos/builtin/packages/r-reshape2/package.py @@ -37,9 +37,9 @@ class RReshape2(Package): extends('R') - depends_on('r-plyr') - depends_on('r-stringr') - depends_on('r-rcpp') + depends_on('r-plyr', type=nolink) + depends_on('r-stringr', type=nolink) + depends_on('r-rcpp', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-rmysql/package.py b/var/spack/repos/builtin/packages/r-rmysql/package.py index 086374df077..bc42c51ca9b 100644 --- a/var/spack/repos/builtin/packages/r-rmysql/package.py +++ b/var/spack/repos/builtin/packages/r-rmysql/package.py @@ -36,7 +36,7 @@ class RRmysql(Package): extends('R') - depends_on('r-dbi') + depends_on('r-dbi', type=nolink) depends_on('mariadb') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/r-rpostgresql/package.py b/var/spack/repos/builtin/packages/r-rpostgresql/package.py index 53e1978ffe0..999fb4a1b9b 100644 --- a/var/spack/repos/builtin/packages/r-rpostgresql/package.py +++ b/var/spack/repos/builtin/packages/r-rpostgresql/package.py @@ -44,7 +44,7 @@ class RRpostgresql(Package): extends('R') - depends_on('r-dbi') + depends_on('r-dbi', type=nolink) depends_on('postgresql') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/r-rsqlite/package.py b/var/spack/repos/builtin/packages/r-rsqlite/package.py index 7c03ab89505..c7d41f0fb37 100644 --- a/var/spack/repos/builtin/packages/r-rsqlite/package.py +++ b/var/spack/repos/builtin/packages/r-rsqlite/package.py @@ -38,7 +38,7 @@ class RRsqlite(Package): extends('R') - depends_on('r-dbi') + depends_on('r-dbi', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-rstan/package.py b/var/spack/repos/builtin/packages/r-rstan/package.py index 00fd5116d9c..1d0052563b7 100644 --- a/var/spack/repos/builtin/packages/r-rstan/package.py +++ b/var/spack/repos/builtin/packages/r-rstan/package.py @@ -44,13 +44,13 @@ class RRstan(Package): extends('R') - depends_on('r-ggplot2') - depends_on('r-stanheaders') - depends_on('r-inline') - depends_on('r-gridextra') - depends_on('r-rcpp') - depends_on('r-rcppeigen') - depends_on('r-bh') + depends_on('r-ggplot2', type=nolink) + depends_on('r-stanheaders', type=nolink) + depends_on('r-inline', type=nolink) + depends_on('r-gridextra', type=nolink) + depends_on('r-rcpp', type=nolink) + depends_on('r-rcppeigen', type=nolink) + depends_on('r-bh', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-scales/package.py b/var/spack/repos/builtin/packages/r-scales/package.py index 046a05d48ec..44bd5a5a2a7 100644 --- a/var/spack/repos/builtin/packages/r-scales/package.py +++ b/var/spack/repos/builtin/packages/r-scales/package.py @@ -37,12 +37,12 @@ class RScales(Package): extends('R') - depends_on('r-rcolorbrewer') - depends_on('r-dichromat') - depends_on('r-plyr') - depends_on('r-munsell') - depends_on('r-labeling') - depends_on('r-rcpp') + depends_on('r-rcolorbrewer', type=nolink) + depends_on('r-dichromat', type=nolink) + depends_on('r-plyr', type=nolink) + depends_on('r-munsell', type=nolink) + depends_on('r-labeling', type=nolink) + depends_on('r-rcpp', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-shiny/package.py b/var/spack/repos/builtin/packages/r-shiny/package.py index a9a95329107..a80860f4a53 100644 --- a/var/spack/repos/builtin/packages/r-shiny/package.py +++ b/var/spack/repos/builtin/packages/r-shiny/package.py @@ -39,13 +39,13 @@ class RShiny(Package): extends('R') - depends_on('r-httpuv') - depends_on('r-mime') - depends_on('r-jsonlite') - depends_on('r-xtable') - depends_on('r-digest') - depends_on('r-htmltools') - depends_on('r-R6') + depends_on('r-httpuv', type=nolink) + depends_on('r-mime', type=nolink) + depends_on('r-jsonlite', type=nolink) + depends_on('r-xtable', type=nolink) + depends_on('r-digest', type=nolink) + depends_on('r-htmltools', type=nolink) + depends_on('r-R6', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-stringr/package.py b/var/spack/repos/builtin/packages/r-stringr/package.py index 01fd9695226..08ded958339 100644 --- a/var/spack/repos/builtin/packages/r-stringr/package.py +++ b/var/spack/repos/builtin/packages/r-stringr/package.py @@ -40,8 +40,8 @@ class RStringr(Package): extends('R') - depends_on('r-stringi') - depends_on('r-magrittr') + depends_on('r-stringi', type=nolink) + depends_on('r-magrittr', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-tibble/package.py b/var/spack/repos/builtin/packages/r-tibble/package.py index 837c3df6030..c601de3c880 100644 --- a/var/spack/repos/builtin/packages/r-tibble/package.py +++ b/var/spack/repos/builtin/packages/r-tibble/package.py @@ -37,9 +37,9 @@ class RTibble(Package): extends('R') - depends_on('r-assertthat') - depends_on('r-lazyeval') - depends_on('r-rcpp') + depends_on('r-assertthat', type=nolink) + depends_on('r-lazyeval', type=nolink) + depends_on('r-rcpp', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-tidyr/package.py b/var/spack/repos/builtin/packages/r-tidyr/package.py index 2db7b174c2c..d82ae278fe0 100644 --- a/var/spack/repos/builtin/packages/r-tidyr/package.py +++ b/var/spack/repos/builtin/packages/r-tidyr/package.py @@ -37,12 +37,12 @@ class RTidyr(Package): version('0.5.1', '3cadc869510c054ed93d374ab44120bd') extends('R') - depends_on('r-tibble') - depends_on('r-dplyr') - depends_on('r-stringi') - depends_on('r-lazyeval') - depends_on('r-magrittr') - depends_on('r-rcpp') + depends_on('r-tibble', type=nolink) + depends_on('r-dplyr', type=nolink) + depends_on('r-stringi', type=nolink) + depends_on('r-lazyeval', type=nolink) + depends_on('r-magrittr', type=nolink) + depends_on('r-rcpp', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-xlconnect/package.py b/var/spack/repos/builtin/packages/r-xlconnect/package.py index 4ee7163be15..332c80fb92d 100644 --- a/var/spack/repos/builtin/packages/r-xlconnect/package.py +++ b/var/spack/repos/builtin/packages/r-xlconnect/package.py @@ -33,13 +33,13 @@ class RXlconnect(Package): url = "https://cran.r-project.org/src/contrib/XLConnect_0.2-11.tar.gz" list_url = "https://cran.r-project.org/src/contrib/Archive/XLConnect" - version('0.2-11', '9d1769a103cda05665df399cc335017d', - url='https://cran.r-project.org/src/contrib/Archive/XLConnect/XLConnect_0.2-11.tar.gz') + version('0.2-12', '3340d05d259f0a41262eab4ed32617ad') + version('0.2-11', '9d1769a103cda05665df399cc335017d') extends('R') - depends_on('r-xlconnectjars') - depends_on('r-rjava') + depends_on('r-xlconnectjars', type=nolink) + depends_on('r-rjava', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-xlconnectjars/package.py b/var/spack/repos/builtin/packages/r-xlconnectjars/package.py index c1fb3658838..26e822d2b77 100644 --- a/var/spack/repos/builtin/packages/r-xlconnectjars/package.py +++ b/var/spack/repos/builtin/packages/r-xlconnectjars/package.py @@ -32,12 +32,12 @@ class RXlconnectjars(Package): url = "https://cran.r-project.org/src/contrib/XLConnectJars_0.2-9.tar.gz" list_url = "https://cran.r-project.org/src/contrib/Archive/XLConnectJars" - version('0.2-9', 'e6d6b1acfede26acaa616ee421bd30fb', - url='https://cran.r-project.org/src/contrib/Archive/XLConnectJars/XLConnectJars_0.2-9.tar.gz') + version('0.2-12', '6984e5140cd1c887c017ef6f88cbba81') + version('0.2-9', 'e6d6b1acfede26acaa616ee421bd30fb') extends('R') - depends_on('r-rjava') + depends_on('r-rjava', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-xlsx/package.py b/var/spack/repos/builtin/packages/r-xlsx/package.py index 99d41dbb947..6fed09296d6 100644 --- a/var/spack/repos/builtin/packages/r-xlsx/package.py +++ b/var/spack/repos/builtin/packages/r-xlsx/package.py @@ -37,8 +37,8 @@ class RXlsx(Package): extends('R') - depends_on('r-rjava') - depends_on('r-xlsxjars') + depends_on('r-rjava', type=nolink) + depends_on('r-xlsxjars', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), diff --git a/var/spack/repos/builtin/packages/r-xlsxjars/package.py b/var/spack/repos/builtin/packages/r-xlsxjars/package.py index 80e86d0c733..19f0006ce98 100644 --- a/var/spack/repos/builtin/packages/r-xlsxjars/package.py +++ b/var/spack/repos/builtin/packages/r-xlsxjars/package.py @@ -37,7 +37,7 @@ class RXlsxjars(Package): extends('R') - depends_on('r-rjava') + depends_on('r-rjava', type=nolink) def install(self, spec, prefix): R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), From 4f09e8c9759473257ca8857bfa09eeeeca08cafd Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 23 Jul 2016 22:56:08 -0500 Subject: [PATCH 075/284] Only use list if list_url set This commit will make urls from list_url only checked if `list_url` is set in the package file. This makes more sense as there is no need to check for those if the attribute is not present. If `url` is present and `list_url` is not then it would result in the same url. If `url_for_version` is used then that will not work anyway. --- lib/spack/spack/stage.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index cf1c9d7b506..6cf736b3f0f 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -37,7 +37,7 @@ import spack.config import spack.fetch_strategy as fs import spack.error -from spack.version import Version +from spack.version import * STAGE_PREFIX = 'spack-stage-' @@ -308,13 +308,14 @@ def fetch(self, mirror_only=False): archive_version = spack.url.parse_version(self.default_fetcher.url) package_name = os.path.dirname(self.mirror_path) pkg = spack.repo.get(package_name) - versions = pkg.fetch_remote_versions() - try: - url_from_list = versions[Version(archive_version)] - fetchers.append(fs.URLFetchStrategy(url_from_list, digest)) - except KeyError: - tty.msg("Can not find version %s in url_list" % - archive_version) + if pkg.list_url is not None: + versions = pkg.fetch_remote_versions() + try: + url_from_list = versions[Version(archive_version)] + fetchers.append(fs.URLFetchStrategy(url_from_list, digest)) + except KeyError: + tty.msg("Can not find version %s in url_list" % + archive_version) for fetcher in fetchers: try: From b1e5ec05739b05ed3781473b0a27b04f7b053ce6 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 24 Jul 2016 09:21:11 -0500 Subject: [PATCH 076/284] Make sure package has the `url` attribute. In addition to `list_url` make sure the package has the `url` attribute set before attempting to add urls from a list. This is to cover the case where there may be a `list_url` specified in tandem with a `url_for_version`. --- lib/spack/spack/stage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 6cf736b3f0f..8f3f0e163a0 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -308,7 +308,7 @@ def fetch(self, mirror_only=False): archive_version = spack.url.parse_version(self.default_fetcher.url) package_name = os.path.dirname(self.mirror_path) pkg = spack.repo.get(package_name) - if pkg.list_url is not None: + if pkg.list_url is not None and pkg.url is not None: versions = pkg.fetch_remote_versions() try: url_from_list = versions[Version(archive_version)] From 29278090ebeb6cfbb0644206c68531c7109f2bf8 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sun, 24 Jul 2016 15:19:27 -0500 Subject: [PATCH 077/284] Fix OpenSSL url_for_version --- var/spack/repos/builtin/packages/openssl/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index b9eada9e83a..78bdd88d9c3 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -52,7 +52,7 @@ def url_for_version(self, version): if '@system' in self.spec: return '@system (reserved version for system openssl)' else: - return super(Openssl, self).url_for_version(self.spec) + return super(Openssl, self).url_for_version(self.version) def handle_fetch_error(self, error): tty.warn("Fetching OpenSSL failed. This may indicate that OpenSSL has " From b7bf88c761dbbe28810e16932fc2917c90a3e386 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 24 Jul 2016 16:38:36 -0500 Subject: [PATCH 078/284] New package - r-googlevis --- .../builtin/packages/r-googlevis/package.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-googlevis/package.py diff --git a/var/spack/repos/builtin/packages/r-googlevis/package.py b/var/spack/repos/builtin/packages/r-googlevis/package.py new file mode 100644 index 00000000000..bcb7caa0fbd --- /dev/null +++ b/var/spack/repos/builtin/packages/r-googlevis/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RGooglevis(Package): + """R interface to Google Charts API, allowing users to create interactive + charts based on data frames. Charts are displayed locally via the R HTTP + help server. A modern browser with an Internet connection is required and + for some charts a Flash player. The data remains local and is not uploaded + to Google.""" + + homepage = "https://github.com/mages/googleVis#googlevis" + url = "https://cran.r-project.org/src/contrib/googleVis_0.6.0.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/googleVis" + + version('0.6.0', 'ec36fd2a6884ddc7baa894007d0d0468') + + extends('R') + + depends_on('r-jsonlite', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 27986c9edf2d721aa498833f2dccb48cc7548aeb Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 24 Jul 2016 16:02:36 -0500 Subject: [PATCH 079/284] New packages - R htmlwidgets --- .../builtin/packages/r-base64enc/package.py | 42 +++++++++++++ .../builtin/packages/r-cluster/package.py | 43 +++++++++++++ .../builtin/packages/r-codetools/package.py | 41 +++++++++++++ .../builtin/packages/r-diagrammer/package.py | 49 +++++++++++++++ .../builtin/packages/r-doparallel/package.py | 45 ++++++++++++++ .../repos/builtin/packages/r-dt/package.py | 47 +++++++++++++++ .../builtin/packages/r-dygraphs/package.py | 50 ++++++++++++++++ .../builtin/packages/r-foreach/package.py | 50 ++++++++++++++++ .../builtin/packages/r-gridbase/package.py | 41 +++++++++++++ .../builtin/packages/r-htmlwidgets/package.py | 47 +++++++++++++++ .../builtin/packages/r-igraph/package.py | 50 ++++++++++++++++ .../builtin/packages/r-influencer/package.py | 50 ++++++++++++++++ .../repos/builtin/packages/r-irlba/package.py | 45 ++++++++++++++ .../builtin/packages/r-iterators/package.py | 42 +++++++++++++ .../builtin/packages/r-leaflet/package.py | 54 +++++++++++++++++ .../builtin/packages/r-markdown/package.py | 47 +++++++++++++++ .../builtin/packages/r-networkd3/package.py | 46 ++++++++++++++ .../repos/builtin/packages/r-nmf/package.py | 60 +++++++++++++++++++ .../builtin/packages/r-pkgmaker/package.py | 53 ++++++++++++++++ .../repos/builtin/packages/r-png/package.py | 45 ++++++++++++++ .../builtin/packages/r-raster/package.py | 46 ++++++++++++++ .../builtin/packages/r-registry/package.py | 41 +++++++++++++ .../builtin/packages/r-rngtools/package.py | 49 +++++++++++++++ .../builtin/packages/r-rstudioapi/package.py | 1 + .../repos/builtin/packages/r-sp/package.py | 46 ++++++++++++++ .../builtin/packages/r-threejs/package.py | 47 +++++++++++++++ .../builtin/packages/r-visnetwork/package.py | 47 +++++++++++++++ .../repos/builtin/packages/r-xts/package.py | 46 ++++++++++++++ .../repos/builtin/packages/r-yaml/package.py | 42 +++++++++++++ .../repos/builtin/packages/r-zoo/package.py | 47 +++++++++++++++ 30 files changed, 1359 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-base64enc/package.py create mode 100644 var/spack/repos/builtin/packages/r-cluster/package.py create mode 100644 var/spack/repos/builtin/packages/r-codetools/package.py create mode 100644 var/spack/repos/builtin/packages/r-diagrammer/package.py create mode 100644 var/spack/repos/builtin/packages/r-doparallel/package.py create mode 100644 var/spack/repos/builtin/packages/r-dt/package.py create mode 100644 var/spack/repos/builtin/packages/r-dygraphs/package.py create mode 100644 var/spack/repos/builtin/packages/r-foreach/package.py create mode 100644 var/spack/repos/builtin/packages/r-gridbase/package.py create mode 100644 var/spack/repos/builtin/packages/r-htmlwidgets/package.py create mode 100644 var/spack/repos/builtin/packages/r-igraph/package.py create mode 100644 var/spack/repos/builtin/packages/r-influencer/package.py create mode 100644 var/spack/repos/builtin/packages/r-irlba/package.py create mode 100644 var/spack/repos/builtin/packages/r-iterators/package.py create mode 100644 var/spack/repos/builtin/packages/r-leaflet/package.py create mode 100644 var/spack/repos/builtin/packages/r-markdown/package.py create mode 100644 var/spack/repos/builtin/packages/r-networkd3/package.py create mode 100644 var/spack/repos/builtin/packages/r-nmf/package.py create mode 100644 var/spack/repos/builtin/packages/r-pkgmaker/package.py create mode 100644 var/spack/repos/builtin/packages/r-png/package.py create mode 100644 var/spack/repos/builtin/packages/r-raster/package.py create mode 100644 var/spack/repos/builtin/packages/r-registry/package.py create mode 100644 var/spack/repos/builtin/packages/r-rngtools/package.py create mode 100644 var/spack/repos/builtin/packages/r-sp/package.py create mode 100644 var/spack/repos/builtin/packages/r-threejs/package.py create mode 100644 var/spack/repos/builtin/packages/r-visnetwork/package.py create mode 100644 var/spack/repos/builtin/packages/r-xts/package.py create mode 100644 var/spack/repos/builtin/packages/r-yaml/package.py create mode 100644 var/spack/repos/builtin/packages/r-zoo/package.py diff --git a/var/spack/repos/builtin/packages/r-base64enc/package.py b/var/spack/repos/builtin/packages/r-base64enc/package.py new file mode 100644 index 00000000000..4c5d14a90f4 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-base64enc/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RBase64enc(Package): + """This package provides tools for handling base64 encoding. It is more + flexible than the orphaned base64 package.""" + + homepage = "http://www.rforge.net/base64enc" + url = "https://cran.r-project.org/src/contrib/base64enc_0.1-3.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/base64enc" + + version('0.1-3', '0f476dacdd11a3e0ad56d13f5bc2f190') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-cluster/package.py b/var/spack/repos/builtin/packages/r-cluster/package.py new file mode 100644 index 00000000000..0fdf2ce5de1 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-cluster/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RCluster(Package): + """Methods for Cluster analysis. Much extended the original from Peter + Rousseeuw, Anja Struyf and Mia Hubert, based on Kaufman and Rousseeuw + (1990) "Finding Groups in Data".""" + + homepage = "https://cran.r-project.org/web/packages/cluster/index.html" + url = "https://cran.r-project.org/src/contrib/cluster_2.0.4.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/cluster" + + version('2.0.4', 'bb4deceaafb1c42bb1278d5d0dc11e59') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-codetools/package.py b/var/spack/repos/builtin/packages/r-codetools/package.py new file mode 100644 index 00000000000..bea2f78c829 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-codetools/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RCodetools(Package): + """Code analysis tools for R.""" + + homepage = "https://cran.r-project.org/web/packages/codetools/index.html" + url = "https://cran.r-project.org/src/contrib/codetools_0.2-14.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/codetools" + + version('0.2-14', '7ec41d4f8bd6ba85facc8c5e6adc1f4d') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-diagrammer/package.py b/var/spack/repos/builtin/packages/r-diagrammer/package.py new file mode 100644 index 00000000000..7ff3b8548b6 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-diagrammer/package.py @@ -0,0 +1,49 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RDiagrammer(Package): + """Create graph diagrams and flowcharts using R.""" + + homepage = "https://github.com/rich-iannone/DiagrammeR" + url = "https://cran.r-project.org/src/contrib/DiagrammeR_0.8.4.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/DiagrammeR" + + version('0.8.4', '9ee295c744f5d4ba9a84289ca7bdaf1a') + + extends('R') + + depends_on('r-htmlwidgets', type=nolink) + depends_on('r-igraph', type=nolink) + depends_on('r-influencer', type=nolink) + depends_on('r-rstudioapi@0.6:', type=nolink) + depends_on('r-stringr', type=nolink) + depends_on('r-visnetwork', type=nolink) + depends_on('r-scales', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-doparallel/package.py b/var/spack/repos/builtin/packages/r-doparallel/package.py new file mode 100644 index 00000000000..7cebfd9e58e --- /dev/null +++ b/var/spack/repos/builtin/packages/r-doparallel/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RDoparallel(Package): + """Provides a parallel backend for the %dopar% function using the parallel + package.""" + + homepage = "https://cran.r-project.org/web/packages/doParallel/index.html" + url = "https://cran.r-project.org/src/contrib/doParallel_1.0.10.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/doParallel" + + version('1.0.10', 'd9fbde8f315d98d055483ee3493c9b43') + + extends('R') + + depends_on('r-foreach', type=nolink) + depends_on('r-iterators', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-dt/package.py b/var/spack/repos/builtin/packages/r-dt/package.py new file mode 100644 index 00000000000..f134ee8913f --- /dev/null +++ b/var/spack/repos/builtin/packages/r-dt/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RDt(Package): + """Data objects in R can be rendered as HTML tables using the JavaScript + library 'DataTables' (typically via R Markdown or Shiny). The 'DataTables' + library has been included in this R package. The package name 'DT' is an + abbreviation of 'DataTables'.""" + + homepage = "http://rstudio.github.io/DT" + url = "https://cran.r-project.org/src/contrib/DT_0.1.tar.gz" + + version('0.1', '5c8df984921fa484784ec4b8a4fb6f3c') + + extends('R') + + depends_on('r-htmltools', type=nolink) + depends_on('r-htmlwidgets', type=nolink) + depends_on('r-magrittr', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-dygraphs/package.py b/var/spack/repos/builtin/packages/r-dygraphs/package.py new file mode 100644 index 00000000000..9b01d7aa180 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-dygraphs/package.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RDygraphs(Package): + """An R interface to the 'dygraphs' JavaScript charting library (a copy of + which is included in the package). Provides rich facilities for charting + time-series data in R, including highly configurable series- and + axis-display and interactive features like zoom/pan and series/point + highlighting.""" + + homepage = "https://cran.r-project.org/web/packages/dygraphs/index.html" + url = "https://cran.r-project.org/src/contrib/dygraphs_0.9.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/dygraphs" + + version('0.9', '7f0ce4312bcd3f0a58b8c03b2772f833') + + extends('R') + + depends_on('r-magrittr', type=nolink) + depends_on('r-htmlwidgets', type=nolink) + depends_on('r-zoo', type=nolink) + depends_on('r-xts', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-foreach/package.py b/var/spack/repos/builtin/packages/r-foreach/package.py new file mode 100644 index 00000000000..2a7324ae993 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-foreach/package.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RForeach(Package): + """Support for the foreach looping construct. Foreach is an idiom that + allows for iterating over elements in a collection, without the use of an + explicit loop counter. This package in particular is intended to be used + for its return value, rather than for its side effects. In that sense, it + is similar to the standard lapply function, but doesn't require the + evaluation of a function. Using foreach without side effects also + facilitates executing the loop in parallel.""" + + homepage = "https://cran.r-project.org/web/packages/foreach/index.html" + url = "https://cran.r-project.org/src/contrib/foreach_1.4.3.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/foreach" + + version('1.4.3', 'ef45768126661b259f9b8994462c49a0') + + extends('R') + + depends_on('r-codetools', type=nolink) + depends_on('r-iterators', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-gridbase/package.py b/var/spack/repos/builtin/packages/r-gridbase/package.py new file mode 100644 index 00000000000..a2f55b44703 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-gridbase/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RGridbase(Package): + """Integration of base and grid graphics.""" + + homepage = "https://cran.r-project.org/web/packages/gridBase/index.html" + url = "https://cran.r-project.org/src/contrib/gridBase_0.4-7.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/gridBase" + + version('0.4-7', '6d5064a85f5c966a92ee468ae44c5f1f') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-htmlwidgets/package.py b/var/spack/repos/builtin/packages/r-htmlwidgets/package.py new file mode 100644 index 00000000000..8090776f06f --- /dev/null +++ b/var/spack/repos/builtin/packages/r-htmlwidgets/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RHtmlwidgets(Package): + """A framework for creating HTML widgets that render in various contexts + including the R console, 'R Markdown' documents, and 'Shiny' web + applications.""" + + homepage = "https://github.com/ramnathv/htmlwidgets" + url = "https://cran.r-project.org/src/contrib/htmlwidgets_0.6.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/htmlwidgets" + + version('0.6', '7fa522d2eda97593978021bda9670c0e') + + extends('R') + + depends_on('r-htmltools', type=nolink) + depends_on('r-jsonlite', type=nolink) + depends_on('r-yaml', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-igraph/package.py b/var/spack/repos/builtin/packages/r-igraph/package.py new file mode 100644 index 00000000000..37925d4e028 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-igraph/package.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RIgraph(Package): + """Routines for simple graphs and network analysis. It can handle large + graphs very well and provides functions for generating random and regular + graphs, graph visualization, centrality methods and much more.""" + + homepage = "http://igraph.org/" + url = "https://cran.r-project.org/src/contrib/igraph_1.0.1.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/igraph" + + version('1.0.1', 'ea33495e49adf4a331e4ba60ba559065') + + extends('R') + + depends_on('r-matrix', type=nolink) + depends_on('r-magrittr', type=nolink) + depends_on('r-nmf', type=nolink) + depends_on('r-irlba', type=nolink) + depends_on('gmp') + depends_on('libxml2') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-influencer/package.py b/var/spack/repos/builtin/packages/r-influencer/package.py new file mode 100644 index 00000000000..dd525daf2b9 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-influencer/package.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RInfluencer(Package): + """Provides functionality to compute various node centrality measures on + networks. Included are functions to compute betweenness centrality (by + utilizing Madduri and Bader's SNAP library), implementations of Burt's + constraint and effective network size (ENS) metrics, Borgatti's algorithm + to identify key players, and Valente's bridging metric. On Unix systems, + the betweenness, Key Players, and bridging implementations are parallelized + with OpenMP, which may run faster on systems which have OpenMP + configured.""" + + homepage = "https://github.com/rcc-uchicago/influenceR" + url = "https://cran.r-project.org/src/contrib/influenceR_0.1.0.tar.gz" + + version('0.1.0', '6c8b6decd78c341364b5811fb3050ba5') + + extends('R') + + depends_on('r-igraph', type=nolink) + depends_on('r-matrix', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-irlba/package.py b/var/spack/repos/builtin/packages/r-irlba/package.py new file mode 100644 index 00000000000..42cc142e8ab --- /dev/null +++ b/var/spack/repos/builtin/packages/r-irlba/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RIrlba(Package): + """Fast and memory efficient methods for truncated singular and eigenvalue + decompositions and principal component analysis of large sparse or dense + matrices.""" + + homepage = "https://cran.r-project.org/web/packages/irlba/index.html" + url = "https://cran.r-project.org/src/contrib/irlba_2.0.0.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/irlba" + + version('2.0.0', '557674cf8b68fea5b9f231058c324d26') + + extends('R') + + depends_on('r-matrix', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-iterators/package.py b/var/spack/repos/builtin/packages/r-iterators/package.py new file mode 100644 index 00000000000..f5a3489e7d2 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-iterators/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RIterators(Package): + """Support for iterators, which allow a programmer to traverse through all + the elements of a vector, list, or other collection of data.""" + + homepage = "https://cran.r-project.org/web/packages/iterators/index.html" + url = "https://cran.r-project.org/src/contrib/iterators_1.0.8.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/iterators" + + version('1.0.8', '2ded7f82cddd8174f1ec98607946c6ee') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-leaflet/package.py b/var/spack/repos/builtin/packages/r-leaflet/package.py new file mode 100644 index 00000000000..2a91100c348 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-leaflet/package.py @@ -0,0 +1,54 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RLeaflet(Package): + """Create and customize interactive maps using the 'Leaflet' JavaScript + library and the 'htmlwidgets' package. These maps can be used directly from + the R console, from 'RStudio', in Shiny apps and R Markdown documents.""" + + homepage = "http://rstudio.github.io/leaflet/" + url = "https://cran.r-project.org/src/contrib/leaflet_1.0.1.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/leaflet" + + version('1.0.1', '7f3d8b17092604d87d4eeb579f73d5df') + + extends('R') + + depends_on('r-base64enc', type=nolink) + depends_on('r-htmlwidgets', type=nolink) + depends_on('r-htmltools', type=nolink) + depends_on('r-magrittr', type=nolink) + depends_on('r-markdown', type=nolink) + depends_on('r-png', type=nolink) + depends_on('r-rcolorbrewer', type=nolink) + depends_on('r-raster', type=nolink) + depends_on('r-scales', type=nolink) + depends_on('r-sp', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-markdown/package.py b/var/spack/repos/builtin/packages/r-markdown/package.py new file mode 100644 index 00000000000..c34727d2b05 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-markdown/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RMarkdown(Package): + """Provides R bindings to the 'Sundown' 'Markdown' rendering library + (https://github.com/vmg/sundown). 'Markdown' is a plain-text formatting + syntax that can be converted to 'XHTML' or other formats. See + http://en.wikipedia.org/wiki/Markdown for more information about + 'Markdown'.""" + + homepage = "https://github.com/rstudio/markdown" + url = "https://cran.r-project.org/src/contrib/markdown_0.7.7.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/markdown" + + version('0.7.7', '72deca9c675c7cc9343048edbc29f7ff') + + extends('R') + + depends_on('r-mime', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-networkd3/package.py b/var/spack/repos/builtin/packages/r-networkd3/package.py new file mode 100644 index 00000000000..8884f7cf1b1 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-networkd3/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RNetworkd3(Package): + """Creates 'D3' 'JavaScript' network, tree, dendrogram, and Sankey graphs + from 'R'.""" + + homepage = "http://cran.r-project.org/package=networkD3" + url = "https://cran.r-project.org/src/contrib/networkD3_0.2.12.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/networkD3" + + version('0.2.12', '356fe4be59698e6fb052644bd9659d84') + + extends('R') + + depends_on('r-htmlwidgets', type=nolink) + depends_on('r-igraph', type=nolink) + depends_on('r-magrittr', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-nmf/package.py b/var/spack/repos/builtin/packages/r-nmf/package.py new file mode 100644 index 00000000000..c30a1ac1012 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-nmf/package.py @@ -0,0 +1,60 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RNmf(Package): + """Provides a framework to perform Non-negative Matrix Factorization (NMF). + The package implements a set of already published algorithms and seeding + methods, and provides a framework to test, develop and plug new/custom + algorithms. Most of the built-in algorithms have been optimized in C++, and + the main interface function provides an easy way of performing parallel + computations on multicore machines..""" + + homepage = "http://renozao.github.io/NMF" + url = "https://cran.r-project.org/src/contrib/NMF_0.20.6.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/NMF" + + version('0.20.6', '81df07b3bf710a611db5af24730ff3d0') + + extends('R') + + depends_on('r-pkgmaker', type=nolink) + depends_on('r-registry', type=nolink) + depends_on('r-rngtools', type=nolink) + depends_on('r-cluster', type=nolink) + depends_on('r-stringr', type=nolink) + depends_on('r-digest', type=nolink) + depends_on('r-gridbase', type=nolink) + depends_on('r-colorspace', type=nolink) + depends_on('r-rcolorbrewer', type=nolink) + depends_on('r-foreach', type=nolink) + depends_on('r-doparallel', type=nolink) + depends_on('r-ggplot2', type=nolink) + depends_on('r-reshape2', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-pkgmaker/package.py b/var/spack/repos/builtin/packages/r-pkgmaker/package.py new file mode 100644 index 00000000000..87daec5ad00 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-pkgmaker/package.py @@ -0,0 +1,53 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RPkgmaker(Package): + """This package provides some low-level utilities to use for package + development. It currently provides managers for multiple package specific + options and registries, vignette, unit test and bibtex related utilities. + It serves as a base package for packages like NMF, RcppOctave, doRNG, and + as an incubator package for other general purposes utilities, that will + eventually be packaged separately. It is still under heavy development and + changes in the interface(s) are more than likely to happen.""" + + homepage = "https://renozao.github.io/pkgmaker" + url = "https://cran.r-project.org/src/contrib/pkgmaker_0.22.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/pkgmaker" + + version('0.22', '73a0c6d3e84c6dadf3de7582ef7e88a4') + + extends('R') + + depends_on('r-registry', type=nolink) + depends_on('r-codetools', type=nolink) + depends_on('r-digest', type=nolink) + depends_on('r-stringr', type=nolink) + depends_on('r-xtable', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-png/package.py b/var/spack/repos/builtin/packages/r-png/package.py new file mode 100644 index 00000000000..d3a80a4e16b --- /dev/null +++ b/var/spack/repos/builtin/packages/r-png/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RPng(Package): + """This package provides an easy and simple way to read, write and display + bitmap images stored in the PNG format. It can read and write both files + and in-memory raw vectors.""" + + homepage = "http://www.rforge.net/png/" + url = "https://cran.r-project.org/src/contrib/png_0.1-7.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/png" + + version('0.1-7', '1ebc8b8aa5979b12c5ec2384b30d649f') + + extends('R') + + depends_on('libpng') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-raster/package.py b/var/spack/repos/builtin/packages/r-raster/package.py new file mode 100644 index 00000000000..39f4256703c --- /dev/null +++ b/var/spack/repos/builtin/packages/r-raster/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RRaster(Package): + """Reading, writing, manipulating, analyzing and modeling of gridded + spatial data. The package implements basic and high-level functions. + Processing of very large files is supported.""" + + homepage = "http://cran.r-project.org/package=raster" + url = "https://cran.r-project.org/src/contrib/raster_2.5-8.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/raster" + + version('2.5-8', '2a7db931c74d50516e82d04687c0a577') + + extends('R') + + depends_on('r-sp', type=nolink) + depends_on('r-rcpp', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-registry/package.py b/var/spack/repos/builtin/packages/r-registry/package.py new file mode 100644 index 00000000000..119a84718ad --- /dev/null +++ b/var/spack/repos/builtin/packages/r-registry/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RRegistry(Package): + """Provides a generic infrastructure for creating and using registries.""" + + homepage = "https://cran.r-project.org/web/packages/registry/index.html" + url = "https://cran.r-project.org/src/contrib/registry_0.3.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/registry" + + version('0.3', '85345b334ec81eb3da6edcbb27c5f421') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-rngtools/package.py b/var/spack/repos/builtin/packages/r-rngtools/package.py new file mode 100644 index 00000000000..2f33cd9f88c --- /dev/null +++ b/var/spack/repos/builtin/packages/r-rngtools/package.py @@ -0,0 +1,49 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RRngtools(Package): + """This package contains a set of functions for working with Random Number + Generators (RNGs). In particular, it defines a generic S4 framework for + getting/setting the current RNG, or RNG data that are embedded into objects + for reproducibility. Notably, convenient default methods greatly facilitate + the way current RNG settings can be changed.""" + + homepage = "https://renozao.github.io/rngtools" + url = "https://cran.r-project.org/src/contrib/rngtools_1.2.4.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/rngtools" + + version('1.2.4', '715967f8b3af2848a76593a7c718c1cd') + + extends('R') + + depends_on('r-pkgmaker', type=nolink) + depends_on('r-stringr', type=nolink) + depends_on('r-digest', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-rstudioapi/package.py b/var/spack/repos/builtin/packages/r-rstudioapi/package.py index 0ef2d9b9872..bf9b35fe5b9 100644 --- a/var/spack/repos/builtin/packages/r-rstudioapi/package.py +++ b/var/spack/repos/builtin/packages/r-rstudioapi/package.py @@ -33,6 +33,7 @@ class RRstudioapi(Package): url = "https://cran.r-project.org/src/contrib/rstudioapi_0.5.tar.gz" list_url = "https://cran.r-project.org/src/contrib/Archive/rstudioapi" + version('0.6', 'fdb13bf46aab02421557e713fceab66b') version('0.5', '6ce1191da74e7bcbf06b61339486b3ba') extends('R') diff --git a/var/spack/repos/builtin/packages/r-sp/package.py b/var/spack/repos/builtin/packages/r-sp/package.py new file mode 100644 index 00000000000..bb7589d4906 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-sp/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RSp(Package): + """Classes and methods for spatial data; the classes document where the + spatial location information resides, for 2D or 3D data. Utility functions + are provided, e.g. for plotting data as maps, spatial selection, as well as + methods for retrieving coordinates, for subsetting, print, summary, etc.""" + + homepage = "https://github.com/edzer/sp/" + url = "https://cran.r-project.org/src/contrib/sp_1.2-3.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/sp" + + version('1.2-3', 'f0e24d993dec128642ee66b6b47b10c1') + + extends('R') + + depends_on('r-lattice', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-threejs/package.py b/var/spack/repos/builtin/packages/r-threejs/package.py new file mode 100644 index 00000000000..89ecce8bfc7 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-threejs/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RThreejs(Package): + """Create interactive 3D scatter plots, network plots, and globes using the + 'three.js' visualization library ("http://threejs.org").""" + + homepage = "http://bwlewis.github.io/rthreejs" + url = "https://cran.r-project.org/src/contrib/threejs_0.2.2.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/threejs" + + version('0.2.2', '35c179b10813c5e4bd3e7827fae6627b') + + extends('R') + + depends_on('r-htmlwidgets', type=nolink) + depends_on('r-base64enc', type=nolink) + depends_on('r-matrix', type=nolink) + depends_on('r-jsonlite', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-visnetwork/package.py b/var/spack/repos/builtin/packages/r-visnetwork/package.py new file mode 100644 index 00000000000..90d55bbb2fb --- /dev/null +++ b/var/spack/repos/builtin/packages/r-visnetwork/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RVisnetwork(Package): + """Provides an R interface to the 'vis.js' JavaScript charting library. It + allows an interactive visualization of networks.""" + + homepage = "https://github.com/datastorm-open/visNetwork" + url = "https://cran.r-project.org/src/contrib/visNetwork_1.0.1.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/visNetwork" + + version('1.0.1', 'dfc9664a5165134d8dbdcd949ad73cf7') + + extends('R') + + depends_on('r-htmlwidgets', type=nolink) + depends_on('r-htmltools', type=nolink) + depends_on('r-jsonlite', type=nolink) + depends_on('r-magrittr', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-xts/package.py b/var/spack/repos/builtin/packages/r-xts/package.py new file mode 100644 index 00000000000..d8a4b62d27e --- /dev/null +++ b/var/spack/repos/builtin/packages/r-xts/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RXts(Package): + """Provide for uniform handling of R's different time-based data classes by + extending zoo, maximizing native format information preservation and + allowing for user level customization and extension, while simplifying + cross-class interoperability.""" + + homepage = "http://r-forge.r-project.org/projects/xts/" + url = "https://cran.r-project.org/src/contrib/xts_0.9-7.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/xts" + + version('0.9-7', 'a232e94aebfa654653a7d88a0503537b') + + extends('R') + + depends_on('r-zoo', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-yaml/package.py b/var/spack/repos/builtin/packages/r-yaml/package.py new file mode 100644 index 00000000000..b68f26e9b87 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-yaml/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RYaml(Package): + """This package implements the libyaml YAML 1.1 parser and emitter + (http://pyyaml.org/wiki/LibYAML) for R.""" + + homepage = "https://cran.r-project.org/web/packages/yaml/index.html" + url = "https://cran.r-project.org/src/contrib/yaml_2.1.13.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/yaml" + + version('2.1.13', 'f2203ea395adaff6bd09134666191d9a') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-zoo/package.py b/var/spack/repos/builtin/packages/r-zoo/package.py new file mode 100644 index 00000000000..d889cacbeee --- /dev/null +++ b/var/spack/repos/builtin/packages/r-zoo/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RZoo(Package): + """An S3 class with methods for totally ordered indexed observations. It is + particularly aimed at irregular time series of numeric vectors/matrices and + factors. zoo's key design goals are independence of a particular + index/date/time class and consistency with ts and base R by providing + methods to extend standard generics.""" + + homepage = "http://zoo.r-forge.r-project.org/" + url = "https://cran.r-project.org/src/contrib/zoo_1.7-13.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/zoo" + + version('1.7-13', '99521dfa4c668e692720cefcc5a1bf30') + + extends('R') + + depends_on('r-lattice', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 81cd458c265a0ce6266604d21817d44b386c676b Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 24 Jul 2016 10:24:21 -0500 Subject: [PATCH 080/284] r-curl update and chaged remote archive This updates to the 1.0 version of r-curl. The remote archive directory name also changed so change that here as well. --- var/spack/repos/builtin/packages/r-curl/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/r-curl/package.py b/var/spack/repos/builtin/packages/r-curl/package.py index c6e8f22a944..24c0eadb2d5 100644 --- a/var/spack/repos/builtin/packages/r-curl/package.py +++ b/var/spack/repos/builtin/packages/r-curl/package.py @@ -38,8 +38,9 @@ class RCurl(Package): homepage = "https://github.com/jeroenooms/curl" url = "https://cran.r-project.org/src/contrib/curl_0.9.7.tar.gz" - list_url = "https://cran.r-project.org/src/contrib/Archive/RCurl" + list_url = "https://cran.r-project.org/src/contrib/Archive/curl" + version('1.0', '93d34926d6071e1fba7e728b482f0dd9') version('0.9.7', 'a101f7de948cb828fef571c730f39217') extends('R') From 23006d11951fb5c3b7e3b5631f5fcd075323fc3c Mon Sep 17 00:00:00 2001 From: alalazo Date: Mon, 25 Jul 2016 09:05:50 +0200 Subject: [PATCH 081/284] spec : removed dead code --- lib/spack/spack/spec.py | 44 +++++++++++++---------------------------- 1 file changed, 14 insertions(+), 30 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 8bdae0445e6..b554cb0d33e 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -95,32 +95,30 @@ specs to avoid ambiguity. Both are provided because ~ can cause shell expansion when it is the first character in an id typed on the command line. """ -import sys -import hashlib import base64 +import hashlib import imp +import sys from StringIO import StringIO from operator import attrgetter -import yaml -from yaml.error import MarkedYAMLError import llnl.util.tty as tty -from llnl.util.lang import * -from llnl.util.tty.color import * -from llnl.util.filesystem import join_path - import spack import spack.architecture -import spack.parse -import spack.error import spack.compilers as compilers - -from spack.version import * -from spack.util.string import * -from spack.util.prefix import Prefix -from spack.util.naming import mod_to_class -from spack.virtual import ProviderIndex +import spack.error +import spack.parse +import yaml +from llnl.util.filesystem import join_path +from llnl.util.lang import * +from llnl.util.tty.color import * from spack.build_environment import get_path_from_module, load_module +from spack.util.naming import mod_to_class +from spack.util.prefix import Prefix +from spack.util.string import * +from spack.version import * +from spack.virtual import ProviderIndex +from yaml.error import MarkedYAMLError # Valid pattern for an identifier in Spack identifier_re = r'\w[\w-]*' @@ -160,20 +158,6 @@ 'nolink': nolink, } - -def index_specs(specs): - """Take a list of specs and return a dict of lists. Dict is - keyed by spec name and lists include all specs with the - same name. - """ - spec_dict = {} - for spec in specs: - if spec.name not in spec_dict: - spec_dict[spec.name] = [] - spec_dict[spec.name].append(spec) - return spec_dict - - def colorize_spec(spec): """Returns a spec colorized according to the colors specified in color_formats.""" From 5cfaa557d38dfe65ec9d129651fdaeb4087c8fc3 Mon Sep 17 00:00:00 2001 From: alalazo Date: Mon, 25 Jul 2016 10:19:58 +0200 Subject: [PATCH 082/284] qa : flake8 issues --- lib/spack/spack/spec.py | 13 +++++----- lib/spack/spack/test/cmd/test_install.py | 33 ++++++++++++++---------- 2 files changed, 27 insertions(+), 19 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index b554cb0d33e..c6277fc8d2b 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -158,6 +158,7 @@ 'nolink': nolink, } + def colorize_spec(spec): """Returns a spec colorized according to the colors specified in color_formats.""" @@ -1020,7 +1021,7 @@ def from_yaml(stream): """ try: yfile = yaml.load(stream) - except MarkedYAMLError, e: + except MarkedYAMLError as e: raise SpackYAMLError("error parsing YAML spec:", str(e)) nodes = yfile['spec'] @@ -1334,7 +1335,7 @@ def flat_dependencies_with_deptype(self, **kwargs): return flat_deps - except UnsatisfiableSpecError, e: + except UnsatisfiableSpecError as e: # Here, the DAG contains two instances of the same package # with inconsistent constraints. Users cannot produce # inconsistent specs like this on the command line: the @@ -1369,7 +1370,7 @@ def _evaluate_dependency_conditions(self, name): dep = Spec(name) try: dep.constrain(dep_spec) - except UnsatisfiableSpecError, e: + except UnsatisfiableSpecError as e: e.message = ("Conflicting conditional dependencies on" "package %s for spec %s" % (self.name, self)) raise e @@ -1455,7 +1456,7 @@ def _merge_dependency(self, dep, deptypes, visited, spec_deps, try: changed |= spec_deps[dep.name].spec.constrain(dep) - except UnsatisfiableSpecError, e: + except UnsatisfiableSpecError as e: e.message = "Invalid spec: '%s'. " e.message += "Package %s requires %s %s, but spec asked for %s" e.message %= (spec_deps[dep.name].spec, dep.name, @@ -2389,7 +2390,7 @@ def do_parse(self): # errors now? specs.append(self.spec(None, True)) - except spack.parse.ParseError, e: + except spack.parse.ParseError as e: raise SpecParseError(e) # If the spec has an os or a target and no platform, give it @@ -2834,4 +2835,4 @@ class AmbiguousHashError(SpecError): def __init__(self, msg, *specs): super(AmbiguousHashError, self).__init__(msg) for spec in specs: - print ' ', spec.format('$.$@$%@+$+$=$#') + print(' ', spec.format('$.$@$%@+$+$=$#')) diff --git a/lib/spack/spack/test/cmd/test_install.py b/lib/spack/spack/test/cmd/test_install.py index 5e80776279a..39287d5d6df 100644 --- a/lib/spack/spack/test/cmd/test_install.py +++ b/lib/spack/spack/test/cmd/test_install.py @@ -24,15 +24,22 @@ ############################################################################## import StringIO import collections -from contextlib import contextmanager +import os +import unittest +import contextlib + +import spack +import spack.cmd FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) + # Monkey-patch open to write module files to a StringIO instance -@contextmanager +@contextlib.contextmanager def mock_open(filename, mode): if not mode == 'wb': - raise RuntimeError('test.test_install : unexpected opening mode for monkey-patched open') + message = 'test.test_install : unexpected opening mode for mock_open' + raise RuntimeError(message) FILE_REGISTRY[filename] = StringIO.StringIO() @@ -43,18 +50,14 @@ def mock_open(filename, mode): FILE_REGISTRY[filename] = handle.getvalue() handle.close() -import os -import unittest -import spack -import spack.cmd - - -# The use of __import__ is necessary to maintain a name with hyphen (which cannot be an identifier in python) +# The use of __import__ is necessary to maintain a name with hyphen (which +# cannot be an identifier in python) test_install = __import__("spack.cmd.test-install", fromlist=['test_install']) class MockSpec(object): + def __init__(self, name, version, hashStr=None): self._dependencies = {} self.name = name @@ -96,6 +99,7 @@ def short_spec(self): class MockPackage(object): + def __init__(self, spec, buildLogPath): self.name = spec.name self.spec = spec @@ -107,6 +111,7 @@ def do_install(self, *args, **kwargs): class MockPackageDb(object): + def __init__(self, init=None): self.specToPkg = {} if init: @@ -127,6 +132,7 @@ def mock_fetch_log(path): class MockArgs(object): + def __init__(self, package): self.package = package self.jobs = None @@ -162,7 +168,7 @@ def monkey_parse_specs(x, concretize): test_install.open = mock_open # Clean FILE_REGISTRY - FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) + FILE_REGISTRY.clear() pkgX.installed = False pkgY.installed = False @@ -188,7 +194,7 @@ def tearDown(self): spack.repo = self.saved_db def test_installing_both(self): - test_install.test_install(None, MockArgs('X') ) + test_install.test_install(None, MockArgs('X')) self.assertEqual(len(FILE_REGISTRY), 1) for _, content in FILE_REGISTRY.items(): self.assertTrue('tests="2"' in content) @@ -204,4 +210,5 @@ def test_dependency_already_installed(self): self.assertTrue('tests="2"' in content) self.assertTrue('failures="0"' in content) self.assertTrue('errors="0"' in content) - self.assertEqual(sum('skipped' in line for line in content.split('\n')), 2) + self.assertEqual( + sum('skipped' in line for line in content.split('\n')), 2) From d684b17c067c7e2024e240da9e1889078a7b1a4d Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 08:45:44 -0700 Subject: [PATCH 083/284] Fix doc for install_tree (cut/paste error) It looks like the docs for copy_tree were cut/paste from copy and still referred to installing a "file". This fixes that. --- lib/spack/llnl/util/filesystem.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 6e4cd338fe5..553ec1e4b5a 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -189,7 +189,7 @@ def install(src, dest): def install_tree(src, dest, **kwargs): - """Manually install a file to a particular location.""" + """Manually install a directory tree to a particular location.""" tty.debug("Installing %s to %s" % (src, dest)) shutil.copytree(src, dest, **kwargs) From 3126ed5f212e12464d349118a56b2a0ac0bf9308 Mon Sep 17 00:00:00 2001 From: alalazo Date: Mon, 25 Jul 2016 18:10:27 +0200 Subject: [PATCH 084/284] modules : permit token expansion in configuration files --- lib/spack/spack/environment.py | 9 ++++++++- lib/spack/spack/modules.py | 25 +++++++++++++++++++++---- lib/spack/spack/test/modules.py | 7 ++++++- 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 30c6228ca4b..9cb3f2575de 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -37,6 +37,9 @@ def __init__(self, name, **kwargs): self.args = {'name': name} self.args.update(kwargs) + def update_args(self, **kwargs): + self.__dict__.update(kwargs) + self.args.update(kwargs) class NameValueModifier(object): @@ -44,7 +47,11 @@ def __init__(self, name, value, **kwargs): self.name = name self.value = value self.separator = kwargs.get('separator', ':') - self.args = {'name': name, 'value': value, 'delim': self.separator} + self.args = {'name': name, 'value': value, 'separator': self.separator} + self.args.update(kwargs) + + def update_args(self, **kwargs): + self.__dict__.update(kwargs) self.args.update(kwargs) diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index db8b20ae42e..7d35839570c 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -272,13 +272,25 @@ def naming_scheme(self): @property def tokens(self): + """Tokens that can be substituted in environment variable values + and naming schemes + """ tokens = { 'name': self.spec.name, 'version': self.spec.version, - 'compiler': self.spec.compiler + 'compiler': self.spec.compiler, + 'prefix': self.spec.package.prefix } return tokens + @property + def upper_tokens(self): + """Tokens that can be substituted in environment variable names""" + upper_tokens = { + 'name': self.spec.name.upper() + } + return upper_tokens + @property def use_name(self): """ @@ -438,6 +450,11 @@ def prerequisite(self, spec): def process_environment_command(self, env): for command in env: + # Token expansion from configuration file + name = command.args.get('name', '').format(**self.upper_tokens) + value = str(command.args.get('value', '')).format(**self.tokens) + command.update_args(name=name, value=value) + # Format the line int the module file try: yield self.environment_modifications_formats[type( command)].format(**command.args) @@ -511,9 +528,9 @@ class TclModule(EnvModule): name = 'tcl' path = join_path(spack.share_path, "modules") environment_modifications_formats = { - PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n', - AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n', - RemovePath: 'remove-path --delim "{delim}" {name} \"{value}\"\n', + PrependPath: 'prepend-path --delim "{separator}" {name} \"{value}\"\n', + AppendPath: 'append-path --delim "{separator}" {name} \"{value}\"\n', + RemovePath: 'remove-path --delim "{separator}" {name} \"{value}\"\n', SetEnv: 'setenv {name} \"{value}\"\n', UnsetEnv: 'unsetenv {name}\n' } diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py index 135cd028e3d..0d33627be34 100644 --- a/lib/spack/spack/test/modules.py +++ b/lib/spack/spack/test/modules.py @@ -89,7 +89,10 @@ def mock_open(filename, mode): 'enable': ['tcl'], 'tcl': { 'all': { - 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']} + 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}, + 'environment': { + 'set': {'{name}_ROOT': '{prefix}'} + } }, 'platform=test target=x86_64': { 'environment': { @@ -248,6 +251,7 @@ def test_alter_environment(self): self.assertEqual( len([x for x in content if 'setenv FOO "foo"' in x]), 1) self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1) + self.assertEqual(len([x for x in content if 'setenv MPILEAKS_ROOT' in x]), 1) spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32') content = self.get_modulefile_content(spec) @@ -262,6 +266,7 @@ def test_alter_environment(self): len([x for x in content if 'is-loaded foo/bar' in x]), 1) self.assertEqual( len([x for x in content if 'module load foo/bar' in x]), 1) + self.assertEqual(len([x for x in content if 'setenv LIBDWARF_ROOT' in x]), 1) def test_blacklist(self): spack.modules.CONFIGURATION = configuration_blacklist From c6a05f4a7d316a1b4590654fd4e64475cb54f194 Mon Sep 17 00:00:00 2001 From: alalazo Date: Mon, 25 Jul 2016 18:22:09 +0200 Subject: [PATCH 085/284] qa : flake8 issues fixes #1109 --- lib/spack/spack/environment.py | 1 + lib/spack/spack/test/modules.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 9cb3f2575de..41136ab2eb5 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -41,6 +41,7 @@ def update_args(self, **kwargs): self.__dict__.update(kwargs) self.args.update(kwargs) + class NameValueModifier(object): def __init__(self, name, value, **kwargs): diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py index 0d33627be34..5e280d8e438 100644 --- a/lib/spack/spack/test/modules.py +++ b/lib/spack/spack/test/modules.py @@ -251,7 +251,8 @@ def test_alter_environment(self): self.assertEqual( len([x for x in content if 'setenv FOO "foo"' in x]), 1) self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1) - self.assertEqual(len([x for x in content if 'setenv MPILEAKS_ROOT' in x]), 1) + self.assertEqual( + len([x for x in content if 'setenv MPILEAKS_ROOT' in x]), 1) spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32') content = self.get_modulefile_content(spec) @@ -266,7 +267,8 @@ def test_alter_environment(self): len([x for x in content if 'is-loaded foo/bar' in x]), 1) self.assertEqual( len([x for x in content if 'module load foo/bar' in x]), 1) - self.assertEqual(len([x for x in content if 'setenv LIBDWARF_ROOT' in x]), 1) + self.assertEqual( + len([x for x in content if 'setenv LIBDWARF_ROOT' in x]), 1) def test_blacklist(self): spack.modules.CONFIGURATION = configuration_blacklist From eb865b0df20e47c365db5d7ba26a63cfd39115c6 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 25 Jul 2016 11:23:09 -0500 Subject: [PATCH 086/284] Add py-setuptools as dependency With the addition of dependency types and with `py-setuptools` set as type='build' there are more packages that need to have `py-setuptools` added as a dependency. This PR adds that dependency for the following packages: - py-h5py - py-networkx - py-pytables - py-scikit-image --- var/spack/repos/builtin/packages/py-h5py/package.py | 1 + var/spack/repos/builtin/packages/py-networkx/package.py | 1 + var/spack/repos/builtin/packages/py-pytables/package.py | 1 + var/spack/repos/builtin/packages/py-scikit-image/package.py | 1 + 4 files changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py index f96cb9b4cd1..90a67c51bd2 100644 --- a/var/spack/repos/builtin/packages/py-h5py/package.py +++ b/var/spack/repos/builtin/packages/py-h5py/package.py @@ -43,6 +43,7 @@ class PyH5py(Package): # Build dependencies depends_on('py-cython@0.19:', type='build') depends_on('pkg-config', type='build') + depends_on('py-setuptools', type='build') depends_on('hdf5@1.8.4:') depends_on('hdf5+mpi', when='+mpi') depends_on('mpi', when='+mpi') diff --git a/var/spack/repos/builtin/packages/py-networkx/package.py b/var/spack/repos/builtin/packages/py-networkx/package.py index d5457176284..c7239486107 100644 --- a/var/spack/repos/builtin/packages/py-networkx/package.py +++ b/var/spack/repos/builtin/packages/py-networkx/package.py @@ -34,6 +34,7 @@ class PyNetworkx(Package): extends('python') depends_on('py-decorator', type=nolink) + depends_on('py-setuptools', type='build') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pytables/package.py b/var/spack/repos/builtin/packages/py-pytables/package.py index 58ed067b218..b05af01c94e 100644 --- a/var/spack/repos/builtin/packages/py-pytables/package.py +++ b/var/spack/repos/builtin/packages/py-pytables/package.py @@ -37,6 +37,7 @@ class PyPytables(Package): depends_on('py-numpy', type=nolink) depends_on('py-numexpr', type=nolink) depends_on('py-cython', type=nolink) + depends_on('py-setuptools', type='build') def install(self, spec, prefix): env["HDF5_DIR"] = spec['hdf5'].prefix diff --git a/var/spack/repos/builtin/packages/py-scikit-image/package.py b/var/spack/repos/builtin/packages/py-scikit-image/package.py index 26c286e4be0..fbeb5c95ca8 100644 --- a/var/spack/repos/builtin/packages/py-scikit-image/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-image/package.py @@ -42,6 +42,7 @@ class PyScikitImage(Package): depends_on('py-six', type=nolink) depends_on('py-scipy', type=nolink) depends_on('py-matplotlib', type=nolink) + depends_on('py-setuptools', type='build') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) From a2d4dcc6366f7330a50d144a62646c5d6b016b56 Mon Sep 17 00:00:00 2001 From: Samuel Knight Date: Thu, 21 Jul 2016 17:16:07 +0000 Subject: [PATCH 087/284] Added libhio package --- .../repos/builtin/packages/libhio/package.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 var/spack/repos/builtin/packages/libhio/package.py diff --git a/var/spack/repos/builtin/packages/libhio/package.py b/var/spack/repos/builtin/packages/libhio/package.py new file mode 100644 index 00000000000..17bd86d310f --- /dev/null +++ b/var/spack/repos/builtin/packages/libhio/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Libhio(Package): + """ + A library for writing to hierarchical data store systems. + """ + + homepage = "https://github.com/hpc/libhio/" + url = "https://github.com/hpc/libhio/releases/download/hio.1.3.0.1/libhio-1.3.0.1.tar.gz" + + version('1.3.0.1', 'c073541de8dd70aeb8878bd00d6d877f') + + depends_on("libjson-c") + depends_on("bzip2") + depends_on("pkg-config", type="build") + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + make() + make("install") From dba384108b47eaf67c20e7b1110e0b29c1fe36c6 Mon Sep 17 00:00:00 2001 From: Greg Lee Date: Mon, 25 Jul 2016 14:01:49 -0700 Subject: [PATCH 088/284] intel parallel studio setup mkl environment --- .../packages/intel-parallel-studio/package.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index fb98b2473c5..00a30cafaf8 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -50,6 +50,11 @@ class IntelParallelStudio(IntelInstaller): provides('daal', when='+daal') provides('ipp', when='+ipp') + # virtual dependency + provides('blas', when='+mkl') + provides('lapack', when='+mkl') + # TODO: MKL also provides implementation of Scalapack. + def check_variants(self, spec): error_message = '\t{variant} can not be turned off if "+all" is set' @@ -159,6 +164,24 @@ def install(self, spec, prefix): os.symlink(os.path.join(self.prefix.man, "common", "man1"), os.path.join(self.prefix.man, "man1")) + def setup_dependent_package(self, module, dspec): + # For now use Single Dynamic Library: + # To set the threading layer at run time, use the + # mkl_set_threading_layer function or set MKL_THREADING_LAYER + # variable to one of the following values: INTEL, SEQUENTIAL, PGI. + # To set interface layer at run time, use the mkl_set_interface_layer + # function or set the MKL_INTERFACE_LAYER variable to LP64 or ILP64. + + # Otherwise one would need to specify several libraries + # (e.g. mkl_intel_lp64;mkl_sequential;mkl_core), which reflect + # different interface and threading layers. + + name = 'libmkl_rt.%s' % dso_suffix + libdir = find_library_path(name, self.prefix.lib64, self.prefix.lib) + + self.spec.blas_shared_lib = join_path(libdir, name) + self.spec.lapack_shared_lib = self.spec.blas_shared_lib + def setup_environment(self, spack_env, run_env): # TODO: Determine variables needed for the professional edition. @@ -227,6 +250,8 @@ def setup_environment(self, spack_env, run_env): run_env.set('I_MPI_ROOT', join_path(self.prefix, 'impi')) if self.spec.satisfies('+all') or self.spec.satisfies('+mkl'): + spack_env.set('MKLROOT', self.prefix) + run_env.prepend_path('LD_LIBRARY_PATH', join_path(self.prefix, 'mkl', 'lib', 'intel64')) From e4525e57bb770997ec58f90b216749528b3685dd Mon Sep 17 00:00:00 2001 From: Greg Lee Date: Mon, 25 Jul 2016 14:02:15 -0700 Subject: [PATCH 089/284] intel parallel studio itac fixes --- .../builtin/packages/intel-parallel-studio/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 00a30cafaf8..9ef1724ae00 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -80,7 +80,7 @@ def install(self, spec, prefix): regex = '(comp|openmp|intel-tbb|icc|ifort|psxe|icsxe-pset)' base_components = \ filter_pick(all_components, re.compile(regex).search) - regex = '(icsxe|imb|mpi|itac|intel-tc|clck)' + regex = '(icsxe|imb|mpi|itac|intel-ta|intel-tc|clck)' mpi_components = \ filter_pick(all_components, re.compile(regex).search) mkl_components = \ @@ -139,6 +139,12 @@ def install(self, spec, prefix): os.symlink(self.global_license_file, os.path.join(self.prefix, "itac", ifile, "license.lic")) + if os.path.isdir(os.path.join(self.prefix, "itac", + ifile, "intel64")): + os.symlink(self.global_license_file, + os.path.join(self.prefix, "itac", + ifile, "intel64", + "license.lic")) if spec.satisfies('~newdtags'): wrappers = ["mpif77", "mpif77", "mpif90", "mpif90", "mpigcc", "mpigcc", "mpigxx", "mpigxx", From 9c7b98dcc80d3bb3b5a09d06ec81da12b8761fb2 Mon Sep 17 00:00:00 2001 From: alalazo Date: Tue, 26 Jul 2016 09:03:17 +0200 Subject: [PATCH 090/284] modules : handle dashes in environment variables --- lib/spack/spack/modules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 7d35839570c..8701a31c496 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -287,7 +287,7 @@ def tokens(self): def upper_tokens(self): """Tokens that can be substituted in environment variable names""" upper_tokens = { - 'name': self.spec.name.upper() + 'name': self.spec.name.replace('-', '_').upper() } return upper_tokens From 4e6fdd12e26273b15c081400b60791e9d1bd6472 Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Tue, 26 Jul 2016 17:09:25 -0400 Subject: [PATCH 091/284] Adds targets config file --- lib/spack/spack/config.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index e2e7dbc0ee4..31f0eb3a560 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -266,6 +266,19 @@ ], }, },},},},},}, + 'targets': { + '$schema': 'http://json-schema.org/schema#', + 'title': 'Spack target configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + r'targets:?': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'patternProperties': { + r'\w[\w-]*': { # target name + 'type': 'string' ,},},},},}, 'modules': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack module file configuration file schema', From b1e6c58ff2de8ff5d729dd9a10e28bec41aebeeb Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Tue, 26 Jul 2016 17:11:01 -0400 Subject: [PATCH 092/284] Adds __str__ method to CNL operating system class. --- lib/spack/spack/operating_systems/cnl.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/spack/spack/operating_systems/cnl.py b/lib/spack/spack/operating_systems/cnl.py index c160a60be82..5ee8599e1de 100644 --- a/lib/spack/spack/operating_systems/cnl.py +++ b/lib/spack/spack/operating_systems/cnl.py @@ -19,6 +19,8 @@ def __init__(self): version = '10' super(Cnl, self).__init__(name, version) + def __str__(self): + return self.name def find_compilers(self, *paths): types = spack.compilers.all_compiler_types() From f855cf6babf7a1a02e9b07a6935059a79c30eddf Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 14:41:52 -0400 Subject: [PATCH 093/284] Add package for tree Add a package for [tree](http://mama.indstate.edu/users/ice/tree/). It has a Makefile that hardcodes a prefix and some CFLAGS. Used filter_file to: - set the make variable *prefix* to `prefix`; and - comment out their CFLAGS, just use ours.... It installs, runs on CentOS7, and uninstalls cleanly. --- .../repos/builtin/packages/tree/package.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 var/spack/repos/builtin/packages/tree/package.py diff --git a/var/spack/repos/builtin/packages/tree/package.py b/var/spack/repos/builtin/packages/tree/package.py new file mode 100644 index 00000000000..8e0e176c4c8 --- /dev/null +++ b/var/spack/repos/builtin/packages/tree/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Tree(Package): + """Tree is a recursive directory listing command that produces a depth + indented listing of files, which is colorized ala dircolors if + the LS_COLORS environment variable is set and output is to + tty. Tree has been ported and reported to work under the + following operating systems: Linux, FreeBSD, OS X, Solaris, + HP/UX, Cygwin, HP Nonstop and OS/2.""" + + homepage = "http://mama.indstate.edu/users/ice/tree/" + url = "http://mama.indstate.edu/users/ice/tree/src/tree-1.7.0.tgz" + + version('1.7.0', 'abe3e03e469c542d8e157cdd93f4d8a6') + + def install(self, spec, prefix): + filter_file(r'^prefix =.*', 'prefix = %s' % prefix, 'Makefile') + filter_file(r'^CFLAGS', '# use spack settings instead... CFLAGS', + 'Makefile') + make() + make('install') From 3040381f03d962513d49406a6c45e12a952f3bc6 Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Tue, 26 Jul 2016 17:11:32 -0400 Subject: [PATCH 094/284] Front-end unification for Cray systems. A platform to generically cover all Cray systems is introduced to avoid having specific platforms for each of XK (XE, XT), XC, and future systems using CrayPE and CNL. The platform searches for 'front_end' and 'back_end' targets, in order, from: * Environment variables 'SPACK_FRONT_END' and 'SPACK_BACK_END' * A spack configuration file 'targets.yaml' * Parsing `/etc/bash.bashrc.local` for first loaded CrayPE CPU target. If a back_end target is not found through one of these methods, an exception is raised. Otherwise, the back_end is set as the default target. The shell init script search is based on recommendations in Crayports case #144359. No automagic way of determining front_end targets has been found (to date) so if a front_end is not specified through configuration, it is ignored which should leave a spack instance in a consistant state. --- lib/spack/spack/platforms/cray.py | 87 +++++++++++++++++++++++++++++++ 1 file changed, 87 insertions(+) create mode 100644 lib/spack/spack/platforms/cray.py diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py new file mode 100644 index 00000000000..279009be5f7 --- /dev/null +++ b/lib/spack/spack/platforms/cray.py @@ -0,0 +1,87 @@ +import os +import re +import platform as py_platform +#from subprocess import check_output +import spack.config +from spack.util.executable import which +from spack.architecture import Platform, Target, NoPlatformError +from spack.operating_systems.linux_distro import LinuxDistro +from spack.operating_systems.cnl import Cnl + + +# Craype- module prefixes that are not valid CPU targets. +NON_TARGETS = ('hugepages', 'network', 'target', 'accel', 'xtpe') + + +def _target_from_init(name): + matches = [] + if name != 'front_end': + pattern = 'craype-(?!{0})(\S*)'.format('|'.join(NON_TARGETS)) + with open('/etc/bash.bashrc.local', 'r') as conf: + for line in conf: + if re.search('^[^\#]*module[\s]*(?:add|load)', line): + matches.extend(re.findall(pattern, line)) + return matches[0] if matches else None + + +class Cray(Platform): + priority = 10 + + def __init__(self): + ''' Create a Cray system platform. + + Target names should use craype target names but not include the + 'craype-' prefix. Uses first viable target from: + self + envars [SPACK_FRONT_END, SPACK_BACK_END] + configuration file "targets.yaml" with keys 'front_end', 'back_end' + scanning /etc/bash/bashrc.local for back_end only + ''' + super(Cray, self).__init__('cray') + + # Get targets from config or make best guess from environment: + conf = spack.config.get_config('targets') + for name in ('front_end', 'back_end'): + _target = getattr(self, name, None) + if _target is None: + _target = os.environ.get('SPACK_' + name.upper()) + if _target is None: + _target = conf.get(name) + if _target is None: + _target = _target_from_init(name) + setattr(self, name, _target) + + if _target is not None: + self.add_target(name, Target(_target, 'craype-' + _target)) + self.add_target(_target, Target(_target, 'craype-' + _target)) + + if self.back_end is not None: + self.default = self.back_end + self.add_target('default', Target(self.default, 'craype-' + self.default)) + else: + raise NoPlatformError() + + front_distro = LinuxDistro() + back_distro = Cnl() + + self.default_os = str(back_distro) + self.back_os = self.default_os + self.front_os = str(front_distro) + + self.add_operating_system(self.back_os, back_distro) + self.add_operating_system(self.front_os, front_distro) + + @classmethod + def setup_platform_environment(self, pkg, env): + """ Change the linker to default dynamic to be more + similar to linux/standard linker behavior + """ + env.set('CRAYPE_LINK_TYPE', 'dynamic') + cray_wrapper_names = join_path(spack.build_env_path, 'cray') + if os.path.isdir(cray_wrapper_names): + env.prepend_path('PATH', cray_wrapper_names) + env.prepend_path('SPACK_ENV_PATHS', cray_wrapper_names) + + @classmethod + def detect(self): + return os.environ.get('CRAYPE_VERSION') is not None From eab56b71bee2cc4ef75fdd68a2398deb41203beb Mon Sep 17 00:00:00 2001 From: robertdfrench Date: Wed, 27 Jul 2016 13:11:24 -0400 Subject: [PATCH 095/284] PEP8 Goodness --- lib/spack/spack/operating_systems/cnl.py | 22 +++++++++++++--------- lib/spack/spack/platforms/cray.py | 12 +++++------- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/lib/spack/spack/operating_systems/cnl.py b/lib/spack/spack/operating_systems/cnl.py index 5ee8599e1de..dbd27758617 100644 --- a/lib/spack/spack/operating_systems/cnl.py +++ b/lib/spack/spack/operating_systems/cnl.py @@ -7,11 +7,12 @@ from spack.util.multiproc import parmap import spack.compilers + class Cnl(OperatingSystem): """ Compute Node Linux (CNL) is the operating system used for the Cray XC series super computers. It is a very stripped down version of GNU/Linux. Any compilers found through this operating system will be used with - modules. If updated, user must make sure that version and name are + modules. If updated, user must make sure that version and name are updated to indicate that OS has been upgraded (or downgraded) """ def __init__(self): @@ -24,14 +25,14 @@ def __str__(self): def find_compilers(self, *paths): types = spack.compilers.all_compiler_types() - compiler_lists = parmap(lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types) + compiler_lists = parmap( + lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types) # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. - clist = reduce(lambda x,y: x+y, compiler_lists) + clist = reduce(lambda x, y: x + y, compiler_lists) return clist - def find_compiler(self, cmp_cls, *paths): compilers = [] if cmp_cls.PrgEnv: @@ -47,13 +48,16 @@ def find_compiler(self, cmp_cls, *paths): if paths: module_paths = ':' + ':'.join(p for p in paths) os.environ['MODULEPATH'] = module_paths - - output = modulecmd('avail', cmp_cls.PrgEnv_compiler, output=str, error=str) - matches = re.findall(r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler, output) + + output = modulecmd( + 'avail', cmp_cls.PrgEnv_compiler, output=str, error=str) + matches = re.findall( + r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler, output) for name, version in matches: v = version - comp = cmp_cls(spack.spec.CompilerSpec(name + '@' + v), self, - ['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name +'/' + v]) + comp = cmp_cls( + spack.spec.CompilerSpec(name + '@' + v), self, + ['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name + '/' + v]) compilers.append(comp) diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index 279009be5f7..79a53d887cc 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -1,9 +1,6 @@ import os import re -import platform as py_platform -#from subprocess import check_output import spack.config -from spack.util.executable import which from spack.architecture import Platform, Target, NoPlatformError from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl @@ -20,7 +17,7 @@ def _target_from_init(name): with open('/etc/bash.bashrc.local', 'r') as conf: for line in conf: if re.search('^[^\#]*module[\s]*(?:add|load)', line): - matches.extend(re.findall(pattern, line)) + matches.extend(re.findall(pattern, line)) return matches[0] if matches else None @@ -29,7 +26,7 @@ class Cray(Platform): def __init__(self): ''' Create a Cray system platform. - + Target names should use craype target names but not include the 'craype-' prefix. Uses first viable target from: self @@ -50,14 +47,15 @@ def __init__(self): if _target is None: _target = _target_from_init(name) setattr(self, name, _target) - + if _target is not None: self.add_target(name, Target(_target, 'craype-' + _target)) self.add_target(_target, Target(_target, 'craype-' + _target)) if self.back_end is not None: self.default = self.back_end - self.add_target('default', Target(self.default, 'craype-' + self.default)) + self.add_target( + 'default', Target(self.default, 'craype-' + self.default)) else: raise NoPlatformError() From 572f1cd42710fea4b176619ca69d66c731d64f34 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Fri, 29 Jul 2016 11:36:37 +0200 Subject: [PATCH 096/284] mkl: symlink libs to prefix.lib --- var/spack/repos/builtin/packages/mkl/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/var/spack/repos/builtin/packages/mkl/package.py b/var/spack/repos/builtin/packages/mkl/package.py index 6ea64f53133..b06e2fb3840 100644 --- a/var/spack/repos/builtin/packages/mkl/package.py +++ b/var/spack/repos/builtin/packages/mkl/package.py @@ -38,6 +38,13 @@ def install(self, spec, prefix): for f in os.listdir(mkl_dir): os.symlink(os.path.join(mkl_dir, f), os.path.join(self.prefix, f)) + # Unfortunately MKL libs are natively distrubted in prefix/lib/intel64. + # To make MKL play nice with Spack, symlink all files to prefix/lib: + mkl_lib_dir = os.path.join(prefix, "lib","intel64") + for f in os.listdir(mkl_lib_dir): + os.symlink(os.path.join(mkl_lib_dir, f), os.path.join(self.prefix, "lib", f)) + + def setup_dependent_package(self, module, dspec): # For now use Single Dynamic Library: # To set the threading layer at run time, use the @@ -53,6 +60,7 @@ def setup_dependent_package(self, module, dspec): name = 'libmkl_rt.%s' % dso_suffix libdir = find_library_path(name, self.prefix.lib64, self.prefix.lib) + # Now set blas/lapack libs: self.spec.blas_shared_lib = join_path(libdir, name) self.spec.lapack_shared_lib = self.spec.blas_shared_lib From 8ee4df8101e1b068f2789d9b6818fe9ead99d658 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Fri, 29 Jul 2016 11:37:19 +0200 Subject: [PATCH 097/284] hypre: minor cleanup related to blas/lapack --- var/spack/repos/builtin/packages/hypre/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index 65fef575599..b339e068bfe 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -62,10 +62,10 @@ def install(self, spec, prefix): '--prefix=%s' % prefix, '--with-lapack-libs=%s' % to_lib_name( spec['lapack'].lapack_shared_lib), - '--with-lapack-lib-dirs=%s/lib' % spec['lapack'].prefix, + '--with-lapack-lib-dirs=%s' % spec['lapack'].prefix.lib, '--with-blas-libs=%s' % to_lib_name( spec['blas'].blas_shared_lib), - '--with-blas-lib-dirs=%s/lib' % spec['blas'].prefix + '--with-blas-lib-dirs=%s' % spec['blas'].prefix.lib ] if '+shared' in self.spec: From 943896e237bc69a08ed5275542890e9284278f97 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Fri, 29 Jul 2016 13:09:34 +0200 Subject: [PATCH 098/284] trilinos: fix lapack lib dir --- var/spack/repos/builtin/packages/trilinos/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 77589bb8f90..469fd1091a8 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -138,7 +138,7 @@ def install(self, spec, prefix): '-DTPL_ENABLE_LAPACK=ON', '-DLAPACK_LIBRARY_NAMES=%s' % to_lib_name( spec['lapack'].lapack_shared_lib), - '-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix, + '-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix.lib, '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON', '-DTrilinos_ENABLE_CXX11:BOOL=ON', '-DTPL_ENABLE_Netcdf:BOOL=ON', From b6ce0e6f0e750add5fdc9751f0ae581278a9f562 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Fri, 29 Jul 2016 13:11:12 +0200 Subject: [PATCH 099/284] trilinos: make sure hdf5 is picked up from Spack --- .../repos/builtin/packages/trilinos/package.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 469fd1091a8..4d1d27e74ac 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -143,11 +143,18 @@ def install(self, spec, prefix): '-DTrilinos_ENABLE_CXX11:BOOL=ON', '-DTPL_ENABLE_Netcdf:BOOL=ON', '-DTPL_ENABLE_HYPRE:BOOL=%s' % ( - 'ON' if '+hypre' in spec else 'OFF'), - '-DTPL_ENABLE_HDF5:BOOL=%s' % ( - 'ON' if '+hdf5' in spec else 'OFF'), + 'ON' if '+hypre' in spec else 'OFF') ]) + if '+hdf5' in spec: + options.extend([ + '-DTPL_ENABLE_HDF5:BOOL=ON', + '-DHDF5_INCLUDE_DIRS:PATH=%s' % spec['hdf5'].prefix.include, + '-DHDF5_LIBRARY_DIRS:PATH=%s' % spec['hdf5'].prefix.lib + ]) + else: + options.extend(['-DTPL_ENABLE_HDF5:BOOL=OFF']) + if '+boost' in spec: options.extend([ '-DTPL_ENABLE_Boost:BOOL=ON', From 5eebb2defa08cd897b236545c0fdcca2eca95688 Mon Sep 17 00:00:00 2001 From: Paul Hopkins Date: Fri, 22 Jul 2016 09:11:26 +0100 Subject: [PATCH 100/284] Use space characters to separate preferred variants from package name and each other --- lib/spack/spack/preferred_packages.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index 5f18e212b6c..f079c1ef8b0 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -162,8 +162,8 @@ def spec_preferred_variants(self, pkgname): """Return a VariantMap of preferred variants and their values""" variants = self.preferred.get(pkgname, {}).get('variants', '') if not isinstance(variants, basestring): - variants = "".join(variants) - return spack.spec.Spec(pkgname + variants).variants + variants = " ".join(variants) + return spack.spec.Spec("%s %s" % (pkgname, variants)).variants def version_compare(self, pkgname, a, b): """Return less-than-0, 0, or greater than 0 if version a of pkgname is From aad5a4c4b39e1ff4d478ae8e0402f6b533741d82 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Fri, 29 Jul 2016 14:12:07 +0200 Subject: [PATCH 101/284] mkl: flake8 fixes; minor docu update --- var/spack/repos/builtin/packages/mkl/package.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/mkl/package.py b/var/spack/repos/builtin/packages/mkl/package.py index b06e2fb3840..71a233ff3e1 100644 --- a/var/spack/repos/builtin/packages/mkl/package.py +++ b/var/spack/repos/builtin/packages/mkl/package.py @@ -12,9 +12,9 @@ class Mkl(IntelInstaller): mirror, see http://software.llnl.gov/spack/mirrors.html. To set the threading layer at run time set MKL_THREADING_LAYER - variable to one of the following values: INTEL, SEQUENTIAL, PGI. + variable to one of the following values: INTEL (default), SEQUENTIAL, PGI. To set interface layer at run time, use set the MKL_INTERFACE_LAYER - variable to LP64 or ILP64. + variable to LP64 (default) or ILP64. """ homepage = "https://software.intel.com/en-us/intel-mkl" @@ -40,10 +40,10 @@ def install(self, spec, prefix): # Unfortunately MKL libs are natively distrubted in prefix/lib/intel64. # To make MKL play nice with Spack, symlink all files to prefix/lib: - mkl_lib_dir = os.path.join(prefix, "lib","intel64") + mkl_lib_dir = os.path.join(prefix, "lib", "intel64") for f in os.listdir(mkl_lib_dir): - os.symlink(os.path.join(mkl_lib_dir, f), os.path.join(self.prefix, "lib", f)) - + os.symlink(os.path.join(mkl_lib_dir, f), + os.path.join(self.prefix, "lib", f)) def setup_dependent_package(self, module, dspec): # For now use Single Dynamic Library: From 6c80b152208e8f2154db6c658475fe18a9c9f8bc Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Fri, 29 Jul 2016 08:46:31 -0600 Subject: [PATCH 102/284] GTK+3: Provide updates to package dependencies. WIP: I started trying to build gtkplus@3.20, but this package has many more dependencies than v2 and it requires newer versions of existing packages. This commit provides updates for 5 packages that are required by GTK+3. This is not the complete set of changes required for GTK+3. atk - move default version from 2.14 -> 2.20. glib - move default version from 2.42 -> 2.49 - v2.49 requires pcre+utf as a new dependency. pcre - if variant +utf is selected, add '--enable-unicode-properties' to the configure options. libepoxy - new package to spack - manages OpenGL function pointers. pango - move default version from 1.36 -> 1.40 --- .../repos/builtin/packages/atk/package.py | 1 + .../repos/builtin/packages/glib/package.py | 10 +++-- .../builtin/packages/libepoxy/package.py | 39 +++++++++++++++++++ .../repos/builtin/packages/pango/package.py | 2 + .../repos/builtin/packages/pcre/package.py | 2 +- 5 files changed, 49 insertions(+), 5 deletions(-) create mode 100644 var/spack/repos/builtin/packages/libepoxy/package.py diff --git a/var/spack/repos/builtin/packages/atk/package.py b/var/spack/repos/builtin/packages/atk/package.py index 1d26145fd11..8da562f1225 100644 --- a/var/spack/repos/builtin/packages/atk/package.py +++ b/var/spack/repos/builtin/packages/atk/package.py @@ -32,6 +32,7 @@ class Atk(Package): homepage = "https://developer.gnome.org/atk/" url = "http://ftp.gnome.org/pub/gnome/sources/atk/2.14/atk-2.14.0.tar.xz" + version('2.20.0', '5187b0972f4d3905f285540b31395e20') version('2.14.0', 'ecb7ca8469a5650581b1227d78051b8b') depends_on("glib") diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 0a0a02f464e..512f4466172 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -32,14 +32,16 @@ class Glib(Package): and interfaces for such runtime functionality as an event loop, threads, dynamic loading and an object system.""" homepage = "https://developer.gnome.org/glib/" - url = "http://ftp.gnome.org/pub/gnome/sources/glib/2.42/glib-2.42.1.tar.xz" + url = "http://ftp.gnome.org/pub/gnome/sources/glib/2.42/glib-2.42.1.tar.xz" + version('2.49.4', 'e2c87c03017b0cd02c4c73274b92b148') version('2.42.1', '89c4119e50e767d3532158605ee9121a') - depends_on("libffi") - depends_on("zlib") - depends_on("pkg-config", type='build') + depends_on('libffi') + depends_on('zlib') + depends_on('pkg-config', type='build') depends_on('gettext', when=sys.platform == 'darwin') + depends_on('pcre+utf', when='@2.49:') # The following patch is needed for gcc-6.1 patch('g_date_strftime.patch') diff --git a/var/spack/repos/builtin/packages/libepoxy/package.py b/var/spack/repos/builtin/packages/libepoxy/package.py new file mode 100644 index 00000000000..364ea1e30c5 --- /dev/null +++ b/var/spack/repos/builtin/packages/libepoxy/package.py @@ -0,0 +1,39 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Libepoxy(Package): + """Epoxy is a library for handling OpenGL function pointer management for + you.""" + homepage = "https://github.com/anholt/libepoxy" + url = "https://github.com/anholt/libepoxy/releases/download/v1.3.1/libepoxy-1.3.1.tar.bz2" + + version('1.3.1', '96f6620a9b005a503e7b44b0b528287d') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix)) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/pango/package.py b/var/spack/repos/builtin/packages/pango/package.py index a04f6d64e0a..7ae90f17955 100644 --- a/var/spack/repos/builtin/packages/pango/package.py +++ b/var/spack/repos/builtin/packages/pango/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Pango(Package): """Pango is a library for laying out and rendering of text, with an emphasis on internationalization. It can be used anywhere @@ -33,6 +34,7 @@ class Pango(Package): url = "http://ftp.gnome.org/pub/gnome/sources/pango/1.36/pango-1.36.8.tar.xz" version('1.36.8', '217a9a753006275215fa9fa127760ece') + version('1.40.1', '6fc88c6529890d6c8e03074d57a3eceb') depends_on("harfbuzz") depends_on("cairo") diff --git a/var/spack/repos/builtin/packages/pcre/package.py b/var/spack/repos/builtin/packages/pcre/package.py index a43729a1ac6..6f306ab0f9c 100644 --- a/var/spack/repos/builtin/packages/pcre/package.py +++ b/var/spack/repos/builtin/packages/pcre/package.py @@ -29,7 +29,6 @@ class Pcre(Package): """The PCRE package contains Perl Compatible Regular Expression libraries. These are useful for implementing regular expression pattern matching using the same syntax and semantics as Perl 5.""" - homepage = "http://www.pcre.org""" url = "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-8.36.tar.bz2" @@ -46,6 +45,7 @@ def install(self, spec, prefix): configure_args = ['--prefix=%s' % prefix] if '+utf' in spec: configure_args.append('--enable-utf') + configure_args.append('--enable-unicode-properties') configure(*configure_args) make() From 5e97eb5ec4c49f5199b47d4b43fb1584046785e4 Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Fri, 29 Jul 2016 12:36:50 -0400 Subject: [PATCH 103/284] Obtains default modules from a clean subshell. The list of default environment modules is obtained by calling `module list -lt` from a subshell with a wiped environment. This allows `/etc/profile` and other init scripts to be fully sourced which should generally include loading the default modules. The list of default modules is then parsed for the first acceptable CPU target, assumed to be the back_end target. --- lib/spack/spack/platforms/cray.py | 37 ++++++++++++++++++++++++------- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index 79a53d887cc..dc959c7eafe 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -1,6 +1,7 @@ import os import re import spack.config +from spack.util.executable import which from spack.architecture import Platform, Target, NoPlatformError from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl @@ -10,15 +11,35 @@ NON_TARGETS = ('hugepages', 'network', 'target', 'accel', 'xtpe') -def _target_from_init(name): - matches = [] +def _target_from_clean_env(name): + '''Return the default back_end target as loaded in a clean login session. + + A bash subshell is launched with a wiped environment and the list of loaded + modules is parsed for the first acceptable CrayPE target. + ''' + # Based on the incantation: + # echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')" + default_modules = [] + targets = [] if name != 'front_end': + env = which('env') + env.add_default_arg('-') + # CAUTION - $USER is generally needed to initialize the environment. + # There may be other variables needed for general success. + output = env('USER=%s' % os.environ['USER'], + '/bin/bash', '-l', '-c', 'module list -lt', + output=str, error=str) pattern = 'craype-(?!{0})(\S*)'.format('|'.join(NON_TARGETS)) - with open('/etc/bash.bashrc.local', 'r') as conf: - for line in conf: - if re.search('^[^\#]*module[\s]*(?:add|load)', line): - matches.extend(re.findall(pattern, line)) - return matches[0] if matches else None + for line in output.splitlines(): + if 'craype-' in line: + targets.extend(re.findall(pattern, line)) + if len(line.split()) == 1: + default_modules.append(line) + # if default_modules: + # print 'Found default modules:' + # for defmod in default_modules: + # print ' ', defmod + return targets[0] if targets else None class Cray(Platform): @@ -45,7 +66,7 @@ def __init__(self): if _target is None: _target = conf.get(name) if _target is None: - _target = _target_from_init(name) + _target = _target_from_clean_env(name) setattr(self, name, _target) if _target is not None: From dc7e0899a05cf4464552f4b3d41005e79571a15a Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Sat, 30 Jul 2016 11:38:34 -0400 Subject: [PATCH 104/284] Invokes subshell without user init scripts. --- lib/spack/spack/platforms/cray.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index dc959c7eafe..fd58915c575 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -27,7 +27,8 @@ def _target_from_clean_env(name): # CAUTION - $USER is generally needed to initialize the environment. # There may be other variables needed for general success. output = env('USER=%s' % os.environ['USER'], - '/bin/bash', '-l', '-c', 'module list -lt', + '/bin/sh', '--noprofile', '-c', + 'source /etc/profile; module list -lt', output=str, error=str) pattern = 'craype-(?!{0})(\S*)'.format('|'.join(NON_TARGETS)) for line in output.splitlines(): From f4422dc165b37b8dbad0af31dd0a3d24671a1e49 Mon Sep 17 00:00:00 2001 From: alalazo Date: Sat, 30 Jul 2016 18:10:33 +0200 Subject: [PATCH 105/284] qa : fixes #1370 (no flake8 on python 2.6, coveralls only python 2.7) --- .travis.yml | 25 ++++++++++++++----------- share/spack/qa/run-unit-tests | 20 ++++++++++++++++++++ 2 files changed, 34 insertions(+), 11 deletions(-) create mode 100755 share/spack/qa/run-unit-tests diff --git a/.travis.yml b/.travis.yml index 904143a00f7..b376a33490c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,17 @@ language: python + python: - "2.6" - "2.7" +env: + - TEST_TYPE=unit + - TEST_TYPE=flake8 + +# Exclude flake8 from python 2.6 +matrix: + exclude: + - python: "2.6" + env: TEST_TYPE=flake8 # Use new Travis infrastructure (Docker can't sudo yet) sudo: false @@ -20,20 +30,13 @@ before_install: - git fetch origin develop:develop script: - # Regular spack setup and tests - - . share/spack/setup-env.sh - - spack compilers - - spack config get compilers - - spack install -v libdwarf - - # Run unit tests with code coverage - - coverage run bin/spack test - + # Run unit tests with code coverage plus install libdwarf + - 'if [ "$TEST_TYPE" = "unit" ]; then share/spack/qa/run-unit-tests; fi' # Run flake8 code style checks. - - share/spack/qa/run-flake8 + - 'if [ "$TEST_TYPE" = "flake8" ]; then share/spack/qa/run-flake8; fi' after_success: - - coveralls + - 'if [ "$TEST_TYPE" = "unit" ] && [ "$TRAVIS_PYTHON_VERSION" = "2.7" ]; then coveralls; fi' notifications: email: diff --git a/share/spack/qa/run-unit-tests b/share/spack/qa/run-unit-tests new file mode 100755 index 00000000000..33fb1bfae2a --- /dev/null +++ b/share/spack/qa/run-unit-tests @@ -0,0 +1,20 @@ +#!/usr/bin/env bash +# +# This script runs Spack unit tests. +# +# It should be executed from the top-level directory of the repo, +# e.g.: +# +# share/spack/qa/run-unit-tests +# +# To run it, you'll need to have the Python coverage installed locally. +# + +# Regular spack setup and tests +. ./share/spack/setup-env.sh +spack compilers +spack config get compilers +spack install -v libdwarf + +# Run unit tests with code coverage +coverage run bin/spack test From ee7acc6b1337366c91261c3d76ad3750dd6409f0 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 15:10:25 -0500 Subject: [PATCH 106/284] New package r-car Companion to Applied Regression. --- .../repos/builtin/packages/r-car/package.py | 48 +++++++++++++++++ .../repos/builtin/packages/r-lme4/package.py | 53 +++++++++++++++++++ .../packages/r-matrixmodels/package.py | 44 +++++++++++++++ .../repos/builtin/packages/r-mgcv/package.py | 47 ++++++++++++++++ .../repos/builtin/packages/r-minqa/package.py | 44 +++++++++++++++ .../repos/builtin/packages/r-nlme/package.py | 43 +++++++++++++++ .../builtin/packages/r-nloptr/package.py | 47 ++++++++++++++++ .../repos/builtin/packages/r-nnet/package.py | 42 +++++++++++++++ .../builtin/packages/r-pbkrtest/package.py | 49 +++++++++++++++++ .../builtin/packages/r-quantreg/package.py | 50 +++++++++++++++++ .../builtin/packages/r-sparsem/package.py | 43 +++++++++++++++ 11 files changed, 510 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-car/package.py create mode 100644 var/spack/repos/builtin/packages/r-lme4/package.py create mode 100644 var/spack/repos/builtin/packages/r-matrixmodels/package.py create mode 100644 var/spack/repos/builtin/packages/r-mgcv/package.py create mode 100644 var/spack/repos/builtin/packages/r-minqa/package.py create mode 100644 var/spack/repos/builtin/packages/r-nlme/package.py create mode 100644 var/spack/repos/builtin/packages/r-nloptr/package.py create mode 100644 var/spack/repos/builtin/packages/r-nnet/package.py create mode 100644 var/spack/repos/builtin/packages/r-pbkrtest/package.py create mode 100644 var/spack/repos/builtin/packages/r-quantreg/package.py create mode 100644 var/spack/repos/builtin/packages/r-sparsem/package.py diff --git a/var/spack/repos/builtin/packages/r-car/package.py b/var/spack/repos/builtin/packages/r-car/package.py new file mode 100644 index 00000000000..f4a3824983f --- /dev/null +++ b/var/spack/repos/builtin/packages/r-car/package.py @@ -0,0 +1,48 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RCar(Package): + """Functions and Datasets to Accompany J. Fox and S. Weisberg, An R + Companion to Applied Regression, Second Edition, Sage, 2011.""" + + homepage = "https://r-forge.r-project.org/projects/car/" + url = "https://cran.r-project.org/src/contrib/car_2.1-2.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/car" + + version('2.1-2', '0f78ad74ef7130126d319acec23951a0') + + extends('R') + + depends_on('r-mass', type=nolink) + depends_on('r-mgcv', type=nolink) + depends_on('r-nnet', type=nolink) + depends_on('r-pbkrtest', type=nolink) + depends_on('r-quantreg', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-lme4/package.py b/var/spack/repos/builtin/packages/r-lme4/package.py new file mode 100644 index 00000000000..c52d9d0e27c --- /dev/null +++ b/var/spack/repos/builtin/packages/r-lme4/package.py @@ -0,0 +1,53 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RLme4(Package): + """Fit linear and generalized linear mixed-effects models. The models and + their components are represented using S4 classes and methods. The core + computational algorithms are implemented using the 'Eigen' C++ library for + numerical linear algebra and 'RcppEigen' "glue".""" + + homepage = "https://github.com/lme4/lme4/" + url = "https://cran.r-project.org/src/contrib/lme4_1.1-12.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/lme4" + + version('1.1-12', 'da8aaebb67477ecb5631851c46207804') + + extends('R') + + depends_on('r-matrix', type=nolink) + depends_on('r-mass', type=nolink) + depends_on('r-lattice', type=nolink) + depends_on('r-nlme', type=nolink) + depends_on('r-minqa', type=nolink) + depends_on('r-nloptr', type=nolink) + depends_on('r-rcpp', type=nolink) + depends_on('r-rcppeigen', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-matrixmodels/package.py b/var/spack/repos/builtin/packages/r-matrixmodels/package.py new file mode 100644 index 00000000000..3cdce6fea63 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-matrixmodels/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RMatrixmodels(Package): + """Modelling with sparse and dense 'Matrix' matrices, using modular + prediction and response module classes.""" + + homepage = "http://matrix.r-forge.r-project.org/" + url = "https://cran.r-project.org/src/contrib/MatrixModels_0.4-1.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/MatrixModels" + + version('0.4-1', '65b3ab56650c62bf1046a3eb1f1e19a0') + + extends('R') + + depends_on('r-matrix', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-mgcv/package.py b/var/spack/repos/builtin/packages/r-mgcv/package.py new file mode 100644 index 00000000000..4c49462ba23 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-mgcv/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RMgcv(Package): + """GAMs, GAMMs and other generalized ridge regression with multiple + smoothing parameter estimation by GCV, REML or UBRE/AIC. Includes a gam() + function, a wide variety of smoothers, JAGS support and distributions + beyond the exponential family.""" + + homepage = "https://cran.r-project.org/package=mgcv" + url = "https://cran.r-project.org/src/contrib/mgcv_1.8-13.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/mgcv" + + version('1.8-13', '30607be3aaf44b13bd8c81fc32e8c984') + + extends('R') + + depends_on('r-nlme', type=nolink) + depends_on('r-matrix', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-minqa/package.py b/var/spack/repos/builtin/packages/r-minqa/package.py new file mode 100644 index 00000000000..16cff20b410 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-minqa/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RMinqa(Package): + """Derivative-free optimization by quadratic approximation based on an + interface to Fortran implementations by M. J. D. Powell.""" + + homepage = "http://optimizer.r-forge.r-project.org/" + url = "https://cran.r-project.org/src/contrib/minqa_1.2.4.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/minqa" + + version('1.2.4', 'bcaae4fdba60a33528f2116e2fd51105') + + extends('R') + + depends_on('r-rcpp', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-nlme/package.py b/var/spack/repos/builtin/packages/r-nlme/package.py new file mode 100644 index 00000000000..1b6bb114e1b --- /dev/null +++ b/var/spack/repos/builtin/packages/r-nlme/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RNlme(Package): + """Fit and compare Gaussian linear and nonlinear mixed-effects models.""" + + homepage = "https://cran.r-project.org/package=nlme" + url = "https://cran.r-project.org/src/contrib/nlme_3.1-128.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/nlme" + + version('3.1-128', '3d75ae7380bf123761b95a073eb55008') + + extends('R') + + depends_on('r-lattice', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-nloptr/package.py b/var/spack/repos/builtin/packages/r-nloptr/package.py new file mode 100644 index 00000000000..58cb585c491 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-nloptr/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RNloptr(Package): + """nloptr is an R interface to NLopt. NLopt is a free/open-source library + for nonlinear optimization, providing a common interface for a number of + different free optimization routines available online as well as original + implementations of various other algorithms. See + http://ab-initio.mit.edu/wiki/index.php/NLopt_Introduction for more + information on the available algorithms. During installation on Unix the + NLopt code is downloaded and compiled from the NLopt website.""" + + homepage = "https://cran.r-project.org/package=nloptr" + url = "https://cran.r-project.org/src/contrib/nloptr_1.0.4.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/nloptr" + + version('1.0.4', '9af69a613349b236fd377d0a107f484c') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-nnet/package.py b/var/spack/repos/builtin/packages/r-nnet/package.py new file mode 100644 index 00000000000..be048f5ad04 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-nnet/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RNnet(Package): + """Software for feed-forward neural networks with a single hidden layer, + and for multinomial log-linear models.""" + + homepage = "http://www.stats.ox.ac.uk/pub/MASS4/" + url = "https://cran.r-project.org/src/contrib/nnet_7.3-12.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/nnet" + + version('7.3-12', 'dc7c6f0d0de53d8fc72b44554400a74e') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-pbkrtest/package.py b/var/spack/repos/builtin/packages/r-pbkrtest/package.py new file mode 100644 index 00000000000..40b6f96927f --- /dev/null +++ b/var/spack/repos/builtin/packages/r-pbkrtest/package.py @@ -0,0 +1,49 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RPbkrtest(Package): + """Test in mixed effects models. Attention is on mixed effects models as + implemented in the 'lme4' package. This package implements a parametric + bootstrap test and a Kenward Roger modification of F-tests for linear mixed + effects models and a parametric bootstrap test for generalized linear mixed + models.""" + + homepage = "http://people.math.aau.dk/~sorenh/software/pbkrtest/" + url = "https://cran.r-project.org/src/contrib/pbkrtest_0.4-6.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/pbkrtest" + + version('0.4-6', '0a7d9ff83b8d131af9b2335f35781ef9') + + extends('R') + + depends_on('r-lme4', type=nolink) + depends_on('r-matrix', type=nolink) + depends_on('r-mass', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-quantreg/package.py b/var/spack/repos/builtin/packages/r-quantreg/package.py new file mode 100644 index 00000000000..89a26070ba2 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-quantreg/package.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RQuantreg(Package): + """Estimation and inference methods for models of conditional quantiles: + Linear and nonlinear parametric and non-parametric (total variation + penalized) models for conditional quantiles of a univariate response + and several methods for handling censored survival data. Portfolio + selection methods based on expected shortfall risk are also + included.""" + + homepage = "https://cran.r-project.org/package=quantreg" + url = "https://cran.r-project.org/src/contrib/quantreg_5.26.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/quantreg" + + version('5.26', '1d89ed932fb4d67ae2d5da0eb8c2989f') + + extends('R') + + depends_on('r-sparsem', type=nolink) + depends_on('r-matrix', type=nolink) + depends_on('r-matrixmodels', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-sparsem/package.py b/var/spack/repos/builtin/packages/r-sparsem/package.py new file mode 100644 index 00000000000..c4dabf5c153 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-sparsem/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RSparsem(Package): + """Some basic linear algebra functionality for sparse matrices is provided: + including Cholesky decomposition and backsolving as well as standard R + subsetting and Kronecker products.""" + + homepage = "http://www.econ.uiuc.edu/~roger/research/sparse/sparse.html" + url = "https://cran.r-project.org/src/contrib/SparseM_1.7.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/SparseM" + + version('1.7', '7b5b0ab166a0929ef6dcfe1d97643601') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 8dc26bbcd957a7a00c4a370c434136e59b52aa87 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 15:07:18 -0500 Subject: [PATCH 107/284] Have R extensions build in parallel. Set `MAKEFLAGS` so R extensions can be built in parallel if that is set in spack. --- var/spack/repos/builtin/packages/R/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/R/package.py b/var/spack/repos/builtin/packages/R/package.py index 554adc77935..e880a3aa668 100644 --- a/var/spack/repos/builtin/packages/R/package.py +++ b/var/spack/repos/builtin/packages/R/package.py @@ -139,6 +139,10 @@ def setup_dependent_environment(self, spack_env, run_env, extension_spec): spack_env.set('R_MAKEVARS_SITE', join_path(self.etcdir, 'Makeconf.spack')) + # Use the number of make_jobs set in spack. The make program will + # determine how many jobs can actually be started. + spack_env.set('MAKEFLAGS', '-j{0}'.format(make_jobs)) + # For run time environment set only the path for extension_spec and # prepend it to R_LIBS if extension_spec.package.extends(self.spec): From 12167e8f375da357983967a5803ef9045313cc4d Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 15:39:28 -0500 Subject: [PATCH 108/284] New package r-randomforest Breiman and Cutler's Random Forests for Classification and Regression. --- .../packages/r-randomforest/package.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-randomforest/package.py diff --git a/var/spack/repos/builtin/packages/r-randomforest/package.py b/var/spack/repos/builtin/packages/r-randomforest/package.py new file mode 100644 index 00000000000..1066c217f44 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-randomforest/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RRandomforest(Package): + """Classification and regression based on a forest of trees using random + inputs.""" + + homepage = "https://www.stat.berkeley.edu/~breiman/RandomForests/" + url = "https://cran.r-project.org/src/contrib/randomForest_4.6-12.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/randomForest" + + version('4.6-12', '071c03af974198e861f1475c5bab9e7a') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From fc1804974c2a6444e119008bc93282cdd48f3670 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 16:09:57 -0500 Subject: [PATCH 109/284] New package - r-multcomp Simultaneous Inference in General Parametric Models. --- .../builtin/packages/r-multcomp/package.py | 51 +++++++++++++++++++ .../builtin/packages/r-mvtnorm/package.py | 42 +++++++++++++++ .../builtin/packages/r-sandwich/package.py | 44 ++++++++++++++++ .../builtin/packages/r-survival/package.py | 45 ++++++++++++++++ .../builtin/packages/r-thdata/package.py | 44 ++++++++++++++++ 5 files changed, 226 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-multcomp/package.py create mode 100644 var/spack/repos/builtin/packages/r-mvtnorm/package.py create mode 100644 var/spack/repos/builtin/packages/r-sandwich/package.py create mode 100644 var/spack/repos/builtin/packages/r-survival/package.py create mode 100644 var/spack/repos/builtin/packages/r-thdata/package.py diff --git a/var/spack/repos/builtin/packages/r-multcomp/package.py b/var/spack/repos/builtin/packages/r-multcomp/package.py new file mode 100644 index 00000000000..54416502d95 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-multcomp/package.py @@ -0,0 +1,51 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RMultcomp(Package): + """Simultaneous tests and confidence intervals for general linear + hypotheses in parametric models, including linear, generalized linear, + linear mixed effects, and survival models. The package includes demos + reproducing analyzes presented in the book "Multiple Comparisons Using R" + (Bretz, Hothorn, Westfall, 2010, CRC Press).""" + + homepage = "http://multcomp.r-forge.r-project.org/" + url = "https://cran.r-project.org/src/contrib/multcomp_1.4-6.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/multcomp" + + version('1.4-6', 'f1353ede2ed78b23859a7f1f1f9ebe88') + + extends('R') + + depends_on('r-mvtnorm', type=nolink) + depends_on('r-survival', type=nolink) + depends_on('r-thdata', type=nolink) + depends_on('r-sandwich', type=nolink) + depends_on('r-codetools', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-mvtnorm/package.py b/var/spack/repos/builtin/packages/r-mvtnorm/package.py new file mode 100644 index 00000000000..b3f7db60c60 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-mvtnorm/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RMvtnorm(Package): + """Computes multivariate normal and t probabilities, quantiles, random + deviates and densities.""" + + homepage = "http://mvtnorm.r-forge.r-project.org/" + url = "https://cran.r-project.org/src/contrib/mvtnorm_1.0-5.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/mvtnorm" + + version('1.0-5', '5894dd3969bbfa26f4862c45f9a48a52') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-sandwich/package.py b/var/spack/repos/builtin/packages/r-sandwich/package.py new file mode 100644 index 00000000000..ffd62b1b147 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-sandwich/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RSandwich(Package): + """Model-robust standard error estimators for cross-sectional, time series, + and longitudinal data.""" + + homepage = "https://cran.r-project.org/package=sandwich" + url = "https://cran.r-project.org/src/contrib/sandwich_2.3-4.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/sandwich" + + version('2.3-4', 'a621dbd8a57b6e1e036496642aadc2e5') + + extends('R') + + depends_on('r-zoo', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-survival/package.py b/var/spack/repos/builtin/packages/r-survival/package.py new file mode 100644 index 00000000000..cfba9298fe1 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-survival/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RSurvival(Package): + """Contains the core survival analysis routines, including definition of + Surv objects, Kaplan-Meier and Aalen-Johansen (multi-state) curves, Cox + models, and parametric accelerated failure time models.""" + + homepage = "https://cran.r-project.org/package=survival" + url = "https://cran.r-project.org/src/contrib/survival_2.39-5.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/survival" + + version('2.39-5', 'a3cc6b5762e8c5c0bb9e64a276710be2') + + extends('R') + + depends_on('r-matrix', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-thdata/package.py b/var/spack/repos/builtin/packages/r-thdata/package.py new file mode 100644 index 00000000000..e1cf0501811 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-thdata/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RThdata(Package): + """Contains data sets used in other packages Torsten Hothorn maintains.""" + + homepage = "https://cran.r-project.org/package=TH.data" + url = "https://cran.r-project.org/src/contrib/TH.data_1.0-7.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/TH.data" + + version('1.0-7', '3e8b6b1a4699544f175215aed7039a94') + + extends('R') + + depends_on('r-survival', type=nolink) + depends_on('r-mass', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From e9944150a5023161fb040de418a2fd58edfc528c Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 16:28:43 -0500 Subject: [PATCH 110/284] New package - r-vcd Visualizing Categorical Data --- .../builtin/packages/r-lmtest/package.py | 45 +++++++++++++++++ .../repos/builtin/packages/r-vcd/package.py | 50 +++++++++++++++++++ 2 files changed, 95 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-lmtest/package.py create mode 100644 var/spack/repos/builtin/packages/r-vcd/package.py diff --git a/var/spack/repos/builtin/packages/r-lmtest/package.py b/var/spack/repos/builtin/packages/r-lmtest/package.py new file mode 100644 index 00000000000..31a36f1f7e4 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-lmtest/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RLmtest(Package): + """A collection of tests, data sets, and examples for diagnostic checking + in linear regression models. Furthermore, some generic tools for inference + in parametric models are provided.""" + + homepage = "https://cran.r-project.org/package=lmtest" + url = "https://cran.r-project.org/src/contrib/lmtest_0.9-34.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/lmtest" + + version('0.9-34', 'fcdf7286bb5ccc2ca46be00bf25ac2fe') + + extends('R') + + depends_on('r-zoo', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-vcd/package.py b/var/spack/repos/builtin/packages/r-vcd/package.py new file mode 100644 index 00000000000..06e609b1efd --- /dev/null +++ b/var/spack/repos/builtin/packages/r-vcd/package.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RVcd(Package): + """Visualization techniques, data sets, summary and inference procedures + aimed particularly at categorical data. Special emphasis is given to highly + extensible grid graphics. The package was package was originally inspired + by the book "Visualizing Categorical Data" by Michael Friendly and is now + the main support package for a new book, "Discrete Data Analysis with R" by + Michael Friendly and David Meyer (2015).""" + + homepage = "https://cran.r-project.org/package=vcd" + url = "https://cran.r-project.org/src/contrib/vcd_1.4-1.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/vcd" + + version('1.4-1', '7db150a77f173f85b69a1f86f73f8f02') + + extends('R') + + depends_on('r-mass', type=nolink) + depends_on('r-colorspace', type=nolink) + depends_on('r-lmtest', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 20e52e505204c59dda44d5c2ac6949f4aeb6fd23 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 16:38:00 -0500 Subject: [PATCH 111/284] New package - r-glmnet Lasso and Elastic-Net Regularized Generalized Linear Models --- .../builtin/packages/r-glmnet/package.py | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-glmnet/package.py diff --git a/var/spack/repos/builtin/packages/r-glmnet/package.py b/var/spack/repos/builtin/packages/r-glmnet/package.py new file mode 100644 index 00000000000..af6e1d1b637 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-glmnet/package.py @@ -0,0 +1,49 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RGlmnet(Package): + """Extremely efficient procedures for fitting the entire lasso or + elastic-net regularization path for linear regression, logistic and + multinomial regression models, Poisson regression and the Cox model. Two + recent additions are the multiple-response Gaussian, and the grouped + multinomial. The algorithm uses cyclical coordinate descent in a path-wise + fashion, as described in the paper linked to via the URL below.""" + + homepage = "http://www.jstatsoft.org/v33/i01/" + url = "https://cran.r-project.org/src/contrib/glmnet_2.0-5.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/glmnet" + + version('2.0-5', '049b18caa29529614cd684db3beaec2a') + + extends('R') + + depends_on('r-matrix', type=nolink) + depends_on('r-foreach', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 7207ce2a18e489d272715c4b5a0197ea564487ed Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 16:53:54 -0500 Subject: [PATCH 112/284] New package - r-caret Classification and Regression Training --- .../repos/builtin/packages/r-caret/package.py | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-caret/package.py diff --git a/var/spack/repos/builtin/packages/r-caret/package.py b/var/spack/repos/builtin/packages/r-caret/package.py new file mode 100644 index 00000000000..460526c7d32 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-caret/package.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RCaret(Package): + """Misc functions for training and plotting classification and regression + models.""" + + homepage = "https://github.com/topepo/caret/" + url = "https://cran.r-project.org/src/contrib/caret_6.0-70.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/caret" + + version('6.0-70', '202d7abb6a679af716ea69fb2573f108') + + extends('R') + + depends_on('r-lattice', type=nolink) + depends_on('r-ggplot2', type=nolink) + depends_on('r-car', type=nolink) + depends_on('r-foreach', type=nolink) + depends_on('r-plyr', type=nolink) + depends_on('r-nlme', type=nolink) + depends_on('r-reshape2', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 386f0e577ae732ff0139dc17c83cef23fa384cc4 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 17:26:31 -0500 Subject: [PATCH 113/284] New package - r-maptools Tools for reading and handling spatial objects. --- .../builtin/packages/r-maptools/package.py | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-maptools/package.py diff --git a/var/spack/repos/builtin/packages/r-maptools/package.py b/var/spack/repos/builtin/packages/r-maptools/package.py new file mode 100644 index 00000000000..8d045a4ed44 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-maptools/package.py @@ -0,0 +1,49 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RMaptools(Package): + """Set of tools for manipulating and reading geographic data, in particular + ESRI shapefiles; C code used from shapelib. It includes binary access to + GSHHG shoreline files. The package also provides interface wrappers for + exchanging spatial objects with packages such as PBSmapping, spatstat, + maps, RArcInfo, Stata tmap, WinBUGS, Mondrian, and others.""" + + homepage = "http://r-forge.r-project.org/projects/maptools/" + url = "https://cran.r-project.org/src/contrib/maptools_0.8-39.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/maptools" + + version('0.8-39', '3690d96afba8ef22c8e27ae540ffb836') + + extends('R') + + depends_on('r-sp', type=nolink) + depends_on('r-foreign', type=nolink) + depends_on('r-lattice', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 1bd2def41edf9ec354f889f091f26eef6416ca81 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 17:34:07 -0500 Subject: [PATCH 114/284] New package - r-maps Draw geographical maps. --- .../repos/builtin/packages/r-maps/package.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-maps/package.py diff --git a/var/spack/repos/builtin/packages/r-maps/package.py b/var/spack/repos/builtin/packages/r-maps/package.py new file mode 100644 index 00000000000..1e0bfd2d437 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-maps/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RMaps(Package): + """Display of maps. Projection code and larger maps are in separate + packages ('mapproj' and 'mapdata').""" + + homepage = "https://cran.r-project.org/" + url = "https://cran.r-project.org/src/contrib/maps_3.1.1.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/maps" + + version('3.1.1', 'ff045eccb6d5a658db5a539116ddf764') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 2913aa8d09d69c882d5307412a9d79bc7d3f3d12 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 18:14:02 -0500 Subject: [PATCH 115/284] New package - r-ggmap and dependencies Spatial visualization with ggplot2 --- .../builtin/packages/r-geosphere/package.py | 45 +++++++++++++++ .../repos/builtin/packages/r-ggmap/package.py | 57 +++++++++++++++++++ .../repos/builtin/packages/r-jpeg/package.py | 45 +++++++++++++++ .../builtin/packages/r-mapproj/package.py | 43 ++++++++++++++ .../repos/builtin/packages/r-proto/package.py | 42 ++++++++++++++ .../builtin/packages/r-rgooglemaps/package.py | 47 +++++++++++++++ .../repos/builtin/packages/r-rjson/package.py | 41 +++++++++++++ .../builtin/packages/r-rjsonio/package.py | 55 ++++++++++++++++++ 8 files changed, 375 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-geosphere/package.py create mode 100644 var/spack/repos/builtin/packages/r-ggmap/package.py create mode 100644 var/spack/repos/builtin/packages/r-jpeg/package.py create mode 100644 var/spack/repos/builtin/packages/r-mapproj/package.py create mode 100644 var/spack/repos/builtin/packages/r-proto/package.py create mode 100644 var/spack/repos/builtin/packages/r-rgooglemaps/package.py create mode 100644 var/spack/repos/builtin/packages/r-rjson/package.py create mode 100644 var/spack/repos/builtin/packages/r-rjsonio/package.py diff --git a/var/spack/repos/builtin/packages/r-geosphere/package.py b/var/spack/repos/builtin/packages/r-geosphere/package.py new file mode 100644 index 00000000000..21ae07fd41f --- /dev/null +++ b/var/spack/repos/builtin/packages/r-geosphere/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RGeosphere(Package): + """Spherical trigonometry for geographic applications. That is, compute + distances and related measures for angular (longitude/latitude) + locations.""" + + homepage = "https://cran.r-project.org/package=geosphere" + url = "https://cran.r-project.org/src/contrib/geosphere_1.5-5.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/geosphere" + + version('1.5-5', '28efb7a8e266c7f076cdbcf642455f3e') + + extends('R') + + depends_on('r-sp', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-ggmap/package.py b/var/spack/repos/builtin/packages/r-ggmap/package.py new file mode 100644 index 00000000000..2dfca19b51d --- /dev/null +++ b/var/spack/repos/builtin/packages/r-ggmap/package.py @@ -0,0 +1,57 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RGgmap(Package): + """A collection of functions to visualize spatial data and models on top of + static maps from various online sources (e.g Google Maps and Stamen Maps). + It includes tools common to those tasks, including functions for + geolocation and routing.""" + + homepage = "https://github.com/dkahle/ggmap" + url = "https://cran.r-project.org/src/contrib/ggmap_2.6.1.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/ggmap" + + version('2.6.1', '25ad414a3a1c6d59a227a9f22601211a') + + extends('R') + + depends_on('r-ggplot2', type=nolink) + depends_on('r-proto', type=nolink) + depends_on('r-rgooglemaps', type=nolink) + depends_on('r-png', type=nolink) + depends_on('r-plyr', type=nolink) + depends_on('r-reshape2', type=nolink) + depends_on('r-rjson', type=nolink) + depends_on('r-mapproj', type=nolink) + depends_on('r-jpeg', type=nolink) + depends_on('r-geosphere', type=nolink) + depends_on('r-digest', type=nolink) + depends_on('r-scales', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-jpeg/package.py b/var/spack/repos/builtin/packages/r-jpeg/package.py new file mode 100644 index 00000000000..ef940720f83 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-jpeg/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RJpeg(Package): + """This package provides an easy and simple way to read, write and display + bitmap images stored in the JPEG format. It can read and write both files + and in-memory raw vectors.""" + + homepage = "http://www.rforge.net/jpeg/" + url = "https://cran.r-project.org/src/contrib/jpeg_0.1-8.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/jpeg" + + version('0.1-8', '696007451d14395b1ed1d0e9af667a57') + + extends('R') + + depends_on('jpeg') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-mapproj/package.py b/var/spack/repos/builtin/packages/r-mapproj/package.py new file mode 100644 index 00000000000..0f8bbe199b4 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-mapproj/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RMapproj(Package): + """Converts latitude/longitude into projected coordinates.""" + + homepage = "https://cran.r-project.org/package=mapproj" + url = "https://cran.r-project.org/src/contrib/mapproj_1.2-4.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/mapproj" + + version('1.2-4', '10e22bde1c790e1540672f15ddcaee71') + + extends('R') + + depends_on('r-maps', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-proto/package.py b/var/spack/repos/builtin/packages/r-proto/package.py new file mode 100644 index 00000000000..07ace3ad296 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-proto/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RProto(Package): + """An object oriented system using object-based, also called + prototype-based, rather than class-based object oriented ideas.""" + + homepage = "http://r-proto.googlecode.com/" + url = "https://cran.r-project.org/src/contrib/proto_0.3-10.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/proto" + + version('0.3-10', 'd5523943a5be6ca2f0ab557c900f8212') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-rgooglemaps/package.py b/var/spack/repos/builtin/packages/r-rgooglemaps/package.py new file mode 100644 index 00000000000..0d28b68b940 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-rgooglemaps/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RRgooglemaps(Package): + """This package serves two purposes: (i) Provide a comfortable R interface + to query the Google server for static maps, and (ii) Use the map as a + background image to overlay plots within R. This requires proper coordinate + scaling.""" + + homepage = "https://cran.r-project.org/package=RgoogleMaps" + url = "https://cran.r-project.org/src/contrib/RgoogleMaps_1.2.0.7.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/RgoogleMaps" + + version('1.2.0.7', '2e1df804f0331b4122d841105f0c7ea5') + + extends('R') + + depends_on('r-png', type=nolink) + depends_on('r-rjsonio', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-rjson/package.py b/var/spack/repos/builtin/packages/r-rjson/package.py new file mode 100644 index 00000000000..94ca45f485d --- /dev/null +++ b/var/spack/repos/builtin/packages/r-rjson/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RRjson(Package): + """Converts R object into JSON objects and vice-versa.""" + + homepage = "https://cran.r-project.org/package=rjson" + url = "https://cran.r-project.org/src/contrib/rjson_0.2.15.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/rjson" + + version('0.2.15', '87d0e29bc179c6aeaf312b138089f8e9') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-rjsonio/package.py b/var/spack/repos/builtin/packages/r-rjsonio/package.py new file mode 100644 index 00000000000..b56dfbe21d3 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-rjsonio/package.py @@ -0,0 +1,55 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RRjsonio(Package): + """This is a package that allows conversion to and from data in Javascript + object notation (JSON) format. This allows R objects to be inserted into + Javascript/ECMAScript/ActionScript code and allows R programmers to read + and convert JSON content to R objects. This is an alternative to rjson + package. Originally, that was too slow for converting large R objects to + JSON and was not extensible. rjson's performance is now similar to this + package, and perhaps slightly faster in some cases. This package uses + methods and is readily extensible by defining methods for different + classes, vectorized operations, and C code and callbacks to R functions for + deserializing JSON objects to R. The two packages intentionally share the + same basic interface. This package (RJSONIO) has many additional options to + allow customizing the generation and processing of JSON content. This + package uses libjson rather than implementing yet another JSON parser. The + aim is to support other general projects by building on their work, + providing feedback and benefit from their ongoing development.""" + + homepage = "https://cran.r-project.org/package=RJSONIO" + url = "https://cran.r-project.org/src/contrib/RJSONIO_1.3-0.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/RJSONIO" + + version('1.3-0', '72c395622ba8d1435ec43849fd32c830') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 41cd8618619633fd6c40a53ff5c31caf0c3dcd14 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 30 Jul 2016 18:25:43 -0500 Subject: [PATCH 116/284] New package - r-quantmod Qualitative Financial modelling framework. --- .../builtin/packages/r-quantmod/package.py | 46 +++++++++++++++++++ .../repos/builtin/packages/r-ttr/package.py | 44 ++++++++++++++++++ 2 files changed, 90 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-quantmod/package.py create mode 100644 var/spack/repos/builtin/packages/r-ttr/package.py diff --git a/var/spack/repos/builtin/packages/r-quantmod/package.py b/var/spack/repos/builtin/packages/r-quantmod/package.py new file mode 100644 index 00000000000..ecfbf490554 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-quantmod/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RQuantmod(Package): + """Specify, build, trade, and analyse quantitative financial trading + strategies.""" + + homepage = "http://www.quantmod.com/" + url = "https://cran.r-project.org/src/contrib/quantmod_0.4-5.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/quantmod" + + version('0.4-5', 'cab3c409e4de3df98a20f1ded60f3631') + + extends('R') + + depends_on('r-xts', type=nolink) + depends_on('r-zoo', type=nolink) + depends_on('r-ttr', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-ttr/package.py b/var/spack/repos/builtin/packages/r-ttr/package.py new file mode 100644 index 00000000000..c9b40a8262d --- /dev/null +++ b/var/spack/repos/builtin/packages/r-ttr/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RTtr(Package): + """Functions and data to construct technical trading rules with R.""" + + homepage = "https://github.com/joshuaulrich/TTR" + url = "https://cran.r-project.org/src/contrib/TTR_0.23-1.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/TTR" + + version('0.23-1', '35f693ac0d97e8ec742ebea2da222986') + + extends('R') + + depends_on('r-xts', type=nolink) + depends_on('r-zoo', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 1b04b8be011ff844a4c108b96fe8fa7fcb3ef698 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 14:11:04 -0500 Subject: [PATCH 117/284] New package - r-datatable Extension of Data.frame --- .../repos/builtin/packages/r-chron/package.py | 41 +++++++++++++++++ .../builtin/packages/r-datatable/package.py | 46 +++++++++++++++++++ 2 files changed, 87 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-chron/package.py create mode 100644 var/spack/repos/builtin/packages/r-datatable/package.py diff --git a/var/spack/repos/builtin/packages/r-chron/package.py b/var/spack/repos/builtin/packages/r-chron/package.py new file mode 100644 index 00000000000..9cd9d76e9e6 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-chron/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RChron(Package): + """Chronological objects which can handle dates and times.""" + + homepage = "https://cran.r-project.org/package=chron" + url = "https://cran.r-project.org/src/contrib/chron_2.3-47.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/chron" + + version('2.3-47', 'b8890cdc5f2337f8fd775b0becdcdd1f') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-datatable/package.py b/var/spack/repos/builtin/packages/r-datatable/package.py new file mode 100644 index 00000000000..8b506433418 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-datatable/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RDatatable(Package): + """Fast aggregation of large data (e.g. 100GB in RAM), fast ordered joins, + fast add/modify/delete of columns by group using no copies at all, list + columns and a fast file reader (fread). Offers a natural and flexible + syntax, for faster development.""" + + homepage = "https://github.com/Rdatatable/data.table/wiki" + url = "https://cran.r-project.org/src/contrib/data.table_1.9.6.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/data.table" + + version('1.9.6', 'b1c0c7cce490bdf42ab288541cc55372') + + extends('R') + + depends_on('r-chron') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 0662b953f0b6479efddf7eefd0386f058695b478 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 14:25:17 -0500 Subject: [PATCH 118/284] New package - r-xml Tools for parsing and generating XML within R and S-Plus. --- .../repos/builtin/packages/r-xml/package.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-xml/package.py diff --git a/var/spack/repos/builtin/packages/r-xml/package.py b/var/spack/repos/builtin/packages/r-xml/package.py new file mode 100644 index 00000000000..591c887f320 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-xml/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RXml(Package): + """Many approaches for both reading and creating XML (and HTML) documents + (including DTDs), both local and accessible via HTTP or FTP. Also offers + access to an 'XPath' "interpreter".""" + + homepage = "http://www.omegahat.net/RSXML" + url = "https://cran.r-project.org/src/contrib/XML_3.98-1.4.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/XML" + + version('3.98-1', '1a7f3ce6f264eeb109bfa57bedb26c14') + + extends('R') + + depends_on('libxml2') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 58a6039c0a136566c6649223de025aa08a9ec341 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 14:40:46 -0500 Subject: [PATCH 119/284] New package - r-testthat A unit testing system for R. --- .../builtin/packages/r-crayon/package.py | 45 +++++++++++++++++ .../builtin/packages/r-praise/package.py | 41 ++++++++++++++++ .../builtin/packages/r-testthat/package.py | 48 +++++++++++++++++++ 3 files changed, 134 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-crayon/package.py create mode 100644 var/spack/repos/builtin/packages/r-praise/package.py create mode 100644 var/spack/repos/builtin/packages/r-testthat/package.py diff --git a/var/spack/repos/builtin/packages/r-crayon/package.py b/var/spack/repos/builtin/packages/r-crayon/package.py new file mode 100644 index 00000000000..1e0befbca91 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-crayon/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RCrayon(Package): + """Colored terminal output on terminals that support 'ANSI' color and + highlight codes. It also works in 'Emacs' 'ESS'. 'ANSI' color support is + automatically detected. Colors and highlighting can be combined and nested. + New styles can also be created easily. This package was inspired by the + 'chalk' 'JavaScript' project.""" + + homepage = "https://github.com/gaborcsardi/crayon" + url = "https://cran.r-project.org/src/contrib/crayon_1.3.2.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/crayon" + + version('1.3.2', 'fe29c6204d2d6ff4c2f9d107a03d0cb9') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-praise/package.py b/var/spack/repos/builtin/packages/r-praise/package.py new file mode 100644 index 00000000000..102d86d2fad --- /dev/null +++ b/var/spack/repos/builtin/packages/r-praise/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RPraise(Package): + """Build friendly R packages that praise their users if they have done + something good, or they just need it to feel better.""" + + homepage = "https://github.com/gaborcsardi/praise" + url = "https://cran.r-project.org/src/contrib/praise_1.0.0.tar.gz" + + version('1.0.0', '9318724cec0454884b5f762bee2da6a1') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-testthat/package.py b/var/spack/repos/builtin/packages/r-testthat/package.py new file mode 100644 index 00000000000..60dfd2afcd5 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-testthat/package.py @@ -0,0 +1,48 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RTestthat(Package): + """A unit testing system designed to be fun, flexible and easy to set + up.""" + + homepage = "https://github.com/hadley/testthat" + url = "https://cran.r-project.org/src/contrib/testthat_1.0.2.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/testthat" + + version('1.0.2', '6c6a90c8db860292df5784a70e07b8dc') + + extends('R') + + depends_on('r-digest', type=nolink) + depends_on('r-crayon', type=nolink) + depends_on('r-praise', type=nolink) + depends_on('r-magrittr', type=nolink) + depends_on('r-R6', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From f39e570f36fed12500b2577b471230dfa8a7f451 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 14:52:16 -0500 Subject: [PATCH 120/284] New package - r-roxygen2 In-source documentation for R. --- .../repos/builtin/packages/r-brew/package.py | 43 +++++++++++++++++ .../builtin/packages/r-roxygen2/package.py | 48 +++++++++++++++++++ 2 files changed, 91 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-brew/package.py create mode 100644 var/spack/repos/builtin/packages/r-roxygen2/package.py diff --git a/var/spack/repos/builtin/packages/r-brew/package.py b/var/spack/repos/builtin/packages/r-brew/package.py new file mode 100644 index 00000000000..111606d5769 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-brew/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RBrew(Package): + """brew implements a templating framework for mixing text and R code for + report generation. brew template syntax is similar to PHP, Ruby's erb + module, Java Server Pages, and Python's psp module.""" + + homepage = "https://cran.r-project.org/package=brew" + url = "https://cran.r-project.org/src/contrib/brew_1.0-6.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/brew" + + version('1.0-6', '4aaca5e6ec145e0fc0fe6375ce1f3806') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-roxygen2/package.py b/var/spack/repos/builtin/packages/r-roxygen2/package.py new file mode 100644 index 00000000000..4f4b8dcafaa --- /dev/null +++ b/var/spack/repos/builtin/packages/r-roxygen2/package.py @@ -0,0 +1,48 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RRoxygen2(Package): + """A 'Doxygen'-like in-source documentation system for Rd, collation, and + 'NAMESPACE' files.""" + + homepage = "https://github.com/klutometis/roxygen" + url = "https://cran.r-project.org/src/contrib/roxygen2_5.0.1.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/roxygen2" + + version('5.0.1', 'df5bdbc12fda372e427710ef1cd92ed7') + + extends('R') + + depends_on('r-stringr', type=nolink) + depends_on('r-stringi', type=nolink) + depends_on('r-brew', type=nolink) + depends_on('r-digest', type=nolink) + depends_on('r-rcpp', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From dbf799bbf90c1a2e77ca9911ee55a666748504d7 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 15:07:40 -0500 Subject: [PATCH 121/284] New package - r-gdata Data manipulation tools. --- .../repos/builtin/packages/r-gdata/package.py | 59 ++++++++++++++++++ .../builtin/packages/r-gtools/package.py | 60 +++++++++++++++++++ 2 files changed, 119 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-gdata/package.py create mode 100644 var/spack/repos/builtin/packages/r-gtools/package.py diff --git a/var/spack/repos/builtin/packages/r-gdata/package.py b/var/spack/repos/builtin/packages/r-gdata/package.py new file mode 100644 index 00000000000..0a09a1145bd --- /dev/null +++ b/var/spack/repos/builtin/packages/r-gdata/package.py @@ -0,0 +1,59 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RGdata(Package): + """Various R programming tools for data manipulation, including: - medical + unit conversions ('ConvertMedUnits', 'MedUnits'), - combining objects + ('bindData', 'cbindX', 'combine', 'interleave'), - character vector + operations ('centerText', 'startsWith', 'trim'), - factor manipulation + ('levels', 'reorder.factor', 'mapLevels'), - obtaining information about R + objects ('object.size', 'elem', 'env', 'humanReadable', 'is.what', 'll', + 'keep', 'ls.funs', 'Args','nPairs', 'nobs'), - manipulating MS-Excel + formatted files ('read.xls', 'installXLSXsupport', 'sheetCount', + 'xlsFormats'), - generating fixed-width format files ('write.fwf'), - + extricating components of date & time objects ('getYear', 'getMonth', + 'getDay', 'getHour', 'getMin', 'getSec'), - operations on columns of data + frames ('matchcols', 'rename.vars'), - matrix operations ('unmatrix', + 'upperTriangle', 'lowerTriangle'), - operations on vectors ('case', + 'unknownToNA', 'duplicated2', 'trimSum'), - operations on data frames + ('frameApply', 'wideByFactor'), - value of last evaluated expression + ('ans'), and - wrapper for 'sample' that ensures consistent behavior for + both scalar and vector arguments ('resample').""" + + homepage = "https://cran.r-project.org/package=gdata" + url = "https://cran.r-project.org/src/contrib/gdata_2.17.0.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/gdata" + + version('2.17.0', 'c716b663b9dc16ad8cafe6acc781a75f') + + extends('R') + + depends_on('r-gtools', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-gtools/package.py b/var/spack/repos/builtin/packages/r-gtools/package.py new file mode 100644 index 00000000000..367bb30f870 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-gtools/package.py @@ -0,0 +1,60 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RGtools(Package): + """Functions to assist in R programming, including: - assist in developing, + updating, and maintaining R and R packages ('ask', 'checkRVersion', + 'getDependencies', 'keywords', 'scat'), - calculate the logit and inverse + logit transformations ('logit', 'inv.logit'), - test if a value is missing, + empty or contains only NA and NULL values ('invalid'), - manipulate R's + .Last function ('addLast'), - define macros ('defmacro'), - detect odd and + even integers ('odd', 'even'), - convert strings containing non-ASCII + characters (like single quotes) to plain ASCII ('ASCIIfy'), - perform a + binary search ('binsearch'), - sort strings containing both numeric and + character components ('mixedsort'), - create a factor variable from the + quantiles of a continuous variable ('quantcut'), - enumerate permutations + and combinations ('combinations', 'permutation'), - calculate and convert + between fold-change and log-ratio ('foldchange', 'logratio2foldchange', + 'foldchange2logratio'), - calculate probabilities and generate random + numbers from Dirichlet distributions ('rdirichlet', 'ddirichlet'), - apply + a function over adjacent subsets of a vector ('running'), - modify the + TCP\_NODELAY ('de-Nagle') flag for socket objects, - efficient 'rbind' of + data frames, even if the column names don't match ('smartbind'), - generate + significance stars from p-values ('stars.pval'), - convert characters + to/from ASCII codes.""" + + homepage = "https://cran.r-project.org/package=gtools" + url = "https://cran.r-project.org/src/contrib/gtools_3.5.0.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/gtools" + + version('3.5.0', '45f8800c0336d35046641fbacc56bdbb') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 05e5276aec35003989f3002127d14a8e3d6758d8 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 15:15:55 -0500 Subject: [PATCH 122/284] New package - r-plotrix Various plotting functions. --- .../builtin/packages/r-plotrix/package.py | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-plotrix/package.py diff --git a/var/spack/repos/builtin/packages/r-plotrix/package.py b/var/spack/repos/builtin/packages/r-plotrix/package.py new file mode 100644 index 00000000000..d1d61dbc4dd --- /dev/null +++ b/var/spack/repos/builtin/packages/r-plotrix/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RPlotrix(Package): + """Lots of plots, various labeling, axis and color scaling functions.""" + + homepage = "https://cran.r-project.org/package=plotrix" + url = "https://cran.r-project.org/src/contrib/plotrix_3.6-3.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/plotrix" + + version('3.6-3', '23e3e022a13a596e9b77b40afcb4a2ef') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From be2f2e42db66e67f09a744fbd152f2c0b21bb9b6 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 15:26:13 -0500 Subject: [PATCH 123/284] New package - r-e1071 Misc functions of the Department of Statistics, Probability Theory Group (Formerly: E1071), TU Wien. --- .../repos/builtin/packages/r-class/package.py | 44 ++++++++++++++++++ .../repos/builtin/packages/r-e1071/package.py | 45 +++++++++++++++++++ 2 files changed, 89 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-class/package.py create mode 100644 var/spack/repos/builtin/packages/r-e1071/package.py diff --git a/var/spack/repos/builtin/packages/r-class/package.py b/var/spack/repos/builtin/packages/r-class/package.py new file mode 100644 index 00000000000..f541ea36113 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-class/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RClass(Package): + """Various functions for classification, including k-nearest neighbour, + Learning Vector Quantization and Self-Organizing Maps.""" + + homepage = "http://www.stats.ox.ac.uk/pub/MASS4/" + url = "https://cran.r-project.org/src/contrib/class_7.3-14.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/class" + + version('7.3-14', '6a21dd206fe4ea29c55faeb65fb2b71e') + + extends('R') + + depends_on('r-mass', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-e1071/package.py b/var/spack/repos/builtin/packages/r-e1071/package.py new file mode 100644 index 00000000000..c2e71664037 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-e1071/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RE1071(Package): + """Functions for latent class analysis, short time Fourier transform, fuzzy + clustering, support vector machines, shortest path computation, bagged + clustering, naive Bayes classifier, ...""" + + homepage = "https://cran.r-project.org/package=e1071" + url = "https://cran.r-project.org/src/contrib/e1071_1.6-7.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/e1071" + + version('1.6-7', 'd109a7e3dd0c905d420e327a9a921f5a') + + extends('R') + + depends_on('r-class', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From a9cc6a7d084c150d99ce41f5bfb88a126e1c1502 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 15:39:09 -0500 Subject: [PATCH 124/284] New package - r-tarifx A collection of various utility and convenience functions. --- .../builtin/packages/r-tarifx/package.py | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-tarifx/package.py diff --git a/var/spack/repos/builtin/packages/r-tarifx/package.py b/var/spack/repos/builtin/packages/r-tarifx/package.py new file mode 100644 index 00000000000..a85aa8baefe --- /dev/null +++ b/var/spack/repos/builtin/packages/r-tarifx/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RTarifx(Package): + """A collection of various utility and convenience functions.""" + + homepage = "https://cran.r-project.org/package=taRifx" + url = "https://cran.r-project.org/src/contrib/taRifx_1.0.6.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/taRifx" + + version('1.0.6', '7e782e04bd69d929b29f91553382e6a2') + + extends('R') + + depends_on('r-reshape2', type=nolink) + depends_on('r-plyr', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From f14eb07dc182b27db26747eb58c6aa005cd03b09 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 15:50:19 -0500 Subject: [PATCH 125/284] New package - r-survey Analysis of complex survey samples. --- .../builtin/packages/r-survey/package.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-survey/package.py diff --git a/var/spack/repos/builtin/packages/r-survey/package.py b/var/spack/repos/builtin/packages/r-survey/package.py new file mode 100644 index 00000000000..646793f7a44 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-survey/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RSurvey(Package): + """Summary statistics, two-sample tests, rank tests, generalised linear + models, cumulative link models, Cox models, loglinear models, and general + maximum pseudolikelihood estimation for multistage stratified, + cluster-sampled, unequally weighted survey samples. Variances by Taylor + series linearisation or replicate weights. Post-stratification, + calibration, and raking. Two-phase subsampling designs. Graphics. PPS + sampling without replacement. Principal components, factor analysis.""" + + homepage = "http://r-survey.r-forge.r-project.org/survey/" + url = "https://cran.r-project.org/src/contrib/survey_3.30-3.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/survey" + + version('3.30-3', 'c70cdae9cb43d35abddd11173d64cad0') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From bfd03db12a3ebb1f0646704feaec703b66f818b9 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 16:18:55 -0500 Subject: [PATCH 126/284] New package - r-np Nonparametric kernel smoothing methods for mixed data types. --- .../repos/builtin/packages/r-boot/package.py | 43 ++++++++++++++++ .../builtin/packages/r-cubature/package.py | 41 +++++++++++++++ .../repos/builtin/packages/r-np/package.py | 50 +++++++++++++++++++ 3 files changed, 134 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-boot/package.py create mode 100644 var/spack/repos/builtin/packages/r-cubature/package.py create mode 100644 var/spack/repos/builtin/packages/r-np/package.py diff --git a/var/spack/repos/builtin/packages/r-boot/package.py b/var/spack/repos/builtin/packages/r-boot/package.py new file mode 100644 index 00000000000..1f2f541fec6 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-boot/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RBoot(Package): + """Functions and datasets for bootstrapping from the book "Bootstrap + Methods and Their Application" by A. C. Davison and D. V. Hinkley (1997, + CUP), originally written by Angelo Canty for S.""" + + homepage = "https://cran.r-project.org/package=boot" + url = "https://cran.r-project.org/src/contrib/boot_1.3-18.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/boot" + + version('1.3-18', '711dd58af14e1027eb8377d9202e9b6f') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-cubature/package.py b/var/spack/repos/builtin/packages/r-cubature/package.py new file mode 100644 index 00000000000..1d8f837922b --- /dev/null +++ b/var/spack/repos/builtin/packages/r-cubature/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RCubature(Package): + """Adaptive multivariate integration over hypercubes""" + + homepage = "https://cran.r-project.org/package=cubature" + url = "https://cran.r-project.org/src/contrib/cubature_1.1-2.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/cubature" + + version('1.1-2', '5617e1d82baa803a3814d92461da45c9') + + extends('R') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) diff --git a/var/spack/repos/builtin/packages/r-np/package.py b/var/spack/repos/builtin/packages/r-np/package.py new file mode 100644 index 00000000000..bff7d0f1952 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-np/package.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RNp(Package): + """This package provides a variety of nonparametric (and semiparametric) + kernel methods that seamlessly handle a mix of continuous, unordered, and + ordered factor data types. We would like to gratefully acknowledge support + from the Natural Sciences and Engineering Research Council of Canada + (NSERC:www.nserc.ca), the Social Sciences and Humanities Research Council + of Canada (SSHRC:www.sshrc.ca), and the Shared Hierarchical Academic + Research Computing Network (SHARCNET:www.sharcnet.ca).""" + + homepage = "https://github.com/JeffreyRacine/R-Package-np/" + url = "https://cran.r-project.org/src/contrib/np_0.60-2.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/np" + + version('0.60-2', 'e094d52ddff7280272b41e6cb2c74389') + + extends('R') + + depends_on('r-boot', type=nolink) + depends_on('r-cubature', type=nolink) + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From 8815f0a0b5d557b871b1ca01084aec18379c112b Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 31 Jul 2016 17:12:29 -0500 Subject: [PATCH 127/284] New package - r-ncdf4 Interface to Unidata netCDF (version 4 or earlier) format data files. --- .../repos/builtin/packages/r-ncdf4/package.py | 56 +++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-ncdf4/package.py diff --git a/var/spack/repos/builtin/packages/r-ncdf4/package.py b/var/spack/repos/builtin/packages/r-ncdf4/package.py new file mode 100644 index 00000000000..11bf7abb38d --- /dev/null +++ b/var/spack/repos/builtin/packages/r-ncdf4/package.py @@ -0,0 +1,56 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RNcdf4(Package): + """Provides a high-level R interface to data files written using Unidata's + netCDF library (version 4 or earlier), which are binary data files that are + portable across platforms and include metadata information in addition to + the data sets. Using this package, netCDF files (either version 4 or + "classic" version 3) can be opened and data sets read in easily. It is also + easy to create new netCDF dimensions, variables, and files, in either + version 3 or 4 format, and manipulate existing netCDF files. This package + replaces the former ncdf package, which only worked with netcdf version 3 + files. For various reasons the names of the functions have had to be + changed from the names in the ncdf package. The old ncdf package is still + available at the URL given below, if you need to have backward + compatibility. It should be possible to have both the ncdf and ncdf4 + packages installed simultaneously without a problem. However, the ncdf + package does not provide an interface for netcdf version 4 files.""" + + homepage = "http://cirrus.ucsd.edu/~pierce/ncdf" + url = "https://cran.r-project.org/src/contrib/ncdf4_1.15.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/ncdf4" + + version('1.15', 'cd60dadbae3be31371e1ed40ddeb420a') + + extends('R') + + depends_on('netcdf') + + def install(self, spec, prefix): + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path) From ec09dfe5d13531a8d2f78ded5e03b0615f51e59c Mon Sep 17 00:00:00 2001 From: alalazo Date: Sat, 30 Jul 2016 11:00:47 +0200 Subject: [PATCH 128/284] cp2k : added package --- .../repos/builtin/packages/cp2k/package.py | 161 ++++++++++++++++++ 1 file changed, 161 insertions(+) create mode 100644 var/spack/repos/builtin/packages/cp2k/package.py diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py new file mode 100644 index 00000000000..8fdd1e0ca35 --- /dev/null +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -0,0 +1,161 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +import shutil +import copy + +from spack import * + + +class Cp2k(Package): + """CP2K is a quantum chemistry and solid state physics software package + that can perform atomistic simulations of solid state, liquid, molecular, + periodic, material, crystal, and biological systems + """ + homepage = 'https://www.cp2k.org' + url = 'https://sourceforge.net/projects/cp2k/files/cp2k-3.0.tar.bz2' + + version('3.0', 'c05bc47335f68597a310b1ed75601d35') + + variant('mpi', default=True, description='Enable MPI support') + + depends_on('python') # Build dependency + + depends_on('lapack') + depends_on('blas') + depends_on('fftw') + + depends_on('mpi', when='+mpi') + depends_on('scalapack', when='+mpi') + + # TODO : add dependency on libint + # TODO : add dependency on libsmm, libxsmm + # TODO : add dependency on elpa + # TODO : add dependency on CUDA + # TODO : add dependency on PEXSI + # TODO : add dependency on QUIP + # TODO : add dependency on plumed + # TODO : add dependency on libwannier90 + + parallel = False + + def install(self, spec, prefix): + # Construct a proper filename for the architecture file + cp2k_architecture = '{0.architecture}-{0.compiler.name}'.format(spec) + cp2k_version = 'sopt' if '~mpi' in spec else 'popt' + makefile_basename = '.'.join([cp2k_architecture, cp2k_version]) + makefile = join_path('arch', makefile_basename) + + # Write the custom makefile + with open(makefile, 'w') as mkf: + mkf.write('CC = {0.compiler.cc}\n'.format(self)) + if '%intel' in self.spec: + # CPP is a commented command in Intel arch of CP2K + # This is the hack through which cp2k developers avoid doing : + # + # ${CPP} .F > .f90 + # + # and use `-fpp` instead + mkf.write('CPP = # {0.compiler.cc} -P\n'.format(self)) + mkf.write('AR = xiar -r\n') + else: + mkf.write('CPP = {0.compiler.cc} -E\n'.format(self)) + mkf.write('AR = ar -r\n') + fc = self.compiler.fc if '~mpi' in spec else self.spec['mpi'].mpifc + mkf.write('FC = {0}\n'.format(fc)) + mkf.write('LD = {0}\n'.format(fc)) + # Optimization flags + optflags = { + 'gcc': ['-O2', + '-ffast-math', + '-ffree-form', + '-ffree-line-length-none', + '-ftree-vectorize', + '-funroll-loops', + '-mtune=native'], + 'intel': ['-O2', + '-pc64', + '-unroll', + '-heap-arrays 64'] + } + cppflags = [ + '-D__FFTW3', + '-I' + spec['fftw'].prefix.include + ] + fcflags = copy.deepcopy(optflags[self.spec.compiler.name]) + fcflags.extend([ + '-I' + spec['fftw'].prefix.include + ]) + ldflags = ['-L' + spec['fftw'].prefix.lib] + libs = [] + # Intel + if '%intel' in self.spec: + cppflags.extend([ + '-D__INTEL_COMPILER', + '-D__MKL' + ]) + fcflags.extend([ + '-diag-disable 8290,8291,10010,10212,11060', + '-free', + '-fpp' + ]) + # MPI + if '+mpi' in self.spec: + cppflags.extend([ + '-D__parallel', + '-D__SCALAPACK' + ]) + ldflags.extend([ + '-L' + spec['scalapack'].prefix.lib + ]) + libs.extend(spec['scalapack'].scalapack_shared_libs) + + # LAPACK / BLAS + ldflags.extend([ + '-L' + spec['lapack'].prefix.lib, + '-L' + spec['blas'].prefix.lib + ]) + libs.extend([ + join_path(spec['fftw'].prefix.lib, 'libfftw3.so'), + spec['lapack'].lapack_shared_lib, + spec['blas'].blas_shared_lib + ]) + + # Write compiler flags to file + mkf.write('CPPFLAGS = {0}\n'.format(' '.join(cppflags))) + mkf.write('FCFLAGS = {0}\n'.format(' '.join(fcflags))) + mkf.write('LDFLAGS = {0}\n'.format(' '.join(ldflags))) + mkf.write('LIBS = {0}\n'.format(' '.join(libs))) + + with working_dir('makefiles'): + # Apparently the Makefile bases its paths on PWD + # so we need to set PWD = os.getcwd() + pwd_backup = env['PWD'] + env['PWD'] = os.getcwd() + make('ARCH={0}'.format(cp2k_architecture), + 'VERSION={0}'.format(cp2k_version)) + env['PWD'] = pwd_backup + exe_dir = join_path('exe', cp2k_architecture) + shutil.copytree(exe_dir, self.prefix.bin) From cca240c8f9e437cc9c570518b88075e0ebd48965 Mon Sep 17 00:00:00 2001 From: Paul Hopkins Date: Mon, 25 Jul 2016 09:43:36 +0100 Subject: [PATCH 129/284] Add concretize_preferences tests --- lib/spack/spack/test/__init__.py | 6 +- .../spack/test/concretize_preferences.py | 106 ++++++++++++++++++ .../builtin.mock/packages/mpileaks/package.py | 3 + 3 files changed, 112 insertions(+), 3 deletions(-) create mode 100644 lib/spack/spack/test/concretize_preferences.py diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index a849d5f3502..3439764ee6e 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -39,9 +39,9 @@ 'pattern', 'python_version', 'git_fetch', 'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate', 'cc', 'link_tree', 'spec_yaml', 'optional_deps', 'make_executable', 'build_system_guess', 'lock', - 'database', 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find', - 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd', - 'cmd.module' + 'database', 'namespace_trie', 'yaml', 'sbang', 'environment', + 'concretize_preferences', 'cmd.find', 'cmd.uninstall', 'cmd.test_install', + 'cmd.test_compiler_cmd', 'cmd.module' ] diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py new file mode 100644 index 00000000000..2c8bedc33fa --- /dev/null +++ b/lib/spack/spack/test/concretize_preferences.py @@ -0,0 +1,106 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import spack +import spack.architecture +from spack.test.mock_packages_test import * +from tempfile import mkdtemp + + +class ConcretizePreferencesTest(MockPackagesTest): + """Test concretization preferences are being applied correctly. + """ + + def setUp(self): + """Create config section to store concretization preferences + """ + super(ConcretizePreferencesTest, self).setUp() + self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-') + spack.config.ConfigScope('concretize', + os.path.join(self.tmp_dir, 'concretize')) + + def tearDown(self): + super(ConcretizePreferencesTest, self).tearDown() + shutil.rmtree(self.tmp_dir, True) + spack.pkgsort = spack.PreferredPackages() + + def concretize(self, abstract_spec): + return Spec(abstract_spec).concretized() + + def update_packages(self, pkgname, section, value): + """Update config and reread package list""" + conf = {pkgname: {section: value}} + spack.config.update_config('packages', conf, 'concretize') + spack.pkgsort = spack.PreferredPackages() + + def assert_variant_values(self, spec, **variants): + concrete = self.concretize(spec) + for variant, value in variants.items(): + self.assertEqual(concrete.variants[variant].value, value) + + def test_preferred_variants(self): + """Test preferred variants are applied correctly + """ + self.update_packages('mpileaks', 'variants', + '~debug~opt+shared+static') + self.assert_variant_values('mpileaks', debug=False, opt=False, + shared=True, static=True) + + self.update_packages('mpileaks', 'variants', + ['+debug', '+opt', '~shared', '-static']) + self.assert_variant_values('mpileaks', debug=True, opt=True, + shared=False, static=False) + + def test_preferred_compilers(self): + """Test preferred compilers are applied correctly + """ + self.update_packages('mpileaks', 'compiler', ['clang@3.3']) + spec = self.concretize('mpileaks') + self.assertEqual(spec.compiler, spack.spec.CompilerSpec('clang@3.3')) + + self.update_packages('mpileaks', 'compiler', ['gcc@4.5.0']) + spec = self.concretize('mpileaks') + self.assertEqual(spec.compiler, spack.spec.CompilerSpec('gcc@4.5.0')) + + def test_preferred_versions(self): + """Test preferred package versions are applied correctly + """ + self.update_packages('mpileaks', 'version', ['2.3']) + spec = self.concretize('mpileaks') + self.assertEqual(spec.version, spack.spec.Version('2.3')) + + self.update_packages('mpileaks', 'version', ['2.2']) + spec = self.concretize('mpileaks') + self.assertEqual(spec.version, spack.spec.Version('2.2')) + + def test_preferred_providers(self): + """Test preferred providers of virtual packages are applied correctly + """ + self.update_packages('all', 'providers', {'mpi': ['mpich']}) + spec = self.concretize('mpileaks') + self.assertTrue('mpich' in spec) + + self.update_packages('all', 'providers', {'mpi': ['zmpi']}) + spec = self.concretize('mpileaks') + self.assertTrue('zmpi', spec) diff --git a/var/spack/repos/builtin.mock/packages/mpileaks/package.py b/var/spack/repos/builtin.mock/packages/mpileaks/package.py index bc26f539ba4..10fbf3845eb 100644 --- a/var/spack/repos/builtin.mock/packages/mpileaks/package.py +++ b/var/spack/repos/builtin.mock/packages/mpileaks/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Mpileaks(Package): homepage = "http://www.llnl.gov" url = "http://www.llnl.gov/mpileaks-1.0.tar.gz" @@ -35,6 +36,8 @@ class Mpileaks(Package): variant('debug', default=False, description='Debug variant') variant('opt', default=False, description='Optimized variant') + variant('shared', default=True, description='Build shared library') + variant('static', default=True, description='Build static library') depends_on("mpi") depends_on("callpath") From aaa5c9e8a473cccbe3b5e7258bc5f9b7ff4d8c6c Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Mon, 1 Aug 2016 12:05:29 -0400 Subject: [PATCH 130/284] Writes default module list to terminal when debugging. --- lib/spack/spack/platforms/cray.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index fd58915c575..d43580df06d 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -1,6 +1,7 @@ import os import re import spack.config +import llnl.util.tty as tty from spack.util.executable import which from spack.architecture import Platform, Target, NoPlatformError from spack.operating_systems.linux_distro import LinuxDistro @@ -19,7 +20,6 @@ def _target_from_clean_env(name): ''' # Based on the incantation: # echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')" - default_modules = [] targets = [] if name != 'front_end': env = which('env') @@ -30,16 +30,14 @@ def _target_from_clean_env(name): '/bin/sh', '--noprofile', '-c', 'source /etc/profile; module list -lt', output=str, error=str) + default_modules = [i for i in output.splitlines() + if len(i.split()) == 1] + tty.debug("Found default modules:", + *[" " + mod for mod in default_modules]) pattern = 'craype-(?!{0})(\S*)'.format('|'.join(NON_TARGETS)) - for line in output.splitlines(): - if 'craype-' in line: - targets.extend(re.findall(pattern, line)) - if len(line.split()) == 1: - default_modules.append(line) - # if default_modules: - # print 'Found default modules:' - # for defmod in default_modules: - # print ' ', defmod + for mod in default_modules: + if 'craype-' in mod: + targets.extend(re.findall(pattern, mod)) return targets[0] if targets else None From 31b7580b76873a256eab32ba0492aba1489a48b8 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 26 Jul 2016 13:15:37 -0400 Subject: [PATCH 131/284] Fix jdk package's use of distutils See issue #1364. @citibeth's fix works fine. --- var/spack/repos/builtin/packages/jdk/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py index 593a6d83404..794966f1c39 100644 --- a/var/spack/repos/builtin/packages/jdk/package.py +++ b/var/spack/repos/builtin/packages/jdk/package.py @@ -25,7 +25,7 @@ # # Author: Justin Too # -import distutils +import distutils.dir_util import spack from spack import * From 1552ed943ac76716cd577aa70ce5f8020f0b2917 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 13:28:56 -0400 Subject: [PATCH 132/284] Add package for cask Add a package for cask, the emacs project/dependency management tool. Based on [Homebrew's formula][brew]. [brew]: https://github.com/Homebrew/homebrew-core/blob/master/Formula/cask.rb --- .../repos/builtin/packages/cask/package.py | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 var/spack/repos/builtin/packages/cask/package.py diff --git a/var/spack/repos/builtin/packages/cask/package.py b/var/spack/repos/builtin/packages/cask/package.py new file mode 100644 index 00000000000..b37904eeb45 --- /dev/null +++ b/var/spack/repos/builtin/packages/cask/package.py @@ -0,0 +1,52 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +# +# Based on Homebrew's formula: +# https://github.com/Homebrew/homebrew-core/blob/master/Formula/cask.rb +# +from spack import * +from glob import glob + +class Cask(Package): + """Cask is a project management tool for Emacs Lisp to automate the package + development cycle; development, dependencies, testing, building, + packaging and more.""" + homepage = "http://cask.readthedocs.io/en/latest/" + url = "https://github.com/cask/cask/archive/v0.7.4.tar.gz" + + version('0.7.4', 'c973a7db43bc980dd83759a5864a1260') + + depends_on('emacs', type=nolink) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install('bin/cask', prefix.bin) + install_tree('templates', join_path(prefix, 'templates')) + for el_file in glob("*.el"): + install(el_file, prefix) + for misc_file in ['COPYING', 'cask.png', 'README.md']: + install(misc_file, prefix) + # disable cask's automatic upgrading feature + touch(join_path(prefix, ".no-upgrade")) From 7d303afd64cc2b9dc758d7c6535d514d50f5d82e Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 1 Aug 2016 13:42:55 -0400 Subject: [PATCH 133/284] Address flake8 issue --- var/spack/repos/builtin/packages/cask/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/cask/package.py b/var/spack/repos/builtin/packages/cask/package.py index b37904eeb45..67cc48ab2a7 100644 --- a/var/spack/repos/builtin/packages/cask/package.py +++ b/var/spack/repos/builtin/packages/cask/package.py @@ -29,6 +29,7 @@ from spack import * from glob import glob + class Cask(Package): """Cask is a project management tool for Emacs Lisp to automate the package development cycle; development, dependencies, testing, building, From 7e53f4328fcb94190bf7c54287aee95df563af01 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 1 Jul 2016 16:30:11 -0500 Subject: [PATCH 134/284] Add py-meep package and dependencies --- .../repos/builtin/packages/gc/package.py | 52 ++++++++++ .../repos/builtin/packages/gettext/package.py | 96 +++++++++++++++---- .../repos/builtin/packages/gmp/package.py | 12 ++- .../repos/builtin/packages/guile/package.py | 54 +++++++++++ .../repos/builtin/packages/harminv/package.py | 53 ++++++++++ .../builtin/packages/libatomic-ops/package.py | 41 ++++++++ .../repos/builtin/packages/libctl/package.py | 47 +++++++++ .../builtin/packages/libiconv/package.py | 41 ++++++++ .../builtin/packages/libunistring/package.py | 44 +++++++++ .../repos/builtin/packages/meep/package.py | 96 +++++++++++++++++++ .../builtin/packages/pkg-config/package.py | 21 ++-- .../repos/builtin/packages/py-meep/package.py | 54 +++++++++++ 12 files changed, 580 insertions(+), 31 deletions(-) create mode 100644 var/spack/repos/builtin/packages/gc/package.py create mode 100644 var/spack/repos/builtin/packages/guile/package.py create mode 100644 var/spack/repos/builtin/packages/harminv/package.py create mode 100644 var/spack/repos/builtin/packages/libatomic-ops/package.py create mode 100644 var/spack/repos/builtin/packages/libctl/package.py create mode 100644 var/spack/repos/builtin/packages/libiconv/package.py create mode 100644 var/spack/repos/builtin/packages/libunistring/package.py create mode 100644 var/spack/repos/builtin/packages/meep/package.py create mode 100644 var/spack/repos/builtin/packages/py-meep/package.py diff --git a/var/spack/repos/builtin/packages/gc/package.py b/var/spack/repos/builtin/packages/gc/package.py new file mode 100644 index 00000000000..f03f139410c --- /dev/null +++ b/var/spack/repos/builtin/packages/gc/package.py @@ -0,0 +1,52 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Gc(Package): + """The Boehm-Demers-Weiser conservative garbage collector is a garbage + collecting replacement for C malloc or C++ new.""" + + homepage = "http://www.hboehm.info/gc/" + url = "http://www.hboehm.info/gc/gc_source/gc-7.4.4.tar.gz" + + version('7.4.4', '96d18b0448a841c88d56e4ab3d180297') + + variant('libatomic-ops', default=True, description='Use external libatomic-ops') + + depends_on('libatomic-ops', when='+libatomic-ops') + + def install(self, spec, prefix): + config_args = [ + '--prefix={0}'.format(prefix), + '--with-libatomic-ops={0}'.format( + 'yes' if '+libatomic-ops' in spec else 'no') + ] + + configure(*config_args) + + make() + make('check') + make('install') diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py index df301aea3cb..9a67c4d6086 100644 --- a/var/spack/repos/builtin/packages/gettext/package.py +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -24,31 +24,93 @@ ############################################################################## from spack import * + class Gettext(Package): """GNU internationalization (i18n) and localization (l10n) library.""" homepage = "https://www.gnu.org/software/gettext/" url = "http://ftpmirror.gnu.org/gettext/gettext-0.19.7.tar.xz" - version('0.19.7', 'f81e50556da41b44c1d59ac93474dca5') + version('0.19.8.1', 'df3f5690eaa30fd228537b00cb7b7590') + version('0.19.7', 'f81e50556da41b44c1d59ac93474dca5') + + # Recommended variants + variant('libiconv', default=True, description='Use libiconv') + variant('curses', default=True, description='Use libncurses') + variant('libxml2', default=True, description='Use libxml2') + variant('git', default=True, description='Enable git support') + variant('tar', default=True, description='Enable tar support') + variant('gzip', default=True, description='Enable gzip support') + variant('bzip2', default=True, description='Enable bzip2 support') + variant('xz', default=True, description='Enable xz support') + + # Optional variants + variant('libunistring', default=False, description='Use libunistring') + + # Recommended dependencies + depends_on('libiconv', when='+libiconv') + depends_on('ncurses', when='+curses') + depends_on('libxml2', when='+libxml2') + # Java runtime and compiler (e.g. GNU gcj or kaffe) + # C# runtime and compiler (e.g. pnet or mono) + depends_on('git@1.6:', when='+git') + depends_on('tar', when='+tar') + depends_on('gzip', when='+gzip') + depends_on('bzip2', when='+bzip2') + depends_on('xz', when='+xz') + + # Optional dependencies + # depends_on('glib') # circular dependency? + # depends_on('libcroco@0.6.1:') + depends_on('libunistring', when='+libunistring') + # depends_on('cvs') def install(self, spec, prefix): - options = ['--disable-dependency-tracking', - '--disable-silent-rules', - '--disable-debug', - '--prefix=%s' % prefix, - '--with-included-gettext', - '--with-included-glib', - '--with-included-libcroco', - '--with-included-libunistring', - '--with-emacs', - '--with-lispdir=%s/emacs/site-lisp/gettext' % prefix.share, - '--disable-java', - '--disable-csharp', - '--without-git', # Don't use VCS systems to create these archives - '--without-cvs', - '--without-xz'] + config_args = [ + '--prefix={0}'.format(prefix), + '--disable-java', + '--disable-csharp', + '--with-included-glib', + '--with-included-gettext', + '--with-included-libcroco', + '--without-emacs', + '--with-lispdir=%s/emacs/site-lisp/gettext' % prefix.share, + '--without-cvs' + ] - configure(*options) + if '+libiconv' in spec: + config_args.append('--with-libiconv-prefix={0}'.format( + spec['libiconv'].prefix)) + else: + config_args.append('--without-libiconv-prefix') + + if '+curses' in spec: + config_args.append('--with-ncurses-prefix={0}'.format( + spec['ncurses'].prefix)) + else: + config_args.append('--disable-curses') + + if '+libxml2' in spec: + config_args.append('--with-libxml2-prefix={0}'.format( + spec['libxml2'].prefix)) + else: + config_args.append('--with-included-libxml') + + if '+git' not in spec: + config_args.append('--without-git') + + if '+bzip2' not in spec: + config_args.append('--without-bzip2') + + if '+xz' not in spec: + config_args.append('--without-xz') + + if '+libunistring' in spec: + config_args.append('--with-libunistring-prefix={0}'.format( + spec['libunistring'].prefix)) + else: + config_args.append('--with-included-libunistring') + + configure(*config_args) make() make("install") diff --git a/var/spack/repos/builtin/packages/gmp/package.py b/var/spack/repos/builtin/packages/gmp/package.py index e2c2892f18c..d85330dd6ea 100644 --- a/var/spack/repos/builtin/packages/gmp/package.py +++ b/var/spack/repos/builtin/packages/gmp/package.py @@ -24,16 +24,18 @@ ############################################################################## from spack import * + class Gmp(Package): - """GMP is a free library for arbitrary precision arithmetic, - operating on signed integers, rational numbers, and - floating-point numbers.""" + """GMP is a free library for arbitrary precision arithmetic, operating + on signed integers, rational numbers, and floating-point numbers.""" + homepage = "https://gmplib.org" url = "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2" - version('6.1.0' , '86ee6e54ebfc4a90b643a65e402c4048') + version('6.1.1', '4c175f86e11eb32d8bf9872ca3a8e11d') + version('6.1.0', '86ee6e54ebfc4a90b643a65e402c4048') version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470') - version('6.0.0' , '6ef5869ae735db9995619135bd856b84') + version('6.0.0', '6ef5869ae735db9995619135bd856b84') depends_on("m4", type='build') diff --git a/var/spack/repos/builtin/packages/guile/package.py b/var/spack/repos/builtin/packages/guile/package.py new file mode 100644 index 00000000000..0392fd43e04 --- /dev/null +++ b/var/spack/repos/builtin/packages/guile/package.py @@ -0,0 +1,54 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Guile(Package): + """Guile is designed to help programmers create flexible applications + that can be extended by users or other programmers with plug-ins, + modules, or scripts.""" + + homepage = "https://www.gnu.org/software/guile/" + url = "ftp://ftp.gnu.org/gnu/guile/guile-2.0.11.tar.gz" + + version('2.0.11', 'e532c68c6f17822561e3001136635ddd') + + variant('readline', default=True, description='Use the readline library') + + depends_on('gmp@4.2:') + depends_on('libiconv') + depends_on('gettext') + depends_on('libtool@1.5.6:') + depends_on('libunistring@0.9.3:') + depends_on('gc@7.0:') + depends_on('libffi') + depends_on('readline', when='+readline') + depends_on('pkg-config') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix)) + + make() + make('install') diff --git a/var/spack/repos/builtin/packages/harminv/package.py b/var/spack/repos/builtin/packages/harminv/package.py new file mode 100644 index 00000000000..4b07d6687c2 --- /dev/null +++ b/var/spack/repos/builtin/packages/harminv/package.py @@ -0,0 +1,53 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Harminv(Package): + """Harminv is a free program (and accompanying library) to solve the + problem of harmonic inversion - given a discrete-time, finite-length + signal that consists of a sum of finitely-many sinusoids (possibly + exponentially decaying) in a given bandwidth, it determines the + frequencies, decay constants, amplitudes, and phases of those sinusoids.""" + + homepage = "http://ab-initio.mit.edu/wiki/index.php/Harminv" + url = "http://ab-initio.mit.edu/harminv/harminv-1.4.tar.gz" + + version('1.4', 'b95e24a9bc7e07d3d2202d1605e9e86f') + + depends_on('blas') + depends_on('lapack') + + def install(self, spec, prefix): + config_args = [ + '--prefix={0}'.format(prefix), + '--with-blas={0}'.format(spec['blas'].prefix.lib), + '--with-lapack={0}'.format(spec['lapack'].prefix.lib) + ] + + configure(*config_args) + + make() + make('install') diff --git a/var/spack/repos/builtin/packages/libatomic-ops/package.py b/var/spack/repos/builtin/packages/libatomic-ops/package.py new file mode 100644 index 00000000000..bc9be5cc64d --- /dev/null +++ b/var/spack/repos/builtin/packages/libatomic-ops/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class LibatomicOps(Package): + """This package provides semi-portable access to hardware-provided + atomic memory update operations on a number architectures.""" + + homepage = "https://github.com/ivmai/libatomic_ops" + url = "http://www.hboehm.info/gc/gc_source/libatomic_ops-7.4.4.tar.gz" + + version('7.4.4', '426d804baae12c372967a6d183e25af2') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix)) + + make() + make('install') diff --git a/var/spack/repos/builtin/packages/libctl/package.py b/var/spack/repos/builtin/packages/libctl/package.py new file mode 100644 index 00000000000..e0b509c2e7d --- /dev/null +++ b/var/spack/repos/builtin/packages/libctl/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Libctl(Package): + """libctl is a free Guile-based library implementing flexible + control files for scientific simulations.""" + + homepage = "http://ab-initio.mit.edu/wiki/index.php/Libctl" + url = "http://ab-initio.mit.edu/libctl/libctl-3.2.2.tar.gz" + + version('3.2.2', '5fd7634dc9ae8e7fa70a68473b9cbb68') + + depends_on('guile') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix) + 'GUILE={0}'.format(spec['guile'].prefix)) + #GUILE_CONFIG=/path/to/guile-config + + make() + make('check') + make('install') + make('installcheck') diff --git a/var/spack/repos/builtin/packages/libiconv/package.py b/var/spack/repos/builtin/packages/libiconv/package.py new file mode 100644 index 00000000000..a1e7e966c9f --- /dev/null +++ b/var/spack/repos/builtin/packages/libiconv/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Libiconv(Package): + """GNU libiconv provides an implementation of the iconv() function + and the iconv program for character set conversion.""" + + homepage = "https://www.gnu.org/software/libiconv/" + url = "http://ftp.gnu.org/pub/gnu/libiconv/libiconv-1.14.tar.gz" + + version('1.14', 'e34509b1623cec449dfeb73d7ce9c6c6') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix)) + + make() + make('install') diff --git a/var/spack/repos/builtin/packages/libunistring/package.py b/var/spack/repos/builtin/packages/libunistring/package.py new file mode 100644 index 00000000000..585590be551 --- /dev/null +++ b/var/spack/repos/builtin/packages/libunistring/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Libunistring(Package): + """This library provides functions for manipulating Unicode strings + and for manipulating C strings according to the Unicode standard.""" + + homepage = "https://www.gnu.org/software/libunistring/" + url = "http://ftp.gnu.org/gnu/libunistring/libunistring-0.9.6.tar.xz" + + version('0.9.6', 'cb09c398020c27edac10ca590e9e9ef3') + + depends_on('libiconv') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix), + '--with-libiconv-prefix={0}'.format(spec['libiconv'].prefix)) + + make() + make('install') diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py new file mode 100644 index 00000000000..e47e9dbc45b --- /dev/null +++ b/var/spack/repos/builtin/packages/meep/package.py @@ -0,0 +1,96 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Meep(Package): + """Meep (or MEEP) is a free finite-difference time-domain (FDTD) simulation + software package developed at MIT to model electromagnetic systems.""" + + homepage = "http://ab-initio.mit.edu/wiki/index.php/Meep" + + version('1.3', '18a5b9e18008627a0411087e0bb60db5') + version('1.1.1', '415e0cd312b6caa22b5dd612490e1ccf') + + variant('blas', default=True, description='Enable BLAS support') + variant('lapack', default=True, description='Enable LAPACK support') + variant('harminv', default=True, description='Enable Harminv support') + variant('guile', default=True, description='Enable Guilde support') + variant('libctl', default=True, description='Enable libctl support') + variant('mpi', default=True, description='Enable MPI support') + variant('hdf5', default=True, description='Enable HDF5 support') + + # Recommended dependencies + depends_on('blas', when='+blas') + depends_on('lapack', when='+lapack') + depends_on('harminv', when='+harminv') + depends_on('guile', when='+guile') + depends_on('libctl@3.2:', when='+libctl') + depends_on('mpi', when='+mpi') + depends_on('hdf5', when='+hdf5') + + def url_for_version(self, version): + base_url = "http://ab-initio.mit.edu/meep" + if version == Version('1.3'): + return "{0}/meep-{1}.tar.gz".format(base_url, version) + else: + return "{0}/old/meep-{1}.tar.gz".format(base_url, version) + + def install(self, spec, prefix): + config_args = ['--prefix={0}'.format(prefix)] + + if '+blas' in spec: + config_args.append('--with-blas={0}'.format( + spec['blas'].prefix.lib)) + else: + config_args.append('--without-blas') + + if '+lapack' in spec: + config_args.append('--with-lapack={0}'.format( + spec['lapack'].prefix.lib)) + else: + config_args.append('--without-lapack') + + if '+libctl' in spec: + config_args.append('--with-libctl={0}'.format( + spec['libctl'].prefix)) + else: + config_args.append('--without-libctl') + + if '+mpi' in spec: + config_args.append('--with-mpi') + else: + config_args.append('--without-mpi') + + if '+hdf5' in spec: + config_args.append('--with-hdf5') + else: + config_args.append('--without-hdf5') + + configure(*config_args) + + make() + make('check') + make('install') diff --git a/var/spack/repos/builtin/packages/pkg-config/package.py b/var/spack/repos/builtin/packages/pkg-config/package.py index ddbc151767d..9227931a0f3 100644 --- a/var/spack/repos/builtin/packages/pkg-config/package.py +++ b/var/spack/repos/builtin/packages/pkg-config/package.py @@ -24,23 +24,26 @@ ############################################################################## from spack import * + class PkgConfig(Package): - """pkg-config is a helper tool used when compiling applications and libraries""" + """pkg-config is a helper tool used when compiling applications + and libraries""" + homepage = "http://www.freedesktop.org/wiki/Software/pkg-config/" url = "http://pkgconfig.freedesktop.org/releases/pkg-config-0.28.tar.gz" - version('0.28', 'aa3c86e67551adc3ac865160e34a2a0d') + version('0.29.1', 'f739a28cae4e0ca291f82d1d41ef107d') + version('0.28', 'aa3c86e67551adc3ac865160e34a2a0d') parallel = False def install(self, spec, prefix): - configure("--prefix=%s" %prefix, - "--enable-shared", - "--with-internal-glib") # There's a bootstrapping problem here; - # glib uses pkg-config as well, so - # break the cycle by using the internal - # glib. + configure("--prefix={0}".format(prefix), + "--enable-shared", + # There's a bootstrapping problem here; + # glib uses pkg-config as well, so break + # the cycle by using the internal glib. + "--with-internal-glib") make() make("install") - diff --git a/var/spack/repos/builtin/packages/py-meep/package.py b/var/spack/repos/builtin/packages/py-meep/package.py new file mode 100644 index 00000000000..aefa854fc26 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-meep/package.py @@ -0,0 +1,54 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class PyMeep(Package): + """Python-meep is a wrapper around libmeep. It allows the scripting of + Meep-simulations with Python""" + + homepage = "https://launchpad.net/python-meep" + url = "https://launchpad.net/python-meep/1.4/1.4/+download/python-meep-1.4.2.tar" + + version('1.4.2', 'f8913542d18b0dda92ebc64f0a10ce56') + + variant('mpi', default=True, description='Enable MPI support') + + extends('python') + depends_on('meep@1.1.1') # must be compiled with -fPIC + depends_on('swig@1.3.39:') + depends_on('py-numpy') + depends_on('py-scipy') + depends_on('py-matplotlib') + depends_on('mpi', when='+mpi') # OpenMPI 1.3.3 is recommended + # depends_on('hdf5+mpi', when='+mpi') # ??? + + def install(self, spec, prefix): + setup = 'setup-mpi.py' if '+mpi' in spec else 'setup.py' + + python(setup, 'clean', '--all') + python(setup, 'build_ext') + python(setup, 'install', '--prefix={0}'.format(prefix)) + python(setup, 'bdist') From 9e05fdf4a1c5949fbe2958a954a733eceb426b45 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 5 Jul 2016 11:12:26 -0500 Subject: [PATCH 135/284] Bug fixes --- var/spack/repos/builtin/packages/gettext/package.py | 3 +-- var/spack/repos/builtin/packages/libctl/package.py | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py index 9a67c4d6086..8583ae4b5ea 100644 --- a/var/spack/repos/builtin/packages/gettext/package.py +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -39,7 +39,6 @@ class Gettext(Package): variant('libxml2', default=True, description='Use libxml2') variant('git', default=True, description='Enable git support') variant('tar', default=True, description='Enable tar support') - variant('gzip', default=True, description='Enable gzip support') variant('bzip2', default=True, description='Enable bzip2 support') variant('xz', default=True, description='Enable xz support') @@ -54,7 +53,7 @@ class Gettext(Package): # C# runtime and compiler (e.g. pnet or mono) depends_on('git@1.6:', when='+git') depends_on('tar', when='+tar') - depends_on('gzip', when='+gzip') + # depends_on('gzip', when='+gzip') depends_on('bzip2', when='+bzip2') depends_on('xz', when='+xz') diff --git a/var/spack/repos/builtin/packages/libctl/package.py b/var/spack/repos/builtin/packages/libctl/package.py index e0b509c2e7d..d20eff4f403 100644 --- a/var/spack/repos/builtin/packages/libctl/package.py +++ b/var/spack/repos/builtin/packages/libctl/package.py @@ -37,9 +37,9 @@ class Libctl(Package): depends_on('guile') def install(self, spec, prefix): - configure('--prefix={0}'.format(prefix) + configure('--prefix={0}'.format(prefix), 'GUILE={0}'.format(spec['guile'].prefix)) - #GUILE_CONFIG=/path/to/guile-config + # GUILE_CONFIG=/path/to/guile-config make() make('check') From 3b4820f2904e1a20e6b00c80e901261d354e181f Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 6 Jul 2016 13:24:23 -0500 Subject: [PATCH 136/284] Patch pkg-config and update installs for various packages --- .../repos/builtin/packages/guile/package.py | 25 +++++++++++--- .../repos/builtin/packages/libctl/package.py | 1 - .../builtin/packages/libiconv/package.py | 4 ++- .../repos/builtin/packages/meep/package.py | 15 +++++---- .../packages/pkg-config/g_date_strftime.patch | 33 +++++++++++++++++++ .../builtin/packages/pkg-config/package.py | 3 ++ .../repos/builtin/packages/py-meep/package.py | 4 +-- 7 files changed, 70 insertions(+), 15 deletions(-) create mode 100644 var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch diff --git a/var/spack/repos/builtin/packages/guile/package.py b/var/spack/repos/builtin/packages/guile/package.py index 0392fd43e04..4b5f2c57e2f 100644 --- a/var/spack/repos/builtin/packages/guile/package.py +++ b/var/spack/repos/builtin/packages/guile/package.py @@ -26,9 +26,8 @@ class Guile(Package): - """Guile is designed to help programmers create flexible applications - that can be extended by users or other programmers with plug-ins, - modules, or scripts.""" + """Guile is the GNU Ubiquitous Intelligent Language for Extensions, + the official extension language for the GNU operating system.""" homepage = "https://www.gnu.org/software/guile/" url = "ftp://ftp.gnu.org/gnu/guile/guile-2.0.11.tar.gz" @@ -48,7 +47,25 @@ class Guile(Package): depends_on('pkg-config') def install(self, spec, prefix): - configure('--prefix={0}'.format(prefix)) + config_args = [ + '--prefix={0}'.format(prefix), + '--with-libiconv-prefix={0}'.format(spec['libiconv'].prefix), + '--with-libunistring-prefix={0}'.format( + spec['libunistring'].prefix), + '--with-libltdl-prefix={0}'.format(spec['libtool'].prefix), + '--with-libgmp-prefix={0}'.format(spec['gmp'].prefix), + '--with-libintl-prefix={0}'.format(spec['gettext'].prefix) + ] + + if '+readline' in spec: + config_args.append('--with-libreadline-prefix={0}'.format( + spec['readline'].prefix)) + else: + config_args.append('--without-libreadline-prefix') + + configure(*config_args) make() + make('check') make('install') + make('installcheck') diff --git a/var/spack/repos/builtin/packages/libctl/package.py b/var/spack/repos/builtin/packages/libctl/package.py index d20eff4f403..1b48d4dc290 100644 --- a/var/spack/repos/builtin/packages/libctl/package.py +++ b/var/spack/repos/builtin/packages/libctl/package.py @@ -39,7 +39,6 @@ class Libctl(Package): def install(self, spec, prefix): configure('--prefix={0}'.format(prefix), 'GUILE={0}'.format(spec['guile'].prefix)) - # GUILE_CONFIG=/path/to/guile-config make() make('check') diff --git a/var/spack/repos/builtin/packages/libiconv/package.py b/var/spack/repos/builtin/packages/libiconv/package.py index a1e7e966c9f..f2eeb07b264 100644 --- a/var/spack/repos/builtin/packages/libiconv/package.py +++ b/var/spack/repos/builtin/packages/libiconv/package.py @@ -35,7 +35,9 @@ class Libiconv(Package): version('1.14', 'e34509b1623cec449dfeb73d7ce9c6c6') def install(self, spec, prefix): - configure('--prefix={0}'.format(prefix)) + configure('--prefix={0}'.format(prefix), + '--enable-extra-encodings') make() + make('check') make('install') diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py index e47e9dbc45b..b88cec5a9c0 100644 --- a/var/spack/repos/builtin/packages/meep/package.py +++ b/var/spack/repos/builtin/packages/meep/package.py @@ -34,13 +34,13 @@ class Meep(Package): version('1.3', '18a5b9e18008627a0411087e0bb60db5') version('1.1.1', '415e0cd312b6caa22b5dd612490e1ccf') - variant('blas', default=True, description='Enable BLAS support') - variant('lapack', default=True, description='Enable LAPACK support') - variant('harminv', default=True, description='Enable Harminv support') - variant('guile', default=True, description='Enable Guilde support') - variant('libctl', default=True, description='Enable libctl support') - variant('mpi', default=True, description='Enable MPI support') - variant('hdf5', default=True, description='Enable HDF5 support') + variant('blas', default=True, description='Enable BLAS support') + variant('lapack', default=True, description='Enable LAPACK support') + variant('harminv', default=True, description='Enable Harminv support') + variant('guile', default=False, description='Enable Guilde support') + variant('libctl', default=False, description='Enable libctl support') + variant('mpi', default=True, description='Enable MPI support') + variant('hdf5', default=True, description='Enable HDF5 support') # Recommended dependencies depends_on('blas', when='+blas') @@ -50,6 +50,7 @@ class Meep(Package): depends_on('libctl@3.2:', when='+libctl') depends_on('mpi', when='+mpi') depends_on('hdf5', when='+hdf5') + depends_on('hdf5+mpi', when='+hdf5+mpi') def url_for_version(self, version): base_url = "http://ab-initio.mit.edu/meep" diff --git a/var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch b/var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch new file mode 100644 index 00000000000..578cbf4d7c4 --- /dev/null +++ b/var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch @@ -0,0 +1,33 @@ +From 00148329967adb196138372771052a3f606a6ea3 Mon Sep 17 00:00:00 2001 +From: coypu +Date: Wed, 2 Mar 2016 19:43:10 +0200 +Subject: [PATCH 2/2] gdate: Suppress string format literal warning + +Newer versions of GCC emit an error here, but we know it's safe. +https://bugzilla.gnome.org/761550 +--- + glib/glib/gdate.c | 5 +++++ + 1 file changed, 5 insertions(+) + +diff --git a/glib/glib/gdate.c b/glib/glib/gdate.c +index 4aece02..92c34d2 100644 +--- a/glib/glib/gdate.c ++++ b/glib/glib/gdate.c +@@ -2439,6 +2439,9 @@ win32_strftime_helper (const GDate *d, + * + * Returns: number of characters written to the buffer, or 0 the buffer was too small + */ ++#pragma GCC diagnostic push ++#pragma GCC diagnostic ignored "-Wformat-nonliteral" ++ + gsize + g_date_strftime (gchar *s, + gsize slen, +@@ -2549,3 +2552,5 @@ g_date_strftime (gchar *s, + return retval; + #endif + } ++ ++#pragma GCC diagnostic pop +-- +2.7.1 diff --git a/var/spack/repos/builtin/packages/pkg-config/package.py b/var/spack/repos/builtin/packages/pkg-config/package.py index 9227931a0f3..45e8e8b74ef 100644 --- a/var/spack/repos/builtin/packages/pkg-config/package.py +++ b/var/spack/repos/builtin/packages/pkg-config/package.py @@ -37,6 +37,9 @@ class PkgConfig(Package): parallel = False + # The following patch is needed for gcc-6.1 + patch('g_date_strftime.patch') + def install(self, spec, prefix): configure("--prefix={0}".format(prefix), "--enable-shared", diff --git a/var/spack/repos/builtin/packages/py-meep/package.py b/var/spack/repos/builtin/packages/py-meep/package.py index aefa854fc26..311d94b6bd6 100644 --- a/var/spack/repos/builtin/packages/py-meep/package.py +++ b/var/spack/repos/builtin/packages/py-meep/package.py @@ -37,13 +37,13 @@ class PyMeep(Package): variant('mpi', default=True, description='Enable MPI support') extends('python') + depends_on('mpi', when='+mpi') # OpenMPI 1.3.3 is recommended depends_on('meep@1.1.1') # must be compiled with -fPIC + depends_on('meep+mpi', when='+mpi') depends_on('swig@1.3.39:') depends_on('py-numpy') depends_on('py-scipy') depends_on('py-matplotlib') - depends_on('mpi', when='+mpi') # OpenMPI 1.3.3 is recommended - # depends_on('hdf5+mpi', when='+mpi') # ??? def install(self, spec, prefix): setup = 'setup-mpi.py' if '+mpi' in spec else 'setup.py' From 880cbb22172f32e4e9a8c1dfc1c11ce275419a1e Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 8 Jul 2016 12:56:08 -0500 Subject: [PATCH 137/284] Better testing for MEEP --- var/spack/repos/builtin/packages/meep/package.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py index b88cec5a9c0..cf4b72663d0 100644 --- a/var/spack/repos/builtin/packages/meep/package.py +++ b/var/spack/repos/builtin/packages/meep/package.py @@ -41,8 +41,8 @@ class Meep(Package): variant('libctl', default=False, description='Enable libctl support') variant('mpi', default=True, description='Enable MPI support') variant('hdf5', default=True, description='Enable HDF5 support') + variant('gsl', default=False, description='Build with GSL (only necessary for testing)') - # Recommended dependencies depends_on('blas', when='+blas') depends_on('lapack', when='+lapack') depends_on('harminv', when='+harminv') @@ -51,6 +51,7 @@ class Meep(Package): depends_on('mpi', when='+mpi') depends_on('hdf5', when='+hdf5') depends_on('hdf5+mpi', when='+hdf5+mpi') + depends_on('gsl', when='+gsl') def url_for_version(self, version): base_url = "http://ab-initio.mit.edu/meep" @@ -93,5 +94,12 @@ def install(self, spec, prefix): configure(*config_args) make() - make('check') + + # aniso_disp test fails unless installed with harminv + # near2far test fails unless installed with gsl + if '+harminv' in spec and '+gsl' in spec: + # Most tests fail when run in parallel + # 2D_convergence tests still fails to converge for unknown reasons + make('check', parallel=False) + make('install') From 8d0758fc4cf863ed82d6d4bacd9819e5fd69b3a4 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 8 Jul 2016 14:36:54 -0500 Subject: [PATCH 138/284] Allow py-meep to link properly --- .../repos/builtin/packages/meep/package.py | 5 ++++ .../repos/builtin/packages/py-meep/package.py | 26 +++++++++++++++---- .../builtin/packages/py-numpy/package.py | 13 ++++++++++ .../repos/builtin/packages/swig/package.py | 3 ++- 4 files changed, 41 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py index cf4b72663d0..e651d80a0b8 100644 --- a/var/spack/repos/builtin/packages/meep/package.py +++ b/var/spack/repos/builtin/packages/meep/package.py @@ -61,6 +61,11 @@ def url_for_version(self, version): return "{0}/old/meep-{1}.tar.gz".format(base_url, version) def install(self, spec, prefix): + # Must be compiled with -fPIC for py-meep + env['CFLAGS'] = '-fPIC' + env['CXXFLAGS'] = '-fPIC' + env['FFLAGS'] = '-fPIC' + config_args = ['--prefix={0}'.format(prefix)] if '+blas' in spec: diff --git a/var/spack/repos/builtin/packages/py-meep/package.py b/var/spack/repos/builtin/packages/py-meep/package.py index 311d94b6bd6..9582c818781 100644 --- a/var/spack/repos/builtin/packages/py-meep/package.py +++ b/var/spack/repos/builtin/packages/py-meep/package.py @@ -37,18 +37,34 @@ class PyMeep(Package): variant('mpi', default=True, description='Enable MPI support') extends('python') - depends_on('mpi', when='+mpi') # OpenMPI 1.3.3 is recommended - depends_on('meep@1.1.1') # must be compiled with -fPIC - depends_on('meep+mpi', when='+mpi') - depends_on('swig@1.3.39:') depends_on('py-numpy') depends_on('py-scipy') depends_on('py-matplotlib') + depends_on('mpi', when='+mpi') # OpenMPI 1.3.3 is recommended + depends_on('meep') # must be compiled with -fPIC + depends_on('meep+mpi', when='+mpi') + + # As of SWIG 3.0.3, Python-style comments are now treated as + # pre-processor directives. Use older SWIG. But not too old, + # or else it can't handle newer C++ compilers and flags. + depends_on('swig@1.3.39:3.0.2') + def install(self, spec, prefix): setup = 'setup-mpi.py' if '+mpi' in spec else 'setup.py' + include_dirs = [ + spec['meep'].prefix.include, + spec['py-numpy'].include + ] + + library_dirs = [ + spec['meep'].prefix.lib + ] + python(setup, 'clean', '--all') - python(setup, 'build_ext') + python(setup, 'build_ext', + '-I{0}'.format(','.join(include_dirs)), + '-L{0}'.format(','.join(library_dirs))) python(setup, 'install', '--prefix={0}'.format(prefix)) python(setup, 'bdist') diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index 2febdac6589..f800b5e8347 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * +import platform class PyNumpy(Package): @@ -48,6 +49,18 @@ class PyNumpy(Package): depends_on('blas', when='+blas') depends_on('lapack', when='+lapack') + def setup_dependent_package(self, module, dep_spec): + python_version = self.spec['python'].version.up_to(2) + arch = '{0}-{1}'.format(platform.system().lower(), platform.machine()) + + self.spec.include = join_path( + self.prefix.lib, + 'python{0}'.format(python_version), + 'site-packages', + 'numpy-{0}-py{1}-{2}.egg'.format( + self.spec.version, python_version, arch), + 'numpy/core/include') + def install(self, spec, prefix): libraries = [] library_dirs = [] diff --git a/var/spack/repos/builtin/packages/swig/package.py b/var/spack/repos/builtin/packages/swig/package.py index f5526218212..b43246dcee2 100644 --- a/var/spack/repos/builtin/packages/swig/package.py +++ b/var/spack/repos/builtin/packages/swig/package.py @@ -22,9 +22,9 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## - from spack import * + class Swig(Package): """SWIG is an interface compiler that connects programs written in C and C++ with scripting languages such as Perl, Python, Ruby, @@ -38,6 +38,7 @@ class Swig(Package): homepage = "http://www.swig.org" url = "http://prdownloads.sourceforge.net/swig/swig-3.0.8.tar.gz" + version('3.0.10', 'bb4ab8047159469add7d00910e203124') version('3.0.8', 'c96a1d5ecb13d38604d7e92148c73c97') version('3.0.2', '62f9b0d010cef36a13a010dc530d0d41') version('2.0.12', 'c3fb0b2d710cc82ed0154b91e43085a4') From a2578c21e4a96e7734981570675dfb85cde74e93 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 8 Jul 2016 16:08:53 -0500 Subject: [PATCH 139/284] Supply flags to bdist --- var/spack/repos/builtin/packages/py-meep/package.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-meep/package.py b/var/spack/repos/builtin/packages/py-meep/package.py index 9582c818781..a8e11a12ffe 100644 --- a/var/spack/repos/builtin/packages/py-meep/package.py +++ b/var/spack/repos/builtin/packages/py-meep/package.py @@ -62,9 +62,10 @@ def install(self, spec, prefix): spec['meep'].prefix.lib ] + include_flags = '-I{0}'.format(','.join(include_dirs)) + library_flags = '-L{0}'.format(','.join(library_dirs)) + python(setup, 'clean', '--all') - python(setup, 'build_ext', - '-I{0}'.format(','.join(include_dirs)), - '-L{0}'.format(','.join(library_dirs))) + python(setup, 'build_ext', include_flags, library_flags) python(setup, 'install', '--prefix={0}'.format(prefix)) - python(setup, 'bdist') + python(setup, 'bdist', include_flags, library_flags) From 5b1a882fc22483ed4cbc7ac993dea6babff358d1 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 11 Jul 2016 11:42:36 -0500 Subject: [PATCH 140/284] Update meep's url_for_version --- var/spack/repos/builtin/packages/meep/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py index e651d80a0b8..73ed09af055 100644 --- a/var/spack/repos/builtin/packages/meep/package.py +++ b/var/spack/repos/builtin/packages/meep/package.py @@ -32,6 +32,7 @@ class Meep(Package): homepage = "http://ab-initio.mit.edu/wiki/index.php/Meep" version('1.3', '18a5b9e18008627a0411087e0bb60db5') + version('1.2.1', '9be2e743c3a832ae922de9d955d016c5') version('1.1.1', '415e0cd312b6caa22b5dd612490e1ccf') variant('blas', default=True, description='Enable BLAS support') @@ -55,7 +56,7 @@ class Meep(Package): def url_for_version(self, version): base_url = "http://ab-initio.mit.edu/meep" - if version == Version('1.3'): + if version > Version('1.1.1'): return "{0}/meep-{1}.tar.gz".format(base_url, version) else: return "{0}/old/meep-{1}.tar.gz".format(base_url, version) From 6db99a4fe707e935127e98b0d85fa2f783f0ab45 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 13 Jul 2016 14:36:35 -0500 Subject: [PATCH 141/284] Rename gc => bdw-gc, fix libunistring bug --- var/spack/repos/builtin/packages/{gc => bdw-gc}/package.py | 2 +- var/spack/repos/builtin/packages/guile/package.py | 2 +- var/spack/repos/builtin/packages/libunistring/package.py | 6 ++---- 3 files changed, 4 insertions(+), 6 deletions(-) rename var/spack/repos/builtin/packages/{gc => bdw-gc}/package.py (98%) diff --git a/var/spack/repos/builtin/packages/gc/package.py b/var/spack/repos/builtin/packages/bdw-gc/package.py similarity index 98% rename from var/spack/repos/builtin/packages/gc/package.py rename to var/spack/repos/builtin/packages/bdw-gc/package.py index f03f139410c..2c61c21b43c 100644 --- a/var/spack/repos/builtin/packages/gc/package.py +++ b/var/spack/repos/builtin/packages/bdw-gc/package.py @@ -25,7 +25,7 @@ from spack import * -class Gc(Package): +class BdwGc(Package): """The Boehm-Demers-Weiser conservative garbage collector is a garbage collecting replacement for C malloc or C++ new.""" diff --git a/var/spack/repos/builtin/packages/guile/package.py b/var/spack/repos/builtin/packages/guile/package.py index 4b5f2c57e2f..03bdca440ac 100644 --- a/var/spack/repos/builtin/packages/guile/package.py +++ b/var/spack/repos/builtin/packages/guile/package.py @@ -41,7 +41,7 @@ class Guile(Package): depends_on('gettext') depends_on('libtool@1.5.6:') depends_on('libunistring@0.9.3:') - depends_on('gc@7.0:') + depends_on('bdw-gc@7.0:') depends_on('libffi') depends_on('readline', when='+readline') depends_on('pkg-config') diff --git a/var/spack/repos/builtin/packages/libunistring/package.py b/var/spack/repos/builtin/packages/libunistring/package.py index 585590be551..5b8837e72b2 100644 --- a/var/spack/repos/builtin/packages/libunistring/package.py +++ b/var/spack/repos/builtin/packages/libunistring/package.py @@ -34,11 +34,9 @@ class Libunistring(Package): version('0.9.6', 'cb09c398020c27edac10ca590e9e9ef3') - depends_on('libiconv') - def install(self, spec, prefix): - configure('--prefix={0}'.format(prefix), - '--with-libiconv-prefix={0}'.format(spec['libiconv'].prefix)) + configure('--prefix={0}'.format(prefix)) make() + # make('check') # test-verify fails for me, contacted developers make('install') From 1e3a5d4e26a8da593dcb5334673c4541ab31baa7 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 13 Jul 2016 15:51:55 -0500 Subject: [PATCH 142/284] Final fixes to get guile to build --- var/spack/repos/builtin/packages/gettext/package.py | 8 -------- var/spack/repos/builtin/packages/guile/package.py | 3 --- 2 files changed, 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py index 8583ae4b5ea..0a0b163a746 100644 --- a/var/spack/repos/builtin/packages/gettext/package.py +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -34,7 +34,6 @@ class Gettext(Package): version('0.19.7', 'f81e50556da41b44c1d59ac93474dca5') # Recommended variants - variant('libiconv', default=True, description='Use libiconv') variant('curses', default=True, description='Use libncurses') variant('libxml2', default=True, description='Use libxml2') variant('git', default=True, description='Enable git support') @@ -46,7 +45,6 @@ class Gettext(Package): variant('libunistring', default=False, description='Use libunistring') # Recommended dependencies - depends_on('libiconv', when='+libiconv') depends_on('ncurses', when='+curses') depends_on('libxml2', when='+libxml2') # Java runtime and compiler (e.g. GNU gcj or kaffe) @@ -76,12 +74,6 @@ def install(self, spec, prefix): '--without-cvs' ] - if '+libiconv' in spec: - config_args.append('--with-libiconv-prefix={0}'.format( - spec['libiconv'].prefix)) - else: - config_args.append('--without-libiconv-prefix') - if '+curses' in spec: config_args.append('--with-ncurses-prefix={0}'.format( spec['ncurses'].prefix)) diff --git a/var/spack/repos/builtin/packages/guile/package.py b/var/spack/repos/builtin/packages/guile/package.py index 03bdca440ac..936ad154bda 100644 --- a/var/spack/repos/builtin/packages/guile/package.py +++ b/var/spack/repos/builtin/packages/guile/package.py @@ -37,7 +37,6 @@ class Guile(Package): variant('readline', default=True, description='Use the readline library') depends_on('gmp@4.2:') - depends_on('libiconv') depends_on('gettext') depends_on('libtool@1.5.6:') depends_on('libunistring@0.9.3:') @@ -49,7 +48,6 @@ class Guile(Package): def install(self, spec, prefix): config_args = [ '--prefix={0}'.format(prefix), - '--with-libiconv-prefix={0}'.format(spec['libiconv'].prefix), '--with-libunistring-prefix={0}'.format( spec['libunistring'].prefix), '--with-libltdl-prefix={0}'.format(spec['libtool'].prefix), @@ -68,4 +66,3 @@ def install(self, spec, prefix): make() make('check') make('install') - make('installcheck') From 0903ae0599b6a2a9ff91d3f7939b56a6dc476032 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 13 Jul 2016 16:34:11 -0500 Subject: [PATCH 143/284] Final fixes to get libctl working --- var/spack/repos/builtin/packages/libctl/package.py | 7 ++++--- var/spack/repos/builtin/packages/meep/package.py | 4 ++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/libctl/package.py b/var/spack/repos/builtin/packages/libctl/package.py index 1b48d4dc290..946fc112712 100644 --- a/var/spack/repos/builtin/packages/libctl/package.py +++ b/var/spack/repos/builtin/packages/libctl/package.py @@ -38,9 +38,10 @@ class Libctl(Package): def install(self, spec, prefix): configure('--prefix={0}'.format(prefix), - 'GUILE={0}'.format(spec['guile'].prefix)) + 'GUILE={0}'.format(join_path( + spec['guile'].prefix.bin, 'guile')), + 'GUILE_CONFIG={0}'.format(join_path( + spec['guile'].prefix.bin, 'guile-config'))) make() - make('check') make('install') - make('installcheck') diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py index 73ed09af055..184b6f438ec 100644 --- a/var/spack/repos/builtin/packages/meep/package.py +++ b/var/spack/repos/builtin/packages/meep/package.py @@ -38,8 +38,8 @@ class Meep(Package): variant('blas', default=True, description='Enable BLAS support') variant('lapack', default=True, description='Enable LAPACK support') variant('harminv', default=True, description='Enable Harminv support') - variant('guile', default=False, description='Enable Guilde support') - variant('libctl', default=False, description='Enable libctl support') + variant('guile', default=True, description='Enable Guilde support') + variant('libctl', default=True, description='Enable libctl support') variant('mpi', default=True, description='Enable MPI support') variant('hdf5', default=True, description='Enable HDF5 support') variant('gsl', default=False, description='Build with GSL (only necessary for testing)') From bdb64c2e390c4d2295f6d037201da2f5961512f8 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 14 Jul 2016 09:46:09 -0500 Subject: [PATCH 144/284] Tell MEEP where to find libctl --- var/spack/repos/builtin/packages/meep/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py index 184b6f438ec..603b916da48 100644 --- a/var/spack/repos/builtin/packages/meep/package.py +++ b/var/spack/repos/builtin/packages/meep/package.py @@ -83,7 +83,7 @@ def install(self, spec, prefix): if '+libctl' in spec: config_args.append('--with-libctl={0}'.format( - spec['libctl'].prefix)) + join_path(spec['libctl'].prefix.share, 'libctl'))) else: config_args.append('--without-libctl') From 5cac0a528cd39d082d53af9e241763f24ddf0cd3 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 20 Jul 2016 12:57:43 -0500 Subject: [PATCH 145/284] Fix undefined symbols by building shared libraries --- .../repos/builtin/packages/harminv/package.py | 3 +- .../builtin/packages/libatomic-ops/package.py | 3 +- .../repos/builtin/packages/libctl/package.py | 1 + .../repos/builtin/packages/meep/package.py | 30 +++++++++---------- .../repos/builtin/packages/py-meep/package.py | 4 +-- 5 files changed, 21 insertions(+), 20 deletions(-) diff --git a/var/spack/repos/builtin/packages/harminv/package.py b/var/spack/repos/builtin/packages/harminv/package.py index 4b07d6687c2..184535ebb0c 100644 --- a/var/spack/repos/builtin/packages/harminv/package.py +++ b/var/spack/repos/builtin/packages/harminv/package.py @@ -44,7 +44,8 @@ def install(self, spec, prefix): config_args = [ '--prefix={0}'.format(prefix), '--with-blas={0}'.format(spec['blas'].prefix.lib), - '--with-lapack={0}'.format(spec['lapack'].prefix.lib) + '--with-lapack={0}'.format(spec['lapack'].prefix.lib), + '--enable-shared' ] configure(*config_args) diff --git a/var/spack/repos/builtin/packages/libatomic-ops/package.py b/var/spack/repos/builtin/packages/libatomic-ops/package.py index bc9be5cc64d..0167fbcb338 100644 --- a/var/spack/repos/builtin/packages/libatomic-ops/package.py +++ b/var/spack/repos/builtin/packages/libatomic-ops/package.py @@ -35,7 +35,8 @@ class LibatomicOps(Package): version('7.4.4', '426d804baae12c372967a6d183e25af2') def install(self, spec, prefix): - configure('--prefix={0}'.format(prefix)) + configure('--prefix={0}'.format(prefix), + '--enable-shared') make() make('install') diff --git a/var/spack/repos/builtin/packages/libctl/package.py b/var/spack/repos/builtin/packages/libctl/package.py index 946fc112712..53d30ce5c3c 100644 --- a/var/spack/repos/builtin/packages/libctl/package.py +++ b/var/spack/repos/builtin/packages/libctl/package.py @@ -38,6 +38,7 @@ class Libctl(Package): def install(self, spec, prefix): configure('--prefix={0}'.format(prefix), + '--enable-shared', 'GUILE={0}'.format(join_path( spec['guile'].prefix.bin, 'guile')), 'GUILE_CONFIG={0}'.format(join_path( diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py index 603b916da48..2c1018e711f 100644 --- a/var/spack/repos/builtin/packages/meep/package.py +++ b/var/spack/repos/builtin/packages/meep/package.py @@ -35,14 +35,14 @@ class Meep(Package): version('1.2.1', '9be2e743c3a832ae922de9d955d016c5') version('1.1.1', '415e0cd312b6caa22b5dd612490e1ccf') - variant('blas', default=True, description='Enable BLAS support') - variant('lapack', default=True, description='Enable LAPACK support') - variant('harminv', default=True, description='Enable Harminv support') - variant('guile', default=True, description='Enable Guilde support') - variant('libctl', default=True, description='Enable libctl support') - variant('mpi', default=True, description='Enable MPI support') - variant('hdf5', default=True, description='Enable HDF5 support') - variant('gsl', default=False, description='Build with GSL (only necessary for testing)') + variant('blas', default=True, description='Enable BLAS support') + variant('lapack', default=True, description='Enable LAPACK support') + variant('harminv', default=True, description='Enable Harminv support') + variant('guile', default=True, description='Enable Guilde support') + variant('libctl', default=True, description='Enable libctl support') + variant('mpi', default=True, description='Enable MPI support') + variant('hdf5', default=True, description='Enable HDF5 support') + variant('gsl', default=True, description='Enable GSL support') depends_on('blas', when='+blas') depends_on('lapack', when='+lapack') @@ -50,7 +50,7 @@ class Meep(Package): depends_on('guile', when='+guile') depends_on('libctl@3.2:', when='+libctl') depends_on('mpi', when='+mpi') - depends_on('hdf5', when='+hdf5') + depends_on('hdf5~mpi', when='+hdf5~mpi') depends_on('hdf5+mpi', when='+hdf5+mpi') depends_on('gsl', when='+gsl') @@ -62,12 +62,10 @@ def url_for_version(self, version): return "{0}/old/meep-{1}.tar.gz".format(base_url, version) def install(self, spec, prefix): - # Must be compiled with -fPIC for py-meep - env['CFLAGS'] = '-fPIC' - env['CXXFLAGS'] = '-fPIC' - env['FFLAGS'] = '-fPIC' - - config_args = ['--prefix={0}'.format(prefix)] + config_args = [ + '--prefix={0}'.format(prefix), + '--enable-shared' + ] if '+blas' in spec: config_args.append('--with-blas={0}'.format( @@ -103,7 +101,7 @@ def install(self, spec, prefix): # aniso_disp test fails unless installed with harminv # near2far test fails unless installed with gsl - if '+harminv' in spec and '+gsl' in spec: + if self.run_tests and '+harminv' in spec and '+gsl' in spec: # Most tests fail when run in parallel # 2D_convergence tests still fails to converge for unknown reasons make('check', parallel=False) diff --git a/var/spack/repos/builtin/packages/py-meep/package.py b/var/spack/repos/builtin/packages/py-meep/package.py index a8e11a12ffe..5170111e1f2 100644 --- a/var/spack/repos/builtin/packages/py-meep/package.py +++ b/var/spack/repos/builtin/packages/py-meep/package.py @@ -41,8 +41,8 @@ class PyMeep(Package): depends_on('py-scipy') depends_on('py-matplotlib') - depends_on('mpi', when='+mpi') # OpenMPI 1.3.3 is recommended - depends_on('meep') # must be compiled with -fPIC + depends_on('mpi', when='+mpi') + depends_on('meep~mpi', when='~mpi') depends_on('meep+mpi', when='+mpi') # As of SWIG 3.0.3, Python-style comments are now treated as From f5aed63b0b85cab6eb19958f608e242bf5723d5b Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 20 Jul 2016 13:42:07 -0500 Subject: [PATCH 146/284] pkg-config should be a build dependency only --- var/spack/repos/builtin/packages/guile/package.py | 2 +- var/spack/repos/builtin/packages/py-meep/package.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/guile/package.py b/var/spack/repos/builtin/packages/guile/package.py index 936ad154bda..22aff1bddf3 100644 --- a/var/spack/repos/builtin/packages/guile/package.py +++ b/var/spack/repos/builtin/packages/guile/package.py @@ -43,7 +43,7 @@ class Guile(Package): depends_on('bdw-gc@7.0:') depends_on('libffi') depends_on('readline', when='+readline') - depends_on('pkg-config') + depends_on('pkg-config', type='build') def install(self, spec, prefix): config_args = [ diff --git a/var/spack/repos/builtin/packages/py-meep/package.py b/var/spack/repos/builtin/packages/py-meep/package.py index 5170111e1f2..2331571d3e3 100644 --- a/var/spack/repos/builtin/packages/py-meep/package.py +++ b/var/spack/repos/builtin/packages/py-meep/package.py @@ -62,6 +62,10 @@ def install(self, spec, prefix): spec['meep'].prefix.lib ] + if '+mpi' in spec: + include_dirs.append(spec['mpi'].prefix.include) + library_dirs.append(spec['mpi'].prefix.lib) + include_flags = '-I{0}'.format(','.join(include_dirs)) library_flags = '-L{0}'.format(','.join(library_dirs)) From b4cd2a85df0557c72f2d47add84589ac19483e9a Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 22 Jul 2016 09:45:22 -0500 Subject: [PATCH 147/284] Re-add trailing whitespace in patch From 4014a29d2b163aaa3906bee1adeb2ca778ddd620 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 22 Jul 2016 11:38:12 -0500 Subject: [PATCH 148/284] Use proper deptype so that spack activate works --- var/spack/repos/builtin/packages/py-meep/package.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-meep/package.py b/var/spack/repos/builtin/packages/py-meep/package.py index 2331571d3e3..269ac55c677 100644 --- a/var/spack/repos/builtin/packages/py-meep/package.py +++ b/var/spack/repos/builtin/packages/py-meep/package.py @@ -37,9 +37,9 @@ class PyMeep(Package): variant('mpi', default=True, description='Enable MPI support') extends('python') - depends_on('py-numpy') - depends_on('py-scipy') - depends_on('py-matplotlib') + depends_on('py-numpy', type=nolink) + depends_on('py-scipy', type=nolink) + depends_on('py-matplotlib', type=nolink) depends_on('mpi', when='+mpi') depends_on('meep~mpi', when='~mpi') From ab885a53972cb9de8f8658c340883f35aef071c5 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 26 Jul 2016 09:47:45 -0500 Subject: [PATCH 149/284] Allow git to depend on gettext now that it works --- .../repos/builtin/packages/git/package.py | 25 ++++++++----------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 4cbbaac3424..01364580a0e 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Git(Package): """Git is a free and open source distributed version control system designed to handle everything from small to very large @@ -36,31 +37,27 @@ class Git(Package): version('2.7.3', 'fa1c008b56618c355a32ba4a678305f6') version('2.7.1', 'bf0706b433a8dedd27a63a72f9a66060') - # See here for info on vulnerable Git versions: # http://www.theregister.co.uk/2016/03/16/git_server_client_patch_now/ # All the following are vulnerable - #version('2.6.3', 'b711be7628a4a2c25f38d859ee81b423') - #version('2.6.2', 'da293290da69f45a86a311ad3cd43dc8') - #version('2.6.1', '4c62ee9c5991fe93d99cf2a6b68397fd') - #version('2.6.0', 'eb76a07148d94802a1745d759716a57e') - #version('2.5.4', '3eca2390cf1fa698b48e2a233563a76b') - #version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c') - + # version('2.6.3', 'b711be7628a4a2c25f38d859ee81b423') + # version('2.6.2', 'da293290da69f45a86a311ad3cd43dc8') + # version('2.6.1', '4c62ee9c5991fe93d99cf2a6b68397fd') + # version('2.6.0', 'eb76a07148d94802a1745d759716a57e') + # version('2.5.4', '3eca2390cf1fa698b48e2a233563a76b') + # version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c') depends_on("openssl") depends_on("autoconf", type='build') depends_on("curl") depends_on("expat") - - # Also depends_on gettext: apt-get install gettext (Ubuntu) + depends_on("gettext") + depends_on("zlib") # Use system perl for now. # depends_on("perl") # depends_on("pcre") - depends_on("zlib") - def install(self, spec, prefix): configure_args = [ "--prefix=%s" % prefix, @@ -68,8 +65,8 @@ def install(self, spec, prefix): "--with-openssl=%s" % spec['openssl'].prefix, "--with-zlib=%s" % spec['zlib'].prefix, "--with-curl=%s" % spec['curl'].prefix, - "--with-expat=%s" % spec['expat'].prefix, - ] + "--with-expat=%s" % spec['expat'].prefix + ] which('autoreconf')('-i') configure(*configure_args) From 19578d954fd9b99db657cb4e9dc0da7290191093 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 1 Aug 2016 13:37:42 -0500 Subject: [PATCH 150/284] Fix flake8 line length errors --- lib/spack/spack/stage.py | 85 ++++++++++++++++++++++++---------------- 1 file changed, 52 insertions(+), 33 deletions(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 8f3f0e163a0..1a8b1a169a6 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -52,10 +52,12 @@ class Stage(object): lifecycle looks like this: ``` - with Stage() as stage: # Context manager creates and destroys the stage directory + with Stage() as stage: # Context manager creates and destroys the + # stage directory stage.fetch() # Fetch a source archive into the stage. stage.expand_archive() # Expand the source archive. - # Build and install the archive. (handled by user of Stage) + # Build and install the archive. (handled by + # user of Stage) ``` When used as a context manager, the stage is automatically @@ -72,7 +74,8 @@ class Stage(object): stage.create() # Explicitly create the stage directory. stage.fetch() # Fetch a source archive into the stage. stage.expand_archive() # Expand the source archive. - # Build and install the archive. (handled by user of Stage) + # Build and install the archive. (handled by + # user of Stage) finally: stage.destroy() # Explicitly destroy the stage directory. ``` @@ -121,13 +124,17 @@ def __init__(self, url_or_fetch_strategy, elif isinstance(url_or_fetch_strategy, fs.FetchStrategy): self.fetcher = url_or_fetch_strategy else: - raise ValueError("Can't construct Stage without url or fetch strategy") + raise ValueError( + "Can't construct Stage without url or fetch strategy") self.fetcher.set_stage(self) - self.default_fetcher = self.fetcher # self.fetcher can change with mirrors. - self.skip_checksum_for_mirror = True # used for mirrored archives of repositories. + # self.fetcher can change with mirrors. + self.default_fetcher = self.fetcher + # used for mirrored archives of repositories. + self.skip_checksum_for_mirror = True - # TODO : this uses a protected member of tempfile, but seemed the only way to get a temporary name - # TODO : besides, the temporary link name won't be the same as the temporary stage area in tmp_root + # TODO : this uses a protected member of tempfile, but seemed the only + # TODO : way to get a temporary name besides, the temporary link name + # TODO : won't be the same as the temporary stage area in tmp_root self.name = name if name is None: self.name = STAGE_PREFIX + next(tempfile._get_candidate_names()) @@ -175,8 +182,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def _need_to_create_path(self): """Makes sure nothing weird has happened since the last time we looked at path. Returns True if path already exists and is ok. - Returns False if path needs to be created. - """ + Returns False if path needs to be created.""" # Path doesn't exist yet. Will need to create it. if not os.path.exists(self.path): return True @@ -194,7 +200,8 @@ def _need_to_create_path(self): if spack.use_tmp_stage: # If we're using a tmp dir, it's a link, and it points at the # right spot, then keep it. - if (real_path.startswith(real_tmp) and os.path.exists(real_path)): + if (real_path.startswith(real_tmp) and + os.path.exists(real_path)): return False else: # otherwise, just unlink it and start over. @@ -202,7 +209,8 @@ def _need_to_create_path(self): return True else: - # If we're not tmp mode, then it's a link and we want a directory. + # If we're not tmp mode, then it's a link and we want a + # directory. os.unlink(self.path) return True @@ -213,10 +221,12 @@ def expected_archive_files(self): """Possible archive file paths.""" paths = [] if isinstance(self.fetcher, fs.URLFetchStrategy): - paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url))) + paths.append(os.path.join( + self.path, os.path.basename(self.fetcher.url))) if self.mirror_path: - paths.append(os.path.join(self.path, os.path.basename(self.mirror_path))) + paths.append(os.path.join( + self.path, os.path.basename(self.mirror_path))) return paths @@ -225,10 +235,12 @@ def archive_file(self): """Path to the source archive within this stage directory.""" paths = [] if isinstance(self.fetcher, fs.URLFetchStrategy): - paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url))) + paths.append(os.path.join( + self.path, os.path.basename(self.fetcher.url))) if self.mirror_path: - paths.append(os.path.join(self.path, os.path.basename(self.mirror_path))) + paths.append(os.path.join( + self.path, os.path.basename(self.mirror_path))) for path in paths: if os.path.exists(path): @@ -260,7 +272,8 @@ def source_path(self): return None def chdir(self): - """Changes directory to the stage path. Or dies if it is not set up.""" + """Changes directory to the stage path. Or dies if it is not set + up.""" if os.path.isdir(self.path): os.chdir(self.path) else: @@ -335,7 +348,8 @@ def fetch(self, mirror_only=False): def check(self): """Check the downloaded archive against a checksum digest. No-op if this stage checks code out of a repository.""" - if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror: + if self.fetcher is not self.default_fetcher and \ + self.skip_checksum_for_mirror: tty.warn("Fetching from mirror without a checksum!", "This package is normally checked out from a version " "control system, but it has been archived on a spack " @@ -350,9 +364,8 @@ def cache_local(self): def expand_archive(self): """Changes to the stage directory and attempt to expand the downloaded - archive. Fail if the stage is not set up or if the archive is not yet - downloaded. - """ + archive. Fail if the stage is not set up or if the archive is not yet + downloaded.""" archive_dir = self.source_path if not archive_dir: self.fetcher.expand() @@ -394,8 +407,8 @@ def create(self): # Create the top-level stage directory mkdirp(spack.stage_path) remove_dead_links(spack.stage_path) - # If a tmp_root exists then create a directory there and then link it in the stage area, - # otherwise create the stage directory in self.path + # If a tmp_root exists then create a directory there and then link it + # in the stage area, otherwise create the stage directory in self.path if self._need_to_create_path(): if self.tmp_root: tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) @@ -417,6 +430,7 @@ def destroy(self): class ResourceStage(Stage): + def __init__(self, url_or_fetch_strategy, root, resource, **kwargs): super(ResourceStage, self).__init__(url_or_fetch_strategy, **kwargs) self.root_stage = root @@ -426,12 +440,15 @@ def expand_archive(self): super(ResourceStage, self).expand_archive() root_stage = self.root_stage resource = self.resource - placement = os.path.basename(self.source_path) if resource.placement is None else resource.placement + placement = os.path.basename(self.source_path) \ + if resource.placement is None \ + else resource.placement if not isinstance(placement, dict): placement = {'': placement} # Make the paths in the dictionary absolute and link for key, value in placement.iteritems(): - target_path = join_path(root_stage.source_path, resource.destination) + target_path = join_path( + root_stage.source_path, resource.destination) destination_path = join_path(target_path, value) source_path = join_path(self.source_path, key) @@ -445,21 +462,23 @@ def expand_archive(self): if not os.path.exists(destination_path): # Create a symlink - tty.info('Moving resource stage\n\tsource : {stage}\n\tdestination : {destination}'.format( - stage=source_path, destination=destination_path - )) + tty.info('Moving resource stage\n\tsource : ' + '{stage}\n\tdestination : {destination}'.format( + stage=source_path, destination=destination_path + )) shutil.move(source_path, destination_path) -@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy', 'cache_local']) +@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', + 'restage', 'destroy', 'cache_local']) class StageComposite: - """ - Composite for Stage type objects. The first item in this composite is considered to be the root package, and - operations that return a value are forwarded to it. - """ + """Composite for Stage type objects. The first item in this composite is + considered to be the root package, and operations that return a value are + forwarded to it.""" # # __enter__ and __exit__ delegate to all stages in the composite. # + def __enter__(self): for item in self: item.__enter__() From fed7ba1dab8db3eeb1fa821f5e0c26e58c2a282d Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 25 Jul 2016 16:45:02 -0500 Subject: [PATCH 151/284] Add latest version of PGI --- var/spack/repos/builtin/packages/pgi/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pgi/package.py b/var/spack/repos/builtin/packages/pgi/package.py index 7170c65303a..c58d5636823 100644 --- a/var/spack/repos/builtin/packages/pgi/package.py +++ b/var/spack/repos/builtin/packages/pgi/package.py @@ -41,6 +41,7 @@ class Pgi(Package): homepage = "http://www.pgroup.com/" url = "file://%s/pgi-16.3.tar.gz" % os.getcwd() + version('16.5', 'a40e8852071b5d600cb42f31631b3de1') version('16.3', '618cb7ddbc57d4e4ed1f21a0ab25f427') variant('network', default=True, From 2d51ea5da4e751bdbba5d2fc3854b43962f1c76e Mon Sep 17 00:00:00 2001 From: Mitchell Devlin Date: Mon, 1 Aug 2016 14:38:16 -0500 Subject: [PATCH 152/284] fixed libxsmm makefile filter --- var/spack/repos/builtin/packages/libxsmm/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py index 961e1717149..a7364906005 100644 --- a/var/spack/repos/builtin/packages/libxsmm/package.py +++ b/var/spack/repos/builtin/packages/libxsmm/package.py @@ -46,7 +46,7 @@ def patch(self): makefile.filter('CC = icc', 'CC ?= icc', **kwargs) makefile.filter('CC = gcc', 'CC ?= gcc', **kwargs) makefile.filter('CXX = icpc', 'CXX ?= icpc', **kwargs) - makefile.filter('CXX = g.*', 'CXX ?= g++', **kwargs) + makefile.filter('CXX = g++', 'CXX ?= g++', **kwargs) makefile.filter('FC = ifort', 'FC ?= ifort', **kwargs) makefile.filter('FC = gfortran', 'FC ?= gfortran', **kwargs) From 5b79f0d04a766a7df459dff51ccff93a6edaf8ec Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 1 Aug 2016 15:35:02 -0500 Subject: [PATCH 153/284] Fix backup=True for filter_file --- lib/spack/llnl/util/filesystem.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 6e4cd338fe5..f416fc6fd9e 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -29,8 +29,9 @@ import stat import errno import getpass -from contextlib import contextmanager, closing +from contextlib import contextmanager import subprocess +import fileinput import llnl.util.tty as tty @@ -85,13 +86,14 @@ def groupid_to_group(x): if ignore_absent and not os.path.exists(filename): continue - shutil.copy(filename, backup_filename) + # Create backup file. Don't overwrite an existing backup + # file in case this file is being filtered multiple times. + if not os.path.exists(backup_filename): + shutil.copy(filename, backup_filename) + try: - with closing(open(backup_filename)) as infile: - with closing(open(filename, 'w')) as outfile: - for line in infile: - foo = re.sub(regex, repl, line) - outfile.write(foo) + for line in fileinput.input(filename, inplace=True): + print(re.sub(regex, repl, line.rstrip())) except: # clean up the original file on failure. shutil.move(backup_filename, filename) From e864d2764171b2dbd539bac16dbb7a28910407a9 Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Mon, 1 Aug 2016 17:03:50 -0400 Subject: [PATCH 154/284] Replaces bash-ism `source` for POSIX-compliant `.` Change is made in order to use `/bin/sh` on systems where `/bin/sh` is not simply an alias for `/bin/bash --norc`. --- lib/spack/spack/platforms/cray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index d43580df06d..68f1453edf1 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -28,7 +28,7 @@ def _target_from_clean_env(name): # There may be other variables needed for general success. output = env('USER=%s' % os.environ['USER'], '/bin/sh', '--noprofile', '-c', - 'source /etc/profile; module list -lt', + '. /etc/profile; module list -lt', output=str, error=str) default_modules = [i for i in output.splitlines() if len(i.split()) == 1] From ae167c09fc4d3dc4b4ff90840bd70cd8648261ab Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Mon, 1 Aug 2016 17:17:24 -0400 Subject: [PATCH 155/284] Prefers `bash` over `sh`. Assuming a bash interactive environment will be correctly formed on login, we should prefer to probe the environment using a shell that reports itself as `bash` instead of `sh` which may not source files that set the environment modules in statements like: ``` case "$is" in bash) test -s /etc/bash.bashrc.local && . /etc/bash.bashrc.local ;; ksh) test -s /etc/ksh.kshrc.local && . /etc/ksh.kshrc.local ;; zsh) test -s /etc/zsh.zshrc.local && . /etc/zsh.zshrc.local ;; ash) test -s /etc/ash.ashrc.local && . /etc/ash.ashrc.local esac test -s /etc/sh.shrc.local && . /etc/sh.shrc.local ``` --- lib/spack/spack/platforms/cray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index 68f1453edf1..2883a35ec7b 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -27,7 +27,7 @@ def _target_from_clean_env(name): # CAUTION - $USER is generally needed to initialize the environment. # There may be other variables needed for general success. output = env('USER=%s' % os.environ['USER'], - '/bin/sh', '--noprofile', '-c', + '/bin/bash', '--noprofile', '--norc', '-c', '. /etc/profile; module list -lt', output=str, error=str) default_modules = [i for i in output.splitlines() From 661708b7facbcc4c4fab7b7592e605e1d63de7b4 Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Mon, 1 Aug 2016 17:27:33 -0400 Subject: [PATCH 156/284] Fixes Flake8 indentation errors. --- lib/spack/spack/platforms/cray.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index 2883a35ec7b..2bd2a404634 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -27,9 +27,9 @@ def _target_from_clean_env(name): # CAUTION - $USER is generally needed to initialize the environment. # There may be other variables needed for general success. output = env('USER=%s' % os.environ['USER'], - '/bin/bash', '--noprofile', '--norc', '-c', - '. /etc/profile; module list -lt', - output=str, error=str) + '/bin/bash', '--noprofile', '--norc', '-c', + '. /etc/profile; module list -lt', + output=str, error=str) default_modules = [i for i in output.splitlines() if len(i.split()) == 1] tty.debug("Found default modules:", From 0ce98d4d6578ddf05979732d9db97f5a040e4a6c Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Mon, 1 Aug 2016 15:43:11 -0700 Subject: [PATCH 157/284] Removed vestigial cray_xc platform in favor of combined cray platform --- lib/spack/spack/platforms/cray_xc.py | 72 ---------------------------- 1 file changed, 72 deletions(-) delete mode 100644 lib/spack/spack/platforms/cray_xc.py diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py deleted file mode 100644 index 03d0383cc51..00000000000 --- a/lib/spack/spack/platforms/cray_xc.py +++ /dev/null @@ -1,72 +0,0 @@ -import os -import spack -from spack.architecture import Platform, Target -from spack.operating_systems.linux_distro import LinuxDistro -from spack.operating_systems.cnl import Cnl -from spack.util.executable import which -from llnl.util.filesystem import join_path - - -class CrayXc(Platform): - priority = 20 - front_end = 'sandybridge' - back_end = 'ivybridge' - default = 'ivybridge' - - back_os = "CNL10" - default_os = "CNL10" - - def __init__(self): - ''' Since cori doesn't have ivybridge as a front end it's better - if we use CRAY_CPU_TARGET as the default. This will ensure - that if we're on a XC-40 or XC-30 then we can detect the target - ''' - super(CrayXc, self).__init__('cray_xc') - - # Handle the default here so we can check for a key error - if 'CRAY_CPU_TARGET' in os.environ: - self.default = os.environ['CRAY_CPU_TARGET'] - - # Change the defaults to haswell if we're on an XC40 - if self.default == 'haswell': - self.front_end = self.default - self.back_end = self.default - - # Could switch to use modules and fe targets for front end - # Currently using compilers by path for front end. - self.add_target('sandybridge', Target('sandybridge')) - self.add_target('ivybridge', - Target('ivybridge', 'craype-ivybridge')) - self.add_target('haswell', - Target('haswell', 'craype-haswell')) - - # Front end of the cray platform is a linux distro. - linux_dist = LinuxDistro() - self.front_os = str(linux_dist) - self.add_operating_system(str(linux_dist), linux_dist) - self.add_operating_system('CNL10', Cnl()) - - @classmethod - def setup_platform_environment(self, pkg, env): - """ Change the linker to default dynamic to be more - similar to linux/standard linker behavior - """ - env.set('CRAYPE_LINK_TYPE', 'dynamic') - cray_wrapper_names = join_path(spack.build_env_path, 'cray') - if os.path.isdir(cray_wrapper_names): - env.prepend_path('PATH', cray_wrapper_names) - env.prepend_path('SPACK_ENV_PATHS', cray_wrapper_names) - - @classmethod - def detect(self): - try: - cc_verbose = which('ftn') - text = cc_verbose('-craype-verbose', - output=str, error=str, - ignore_errors=True).split() - if '-D__CRAYXC' in text: - return True - else: - return False - except: - return False From 0425f5d523aaff998785254bf67fea8afacfcfab Mon Sep 17 00:00:00 2001 From: Mario Melara Date: Mon, 1 Aug 2016 15:59:30 -0700 Subject: [PATCH 158/284] Add import statement Add missing import statement for join_path --- lib/spack/spack/platforms/cray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index 2bd2a404634..2a3b81cf9cd 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -6,7 +6,7 @@ from spack.architecture import Platform, Target, NoPlatformError from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl - +from llnl.util.filesystem import join_path # Craype- module prefixes that are not valid CPU targets. NON_TARGETS = ('hugepages', 'network', 'target', 'accel', 'xtpe') From 0cf1f917d54f081f66dd090ee484e35b0b579887 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Mon, 1 Aug 2016 16:17:31 -0700 Subject: [PATCH 159/284] fixed architecture test --- lib/spack/spack/test/architecture.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index 09bdb021af7..42dd9f4c040 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -31,7 +31,7 @@ import spack import spack.architecture from spack.spec import * -from spack.platforms.cray_xc import CrayXc +from spack.platforms.cray import Cray from spack.platforms.linux import Linux from spack.platforms.bgq import Bgq from spack.platforms.darwin import Darwin @@ -76,7 +76,7 @@ def test_dict_functions_for_architecture(self): def test_platform(self): output_platform_class = spack.architecture.platform() if os.path.exists('/opt/cray/craype'): - my_platform_class = CrayXc() + my_platform_class = Cray() elif os.path.exists('/bgsys'): my_platform_class = Bgq() elif 'Linux' in py_platform.system(): From 2705f2c0e3c92aa4c0a099bac3b973d1bf229663 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Mon, 1 Aug 2016 17:11:10 -0700 Subject: [PATCH 160/284] changed error raised in concretize compiler to provide better error message --- lib/spack/spack/concretize.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 5180f3cf042..71f734ebfbb 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -343,7 +343,7 @@ def _proper_compiler_style(cspec, arch): while not _proper_compiler_style(matches[index], spec.architecture): index += 1 if index == len(matches) - 1: - raise NoValidVersionError(spec) + raise UnavailableCompilerVersionError(spec) spec.compiler = matches[index].copy() assert(spec.compiler.concrete) return True # things changed. From 31042e8ed10a8464b1a6cbbc165be8ca7e9510bc Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Mon, 1 Aug 2016 17:16:06 -0700 Subject: [PATCH 161/284] further improved error message --- lib/spack/spack/concretize.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 71f734ebfbb..4fafee3a83d 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -336,14 +336,16 @@ def _proper_compiler_style(cspec, arch): spack.pkgsort.compiler_compare, other_spec.name) matches = sorted(compiler_list, cmp=cmp_compilers) if not matches: - raise UnavailableCompilerVersionError(other_compiler) + raise UnavailableCompilerVersionError(other_compiler, + spec.architecture.operating_system) # copy concrete version into other_compiler index = 0 while not _proper_compiler_style(matches[index], spec.architecture): index += 1 if index == len(matches) - 1: - raise UnavailableCompilerVersionError(spec) + raise UnavailableCompilerVersionError(spec.compiler, + spec.architecture.operating_system) spec.compiler = matches[index].copy() assert(spec.compiler.concrete) return True # things changed. @@ -489,9 +491,9 @@ class UnavailableCompilerVersionError(spack.error.SpackError): """Raised when there is no available compiler that satisfies a compiler spec.""" - def __init__(self, compiler_spec): + def __init__(self, compiler_spec, operating_system): super(UnavailableCompilerVersionError, self).__init__( - "No available compiler version matches '%s'" % compiler_spec, + "No available compiler version matches '%s' on operating_system %s" % compiler_spec, operating_system, # NOQA: ignore=E501 "Run 'spack compilers' to see available compiler Options.") From 679ceabf36bbe87e853141dd86936bc5cda4a337 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Mon, 1 Aug 2016 17:22:13 -0700 Subject: [PATCH 162/284] fixed flake8 errors --- lib/spack/spack/concretize.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 4fafee3a83d..eced9917c96 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -336,16 +336,18 @@ def _proper_compiler_style(cspec, arch): spack.pkgsort.compiler_compare, other_spec.name) matches = sorted(compiler_list, cmp=cmp_compilers) if not matches: + arch = spec.architecture raise UnavailableCompilerVersionError(other_compiler, - spec.architecture.operating_system) + arch.platform_os) # copy concrete version into other_compiler index = 0 while not _proper_compiler_style(matches[index], spec.architecture): index += 1 if index == len(matches) - 1: + arch = spec.architecture raise UnavailableCompilerVersionError(spec.compiler, - spec.architecture.operating_system) + arch.platform_os) spec.compiler = matches[index].copy() assert(spec.compiler.concrete) return True # things changed. From 17f0eb51483b13270219e70b0c7e405f191b9d86 Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Mon, 1 Aug 2016 22:10:54 -0600 Subject: [PATCH 163/284] Provide correct download urls and additional dependencies. --- var/spack/repos/builtin/packages/atk/package.py | 8 +++++++- var/spack/repos/builtin/packages/glib/package.py | 5 +++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/atk/package.py b/var/spack/repos/builtin/packages/atk/package.py index 8da562f1225..361ea24b960 100644 --- a/var/spack/repos/builtin/packages/atk/package.py +++ b/var/spack/repos/builtin/packages/atk/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Atk(Package): """ATK provides the set of accessibility interfaces that are implemented by other toolkits and applications. Using the ATK @@ -35,7 +36,12 @@ class Atk(Package): version('2.20.0', '5187b0972f4d3905f285540b31395e20') version('2.14.0', 'ecb7ca8469a5650581b1227d78051b8b') - depends_on("glib") + depends_on('glib') + depends_on('pkg-config', type='build') + + def url_for_version(self, version): + """Handle atk's version-based custom URLs.""" + return 'http://ftp.gnome.org/pub/gnome/sources/atk/%s/atk-%s.tar.xz' % (version.up_to(2), version) def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 512f4466172..3450791124f 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -42,10 +42,15 @@ class Glib(Package): depends_on('pkg-config', type='build') depends_on('gettext', when=sys.platform == 'darwin') depends_on('pcre+utf', when='@2.49:') + depends_on('gettext', when='@2.49:') # The following patch is needed for gcc-6.1 patch('g_date_strftime.patch') + def url_for_version(self, version): + """Handle glib's version-based custom URLs.""" + return 'http://ftp.gnome.org/pub/gnome/sources/glib/%s/glib-%s.tar.xz' % (version.up_to(2), version) + def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() From 9433e8477657c54e8377b6131e0b3eeb56386c79 Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Mon, 1 Aug 2016 22:54:51 -0600 Subject: [PATCH 164/284] Formatting changes for flake8. --- var/spack/repos/builtin/packages/atk/package.py | 3 ++- var/spack/repos/builtin/packages/glib/package.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/atk/package.py b/var/spack/repos/builtin/packages/atk/package.py index 361ea24b960..d5b6933ec3e 100644 --- a/var/spack/repos/builtin/packages/atk/package.py +++ b/var/spack/repos/builtin/packages/atk/package.py @@ -41,7 +41,8 @@ class Atk(Package): def url_for_version(self, version): """Handle atk's version-based custom URLs.""" - return 'http://ftp.gnome.org/pub/gnome/sources/atk/%s/atk-%s.tar.xz' % (version.up_to(2), version) + url = 'http://ftp.gnome.org/pub/gnome/sources/atk' + return 'url+/%s/atk-%s.tar.xz' % (version.up_to(2), version) def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 3450791124f..98704eaa4ce 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -49,7 +49,8 @@ class Glib(Package): def url_for_version(self, version): """Handle glib's version-based custom URLs.""" - return 'http://ftp.gnome.org/pub/gnome/sources/glib/%s/glib-%s.tar.xz' % (version.up_to(2), version) + url = 'http://ftp.gnome.org/pub/gnome/sources/glib' + return url+'/%s/glib-%s.tar.xz' % (version.up_to(2), version) def install(self, spec, prefix): configure("--prefix=%s" % prefix) From a4e59c27586190543b847f5a4005a4b471ecd087 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Thu, 23 Jun 2016 08:45:20 +0200 Subject: [PATCH 165/284] libxau: add missing dependency --- var/spack/repos/builtin/packages/libxau/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libxau/package.py b/var/spack/repos/builtin/packages/libxau/package.py index 55816ecdbd4..d6d3421f1de 100644 --- a/var/spack/repos/builtin/packages/libxau/package.py +++ b/var/spack/repos/builtin/packages/libxau/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libxau(Package): """The libXau package contains a library implementing the X11 Authorization Protocol. This is useful for restricting client @@ -34,11 +35,10 @@ class Libxau(Package): version('1.0.8', '685f8abbffa6d145c0f930f00703b21b') depends_on('xproto') + depends_on('pkg-config') def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. configure('--prefix=%s' % prefix) - # FIXME: Add logic to build and install here make() make("install") From 450c750214267db6b5f4178273a1f46f7d3d207d Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Thu, 23 Jun 2016 08:45:44 +0200 Subject: [PATCH 166/284] kdiff3: initial version of the package --- .../repos/builtin/packages/kdiff3/package.py | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 var/spack/repos/builtin/packages/kdiff3/package.py diff --git a/var/spack/repos/builtin/packages/kdiff3/package.py b/var/spack/repos/builtin/packages/kdiff3/package.py new file mode 100644 index 00000000000..8c67ef59c87 --- /dev/null +++ b/var/spack/repos/builtin/packages/kdiff3/package.py @@ -0,0 +1,20 @@ +from spack import * + + +class Kdiff3(Package): + """Compare and merge 2 or 3 files or directories.""" + homepage = "http://kdiff3.sourceforge.net/" + url = "https://downloads.sourceforge.net/project/kdiff3/kdiff3/0.9.98/kdiff3-0.9.98.tar.gz" + + version('0.9.98', 'b52f99f2cf2ea75ed5719315cbf77446') + + depends_on("qt@5.2.0:") + + def install(self, spec, prefix): + # make is done inside + configure('qt4') + + # there is no make install, bummer... + mkdirp(self.prefix.bin) + install(join_path(self.stage.source_path, 'releaseQt', 'kdiff3'), + self.prefix.bin) From 0aa513ad70e894c95145b510e97da17b06f860da Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Tue, 2 Aug 2016 13:50:46 +0200 Subject: [PATCH 167/284] libxau: mark pkg-config as build-type dependency --- var/spack/repos/builtin/packages/libxau/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libxau/package.py b/var/spack/repos/builtin/packages/libxau/package.py index d6d3421f1de..b9215bc6013 100644 --- a/var/spack/repos/builtin/packages/libxau/package.py +++ b/var/spack/repos/builtin/packages/libxau/package.py @@ -35,7 +35,7 @@ class Libxau(Package): version('1.0.8', '685f8abbffa6d145c0f930f00703b21b') depends_on('xproto') - depends_on('pkg-config') + depends_on('pkg-config', type='build') def install(self, spec, prefix): configure('--prefix=%s' % prefix) From 6f7e12d49b65be4e91ff01a857682c6af0782648 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 13 Jul 2016 14:19:13 +0200 Subject: [PATCH 168/284] atlas: fix urls and use lapack 3.6.0 --- var/spack/repos/builtin/packages/atlas/package.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index c43d92c34f4..6e91f218151 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -36,15 +36,15 @@ class Atlas(Package): homepage = "http://math-atlas.sourceforge.net/" version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da', - url='http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2', preferred=True) + url='https://sourceforge.net/projects/math-atlas/files/Stable/3.10.2/atlas3.10.2.tar.bz2', preferred=True) resource(name='lapack', - url='http://www.netlib.org/lapack/lapack-3.5.0.tgz', - md5='b1d3e3e425b2e44a06760ff173104bdf', + url='http://www.netlib.org/lapack/lapack-3.6.0.tgz', + md5='f2f6c67134e851fe189bb3ca1fbb5101', destination='spack-resource-lapack', when='@3:') version('3.11.34', '0b6c5389c095c4c8785fd0f724ec6825', - url='http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2/download') + url='http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2') variant('shared', default=True, description='Builds shared library') From 4ecf481337de9ed92737aa0ce1213070a47debf3 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 13 Jul 2016 22:07:27 +0200 Subject: [PATCH 169/284] atlas: fix shared libs; set compilers --- .../repos/builtin/packages/atlas/package.py | 23 +++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index 6e91f218151..662a2825480 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -66,9 +66,24 @@ def install(self, spec, prefix): options = [] if '+shared' in spec: - options.append('--shared') + options.extend([ + '--shared' + ]) + # TODO: for non GNU add '-Fa', 'alg', '-fPIC' ? - # Lapack resource + # configure for 64-bit build + options.extend([ + '-b', '64' + ]) + + # set compilers: + options.extend([ + '-C', 'ic', spack_cc, + '-C', 'if', spack_f77 + ]) + + # Lapack resource to provide full lapack build. Note that + # ATLAS only provides a few LAPACK routines natively. lapack_stage = self.stage[1] lapack_tarfile = os.path.basename(lapack_stage.fetcher.url) lapack_tarfile_path = join_path(lapack_stage.path, lapack_tarfile) @@ -81,4 +96,8 @@ def install(self, spec, prefix): make('check') make('ptcheck') make('time') + if '+shared' in spec: + with working_dir('lib'): + make('shared_all') + make("install") From 0c0b37800d53cf9c56646ebd48e74723996a2aa4 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 13 Jul 2016 22:15:06 +0200 Subject: [PATCH 170/284] atlas: set (blas|lapack)_shared_lib --- var/spack/repos/builtin/packages/atlas/package.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index 662a2825480..20ac0a7879a 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -101,3 +101,16 @@ def install(self, spec, prefix): make('shared_all') make("install") + + def setup_dependent_package(self, module, dspec): + # libsatlas.[so,dylib,dll ] contains all serial APIs (serial lapack, + # serial BLAS), and all ATLAS symbols needed to support them. Whereas + # libtatlas.[so,dylib,dll ] is parallel (multithreaded) version. + name = 'libsatlas.%s' % dso_suffix + libdir = find_library_path(name, + self.prefix.lib64, + self.prefix.lib) + + if '+shared' in self.spec: + self.spec.blas_shared_lib = join_path(libdir, name) + self.spec.lapack_shared_lib = self.spec.blas_shared_lib From f6a4a6b00f9e5a112f450b2e5b278003bfa10250 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 13 Jul 2016 23:29:11 +0200 Subject: [PATCH 171/284] atlas: add install_test --- .../repos/builtin/packages/atlas/package.py | 16 ++++++ .../builtin/packages/atlas/test_cblas_dgemm.c | 49 +++++++++++++++++++ .../packages/atlas/test_cblas_dgemm.output | 12 +++++ 3 files changed, 77 insertions(+) create mode 100644 var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.c create mode 100644 var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.output diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index 20ac0a7879a..0cb15de4111 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -23,9 +23,11 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * +from spack.package_test import * from spack.util.executable import Executable import os.path + class Atlas(Package): """ Automatically Tuned Linear Algebra Software, generic shared ATLAS is an approach for the automatic generation and @@ -101,6 +103,7 @@ def install(self, spec, prefix): make('shared_all') make("install") + self.install_test() def setup_dependent_package(self, module, dspec): # libsatlas.[so,dylib,dll ] contains all serial APIs (serial lapack, @@ -114,3 +117,16 @@ def setup_dependent_package(self, module, dspec): if '+shared' in self.spec: self.spec.blas_shared_lib = join_path(libdir, name) self.spec.lapack_shared_lib = self.spec.blas_shared_lib + + def install_test(self): + source_file = join_path(os.path.dirname(self.module.__file__), + 'test_cblas_dgemm.c') + blessed_file = join_path(os.path.dirname(self.module.__file__), + 'test_cblas_dgemm.output') + + include_flags = ["-I%s" % join_path(self.spec.prefix, "include")] + link_flags = ["-L%s" % join_path(self.spec.prefix, "lib"), + "-lsatlas"] + + output = compile_c_and_execute(source_file, include_flags, link_flags) + compare_output_file(output, blessed_file) diff --git a/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.c b/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.c new file mode 100644 index 00000000000..2cb90fb8830 --- /dev/null +++ b/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.c @@ -0,0 +1,49 @@ +#include +#include + +double m[] = { + 3, 1, 3, + 1, 5, 9, + 2, 6, 5 +}; + +double x[] = { + -1, 3, -3 +}; + +#ifdef __cplusplus +extern "C" { +#endif + + void dgesv_(int *n, int *nrhs, double *a, int *lda, + int *ipivot, double *b, int *ldb, int *info); + +#ifdef __cplusplus +} +#endif + +int main(void) { + int i; + // blas: + double A[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0}; + double B[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0}; + double C[9] = {.5, .5, .5, .5, .5, .5, .5, .5, .5}; + cblas_dgemm(CblasColMajor, CblasNoTrans, CblasTrans, + 3, 3, 2, 1, A, 3, B, 3, 2, C, 3); + for (i = 0; i < 9; i++) + printf("%f\n", C[i]); + + // lapack: + int ipiv[3]; + int j; + int info; + int n = 1; + int nrhs = 1; + int lda = 3; + int ldb = 3; + dgesv_(&n,&nrhs, &m[0], &lda, ipiv, &x[0], &ldb, &info); + for (i=0; i<3; ++i) + printf("%5.1f\n", x[i]); + + return 0; +} diff --git a/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.output b/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.output new file mode 100644 index 00000000000..01404462c4b --- /dev/null +++ b/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.output @@ -0,0 +1,12 @@ +11.000000 +-9.000000 +5.000000 +-9.000000 +21.000000 +-1.000000 +5.000000 +-1.000000 +3.000000 + -0.3 + 3.0 + -3.0 From bef7e2645a624523e5ee269814650f0f39e9498d Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 13 Jul 2016 23:29:54 +0200 Subject: [PATCH 172/284] atlas: flake8 fixes --- var/spack/repos/builtin/packages/atlas/package.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index 0cb15de4111..f3b3887885e 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -29,11 +29,11 @@ class Atlas(Package): - """ - Automatically Tuned Linear Algebra Software, generic shared ATLAS is an approach for the automatic generation and - optimization of numerical software. Currently ATLAS supplies optimized versions for the complete set of linear - algebra kernels known as the Basic Linear Algebra Subroutines (BLAS), and a subset of the linear algebra routines - in the LAPACK library. + """Automatically Tuned Linear Algebra Software, generic shared ATLAS is an + approach for the automatic generation and optimization of numerical + software. Currently ATLAS supplies optimized versions for the complete set + of linear algebra kernels known as the Basic Linear Algebra Subroutines + (BLAS), and a subset of the linear algebra routines in the LAPACK library. """ homepage = "http://math-atlas.sourceforge.net/" From 565bd5f51e418a0e0da26e5a81eb98e3896a9c62 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Sat, 16 Jul 2016 07:19:56 +0200 Subject: [PATCH 173/284] atlas: revert to lapack 3.5.0 --- var/spack/repos/builtin/packages/atlas/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index f3b3887885e..f9d5da61660 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -39,9 +39,11 @@ class Atlas(Package): version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da', url='https://sourceforge.net/projects/math-atlas/files/Stable/3.10.2/atlas3.10.2.tar.bz2', preferred=True) + # not all packages (e.g. Trilinos@12.6.3) stopped using deprecated in 3.6.0 + # Lapack routines. Stick with 3.5.0 until this is fixed. resource(name='lapack', - url='http://www.netlib.org/lapack/lapack-3.6.0.tgz', - md5='f2f6c67134e851fe189bb3ca1fbb5101', + url='http://www.netlib.org/lapack/lapack-3.5.0.tgz', + md5='b1d3e3e425b2e44a06760ff173104bdf', destination='spack-resource-lapack', when='@3:') From dea7bbb4a09dcdc4f74b2080c2ac66e13c558b82 Mon Sep 17 00:00:00 2001 From: Gilles Fourestey Date: Tue, 2 Aug 2016 14:58:31 +0200 Subject: [PATCH 174/284] added plumed * plumed : first version * plumed : added dependencies and docs --- .../repos/builtin/packages/plumed/package.py | 82 +++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 var/spack/repos/builtin/packages/plumed/package.py diff --git a/var/spack/repos/builtin/packages/plumed/package.py b/var/spack/repos/builtin/packages/plumed/package.py new file mode 100644 index 00000000000..e8cd6d18947 --- /dev/null +++ b/var/spack/repos/builtin/packages/plumed/package.py @@ -0,0 +1,82 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +from spack import * + + +class Plumed(Package): + """PLUMED is an open source library for free energy calculations in + molecular systems which works together with some of the most popular + molecular dynamics engines. + + Free energy calculations can be performed as a function of many order + parameters with a particular focus on biological problems, using state + of the art methods such as metadynamics, umbrella sampling and + Jarzynski-equation based steered MD. + + The software, written in C++, can be easily interfaced with both fortran + and C/C++ codes. + """ + homepage = 'http://www.plumed.org/' + url = 'https://github.com/plumed/plumed2/archive/v2.2.3.tar.gz' + + version('2.2.3', 'a6e3863e40aac07eb8cf739cbd14ecf8') + + + variant('shared', default=True, description='Builds shared libraries') + variant('mpi', default=True, description='Activates MPI support') + variant('gsl', default=True, description='Activates GSL support') + + depends_on('zlib') + depends_on('blas') + depends_on('lapack') + + depends_on('mpi', when='+mpi') + depends_on('gsl', when='+gsl') + + def setup_dependent_package(self, module, ext_spec): + # Make plumed visible from dependent packages + module.plumed = Executable(join_path(self.spec.prefix.bin, 'plumed')) + + def install(self, spec, prefix): + # From plumed docs : + # Also consider that this is different with respect to what some other + # configure script does in that variables such as MPICXX are + # completely ignored here. In case you work on a machine where CXX is + # set to a serial compiler and MPICXX to a MPI compiler, to compile with + # MPI you should use: + # + # > ./configure CXX="$MPICXX" + configure_opts = ['CXX={0}'.format(spec['mpi'].mpicxx)] if '+mpi' in self.spec else [] + configure_opts.extend([ + '--prefix={0}'.format(prefix), + '--enable-shared={0}'.format('yes' if '+shared' in spec else 'no'), + '--enable-mpi={0}'.format('yes' if '+mpi' in spec else 'no'), + '--enable-gsl={0}'.format('yes' if '+gsl' in spec else 'no') + ]) + + configure(*configure_opts) + make() + make('install') From a1703bf70d2208841a840513e2d6dc27d74167f4 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Tue, 2 Aug 2016 16:37:14 +0200 Subject: [PATCH 175/284] kdiff3: add license block --- .../repos/builtin/packages/kdiff3/package.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/var/spack/repos/builtin/packages/kdiff3/package.py b/var/spack/repos/builtin/packages/kdiff3/package.py index 8c67ef59c87..48f4b9c3799 100644 --- a/var/spack/repos/builtin/packages/kdiff3/package.py +++ b/var/spack/repos/builtin/packages/kdiff3/package.py @@ -1,3 +1,27 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## from spack import * From a57d94af038199d6b91bfebe52b5ffb5f1424768 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 25 Jul 2016 11:29:31 -0500 Subject: [PATCH 176/284] Fix flake8 issues. --- var/spack/repos/builtin/packages/py-networkx/package.py | 4 +++- var/spack/repos/builtin/packages/py-pytables/package.py | 5 +++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-networkx/package.py b/var/spack/repos/builtin/packages/py-networkx/package.py index c7239486107..79ad420f8fe 100644 --- a/var/spack/repos/builtin/packages/py-networkx/package.py +++ b/var/spack/repos/builtin/packages/py-networkx/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class PyNetworkx(Package): - """NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks.""" + """NetworkX is a Python package for the creation, manipulation, and study + of the structure, dynamics, and functions of complex networks.""" homepage = "http://networkx.github.io/" url = "https://pypi.python.org/packages/source/n/networkx/networkx-1.11.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-pytables/package.py b/var/spack/repos/builtin/packages/py-pytables/package.py index b05af01c94e..f87e74211ff 100644 --- a/var/spack/repos/builtin/packages/py-pytables/package.py +++ b/var/spack/repos/builtin/packages/py-pytables/package.py @@ -23,10 +23,11 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import re + class PyPytables(Package): - """PyTables is a package for managing hierarchical datasets and designed to efficiently and easily cope with extremely large amounts of data.""" + """PyTables is a package for managing hierarchical datasets and designed to + efficiently and easily cope with extremely large amounts of data.""" homepage = "http://www.pytables.org/" url = "https://github.com/PyTables/PyTables/archive/v.3.2.2.tar.gz" From 4373a2b629780a9c14d97110ed014a5620d4eca6 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 12 Jul 2016 20:27:44 -0400 Subject: [PATCH 177/284] First cut package for nextflow, has issues This is my first cut at a package to support nextflow. It's also my first package. It works, but has issues. I'm going to submit a pull request and get some coaching on how to deal with it. One issue particular: if I install, then uninstall, then try to install again (which uses the cached copy of the "distribution file"), it explodes. --- .../builtin/packages/nextflow/package.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 var/spack/repos/builtin/packages/nextflow/package.py diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py new file mode 100644 index 00000000000..92e11bbc9f6 --- /dev/null +++ b/var/spack/repos/builtin/packages/nextflow/package.py @@ -0,0 +1,24 @@ +from spack import * +from glob import glob +import os + +class Nextflow(Package): + """Data-driven computational pipelines""" + + homepage = "http://www.nextflow.io" + + version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', + url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', + expand=False) + + depends_on('jdk') + + def unpack(self): + pass + + def install(self, spec, prefix): + chmod = which('chmod') + + mkdirp(prefix.bin) + install("nextflow", join_path(prefix.bin, "nextflow")) + chmod('+x', join_path(prefix.bin, "nextflow")) From bfcec696308ee8bfd226a54c17a7e15d49e2aed7 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 14:17:44 -0400 Subject: [PATCH 178/284] Add standard header, use spack helpers Added the standard header (stolen from R). Touched up the install to use set_executable rather than doing it myself. --- .../builtin/packages/nextflow/package.py | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py index 92e11bbc9f6..a91badd5edc 100644 --- a/var/spack/repos/builtin/packages/nextflow/package.py +++ b/var/spack/repos/builtin/packages/nextflow/package.py @@ -1,6 +1,29 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## from spack import * -from glob import glob -import os + class Nextflow(Package): """Data-driven computational pipelines""" @@ -17,8 +40,6 @@ def unpack(self): pass def install(self, spec, prefix): - chmod = which('chmod') - mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) - chmod('+x', join_path(prefix.bin, "nextflow")) + set_executable( join_path(prefix.bin, "nextflow")) From 6f332c7e4cc10cdbd3c1fa8571bcc276a512b066 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 14:20:06 -0400 Subject: [PATCH 179/284] Fix whitespace, make flake8 happ{y,ier}. --- var/spack/repos/builtin/packages/nextflow/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py index a91badd5edc..865534c2f80 100644 --- a/var/spack/repos/builtin/packages/nextflow/package.py +++ b/var/spack/repos/builtin/packages/nextflow/package.py @@ -42,4 +42,4 @@ def unpack(self): def install(self, spec, prefix): mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) - set_executable( join_path(prefix.bin, "nextflow")) + set_executable(join_path(prefix.bin, "nextflow")) From 12d126398066379471f395529a3eac0476c9fdb9 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 26 Jul 2016 12:57:33 -0400 Subject: [PATCH 180/284] Update several R packages (r-shiny prereqs) r-jsonlite 0.0.21 -> 1.0 r-mime 0.4 -> 0.5 rcpp 0.12.5 -> 0.12.6 CRAN is funny. The older versions of these packages are still available in package specific directories but the current version is not there, so I don't see any way to make the older versions work. --- var/spack/repos/builtin/packages/r-jsonlite/package.py | 4 ++-- var/spack/repos/builtin/packages/r-mime/package.py | 4 ++-- var/spack/repos/builtin/packages/r-rcpp/package.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/r-jsonlite/package.py b/var/spack/repos/builtin/packages/r-jsonlite/package.py index 6e231ed345b..b47cb5a71aa 100644 --- a/var/spack/repos/builtin/packages/r-jsonlite/package.py +++ b/var/spack/repos/builtin/packages/r-jsonlite/package.py @@ -38,10 +38,10 @@ class RJsonlite(Package): use with dynamic data in systems and applications.""" homepage = "https://github.com/jeroenooms/jsonlite" - url = "https://cran.r-project.org/src/contrib/jsonlite_0.9.21.tar.gz" + url = "https://cran.r-project.org/src/contrib/jsonlite_1.0.tar.gz" list_url = "https://cran.r-project.org/src/contrib/Archive/jsonlite" - version('0.9.21', '4fc382747f88a79ff0718a0d06bed45d') + version('1.0', 'c8524e086de22ab39b8ac8000220cc87') extends('R') diff --git a/var/spack/repos/builtin/packages/r-mime/package.py b/var/spack/repos/builtin/packages/r-mime/package.py index fb079f44c5b..6831fc3b606 100644 --- a/var/spack/repos/builtin/packages/r-mime/package.py +++ b/var/spack/repos/builtin/packages/r-mime/package.py @@ -30,10 +30,10 @@ class RMime(Package): from /etc/mime.types in UNIX-type systems.""" homepage = "https://github.com/yihui/mime" - url = "https://cran.r-project.org/src/contrib/mime_0.4.tar.gz" + url = "https://cran.r-project.org/src/contrib/mime_0.5.tar.gz" list_url = "https://cran.r-project.org/src/contrib/Archive/mime" - version('0.4', '789cb33e41db2206c6fc7c3e9fbc2c02') + version('0.5', '87e00b6d57b581465c19ae869a723c4d') extends('R') diff --git a/var/spack/repos/builtin/packages/r-rcpp/package.py b/var/spack/repos/builtin/packages/r-rcpp/package.py index 2428f4af3ba..0e84f8829b7 100644 --- a/var/spack/repos/builtin/packages/r-rcpp/package.py +++ b/var/spack/repos/builtin/packages/r-rcpp/package.py @@ -37,10 +37,10 @@ class RRcpp(Package): last two.""" homepage = "http://dirk.eddelbuettel.com/code/rcpp.html" - url = "https://cran.r-project.org/src/contrib/Rcpp_0.12.5.tar.gz" + url = "https://cran.r-project.org/src/contrib/Rcpp_0.12.6.tar.gz" list_url = "https://cran.r-project.org/src/contrib/Archive/Rcpp" - version('0.12.5', 'f03ec05b4e391cc46e7ce330e82ff5e2') + version('0.12.6', 'db4280fb0a79cd19be73a662c33b0a8b') extends('R') From 63121a0c4982bb163b064a4b97fbf820f5179be3 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 15:08:12 -0400 Subject: [PATCH 181/284] Add package for ack Add a package for [ack](http://beyondgrep.com/install/). Simply install the fatpacked script. It uses '#!/usr/bin/env perl' and it very much not choosy about what perl it needs. For now just trust that there's one available, perhaps someday we can/should uncomment the depends_on('perl'). Follows the methodolgy I used in nextflow. Has the same uninstall/install problem that nextflow has, there is an issue in progress for that: https://github.com/LLNL/spack/issues/1308. Tested on CentOS7. --- .../repos/builtin/packages/ack/package.py | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 var/spack/repos/builtin/packages/ack/package.py diff --git a/var/spack/repos/builtin/packages/ack/package.py b/var/spack/repos/builtin/packages/ack/package.py new file mode 100644 index 00000000000..8c97e034560 --- /dev/null +++ b/var/spack/repos/builtin/packages/ack/package.py @@ -0,0 +1,57 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * +import os +import re + + +class Ack(Package): + """ack 2.14 is a tool like grep, optimized for programmers. + + Designed for programmers with large heterogeneous trees of + source code, ack is written purely in portable Perl 5 and takes + advantage of the power of Perl's regular expressions.""" + + homepage = "http://beyondgrep.com/" + url = "http://beyondgrep.com/ack-2.14-single-file" + + version('2.14', 'e74150a1609d28a70b450ef9cc2ed56b', expand=False) + + # trust that there's a system perl for now, but perhaps someday we + # should: + # depends_on('perl') + + def unpack(self): + pass + + def install(self, spec, prefix): + mkdirp(prefix.bin) + # find the file named like ack-2.14-single-file in a version + # independent manner (there should be only one )and install it + # as `ack`. + for f in os.listdir('.'): + if re.match('ack-\d*\.\d*-single-file', f): + install(f, join_path(prefix.bin, "ack")) + set_executable(join_path(prefix.bin, "ack")) From 6c3623422fe3d1354f14d67c54a32070e2df2d58 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 17:50:06 -0400 Subject: [PATCH 182/284] Use cleaner mech to install script Use @adamjstewart's nicer bit of python code in the install method. --- var/spack/repos/builtin/packages/ack/package.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/ack/package.py b/var/spack/repos/builtin/packages/ack/package.py index 8c97e034560..41100a5e66d 100644 --- a/var/spack/repos/builtin/packages/ack/package.py +++ b/var/spack/repos/builtin/packages/ack/package.py @@ -48,10 +48,6 @@ def unpack(self): def install(self, spec, prefix): mkdirp(prefix.bin) - # find the file named like ack-2.14-single-file in a version - # independent manner (there should be only one )and install it - # as `ack`. - for f in os.listdir('.'): - if re.match('ack-\d*\.\d*-single-file', f): - install(f, join_path(prefix.bin, "ack")) - set_executable(join_path(prefix.bin, "ack")) + ack = 'ack-{0}-single-file'.format(self.version) + install(ack, join_path(prefix.bin, "ack")) + set_executable(join_path(prefix.bin, "ack")) From bf467c5df374092d231ddb752c33943dfaa4d09d Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 18:12:22 -0400 Subject: [PATCH 183/284] Remove override of `unpack(self)` I cargo culted that from my *nextflow* package. I [thought I] needed it to work around Spack trying to use tar to unpack something that was neither a tar ball nor unpackable. This package works fine without it. In retrospect, the error that I was seeing in the *nextflow* package was probably this problem #1308. --- var/spack/repos/builtin/packages/ack/package.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/ack/package.py b/var/spack/repos/builtin/packages/ack/package.py index 41100a5e66d..af7d206c29a 100644 --- a/var/spack/repos/builtin/packages/ack/package.py +++ b/var/spack/repos/builtin/packages/ack/package.py @@ -43,9 +43,6 @@ class Ack(Package): # should: # depends_on('perl') - def unpack(self): - pass - def install(self, spec, prefix): mkdirp(prefix.bin) ack = 'ack-{0}-single-file'.format(self.version) From 114da813a37ec0847376ecfb99fd4446d7dd1d7c Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 18:50:52 -0400 Subject: [PATCH 184/284] Remove unnecessary imports. Now that it's sporting a cleaner install method, these imports are unnecessary. --- var/spack/repos/builtin/packages/ack/package.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/ack/package.py b/var/spack/repos/builtin/packages/ack/package.py index af7d206c29a..db07e5e246d 100644 --- a/var/spack/repos/builtin/packages/ack/package.py +++ b/var/spack/repos/builtin/packages/ack/package.py @@ -23,8 +23,6 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os -import re class Ack(Package): From 857a03c127236f8f3b9c697fbe8cc2870a2276cc Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 19:06:13 -0400 Subject: [PATCH 185/284] Ack should depends_on('perl') Add a depends_on('perl') and rewrite the script's `#!` line to refer to that Perl's `perl` executable. --- var/spack/repos/builtin/packages/ack/package.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/ack/package.py b/var/spack/repos/builtin/packages/ack/package.py index db07e5e246d..70249aebac4 100644 --- a/var/spack/repos/builtin/packages/ack/package.py +++ b/var/spack/repos/builtin/packages/ack/package.py @@ -37,12 +37,15 @@ class Ack(Package): version('2.14', 'e74150a1609d28a70b450ef9cc2ed56b', expand=False) - # trust that there's a system perl for now, but perhaps someday we - # should: - # depends_on('perl') + depends_on('perl') def install(self, spec, prefix): mkdirp(prefix.bin) ack = 'ack-{0}-single-file'.format(self.version) + + # rewrite the script's #! line to call the perl dependency + shbang = '#!' + join_path(spec['perl'].prefix.bin, 'perl') + filter_file(r'^#!/usr/bin/env perl', shbang, ack) + install(ack, join_path(prefix.bin, "ack")) set_executable(join_path(prefix.bin, "ack")) From 949621eb7fa33d62557d5341f2f43a63d42ba121 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Wed, 27 Jul 2016 18:42:36 -0400 Subject: [PATCH 186/284] Add package for texlive --- .../repos/builtin/packages/texlive/package.py | 56 +++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 var/spack/repos/builtin/packages/texlive/package.py diff --git a/var/spack/repos/builtin/packages/texlive/package.py b/var/spack/repos/builtin/packages/texlive/package.py new file mode 100644 index 00000000000..f4a4acf3ba3 --- /dev/null +++ b/var/spack/repos/builtin/packages/texlive/package.py @@ -0,0 +1,56 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * +import os + +class Texlive(Package): + """TeX Live is a free software distribution for the TeX typesetting + system""" + + homepage = "http://www.tug.org/texlive" + + version('live', 'e671eea7f142c438959493cc42a2a59b', url = "http://mirror.ctan.org/systems/texlive/tlnet/install-tl-unx.tar.gz") + + # There does not seem to be a complete list of schemes. + # Examples include: + # full scheme (everything) + # medium scheme (small + more packages and languages) + # small scheme (basic + xetex, metapost, a few languages) + # basic scheme (plain and latex) + # minimal scheme (plain only) + # See: + # https://www.tug.org/texlive/doc/texlive-en/texlive-en.html#x1-25025r6 + variant('scheme', default="small", + description='Package subset to install (e.g. full, small, basic)') + + depends_on('perl') + + def install(self, spec, prefix): + env = os.environ + env['TEXLIVE_INSTALL_PREFIX'] = prefix + perl = which('perl') + scheme = spec.variants['scheme'].value + perl('./install-tl', '-scheme', scheme, + '-portable', '-profile', '/dev/null') From a5a4525bed678b989130583a1ea54c63f36fdf56 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Fri, 22 Jul 2016 13:39:10 -0400 Subject: [PATCH 187/284] Add perl package Add perl package, based on [work by justintoo](https://github.com/LLNL/spack/pull/105). He had too many things pulled into that pull request, this just adds a perl package. Support the current releases on the past three minor branches. Run perl's tests before installing. Install cpanm into the core (makes building on top of this perl *much* simpler). Controlled by a variant. --- .../builtin/packages/perl/cpanm-installer.pl | 1075 +++++++++++++++++ .../repos/builtin/packages/perl/package.py | 42 + 2 files changed, 1117 insertions(+) create mode 100644 var/spack/repos/builtin/packages/perl/cpanm-installer.pl create mode 100644 var/spack/repos/builtin/packages/perl/package.py diff --git a/var/spack/repos/builtin/packages/perl/cpanm-installer.pl b/var/spack/repos/builtin/packages/perl/cpanm-installer.pl new file mode 100644 index 00000000000..97f56162ffa --- /dev/null +++ b/var/spack/repos/builtin/packages/perl/cpanm-installer.pl @@ -0,0 +1,1075 @@ +#!/usr/bin/env perl +# +# This is a pre-compiled source code for the cpanm (cpanminus) program. +# For more details about how to install cpanm, go to the following URL: +# +# https://github.com/miyagawa/cpanminus +# +# Quickstart: Run the following command and it will install itself for +# you. You might want to run it as a root with sudo if you want to install +# to places like /usr/local/bin. +# +# % curl -L https://cpanmin.us | perl - App::cpanminus +# +# If you don't have curl but wget, replace `curl -L` with `wget -O -`. + +# DO NOT EDIT -- this is an auto generated file + +# This chunk of stuff was generated by App::FatPacker. To find the original +# file's code, look for the end of this BEGIN block or the string 'FATPACK' +BEGIN { +my %fatpacked; + +$fatpacked{"App/cpanminus.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'APP_CPANMINUS'; + package App::cpanminus;our$VERSION="1.7042";1; +APP_CPANMINUS + +$fatpacked{"App/cpanminus/Dependency.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'APP_CPANMINUS_DEPENDENCY'; + package App::cpanminus::Dependency;use strict;use CPAN::Meta::Requirements;sub from_prereqs {my($class,$prereqs,$phases,$types)=@_;my@deps;for my$type (@$types){push@deps,$class->from_versions($prereqs->merged_requirements($phases,[$type])->as_string_hash,$type,)}return@deps}sub from_versions {my($class,$versions,$type)=@_;my@deps;while (my($module,$version)=each %$versions){push@deps,$class->new($module,$version,$type)}@deps}sub merge_with {my($self,$requirements)=@_;$self->{original_version}=$self->version;eval {$requirements->add_string_requirement($self->module,$self->version)};if ($@ =~ /illegal requirements/){warn sprintf("Can't merge requirements for %s: '%s' and '%s'",$self->module,$self->version,$requirements->requirements_for_module($self->module))}$self->{version}=$requirements->requirements_for_module($self->module)}sub new {my($class,$module,$version,$type)=@_;bless {module=>$module,version=>$version,type=>$type || 'requires',},$class}sub module {$_[0]->{module}}sub version {$_[0]->{version}}sub type {$_[0]->{type}}sub requires_version {my$self=shift;if (defined$self->{original_version}){return$self->{original_version}}$self->version}sub is_requirement {$_[0]->{type}eq 'requires'}1; +APP_CPANMINUS_DEPENDENCY + +$fatpacked{"App/cpanminus/script.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'APP_CPANMINUS_SCRIPT'; + package App::cpanminus::script;use strict;use Config;use Cwd ();use App::cpanminus;use App::cpanminus::Dependency;use File::Basename ();use File::Find ();use File::Path ();use File::Spec ();use File::Copy ();use File::Temp ();use Getopt::Long ();use Symbol ();use String::ShellQuote ();use version ();use constant WIN32=>$^O eq 'MSWin32';use constant BAD_TAR=>($^O eq 'solaris' || $^O eq 'hpux');use constant CAN_SYMLINK=>eval {symlink("","");1};our$VERSION=$App::cpanminus::VERSION;if ($INC{"App/FatPacker/Trace.pm"}){require version::vpp}my$quote=WIN32 ? q/"/ : q/'/;sub agent {my$self=shift;my$agent="cpanminus/$VERSION";$agent .= " perl/$]" if$self->{report_perl_version};$agent}sub determine_home {my$class=shift;my$homedir=$ENV{HOME}|| eval {require File::HomeDir;File::HomeDir->my_home}|| join('',@ENV{qw(HOMEDRIVE HOMEPATH)});if (WIN32){require Win32;$homedir=Win32::GetShortPathName($homedir)}return "$homedir/.cpanm"}sub new {my$class=shift;bless {home=>$class->determine_home,cmd=>'install',seen=>{},notest=>undef,test_only=>undef,installdeps=>undef,force=>undef,sudo=>undef,make=>undef,verbose=>undef,quiet=>undef,interactive=>undef,log=>undef,mirrors=>[],mirror_only=>undef,mirror_index=>undef,cpanmetadb=>"http://cpanmetadb.plackperl.org/v1.0/",perl=>$^X,argv=>[],local_lib=>undef,self_contained=>undef,exclude_vendor=>undef,prompt_timeout=>0,prompt=>undef,configure_timeout=>60,build_timeout=>3600,test_timeout=>1800,try_lwp=>1,try_wget=>1,try_curl=>1,uninstall_shadows=>($] < 5.012),skip_installed=>1,skip_satisfied=>0,auto_cleanup=>7,pod2man=>1,installed_dists=>0,install_types=>['requires'],with_develop=>0,with_configure=>0,showdeps=>0,scandeps=>0,scandeps_tree=>[],format=>'tree',save_dists=>undef,skip_configure=>0,verify=>0,report_perl_version=>!$class->maybe_ci,build_args=>{},features=>{},pure_perl=>0,cpanfile_path=>'cpanfile',@_,},$class}sub env {my($self,$key)=@_;$ENV{"PERL_CPANM_" .$key}}sub maybe_ci {my$class=shift;grep$ENV{$_},qw(TRAVIS CI AUTOMATED_TESTING AUTHOR_TESTING)}sub install_type_handlers {my$self=shift;my@handlers;for my$type (qw(recommends suggests)){push@handlers,"with-$type"=>sub {my%uniq;$self->{install_types}=[grep!$uniq{$_}++,@{$self->{install_types}},$type ]};push@handlers,"without-$type"=>sub {$self->{install_types}=[grep $_ ne $type,@{$self->{install_types}}]}}@handlers}sub build_args_handlers {my$self=shift;my@handlers;for my$phase (qw(configure build test install)){push@handlers,"$phase-args=s"=>\($self->{build_args}{$phase})}@handlers}sub parse_options {my$self=shift;local@ARGV=@{$self->{argv}};push@ARGV,grep length,split /\s+/,$self->env('OPT');push@ARGV,@_;Getopt::Long::Configure("bundling");Getopt::Long::GetOptions('f|force'=>sub {$self->{skip_installed}=0;$self->{force}=1},'n|notest!'=>\$self->{notest},'test-only'=>sub {$self->{notest}=0;$self->{skip_installed}=0;$self->{test_only}=1},'S|sudo!'=>\$self->{sudo},'v|verbose'=>\$self->{verbose},'verify!'=>\$self->{verify},'q|quiet!'=>\$self->{quiet},'h|help'=>sub {$self->{action}='show_help'},'V|version'=>sub {$self->{action}='show_version'},'perl=s'=>sub {$self->diag("--perl is deprecated since it's known to be fragile in figuring out dependencies. Run `$_[1] -S cpanm` instead.\n",1);$self->{perl}=$_[1]},'l|local-lib=s'=>sub {$self->{local_lib}=$self->maybe_abs($_[1])},'L|local-lib-contained=s'=>sub {$self->{local_lib}=$self->maybe_abs($_[1]);$self->{self_contained}=1;$self->{pod2man}=undef},'self-contained!'=>\$self->{self_contained},'exclude-vendor!'=>\$self->{exclude_vendor},'mirror=s@'=>$self->{mirrors},'mirror-only!'=>\$self->{mirror_only},'mirror-index=s'=>sub {$self->{mirror_index}=$self->maybe_abs($_[1])},'M|from=s'=>sub {$self->{mirrors}=[$_[1]];$self->{mirror_only}=1},'cpanmetadb=s'=>\$self->{cpanmetadb},'cascade-search!'=>\$self->{cascade_search},'prompt!'=>\$self->{prompt},'installdeps'=>\$self->{installdeps},'skip-installed!'=>\$self->{skip_installed},'skip-satisfied!'=>\$self->{skip_satisfied},'reinstall'=>sub {$self->{skip_installed}=0},'interactive!'=>\$self->{interactive},'i|install'=>sub {$self->{cmd}='install'},'info'=>sub {$self->{cmd}='info'},'look'=>sub {$self->{cmd}='look';$self->{skip_installed}=0},'U|uninstall'=>sub {$self->{cmd}='uninstall'},'self-upgrade'=>sub {$self->{action}='self_upgrade'},'uninst-shadows!'=>\$self->{uninstall_shadows},'lwp!'=>\$self->{try_lwp},'wget!'=>\$self->{try_wget},'curl!'=>\$self->{try_curl},'auto-cleanup=s'=>\$self->{auto_cleanup},'man-pages!'=>\$self->{pod2man},'scandeps'=>\$self->{scandeps},'showdeps'=>sub {$self->{showdeps}=1;$self->{skip_installed}=0},'format=s'=>\$self->{format},'save-dists=s'=>sub {$self->{save_dists}=$self->maybe_abs($_[1])},'skip-configure!'=>\$self->{skip_configure},'dev!'=>\$self->{dev_release},'metacpan!'=>\$self->{metacpan},'report-perl-version!'=>\$self->{report_perl_version},'configure-timeout=i'=>\$self->{configure_timeout},'build-timeout=i'=>\$self->{build_timeout},'test-timeout=i'=>\$self->{test_timeout},'with-develop'=>\$self->{with_develop},'without-develop'=>sub {$self->{with_develop}=0},'with-configure'=>\$self->{with_configure},'without-configure'=>sub {$self->{with_configure}=0},'with-feature=s'=>sub {$self->{features}{$_[1]}=1},'without-feature=s'=>sub {$self->{features}{$_[1]}=0},'with-all-features'=>sub {$self->{features}{__all}=1},'pp|pureperl!'=>\$self->{pure_perl},"cpanfile=s"=>\$self->{cpanfile_path},$self->install_type_handlers,$self->build_args_handlers,);if (!@ARGV && $0 ne '-' &&!-t STDIN){push@ARGV,$self->load_argv_from_fh(\*STDIN);$self->{load_from_stdin}=1}$self->{argv}=\@ARGV}sub check_upgrade {my$self=shift;my$install_base=$ENV{PERL_LOCAL_LIB_ROOT}? $self->local_lib_target($ENV{PERL_LOCAL_LIB_ROOT}): $Config{installsitebin};if ($0 eq '-'){return}elsif ($0 !~ /^$install_base/){if ($0 =~ m!perlbrew/bin!){die <{_checked}++;$self->bootstrap_local_lib}sub setup_verify {my$self=shift;my$has_modules=eval {require Module::Signature;require Digest::SHA;1};$self->{cpansign}=$self->which('cpansign');unless ($has_modules && $self->{cpansign}){warn "WARNING: Module::Signature and Digest::SHA is required for distribution verifications.\n";$self->{verify}=0}}sub parse_module_args {my($self,$module)=@_;$module =~ s/^([A-Za-z0-9_:]+)@([v\d\._]+)$/$1~== $2/;if ($module =~ /\~[v\d\._,\!<>= ]+$/){return split /\~/,$module,2}else {return$module,undef}}sub doit {my$self=shift;my$code;eval {$code=($self->_doit==0)};if (my$e=$@){warn$e;$code=1}return$code}sub _doit {my$self=shift;$self->setup_home;$self->init_tools;$self->setup_verify if$self->{verify};if (my$action=$self->{action}){$self->$action()and return 1}return$self->show_help(1)unless @{$self->{argv}}or $self->{load_from_stdin};$self->configure_mirrors;my$cwd=Cwd::cwd;my@fail;for my$module (@{$self->{argv}}){if ($module =~ s/\.pm$//i){my ($volume,$dirs,$file)=File::Spec->splitpath($module);$module=join '::',grep {$_}File::Spec->splitdir($dirs),$file}($module,my$version)=$self->parse_module_args($module);$self->chdir($cwd);if ($self->{cmd}eq 'uninstall'){$self->uninstall_module($module)or push@fail,$module}else {$self->install_module($module,0,$version)or push@fail,$module}}if ($self->{base}&& $self->{auto_cleanup}){$self->cleanup_workdirs}if ($self->{installed_dists}){my$dists=$self->{installed_dists}> 1 ? "distributions" : "distribution";$self->diag("$self->{installed_dists} $dists installed\n",1)}if ($self->{scandeps}){$self->dump_scandeps()}$self->chdir($cwd);return!@fail}sub setup_home {my$self=shift;$self->{home}=$self->env('HOME')if$self->env('HOME');unless (_writable($self->{home})){die "Can't write to cpanm home '$self->{home}': You should fix it with chown/chmod first.\n"}$self->{base}="$self->{home}/work/" .time .".$$";File::Path::mkpath([$self->{base}],0,0777);$self->{log}=File::Spec->catfile($self->{base},"build.log");my$final_log="$self->{home}/build.log";{open my$out,">$self->{log}" or die "$self->{log}: $!"}if (CAN_SYMLINK){my$build_link="$self->{home}/latest-build";unlink$build_link;symlink$self->{base},$build_link;unlink$final_log;symlink$self->{log},$final_log}else {my$log=$self->{log};my$home=$self->{home};$self->{at_exit}=sub {my$self=shift;my$temp_log="$home/build.log." .time .".$$";File::Copy::copy($log,$temp_log)&& unlink($final_log);rename($temp_log,$final_log)}}$self->chat("cpanm (App::cpanminus) $VERSION on perl $] built for $Config{archname}\n" ."Work directory is $self->{base}\n")}sub package_index_for {my ($self,$mirror)=@_;return$self->source_for($mirror)."/02packages.details.txt"}sub generate_mirror_index {my ($self,$mirror)=@_;my$file=$self->package_index_for($mirror);my$gz_file=$file .'.gz';my$index_mtime=(stat$gz_file)[9];unless (-e $file && (stat$file)[9]>= $index_mtime){$self->chat("Uncompressing index file...\n");if (eval {require Compress::Zlib}){my$gz=Compress::Zlib::gzopen($gz_file,"rb")or do {$self->diag_fail("$Compress::Zlib::gzerrno opening compressed index");return};open my$fh,'>',$file or do {$self->diag_fail("$! opening uncompressed index for write");return};my$buffer;while (my$status=$gz->gzread($buffer)){if ($status < 0){$self->diag_fail($gz->gzerror ." reading compressed index");return}print$fh $buffer}}else {if (system("gunzip -c $gz_file > $file")){$self->diag_fail("Cannot uncompress -- please install gunzip or Compress::Zlib");return}}utime$index_mtime,$index_mtime,$file}return 1}sub search_mirror_index {my ($self,$mirror,$module,$version)=@_;$self->search_mirror_index_file($self->package_index_for($mirror),$module,$version)}sub search_mirror_index_file {my($self,$file,$module,$version)=@_;open my$fh,'<',$file or return;my$found;while (<$fh>){if (m!^\Q$module\E\s+([\w\.]+)\s+(\S*)!m){$found=$self->cpan_module($module,$2,$1);last}}return$found unless$self->{cascade_search};if ($found){if ($self->satisfy_version($module,$found->{module_version},$version)){return$found}else {$self->chat("Found $module $found->{module_version} which doesn't satisfy $version.\n")}}return}sub with_version_range {my($self,$version)=@_;defined($version)&& $version =~ /(?:<|!=|==)/}sub encode_json {my($self,$data)=@_;require JSON::PP;my$json=JSON::PP::encode_json($data);$json =~ s/([^a-zA-Z0-9_\-.])/uc sprintf("%%%02x",ord($1))/eg;$json}sub version_to_query {my($self,$module,$version)=@_;require CPAN::Meta::Requirements;my$requirements=CPAN::Meta::Requirements->new;$requirements->add_string_requirement($module,$version || '0');my$req=$requirements->requirements_for_module($module);if ($req =~ s/^==\s*//){return {term=>{'module.version'=>$req },}}elsif ($req !~ /\s/){return {range=>{'module.version_numified'=>{'gte'=>$self->numify_ver_metacpan($req)}},}}else {my%ops=qw(< lt <= lte > gt >= gte);my(%range,@exclusion);my@requirements=split /,\s*/,$req;for my$r (@requirements){if ($r =~ s/^([<>]=?)\s*//){$range{$ops{$1}}=$self->numify_ver_metacpan($r)}elsif ($r =~ s/\!=\s*//){push@exclusion,$self->numify_ver_metacpan($r)}}my@filters=({range=>{'module.version_numified'=>\%range }},);if (@exclusion){push@filters,{not=>{or=>[map {+{term=>{'module.version_numified'=>$self->numify_ver_metacpan($_)}}}@exclusion ]},}}return@filters}}sub numify_ver_metacpan {my($self,$ver)=@_;$ver =~ s/_//g;version->new($ver)->numify}sub numify_ver {my($self,$ver)=@_;eval version->new($ver)->numify}sub maturity_filter {my($self,$module,$version)=@_;if ($version =~ /==/){return}elsif ($self->{dev_release}){return +{not=>{term=>{status=>'backpan' }}}}else {return ({not=>{term=>{status=>'backpan' }}},{term=>{maturity=>'released' }},)}}sub by_version {my%s=qw(latest 3 cpan 2 backpan 1);$b->{_score}<=> $a->{_score}|| $s{$b->{fields}{status}}<=> $s{$a->{fields}{status}}}sub by_first_come {$a->{fields}{date}cmp $b->{fields}{date}}sub by_date {$b->{fields}{date}cmp $a->{fields}{date}}sub find_best_match {my($self,$match,$version)=@_;return unless$match && @{$match->{hits}{hits}|| []};my@hits=$self->{dev_release}? sort {by_version || by_date}@{$match->{hits}{hits}}: sort {by_version || by_first_come}@{$match->{hits}{hits}};$hits[0]->{fields}}sub search_metacpan {my($self,$module,$version)=@_;require JSON::PP;$self->chat("Searching $module ($version) on metacpan ...\n");my$metacpan_uri='http://api.metacpan.org/v0';my@filter=$self->maturity_filter($module,$version);my$query={filtered=>{(@filter ? (filter=>{and=>\@filter }): ()),query=>{nested=>{score_mode=>'max',path=>'module',query=>{custom_score=>{metacpan_script=>"score_version_numified",query=>{constant_score=>{filter=>{and=>[{term=>{'module.authorized'=>JSON::PP::true()}},{term=>{'module.indexed'=>JSON::PP::true()}},{term=>{'module.name'=>$module }},$self->version_to_query($module,$version),]}}},}},}},}};my$module_uri="$metacpan_uri/file/_search?source=";$module_uri .= $self->encode_json({query=>$query,fields=>['date','release','author','module','status' ],});my($release,$author,$module_version);my$module_json=$self->get($module_uri);my$module_meta=eval {JSON::PP::decode_json($module_json)};my$match=$self->find_best_match($module_meta);if ($match){$release=$match->{release};$author=$match->{author};my$module_matched=(grep {$_->{name}eq $module}@{$match->{module}})[0];$module_version=$module_matched->{version}}unless ($release){$self->chat("! Could not find a release matching $module ($version) on MetaCPAN.\n");return}my$dist_uri="$metacpan_uri/release/_search?source=";$dist_uri .= $self->encode_json({filter=>{and=>[{term=>{'release.name'=>$release }},{term=>{'release.author'=>$author }},]},fields=>['download_url','stat','status' ],});my$dist_json=$self->get($dist_uri);my$dist_meta=eval {JSON::PP::decode_json($dist_json)};if ($dist_meta){$dist_meta=$dist_meta->{hits}{hits}[0]{fields}}if ($dist_meta && $dist_meta->{download_url}){(my$distfile=$dist_meta->{download_url})=~ s!.+/authors/id/!!;local$self->{mirrors}=$self->{mirrors};if ($dist_meta->{status}eq 'backpan'){$self->{mirrors}=['http://backpan.perl.org' ]}elsif ($dist_meta->{stat}{mtime}> time()-24*60*60){$self->{mirrors}=['http://cpan.metacpan.org' ]}return$self->cpan_module($module,$distfile,$module_version)}$self->diag_fail("Finding $module on metacpan failed.");return}sub search_database {my($self,$module,$version)=@_;my$found;if ($self->{dev_release}or $self->{metacpan}){$found=$self->search_metacpan($module,$version)and return$found;$found=$self->search_cpanmetadb($module,$version)and return$found}else {$found=$self->search_cpanmetadb($module,$version)and return$found;$found=$self->search_metacpan($module,$version)and return$found}}sub search_cpanmetadb {my($self,$module,$version)=@_;$self->chat("Searching $module ($version) on cpanmetadb ...\n");if ($self->with_version_range($version)){return$self->search_cpanmetadb_history($module,$version)}else {return$self->search_cpanmetadb_package($module,$version)}}sub search_cpanmetadb_package {my($self,$module,$version)=@_;require CPAN::Meta::YAML;(my$uri=$self->{cpanmetadb})=~ s{/?$}{/package/$module};my$yaml=$self->get($uri);my$meta=eval {CPAN::Meta::YAML::Load($yaml)};if ($meta && $meta->{distfile}){return$self->cpan_module($module,$meta->{distfile},$meta->{version})}$self->diag_fail("Finding $module on cpanmetadb failed.");return}sub search_cpanmetadb_history {my($self,$module,$version)=@_;(my$uri=$self->{cpanmetadb})=~ s{/?$}{/history/$module};my$content=$self->get($uri)or return;my@found;for my$line (split /\r?\n/,$content){if ($line =~ /^$module\s+(\S+)\s+(\S+)$/){push@found,{version=>$1,version_obj=>version::->parse($1),distfile=>$2,}}}return unless@found;$found[-1]->{latest}=1;my$match;for my$try (sort {$b->{version_obj}cmp $a->{version_obj}}@found){if ($self->satisfy_version($module,$try->{version_obj},$version)){local$self->{mirrors}=$self->{mirrors};unshift @{$self->{mirrors}},'http://backpan.perl.org' unless$try->{latest};return$self->cpan_module($module,$try->{distfile},$try->{version})}}$self->diag_fail("Finding $module ($version) on cpanmetadb failed.");return}sub search_module {my($self,$module,$version)=@_;if ($self->{mirror_index}){$self->mask_output(chat=>"Searching $module on mirror index $self->{mirror_index} ...\n");my$pkg=$self->search_mirror_index_file($self->{mirror_index},$module,$version);return$pkg if$pkg;unless ($self->{cascade_search}){$self->mask_output(diag_fail=>"Finding $module ($version) on mirror index $self->{mirror_index} failed.");return}}unless ($self->{mirror_only}){my$found=$self->search_database($module,$version);return$found if$found}MIRROR: for my$mirror (@{$self->{mirrors}}){$self->mask_output(chat=>"Searching $module on mirror $mirror ...\n");my$name='02packages.details.txt.gz';my$uri="$mirror/modules/$name";my$gz_file=$self->package_index_for($mirror).'.gz';unless ($self->{pkgs}{$uri}){$self->mask_output(chat=>"Downloading index file $uri ...\n");$self->mirror($uri,$gz_file);$self->generate_mirror_index($mirror)or next MIRROR;$self->{pkgs}{$uri}="!!retrieved!!"}my$pkg=$self->search_mirror_index($mirror,$module,$version);return$pkg if$pkg;$self->mask_output(diag_fail=>"Finding $module ($version) on mirror $mirror failed.")}return}sub source_for {my($self,$mirror)=@_;$mirror =~ s/[^\w\.\-]+/%/g;my$dir="$self->{home}/sources/$mirror";File::Path::mkpath([$dir ],0,0777);return$dir}sub load_argv_from_fh {my($self,$fh)=@_;my@argv;while(defined(my$line=<$fh>)){chomp$line;$line =~ s/#.+$//;$line =~ s/^\s+//;$line =~ s/\s+$//;push@argv,split ' ',$line if$line}return@argv}sub show_version {my$self=shift;print "cpanm (App::cpanminus) version $VERSION ($0)\n";print "perl version $] ($^X)\n\n";print " \%Config:\n";for my$key (qw(archname installsitelib installsitebin installman1dir installman3dir sitearchexp sitelibexp vendorarch vendorlibexp archlibexp privlibexp)){print " $key=$Config{$key}\n" if$Config{$key}}print " \%ENV:\n";for my$key (grep /^PERL/,sort keys%ENV){print " $key=$ENV{$key}\n"}print " \@INC:\n";for my$inc (@INC){print " $inc\n" unless ref($inc)eq 'CODE'}return 1}sub show_help {my$self=shift;if ($_[0]){print <splitdir($dir);while (@dir){$dir=File::Spec->catdir(@dir);if (-e $dir){return -w _}pop@dir}return}sub maybe_abs {my($self,$lib)=@_;if ($lib eq '_' or $lib =~ /^~/ or File::Spec->file_name_is_absolute($lib)){return$lib}else {return File::Spec->canonpath(File::Spec->catdir(Cwd::cwd(),$lib))}}sub local_lib_target {my($self,$root)=@_;(grep {$_ ne ''}split /\Q$Config{path_sep}/,$root)[0]}sub bootstrap_local_lib {my$self=shift;if ($self->{local_lib}){return$self->setup_local_lib($self->{local_lib})}if ($ENV{PERL_LOCAL_LIB_ROOT}&& $ENV{PERL_MM_OPT}){return$self->setup_local_lib($self->local_lib_target($ENV{PERL_LOCAL_LIB_ROOT}),1)}return if$self->{sudo}or (_writable($Config{installsitelib})and _writable($Config{installsitebin}));if ($ENV{PERL_MM_OPT}and ($ENV{MODULEBUILDRC}or $ENV{PERL_MB_OPT})){return}$self->setup_local_lib;$self->diag(<module=>$_}@$config_deps;my$reqs=CPAN::Meta::Requirements->from_string_hash({'Module::Build'=>'0.38','ExtUtils::MakeMaker'=>'6.58','ExtUtils::Install'=>'1.46',});if ($deps{"ExtUtils::MakeMaker"}){$deps{"ExtUtils::MakeMaker"}->merge_with($reqs)}elsif ($deps{"Module::Build"}){$deps{"Module::Build"}->merge_with($reqs);$deps{"ExtUtils::Install"}||= App::cpanminus::Dependency->new("ExtUtils::Install",0,'configure');$deps{"ExtUtils::Install"}->merge_with($reqs)}@$config_deps=values%deps}sub _core_only_inc {my($self,$base)=@_;require local::lib;(local::lib->resolve_path(local::lib->install_base_arch_path($base)),local::lib->resolve_path(local::lib->install_base_perl_path($base)),(!$self->{exclude_vendor}? grep {$_}@Config{qw(vendorarch vendorlibexp)}: ()),@Config{qw(archlibexp privlibexp)},)}sub _diff {my($self,$old,$new)=@_;my@diff;my%old=map {$_=>1}@$old;for my$n (@$new){push@diff,$n unless exists$old{$n}}@diff}sub _setup_local_lib_env {my($self,$base)=@_;$self->diag(<setup_env_hash_for($base,0)}sub setup_local_lib {my($self,$base,$no_env)=@_;$base=undef if$base eq '_';require local::lib;{local $0='cpanm';$base ||= "~/perl5";$base=local::lib->resolve_path($base);if ($self->{self_contained}){my@inc=$self->_core_only_inc($base);$self->{search_inc}=[@inc ]}else {$self->{search_inc}=[local::lib->install_base_arch_path($base),local::lib->install_base_perl_path($base),@INC,]}$self->_setup_local_lib_env($base)unless$no_env;$self->{local_lib}=$base}}sub prompt_bool {my($self,$mess,$def)=@_;my$val=$self->prompt($mess,$def);return lc$val eq 'y'}sub prompt {my($self,$mess,$def)=@_;my$isa_tty=-t STDIN && (-t STDOUT ||!(-f STDOUT || -c STDOUT));my$dispdef=defined$def ? "[$def] " : " ";$def=defined$def ? $def : "";if (!$self->{prompt}|| (!$isa_tty && eof STDIN)){return$def}local $|=1;local $\;my$ans;eval {local$SIG{ALRM}=sub {undef$ans;die "alarm\n"};print STDOUT "$mess $dispdef";alarm$self->{prompt_timeout}if$self->{prompt_timeout};$ans=;alarm 0};if (defined$ans){chomp$ans}else {print STDOUT "\n"}return (!defined$ans || $ans eq '')? $def : $ans}sub diag_ok {my($self,$msg)=@_;chomp$msg;$msg ||= "OK";if ($self->{in_progress}){$self->_diag("$msg\n");$self->{in_progress}=0}$self->log("-> $msg\n")}sub diag_fail {my($self,$msg,$always)=@_;chomp$msg;if ($self->{in_progress}){$self->_diag("FAIL\n");$self->{in_progress}=0}if ($msg){$self->_diag("! $msg\n",$always,1);$self->log("-> FAIL $msg\n")}}sub diag_progress {my($self,$msg)=@_;chomp$msg;$self->{in_progress}=1;$self->_diag("$msg ... ");$self->log("$msg\n")}sub _diag {my($self,$msg,$always,$error)=@_;my$fh=$error ? *STDERR : *STDOUT;print {$fh}$msg if$always or $self->{verbose}or!$self->{quiet}}sub diag {my($self,$msg,$always)=@_;$self->_diag($msg,$always);$self->log($msg)}sub chat {my$self=shift;print STDERR @_ if$self->{verbose};$self->log(@_)}sub mask_output {my$self=shift;my$method=shift;$self->$method($self->mask_uri_passwords(@_))}sub log {my$self=shift;open my$out,">>$self->{log}";print$out @_}sub run {my($self,$cmd)=@_;if (WIN32){$cmd=$self->shell_quote(@$cmd)if ref$cmd eq 'ARRAY';unless ($self->{verbose}){$cmd .= " >> " .$self->shell_quote($self->{log})." 2>&1"}!system$cmd}else {my$pid=fork;if ($pid){waitpid$pid,0;return!$?}else {$self->run_exec($cmd)}}}sub run_exec {my($self,$cmd)=@_;if (ref$cmd eq 'ARRAY'){unless ($self->{verbose}){open my$logfh,">>",$self->{log};open STDERR,'>&',$logfh;open STDOUT,'>&',$logfh;close$logfh}exec @$cmd}else {unless ($self->{verbose}){$cmd .= " >> " .$self->shell_quote($self->{log})." 2>&1"}exec$cmd}}sub run_timeout {my($self,$cmd,$timeout)=@_;return$self->run($cmd)if WIN32 || $self->{verbose}||!$timeout;my$pid=fork;if ($pid){eval {local$SIG{ALRM}=sub {die "alarm\n"};alarm$timeout;waitpid$pid,0;alarm 0};if ($@ && $@ eq "alarm\n"){$self->diag_fail("Timed out (> ${timeout}s). Use --verbose to retry.");local$SIG{TERM}='IGNORE';kill TERM=>0;waitpid$pid,0;return}return!$?}elsif ($pid==0){$self->run_exec($cmd)}else {$self->chat("! fork failed: falling back to system()\n");$self->run($cmd)}}sub append_args {my($self,$cmd,$phase)=@_;if (my$args=$self->{build_args}{$phase}){$cmd=join ' ',$self->shell_quote(@$cmd),$args}$cmd}sub configure {my($self,$cmd,$depth)=@_;local$ENV{PERL5_CPAN_IS_RUNNING}=local$ENV{PERL5_CPANPLUS_IS_RUNNING}=$$;local$ENV{PERL5_CPANM_IS_RUNNING}=$$;my$use_default=!$self->{interactive};local$ENV{PERL_MM_USE_DEFAULT}=$use_default;local$ENV{PERL_MM_OPT}=$ENV{PERL_MM_OPT};local$ENV{PERL_MB_OPT}=$ENV{PERL_MB_OPT};unless ($self->{pod2man}){$ENV{PERL_MM_OPT}.= " INSTALLMAN1DIR=none INSTALLMAN3DIR=none";$ENV{PERL_MB_OPT}.= " --config installman1dir= --config installsiteman1dir= --config installman3dir= --config installsiteman3dir="}if ($self->{pure_perl}){$ENV{PERL_MM_OPT}.= " PUREPERL_ONLY=1";$ENV{PERL_MB_OPT}.= " --pureperl-only"}$cmd=$self->append_args($cmd,'configure')if$depth==0;local$self->{verbose}=$self->{verbose}|| $self->{interactive};$self->run_timeout($cmd,$self->{configure_timeout})}sub build {my($self,$cmd,$distname,$depth)=@_;local$ENV{PERL_MM_USE_DEFAULT}=!$self->{interactive};$cmd=$self->append_args($cmd,'build')if$depth==0;return 1 if$self->run_timeout($cmd,$self->{build_timeout});while (1){my$ans=lc$self->prompt("Building $distname failed.\nYou can s)kip, r)etry, e)xamine build log, or l)ook ?","s");return if$ans eq 's';return$self->build($cmd,$distname,$depth)if$ans eq 'r';$self->show_build_log if$ans eq 'e';$self->look if$ans eq 'l'}}sub test {my($self,$cmd,$distname,$depth)=@_;return 1 if$self->{notest};local$ENV{PERL_MM_USE_DEFAULT}=!$self->{interactive};local$ENV{NONINTERACTIVE_TESTING}=!$self->{interactive};$cmd=$self->append_args($cmd,'test')if$depth==0;return 1 if$self->run_timeout($cmd,$self->{test_timeout});if ($self->{force}){$self->diag_fail("Testing $distname failed but installing it anyway.");return 1}else {$self->diag_fail;while (1){my$ans=lc$self->prompt("Testing $distname failed.\nYou can s)kip, r)etry, f)orce install, e)xamine build log, or l)ook ?","s");return if$ans eq 's';return$self->test($cmd,$distname,$depth)if$ans eq 'r';return 1 if$ans eq 'f';$self->show_build_log if$ans eq 'e';$self->look if$ans eq 'l'}}}sub install {my($self,$cmd,$uninst_opts,$depth)=@_;if ($depth==0 && $self->{test_only}){return 1}if ($self->{sudo}){unshift @$cmd,"sudo"}if ($self->{uninstall_shadows}&&!$ENV{PERL_MM_OPT}){push @$cmd,@$uninst_opts}$cmd=$self->append_args($cmd,'install')if$depth==0;$self->run($cmd)}sub look {my$self=shift;my$shell=$ENV{SHELL};$shell ||= $ENV{COMSPEC}if WIN32;if ($shell){my$cwd=Cwd::cwd;$self->diag("Entering $cwd with $shell\n");system$shell}else {$self->diag_fail("You don't seem to have a SHELL :/")}}sub show_build_log {my$self=shift;my@pagers=($ENV{PAGER},(WIN32 ? (): ('less')),'more');my$pager;while (@pagers){$pager=shift@pagers;next unless$pager;$pager=$self->which($pager);next unless$pager;last}if ($pager){system("$pager < $self->{log}")}else {$self->diag_fail("You don't seem to have a PAGER :/")}}sub chdir {my$self=shift;Cwd::chdir(File::Spec->canonpath($_[0]))or die "$_[0]: $!"}sub configure_mirrors {my$self=shift;unless (@{$self->{mirrors}}){$self->{mirrors}=['http://www.cpan.org' ]}for (@{$self->{mirrors}}){s!^/!file:///!;s!/$!!}}sub self_upgrade {my$self=shift;$self->check_upgrade;$self->{argv}=['App::cpanminus' ];return}sub install_module {my($self,$module,$depth,$version)=@_;$self->check_libs;if ($self->{seen}{$module}++){$self->chat("Already tried $module. Skipping.\n");return 1}if ($self->{skip_satisfied}){my($ok,$local)=$self->check_module($module,$version || 0);if ($ok){$self->diag("You have $module ($local)\n",1);return 1}}my$dist=$self->resolve_name($module,$version);unless ($dist){my$what=$module .($version ? " ($version)" : "");$self->diag_fail("Couldn't find module or a distribution $what",1);return}if ($dist->{distvname}&& $self->{seen}{$dist->{distvname}}++){$self->chat("Already tried $dist->{distvname}. Skipping.\n");return 1}if ($self->{cmd}eq 'info'){print$self->format_dist($dist),"\n";return 1}$dist->{depth}=$depth;if ($dist->{module}){unless ($self->satisfy_version($dist->{module},$dist->{module_version},$version)){$self->diag("Found $dist->{module} $dist->{module_version} which doesn't satisfy $version.\n",1);return}my$cmp=$version ? "==" : "";my$requirement=$dist->{module_version}? "$cmp$dist->{module_version}" : 0;my($ok,$local)=$self->check_module($dist->{module},$requirement);if ($self->{skip_installed}&& $ok){$self->diag("$dist->{module} is up to date. ($local)\n",1);return 1}}if ($dist->{dist}eq 'perl'){$self->diag("skipping $dist->{pathname}\n");return 1}$self->diag("--> Working on $module\n");$dist->{dir}||= $self->fetch_module($dist);unless ($dist->{dir}){$self->diag_fail("Failed to fetch distribution $dist->{distvname}",1);return}$self->chat("Entering $dist->{dir}\n");$self->chdir($self->{base});$self->chdir($dist->{dir});if ($self->{cmd}eq 'look'){$self->look;return 1}return$self->build_stuff($module,$dist,$depth)}sub uninstall_search_path {my$self=shift;$self->{local_lib}? (local::lib->install_base_arch_path($self->{local_lib}),local::lib->install_base_perl_path($self->{local_lib})): @Config{qw(installsitearch installsitelib)}}sub uninstall_module {my ($self,$module)=@_;$self->check_libs;my@inc=$self->uninstall_search_path;my($metadata,$packlist)=$self->packlists_containing($module,\@inc);unless ($packlist){$self->diag_fail(<uninstall_target($metadata,$packlist);$self->ask_permission($module,\@uninst_files)or return;$self->uninstall_files(@uninst_files,$packlist);$self->diag("Successfully uninstalled $module\n",1);return 1}sub packlists_containing {my($self,$module,$inc)=@_;require Module::Metadata;my$metadata=Module::Metadata->new_from_module($module,inc=>$inc)or return;my$packlist;my$wanted=sub {return unless $_ eq '.packlist' && -f $_;for my$file ($self->unpack_packlist($File::Find::name)){$packlist ||= $File::Find::name if$file eq $metadata->filename}};{require File::pushd;my$pushd=File::pushd::pushd();my@search=grep -d $_,map File::Spec->catdir($_,'auto'),@$inc;File::Find::find($wanted,@search)}return$metadata,$packlist}sub uninstall_target {my($self,$metadata,$packlist)=@_;if ($self->has_shadow_install($metadata)or $self->{local_lib}){grep$self->should_unlink($_),$self->unpack_packlist($packlist)}else {$self->unpack_packlist($packlist)}}sub has_shadow_install {my($self,$metadata)=@_;my@shadow=grep defined,map Module::Metadata->new_from_module($metadata->name,inc=>[$_]),@INC;@shadow >= 2}sub should_unlink {my($self,$file)=@_;if ($self->{local_lib}){$file =~ /^\Q$self->{local_lib}\E/}else {!(grep$file =~ /^\Q$_\E/,@Config{qw(installbin installscript installman1dir installman3dir)})}}sub ask_permission {my ($self,$module,$files)=@_;$self->diag("$module contains the following files:\n\n");for my$file (@$files){$self->diag(" $file\n")}$self->diag("\n");return 'force uninstall' if$self->{force};local$self->{prompt}=1;return$self->prompt_bool("Are you sure you want to uninstall $module?",'y')}sub unpack_packlist {my ($self,$packlist)=@_;open my$fh,'<',$packlist or die "$packlist: $!";map {chomp;$_}<$fh>}sub uninstall_files {my ($self,@files)=@_;$self->diag("\n");for my$file (@files){$self->diag("Unlink: $file\n");unlink$file or $self->diag_fail("$!: $file")}$self->diag("\n");return 1}sub format_dist {my($self,$dist)=@_;return "$dist->{cpanid}/$dist->{filename}"}sub trim {local $_=shift;tr/\n/ /d;s/^\s*|\s*$//g;$_}sub fetch_module {my($self,$dist)=@_;$self->chdir($self->{base});for my$uri (@{$dist->{uris}}){$self->mask_output(diag_progress=>"Fetching $uri");my$filename=$dist->{filename}|| $uri;my$name=File::Basename::basename($filename);my$cancelled;my$fetch=sub {my$file;eval {local$SIG{INT}=sub {$cancelled=1;die "SIGINT\n"};$self->mirror($uri,$name);$file=$name if -e $name};$self->diag("ERROR: " .trim("$@")."\n",1)if $@ && $@ ne "SIGINT\n";return$file};my($try,$file);while ($try++ < 3){$file=$fetch->();last if$cancelled or $file;$self->mask_output(diag_fail=>"Download $uri failed. Retrying ... ")}if ($cancelled){$self->diag_fail("Download cancelled.");return}unless ($file){$self->mask_output(diag_fail=>"Failed to download $uri");next}$self->diag_ok;$dist->{local_path}=File::Spec->rel2abs($name);my$dir=$self->unpack($file,$uri,$dist);next unless$dir;if (my$save=$self->{save_dists}){my$path=$dist->{pathname}? "$save/authors/id/$dist->{pathname}" : "$save/vendor/$file";$self->chat("Copying $name to $path\n");File::Path::mkpath([File::Basename::dirname($path)],0,0777);File::Copy::copy($file,$path)or warn $!}return$dist,$dir}}sub unpack {my($self,$file,$uri,$dist)=@_;if ($self->{verify}){$self->verify_archive($file,$uri,$dist)or return}$self->chat("Unpacking $file\n");my$dir=$file =~ /\.zip/i ? $self->unzip($file): $self->untar($file);unless ($dir){$self->diag_fail("Failed to unpack $file: no directory")}return$dir}sub verify_checksums_signature {my($self,$chk_file)=@_;require Module::Signature;$self->chat("Verifying the signature of CHECKSUMS\n");my$rv=eval {local$SIG{__WARN__}=sub {};my$v=Module::Signature::_verify($chk_file);$v==Module::Signature::SIGNATURE_OK()};if ($rv){$self->chat("Verified OK!\n")}else {$self->diag_fail("Verifying CHECKSUMS signature failed: $rv\n");return}return 1}sub verify_archive {my($self,$file,$uri,$dist)=@_;unless ($dist->{cpanid}){$self->chat("Archive '$file' does not seem to be from PAUSE. Skip verification.\n");return 1}(my$mirror=$uri)=~ s!/authors/id.*$!!;(my$chksum_uri=$uri)=~ s!/[^/]*$!/CHECKSUMS!;my$chk_file=$self->source_for($mirror)."/$dist->{cpanid}.CHECKSUMS";$self->mask_output(diag_progress=>"Fetching $chksum_uri");$self->mirror($chksum_uri,$chk_file);unless (-e $chk_file){$self->diag_fail("Fetching $chksum_uri failed.\n");return}$self->diag_ok;$self->verify_checksums_signature($chk_file)or return;$self->verify_checksum($file,$chk_file)}sub verify_checksum {my($self,$file,$chk_file)=@_;$self->chat("Verifying the SHA1 for $file\n");open my$fh,"<$chk_file" or die "$chk_file: $!";my$data=join '',<$fh>;$data =~ s/\015?\012/\n/g;require Safe;my$chksum=Safe->new->reval($data);if (!ref$chksum or ref$chksum ne 'HASH'){$self->diag_fail("! Checksum file downloaded from $chk_file is broken.\n");return}if (my$sha=$chksum->{$file}{sha256}){my$hex=$self->sha1_for($file);if ($hex eq $sha){$self->chat("Checksum for $file: Verified!\n")}else {$self->diag_fail("Checksum mismatch for $file\n");return}}else {$self->chat("Checksum for $file not found in CHECKSUMS.\n");return}}sub sha1_for {my($self,$file)=@_;require Digest::SHA;open my$fh,"<",$file or die "$file: $!";my$dg=Digest::SHA->new(256);my($data);while (read($fh,$data,4096)){$dg->add($data)}return$dg->hexdigest}sub verify_signature {my($self,$dist)=@_;$self->diag_progress("Verifying the SIGNATURE file");my$out=`$self->{cpansign} -v --skip 2>&1`;$self->log($out);if ($out =~ /Signature verified OK/){$self->diag_ok("Verified OK");return 1}else {$self->diag_fail("SIGNATURE verificaion for $dist->{filename} failed\n");return}}sub resolve_name {my($self,$module,$version)=@_;if ($module =~ /(?:^git:|\.git(?:@.+)?$)/){return$self->git_uri($module)}if ($module =~ /^(ftp|https?|file):/){if ($module =~ m!authors/id/(.*)!){return$self->cpan_dist($1,$module)}else {return {uris=>[$module ]}}}if ($module =~ m!^[\./]! && -d $module){return {source=>'local',dir=>Cwd::abs_path($module),}}if (-f $module){return {source=>'local',uris=>["file://" .Cwd::abs_path($module)],}}if ($module =~ s!^cpan:///distfile/!!){return$self->cpan_dist($module)}if ($module =~ m!^(?:[A-Z]/[A-Z]{2}/)?([A-Z]{2}[\-A-Z0-9]*/.*)$!){return$self->cpan_dist($1)}return$self->search_module($module,$version)}sub cpan_module {my($self,$module,$dist,$version)=@_;my$dist=$self->cpan_dist($dist);$dist->{module}=$module;$dist->{module_version}=$version if$version && $version ne 'undef';return$dist}sub cpan_dist {my($self,$dist,$url)=@_;$dist =~ s!^([A-Z]{2})!substr($1,0,1)."/".substr($1,0,2)."/".$1!e;require CPAN::DistnameInfo;my$d=CPAN::DistnameInfo->new($dist);if ($url){$url=[$url ]unless ref$url eq 'ARRAY'}else {my$id=$d->cpanid;my$fn=substr($id,0,1)."/" .substr($id,0,2)."/" .$id ."/" .$d->filename;my@mirrors=@{$self->{mirrors}};my@urls=map "$_/authors/id/$fn",@mirrors;$url=\@urls,}return {$d->properties,source=>'cpan',uris=>$url,}}sub git_uri {my ($self,$uri)=@_;($uri,my$commitish)=split /(?<=\.git)@/i,$uri,2;my$dir=File::Temp::tempdir(CLEANUP=>1);$self->mask_output(diag_progress=>"Cloning $uri");$self->run(['git','clone',$uri,$dir ]);unless (-e "$dir/.git"){$self->diag_fail("Failed cloning git repository $uri",1);return}if ($commitish){require File::pushd;my$dir=File::pushd::pushd($dir);unless ($self->run(['git','checkout',$commitish ])){$self->diag_fail("Failed to checkout '$commitish' in git repository $uri\n");return}}$self->diag_ok;return {source=>'local',dir=>$dir,}}sub setup_module_build_patch {my$self=shift;open my$out,">$self->{base}/ModuleBuildSkipMan.pm" or die $!;print$out <{search_inc}||= do {if (defined$::Bin){[grep!/^\Q$::Bin\E\/..\/(?:fat)?lib$/,@INC]}else {[@INC]}}}sub check_module {my($self,$mod,$want_ver)=@_;require Module::Metadata;my$meta=Module::Metadata->new_from_module($mod,inc=>$self->search_inc)or return 0,undef;my$version=$meta->version;if ($self->{self_contained}&& $self->loaded_from_perl_lib($meta)){$version=$self->core_version_for($mod);return 0,undef if$version && $version==-1}$self->{local_versions}{$mod}=$version;if ($self->is_deprecated($meta)){return 0,$version}elsif ($self->satisfy_version($mod,$version,$want_ver)){return 1,($version || 'undef')}else {return 0,$version}}sub satisfy_version {my($self,$mod,$version,$want_ver)=@_;$want_ver='0' unless defined($want_ver)&& length($want_ver);require CPAN::Meta::Requirements;my$requirements=CPAN::Meta::Requirements->new;$requirements->add_string_requirement($mod,$want_ver);$requirements->accepts_module($mod,$version)}sub unsatisfy_how {my($self,$ver,$want_ver)=@_;if ($want_ver =~ /^[v0-9\.\_]+$/){return "$ver < $want_ver"}else {return "$ver doesn't satisfy $want_ver"}}sub is_deprecated {my($self,$meta)=@_;my$deprecated=eval {require Module::CoreList;Module::CoreList::is_deprecated($meta->{module})};return$deprecated && $self->loaded_from_perl_lib($meta)}sub loaded_from_perl_lib {my($self,$meta)=@_;require Config;my@dirs=qw(archlibexp privlibexp);if ($self->{self_contained}&&!$self->{exclude_vendor}&& $Config{vendorarch}){unshift@dirs,qw(vendorarch vendorlibexp)}for my$dir (@dirs){my$confdir=$Config{$dir};if ($confdir eq substr($meta->filename,0,length($confdir))){return 1}}return}sub should_install {my($self,$mod,$ver)=@_;$self->chat("Checking if you have $mod $ver ... ");my($ok,$local)=$self->check_module($mod,$ver);if ($ok){$self->chat("Yes ($local)\n")}elsif ($local){$self->chat("No (" .$self->unsatisfy_how($local,$ver).")\n")}else {$self->chat("No\n")}return$mod unless$ok;return}sub check_perl_version {my($self,$version)=@_;require CPAN::Meta::Requirements;my$req=CPAN::Meta::Requirements->from_string_hash({perl=>$version });$req->accepts_module(perl=>$])}sub install_deps {my($self,$dir,$depth,@deps)=@_;my(@install,%seen,@fail);for my$dep (@deps){next if$seen{$dep->module};if ($dep->module eq 'perl'){if ($dep->is_requirement &&!$self->check_perl_version($dep->version)){$self->diag("Needs perl @{[$dep->version]}, you have $]\n");push@fail,'perl'}}elsif ($self->should_install($dep->module,$dep->version)){push@install,$dep;$seen{$dep->module}=1}}if (@install){$self->diag("==> Found dependencies: " .join(", ",map $_->module,@install)."\n")}for my$dep (@install){$self->install_module($dep->module,$depth + 1,$dep->version)}$self->chdir($self->{base});$self->chdir($dir)if$dir;if ($self->{scandeps}){return 1}my@not_ok=$self->unsatisfied_deps(@deps);if (@not_ok){return 0,\@not_ok}else {return 1}}sub unsatisfied_deps {my($self,@deps)=@_;require CPAN::Meta::Check;require CPAN::Meta::Requirements;my$reqs=CPAN::Meta::Requirements->new;for my$dep (grep $_->is_requirement,@deps){$reqs->add_string_requirement($dep->module=>$dep->requires_version || '0')}my$ret=CPAN::Meta::Check::check_requirements($reqs,'requires',$self->{search_inc});grep defined,values %$ret}sub install_deps_bailout {my($self,$target,$dir,$depth,@deps)=@_;my($ok,$fail)=$self->install_deps($dir,$depth,@deps);if (!$ok){$self->diag_fail("Installing the dependencies failed: " .join(", ",@$fail),1);unless ($self->prompt_bool("Do you want to continue building $target anyway?","n")){$self->diag_fail("Bailing out the installation for $target.",1);return}}return 1}sub build_stuff {my($self,$stuff,$dist,$depth)=@_;if ($self->{verify}&& -e 'SIGNATURE'){$self->verify_signature($dist)or return}require CPAN::Meta;my($meta_file)=grep -f,qw(META.json META.yml);if ($meta_file){$self->chat("Checking configure dependencies from $meta_file\n");$dist->{cpanmeta}=eval {CPAN::Meta->load_file($meta_file)}}elsif ($dist->{dist}&& $dist->{version}){$self->chat("META.yml/json not found. Creating skeleton for it.\n");$dist->{cpanmeta}=CPAN::Meta->new({name=>$dist->{dist},version=>$dist->{version}})}$dist->{meta}=$dist->{cpanmeta}? $dist->{cpanmeta}->as_struct : {};my@config_deps;if ($dist->{cpanmeta}){push@config_deps,App::cpanminus::Dependency->from_prereqs($dist->{cpanmeta}->effective_prereqs,['configure'],$self->{install_types},)}if (-e 'Build.PL' &&!$self->should_use_mm($dist->{dist})&&!@config_deps){push@config_deps,App::cpanminus::Dependency->from_versions({'Module::Build'=>'0.38' },'configure',)}$self->merge_with_cpanfile($dist,\@config_deps);$self->upgrade_toolchain(\@config_deps);my$target=$dist->{meta}{name}? "$dist->{meta}{name}-$dist->{meta}{version}" : $dist->{dir};{$self->install_deps_bailout($target,$dist->{dir},$depth,@config_deps)or return}$self->diag_progress("Configuring $target");my$configure_state=$self->configure_this($dist,$depth);$self->diag_ok($configure_state->{configured_ok}? "OK" : "N/A");if ($dist->{cpanmeta}&& $dist->{source}eq 'cpan'){$dist->{provides}=$dist->{cpanmeta}{provides}|| $self->extract_packages($dist->{cpanmeta},".")}my$root_target=(($self->{installdeps}or $self->{showdeps})and $depth==0);$dist->{want_phases}=$self->{notest}&&!$root_target ? [qw(build runtime)]: [qw(build test runtime)];push @{$dist->{want_phases}},'develop' if$self->{with_develop}&& $depth==0;push @{$dist->{want_phases}},'configure' if$self->{with_configure}&& $depth==0;my@deps=$self->find_prereqs($dist);my$module_name=$self->find_module_name($configure_state)|| $dist->{meta}{name};$module_name =~ s/-/::/g;if ($self->{showdeps}){for my$dep (@config_deps,@deps){print$dep->module,($dep->version ? ("~".$dep->version): ""),"\n"}return 1}my$distname=$dist->{meta}{name}? "$dist->{meta}{name}-$dist->{meta}{version}" : $stuff;my$walkup;if ($self->{scandeps}){$walkup=$self->scandeps_append_child($dist)}$self->install_deps_bailout($distname,$dist->{dir},$depth,@deps)or return;if ($self->{scandeps}){unless ($configure_state->{configured_ok}){my$diag=<{scandeps_tree}};$diag .= "!\n" .join("",map "! * $_->[0]{module}\n",@tree[0..$#tree-1])if@tree}$self->diag("!\n$diag!\n",1)}$walkup->();return 1}if ($self->{installdeps}&& $depth==0){if ($configure_state->{configured_ok}){$self->diag("<== Installed dependencies for $stuff. Finishing.\n");return 1}else {$self->diag("! Configuring $distname failed. See $self->{log} for details.\n",1);return}}my$installed;if ($configure_state->{use_module_build}&& -e 'Build' && -f _){$self->diag_progress("Building " .($self->{notest}? "" : "and testing ").$distname);$self->build([$self->{perl},"./Build" ],$distname,$depth)&& $self->test([$self->{perl},"./Build","test" ],$distname,$depth)&& $self->install([$self->{perl},"./Build","install" ],["--uninst",1 ],$depth)&& $installed++}elsif ($self->{make}&& -e 'Makefile'){$self->diag_progress("Building " .($self->{notest}? "" : "and testing ").$distname);$self->build([$self->{make}],$distname,$depth)&& $self->test([$self->{make},"test" ],$distname,$depth)&& $self->install([$self->{make},"install" ],["UNINST=1" ],$depth)&& $installed++}else {my$why;my$configure_failed=$configure_state->{configured}&&!$configure_state->{configured_ok};if ($configure_failed){$why="Configure failed for $distname."}elsif ($self->{make}){$why="The distribution doesn't have a proper Makefile.PL/Build.PL"}else {$why="Can't configure the distribution. You probably need to have 'make'."}$self->diag_fail("$why See $self->{log} for details.",1);return}if ($installed && $self->{test_only}){$self->diag_ok;$self->diag("Successfully tested $distname\n",1)}elsif ($installed){my$local=$self->{local_versions}{$dist->{module}|| ''};my$version=$dist->{module_version}|| $dist->{meta}{version}|| $dist->{version};my$reinstall=$local && ($local eq $version);my$action=$local &&!$reinstall ? $self->numify_ver($version)< $self->numify_ver($local)? "downgraded" : "upgraded" : undef;my$how=$reinstall ? "reinstalled $distname" : $local ? "installed $distname ($action from $local)" : "installed $distname" ;my$msg="Successfully $how";$self->diag_ok;$self->diag("$msg\n",1);$self->{installed_dists}++;$self->save_meta($stuff,$dist,$module_name,\@config_deps,\@deps);return 1}else {my$what=$self->{test_only}? "Testing" : "Installing";$self->diag_fail("$what $stuff failed. See $self->{log} for details. Retry with --force to force install it.",1);return}}sub perl_requirements {my($self,@requires)=@_;my@perl;for my$requires (grep defined,@requires){if (exists$requires->{perl}){push@perl,App::cpanminus::Dependency->new(perl=>$requires->{perl})}}return@perl}sub should_use_mm {my($self,$dist)=@_;my%should_use_mm=map {$_=>1}qw(version ExtUtils-ParseXS ExtUtils-Install ExtUtils-Manifest);$should_use_mm{$dist}}sub configure_this {my($self,$dist,$depth)=@_;if (-e $self->{cpanfile_path}&& $self->{installdeps}&& $depth==0){require Module::CPANfile;$dist->{cpanfile}=eval {Module::CPANfile->load($self->{cpanfile_path})};$self->diag_fail($@,1)if $@;return {configured=>1,configured_ok=>!!$dist->{cpanfile},use_module_build=>0,}}if ($self->{skip_configure}){my$eumm=-e 'Makefile';my$mb=-e 'Build' && -f _;return {configured=>1,configured_ok=>$eumm || $mb,use_module_build=>$mb,}}my$state={};my$try_eumm=sub {if (-e 'Makefile.PL'){$self->chat("Running Makefile.PL\n");if ($self->configure([$self->{perl},"Makefile.PL" ],$depth)){$state->{configured_ok}=-e 'Makefile'}$state->{configured}++}};my$try_mb=sub {if (-e 'Build.PL'){$self->chat("Running Build.PL\n");if ($self->configure([$self->{perl},"Build.PL" ],$depth)){$state->{configured_ok}=-e 'Build' && -f _}$state->{use_module_build}++;$state->{configured}++}};my@try;if ($dist->{dist}&& $self->should_use_mm($dist->{dist})){@try=($try_eumm,$try_mb)}else {@try=($try_mb,$try_eumm)}for my$try (@try){$try->();last if$state->{configured_ok}}unless ($state->{configured_ok}){while (1){my$ans=lc$self->prompt("Configuring $dist->{dist} failed.\nYou can s)kip, r)etry, e)xamine build log, or l)ook ?","s");last if$ans eq 's';return$self->configure_this($dist,$depth)if$ans eq 'r';$self->show_build_log if$ans eq 'e';$self->look if$ans eq 'l'}}return$state}sub find_module_name {my($self,$state)=@_;return unless$state->{configured_ok};if ($state->{use_module_build}&& -e "_build/build_params"){my$params=do {open my$in,"_build/build_params";$self->safe_eval(join "",<$in>)};return eval {$params->[2]{module_name}}|| undef}elsif (-e "Makefile"){open my$mf,"Makefile";while (<$mf>){if (/^\#\s+NAME\s+=>\s+(.*)/){return$self->safe_eval($1)}}}return}sub list_files {my$self=shift;if (-e 'MANIFEST'){require ExtUtils::Manifest;my$manifest=eval {ExtUtils::Manifest::manifind()}|| {};return sort {lc$a cmp lc$b}keys %$manifest}else {require File::Find;my@files;my$finder=sub {my$name=$File::Find::name;$name =~ s!\.[/\\]!!;push@files,$name};File::Find::find($finder,".");return sort {lc$a cmp lc$b}@files}}sub extract_packages {my($self,$meta,$dir)=@_;my$try=sub {my$file=shift;return 0 if$file =~ m!^(?:x?t|inc|local|perl5|fatlib|_build)/!;return 1 unless$meta->{no_index};return 0 if grep {$file =~ m!^$_/!}@{$meta->{no_index}{directory}|| []};return 0 if grep {$file eq $_}@{$meta->{no_index}{file}|| []};return 1};require Parse::PMFile;my@files=grep {/\.pm(?:\.PL)?$/ && $try->($_)}$self->list_files;my$provides={};for my$file (@files){my$parser=Parse::PMFile->new($meta,{UNSAFE=>1,ALLOW_DEV_VERSION=>1 });my$packages=$parser->parse($file);while (my($package,$meta)=each %$packages){$provides->{$package}||= {file=>$meta->{infile},($meta->{version}eq 'undef')? (): (version=>$meta->{version}),}}}return$provides}sub save_meta {my($self,$module,$dist,$module_name,$config_deps,$build_deps)=@_;return unless$dist->{distvname}&& $dist->{source}eq 'cpan';my$base=($ENV{PERL_MM_OPT}|| '')=~ /INSTALL_BASE=/ ? ($self->install_base($ENV{PERL_MM_OPT})."/lib/perl5"): $Config{sitelibexp};my$provides=$dist->{provides};File::Path::mkpath("blib/meta",0,0777);my$local={name=>$module_name,target=>$module,version=>exists$provides->{$module_name}? ($provides->{$module_name}{version}|| $dist->{version}): $dist->{version},dist=>$dist->{distvname},pathname=>$dist->{pathname},provides=>$provides,};require JSON::PP;open my$fh,">","blib/meta/install.json" or die $!;print$fh JSON::PP::encode_json($local);if (-e "MYMETA.json"){File::Copy::copy("MYMETA.json","blib/meta/MYMETA.json")}my@cmd=(($self->{sudo}? 'sudo' : ()),$^X,'-MExtUtils::Install=install','-e',qq[install({ 'blib/meta' => '$base/$Config{archname}/.meta/$dist->{distvname}' })],);$self->run(\@cmd)}sub _merge_hashref {my($self,@hashrefs)=@_;my%hash;for my$h (@hashrefs){%hash=(%hash,%$h)}return \%hash}sub install_base {my($self,$mm_opt)=@_;$mm_opt =~ /INSTALL_BASE=(\S+)/ and return $1;die "Your PERL_MM_OPT doesn't contain INSTALL_BASE"}sub safe_eval {my($self,$code)=@_;eval$code}sub configure_features {my($self,$dist,@features)=@_;map $_->identifier,grep {$self->effective_feature($dist,$_)}@features}sub effective_feature {my($self,$dist,$feature)=@_;if ($dist->{depth}==0){my$value=$self->{features}{$feature->identifier};return$value if defined$value;return 1 if$self->{features}{__all}}if ($self->{interactive}){require CPAN::Meta::Requirements;$self->diag("[@{[ $feature->description ]}]\n",1);my$req=CPAN::Meta::Requirements->new;for my$phase (@{$dist->{want_phases}}){for my$type (@{$self->{install_types}}){$req->add_requirements($feature->prereqs->requirements_for($phase,$type))}}my$reqs=$req->as_string_hash;my@missing;for my$module (keys %$reqs){if ($self->should_install($module,$req->{$module})){push@missing,$module}}if (@missing){my$howmany=@missing;$self->diag("==> Found missing dependencies: " .join(", ",@missing)."\n",1);local$self->{prompt}=1;return$self->prompt_bool("Install the $howmany optional module(s)?","y")}}return}sub find_prereqs {my($self,$dist)=@_;my@deps=$self->extract_meta_prereqs($dist);if ($dist->{module}=~ /^Bundle::/i){push@deps,$self->bundle_deps($dist)}$self->merge_with_cpanfile($dist,\@deps);return@deps}sub merge_with_cpanfile {my($self,$dist,$deps)=@_;if ($self->{cpanfile_requirements}&&!$dist->{cpanfile}){for my$dep (@$deps){$dep->merge_with($self->{cpanfile_requirements})}}}sub extract_meta_prereqs {my($self,$dist)=@_;if ($dist->{cpanfile}){my@features=$self->configure_features($dist,$dist->{cpanfile}->features);my$prereqs=$dist->{cpanfile}->prereqs_with(@features);$self->{cpanfile_requirements}=$prereqs->merged_requirements($dist->{want_phases},['requires']);return App::cpanminus::Dependency->from_prereqs($prereqs,$dist->{want_phases},$self->{install_types})}require CPAN::Meta;my@deps;my($meta_file)=grep -f,qw(MYMETA.json MYMETA.yml);if ($meta_file){$self->chat("Checking dependencies from $meta_file ...\n");my$mymeta=eval {CPAN::Meta->load_file($meta_file,{lazy_validation=>1 })};if ($mymeta){$dist->{meta}{name}=$mymeta->name;$dist->{meta}{version}=$mymeta->version;return$self->extract_prereqs($mymeta,$dist)}}if (-e '_build/prereqs'){$self->chat("Checking dependencies from _build/prereqs ...\n");my$prereqs=do {open my$in,"_build/prereqs";$self->safe_eval(join "",<$in>)};my$meta=CPAN::Meta->new({name=>$dist->{meta}{name},version=>$dist->{meta}{version},%$prereqs },{lazy_validation=>1 },);@deps=$self->extract_prereqs($meta,$dist)}elsif (-e 'Makefile'){$self->chat("Finding PREREQ from Makefile ...\n");open my$mf,"Makefile";while (<$mf>){if (/^\#\s+PREREQ_PM => \{\s*(.*?)\s*\}/){my@all;my@pairs=split ', ',$1;for (@pairs){my ($pkg,$v)=split '=>',$_;push@all,[$pkg,$v ]}my$list=join ", ",map {"'$_->[0]' => $_->[1]"}@all;my$prereq=$self->safe_eval("no strict; +{ $list }");push@deps,App::cpanminus::Dependency->from_versions($prereq)if$prereq;last}}}return@deps}sub bundle_deps {my($self,$dist)=@_;my@files;File::Find::find({wanted=>sub {push@files,File::Spec->rel2abs($_)if /\.pm/i},no_chdir=>1,},'.');my@deps;for my$file (@files){open my$pod,"<",$file or next;my$in_contents;while (<$pod>){if (/^=head\d\s+CONTENTS/){$in_contents=1}elsif (/^=/){$in_contents=0}elsif ($in_contents){/^(\S+)\s*(\S+)?/ and push@deps,App::cpanminus::Dependency->new($1,$self->maybe_version($2))}}}return@deps}sub maybe_version {my($self,$string)=@_;return$string && $string =~ /^\.?\d/ ? $string : undef}sub extract_prereqs {my($self,$meta,$dist)=@_;my@features=$self->configure_features($dist,$meta->features);my$prereqs=$self->soften_makemaker_prereqs($meta->effective_prereqs(\@features)->clone);return App::cpanminus::Dependency->from_prereqs($prereqs,$dist->{want_phases},$self->{install_types})}sub soften_makemaker_prereqs {my($self,$prereqs)=@_;return$prereqs unless -e "inc/Module/Install.pm";for my$phase (qw(build test runtime)){my$reqs=$prereqs->requirements_for($phase,'requires');if ($reqs->requirements_for_module('ExtUtils::MakeMaker')){$reqs->clear_requirement('ExtUtils::MakeMaker');$reqs->add_minimum('ExtUtils::MakeMaker'=>0)}}$prereqs}sub cleanup_workdirs {my$self=shift;my$expire=time - 24 * 60 * 60 * $self->{auto_cleanup};my@targets;opendir my$dh,"$self->{home}/work";while (my$e=readdir$dh){next if$e !~ /^(\d+)\.\d+$/;my$time=$1;if ($time < $expire){push@targets,"$self->{home}/work/$e"}}if (@targets){if (@targets >= 64){$self->diag("Expiring " .scalar(@targets)." work directories. This might take a while...\n")}else {$self->chat("Expiring " .scalar(@targets)." work directories.\n")}File::Path::rmtree(\@targets,0,0)}}sub scandeps_append_child {my($self,$dist)=@_;my$new_node=[$dist,[]];my$curr_node=$self->{scandeps_current}|| [undef,$self->{scandeps_tree}];push @{$curr_node->[1]},$new_node;$self->{scandeps_current}=$new_node;return sub {$self->{scandeps_current}=$curr_node}}sub dump_scandeps {my$self=shift;if ($self->{format}eq 'tree'){$self->walk_down(sub {my($dist,$depth)=@_;if ($depth==0){print "$dist->{distvname}\n"}else {print " " x ($depth - 1);print "\\_ $dist->{distvname}\n"}},1)}elsif ($self->{format}=~ /^dists?$/){$self->walk_down(sub {my($dist,$depth)=@_;print$self->format_dist($dist),"\n"},0)}elsif ($self->{format}eq 'json'){require JSON::PP;print JSON::PP::encode_json($self->{scandeps_tree})}elsif ($self->{format}eq 'yaml'){require YAML;print YAML::Dump($self->{scandeps_tree})}else {$self->diag("Unknown format: $self->{format}\n")}}sub walk_down {my($self,$cb,$pre)=@_;$self->_do_walk_down($self->{scandeps_tree},$cb,0,$pre)}sub _do_walk_down {my($self,$children,$cb,$depth,$pre)=@_;for my$node (@$children){$cb->($node->[0],$depth)if$pre;$self->_do_walk_down($node->[1],$cb,$depth + 1,$pre);$cb->($node->[0],$depth)unless$pre}}sub DESTROY {my$self=shift;$self->{at_exit}->($self)if$self->{at_exit}}sub shell_quote {my($self,@stuff)=@_;if (WIN32){join ' ',map {/^${quote}.+${quote}$/ ? $_ : ($quote .$_ .$quote)}@stuff}else {String::ShellQuote::shell_quote_best_effort(@stuff)}}sub which {my($self,$name)=@_;if (File::Spec->file_name_is_absolute($name)){if (-x $name &&!-d _){return$name}}my$exe_ext=$Config{_exe};for my$dir (File::Spec->path){my$fullpath=File::Spec->catfile($dir,$name);if ((-x $fullpath || -x ($fullpath .= $exe_ext))&&!-d _){if ($fullpath =~ /\s/){$fullpath=$self->shell_quote($fullpath)}return$fullpath}}return}sub get {my($self,$uri)=@_;if ($uri =~ /^file:/){$self->file_get($uri)}else {$self->{_backends}{get}->(@_)}}sub mirror {my($self,$uri,$local)=@_;if ($uri =~ /^file:/){$self->file_mirror($uri,$local)}else {$self->{_backends}{mirror}->(@_)}}sub untar {$_[0]->{_backends}{untar}->(@_)};sub unzip {$_[0]->{_backends}{unzip}->(@_)};sub uri_to_file {my($self,$uri)=@_;if ($uri =~ s!file:/+!!){$uri="/$uri" unless$uri =~ m![a-zA-Z]:!}return$uri}sub file_get {my($self,$uri)=@_;my$file=$self->uri_to_file($uri);open my$fh,"<$file" or return;join '',<$fh>}sub file_mirror {my($self,$uri,$path)=@_;my$file=$self->uri_to_file($uri);my$source_mtime=(stat$file)[9];return if -e $path && (stat$path)[9]>= $source_mtime;File::Copy::copy($file,$path);utime$source_mtime,$source_mtime,$path}sub has_working_lwp {my($self,$mirrors)=@_;my$https=grep /^https:/,@$mirrors;eval {require LWP::UserAgent;LWP::UserAgent->VERSION(5.802);require LWP::Protocol::https if$https;1}}sub init_tools {my$self=shift;return if$self->{initialized}++;if ($self->{make}=$self->which($Config{make})){$self->chat("You have make $self->{make}\n")}if ($self->{try_lwp}&& $self->has_working_lwp($self->{mirrors})){$self->chat("You have LWP $LWP::VERSION\n");my$ua=sub {LWP::UserAgent->new(parse_head=>0,env_proxy=>1,agent=>$self->agent,timeout=>30,@_,)};$self->{_backends}{get}=sub {my$self=shift;my$res=$ua->()->request(HTTP::Request->new(GET=>$_[0]));return unless$res->is_success;return$res->decoded_content};$self->{_backends}{mirror}=sub {my$self=shift;my$res=$ua->()->mirror(@_);die$res->content if$res->code==501;$res->code}}elsif ($self->{try_wget}and my$wget=$self->which('wget')){$self->chat("You have $wget\n");my@common=('--user-agent',$self->agent,'--retry-connrefused',($self->{verbose}? (): ('-q')),);$self->{_backends}{get}=sub {my($self,$uri)=@_;$self->safeexec(my$fh,$wget,$uri,@common,'-O','-')or die "wget $uri: $!";local $/;<$fh>};$self->{_backends}{mirror}=sub {my($self,$uri,$path)=@_;$self->safeexec(my$fh,$wget,$uri,@common,'-O',$path)or die "wget $uri: $!";local $/;<$fh>}}elsif ($self->{try_curl}and my$curl=$self->which('curl')){$self->chat("You have $curl\n");my@common=('--location','--user-agent',$self->agent,($self->{verbose}? (): '-s'),);$self->{_backends}{get}=sub {my($self,$uri)=@_;$self->safeexec(my$fh,$curl,@common,$uri)or die "curl $uri: $!";local $/;<$fh>};$self->{_backends}{mirror}=sub {my($self,$uri,$path)=@_;$self->safeexec(my$fh,$curl,@common,$uri,'-#','-o',$path)or die "curl $uri: $!";local $/;<$fh>}}else {require HTTP::Tiny;$self->chat("Falling back to HTTP::Tiny $HTTP::Tiny::VERSION\n");my%common=(agent=>$self->agent,);$self->{_backends}{get}=sub {my$self=shift;my$res=HTTP::Tiny->new(%common)->get($_[0]);return unless$res->{success};return$res->{content}};$self->{_backends}{mirror}=sub {my$self=shift;my$res=HTTP::Tiny->new(%common)->mirror(@_);return$res->{status}}}my$tar=$self->which('tar');my$tar_ver;my$maybe_bad_tar=sub {WIN32 || BAD_TAR || (($tar_ver=`$tar --version 2>/dev/null`)=~ /GNU.*1\.13/i)};if ($tar &&!$maybe_bad_tar->()){chomp$tar_ver;$self->chat("You have $tar: $tar_ver\n");$self->{_backends}{untar}=sub {my($self,$tarfile)=@_;my$xf=($self->{verbose}? 'v' : '')."xf";my$ar=$tarfile =~ /bz2$/ ? 'j' : 'z';my($root,@others)=`$tar ${ar}tf $tarfile` or return undef;FILE: {chomp$root;$root =~ s!^\./!!;$root =~ s{^(.+?)/.*$}{$1};if (!length($root)){$root=shift(@others);redo FILE if$root}}system "$tar $ar$xf $tarfile";return$root if -d $root;$self->diag_fail("Bad archive: $tarfile");return undef}}elsif ($tar and my$gzip=$self->which('gzip')and my$bzip2=$self->which('bzip2')){$self->chat("You have $tar, $gzip and $bzip2\n");$self->{_backends}{untar}=sub {my($self,$tarfile)=@_;my$x="x" .($self->{verbose}? 'v' : '')."f -";my$ar=$tarfile =~ /bz2$/ ? $bzip2 : $gzip;my($root,@others)=`$ar -dc $tarfile | $tar tf -` or return undef;FILE: {chomp$root;$root =~ s!^\./!!;$root =~ s{^(.+?)/.*$}{$1};if (!length($root)){$root=shift(@others);redo FILE if$root}}system "$ar -dc $tarfile | $tar $x";return$root if -d $root;$self->diag_fail("Bad archive: $tarfile");return undef}}elsif (eval {require Archive::Tar}){$self->chat("Falling back to Archive::Tar $Archive::Tar::VERSION\n");$self->{_backends}{untar}=sub {my$self=shift;my$t=Archive::Tar->new($_[0]);my($root,@others)=$t->list_files;FILE: {$root =~ s!^\./!!;$root =~ s{^(.+?)/.*$}{$1};if (!length($root)){$root=shift(@others);redo FILE if$root}}$t->extract;return -d $root ? $root : undef}}else {$self->{_backends}{untar}=sub {die "Failed to extract $_[1] - You need to have tar or Archive::Tar installed.\n"}}if (my$unzip=$self->which('unzip')){$self->chat("You have $unzip\n");$self->{_backends}{unzip}=sub {my($self,$zipfile)=@_;my$opt=$self->{verbose}? '' : '-q';my(undef,$root,@others)=`$unzip -t $zipfile` or return undef;chomp$root;$root =~ s{^\s+testing:\s+([^/]+)/.*?\s+OK$}{$1};system "$unzip $opt $zipfile";return$root if -d $root;$self->diag_fail("Bad archive: [$root] $zipfile");return undef}}else {$self->{_backends}{unzip}=sub {eval {require Archive::Zip}or die "Failed to extract $_[1] - You need to have unzip or Archive::Zip installed.\n";my($self,$file)=@_;my$zip=Archive::Zip->new();my$status;$status=$zip->read($file);$self->diag_fail("Read of file[$file] failed")if$status!=Archive::Zip::AZ_OK();my@members=$zip->members();for my$member (@members){my$af=$member->fileName();next if ($af =~ m!^(/|\.\./)!);$status=$member->extractToFileNamed($af);$self->diag_fail("Extracting of file[$af] from zipfile[$file failed")if$status!=Archive::Zip::AZ_OK()}my ($root)=$zip->membersMatching(qr<^[^/]+/$>);$root &&= $root->fileName;return -d $root ? $root : undef}}}sub safeexec {my$self=shift;my$rdr=$_[0]||= Symbol::gensym();if (WIN32){my$cmd=$self->shell_quote(@_[1..$#_]);return open($rdr,"$cmd |")}if (my$pid=open($rdr,'-|')){return$pid}elsif (defined$pid){exec(@_[1 .. $#_ ]);exit 1}else {return}}sub mask_uri_passwords {my($self,@strings)=@_;s{ (https?://) ([^:/]+) : [^@/]+ @ }{$1$2:********@}gx for@strings;return@strings}1; + It appears your cpanm executable was installed via `perlbrew install-cpanm`. + cpanm --self-upgrade won't upgrade the version of cpanm you're running. + + Run the following command to get it upgraded. + + perlbrew install-cpanm + + DIE + You are running cpanm from the path where your current perl won't install executables to. + Because of that, cpanm --self-upgrade won't upgrade the version of cpanm you're running. + + cpanm path : $0 + Install path : $Config{installsitebin} + + It means you either installed cpanm globally with system perl, or use distro packages such + as rpm or apt-get, and you have to use them again to upgrade cpanm. + DIE + Usage: cpanm [options] Module [...] + + Try `cpanm --help` or `man cpanm` for more options. + USAGE + Usage: cpanm [options] Module [...] + + Options: + -v,--verbose Turns on chatty output + -q,--quiet Turns off the most output + --interactive Turns on interactive configure (required for Task:: modules) + -f,--force force install + -n,--notest Do not run unit tests + --test-only Run tests only, do not install + -S,--sudo sudo to run install commands + --installdeps Only install dependencies + --showdeps Only display direct dependencies + --reinstall Reinstall the distribution even if you already have the latest version installed + --mirror Specify the base URL for the mirror (e.g. http://cpan.cpantesters.org/) + --mirror-only Use the mirror's index file instead of the CPAN Meta DB + -M,--from Use only this mirror base URL and its index file + --prompt Prompt when configure/build/test fails + -l,--local-lib Specify the install base to install modules + -L,--local-lib-contained Specify the install base to install all non-core modules + --self-contained Install all non-core modules, even if they're already installed. + --auto-cleanup Number of days that cpanm's work directories expire in. Defaults to 7 + + Commands: + --self-upgrade upgrades itself + --info Displays distribution info on CPAN + --look Opens the distribution with your SHELL + -U,--uninstall Uninstalls the modules (EXPERIMENTAL) + -V,--version Displays software version + + Examples: + + cpanm Test::More # install Test::More + cpanm MIYAGAWA/Plack-0.99_05.tar.gz # full distribution path + cpanm http://example.org/LDS/CGI.pm-3.20.tar.gz # install from URL + cpanm ~/dists/MyCompany-Enterprise-1.00.tar.gz # install from a local file + cpanm --interactive Task::Kensho # Configure interactively + cpanm . # install from local directory + cpanm --installdeps . # install all the deps for the current directory + cpanm -L extlib Plack # install Plack and all non-core deps into extlib + cpanm --mirror http://cpan.cpantesters.org/ DBI # use the fast-syncing mirror + cpanm -M https://cpan.metacpan.org App::perlbrew # use only this secure mirror and its index + + You can also specify the default options in PERL_CPANM_OPT environment variable in the shell rc: + + export PERL_CPANM_OPT="--prompt --reinstall -l ~/perl --mirror http://cpan.cpantesters.org" + + Type `man cpanm` or `perldoc cpanm` for the more detailed explanation of the options. + + HELP + ! + ! Can't write to $Config{installsitelib} and $Config{installsitebin}: Installing modules to $ENV{HOME}/perl5 + ! To turn off this warning, you have to do one of the following: + ! - run me as a root or with --sudo option (to install to $Config{installsitelib} and $Config{installsitebin}) + ! - Configure local::lib in your existing shell to set PERL_MM_OPT etc. + ! - Install local::lib by running the following commands + ! + ! cpanm --local-lib=~/perl5 local::lib && eval \$(perl -I ~/perl5/lib/perl5/ -Mlocal::lib) + ! + DIAG + WARNING: Your lib directory name ($base) contains a space in it. It's known to cause issues with perl builder tools such as local::lib and MakeMaker. You're recommended to rename your directory. + WARN + $module is not found in the following directories and can't be uninstalled. + + @{[ join(" \n", map " $_", @inc) ]} + + DIAG + package ModuleBuildSkipMan; + CHECK { + if (%Module::Build::) { + no warnings 'redefine'; + *Module::Build::Base::ACTION_manpages = sub {}; + *Module::Build::Base::ACTION_docs = sub {}; + } + } + 1; + EOF + ! Configuring $distname failed. See $self->{log} for details. + ! You might have to install the following modules first to get --scandeps working correctly. + DIAG +APP_CPANMINUS_SCRIPT + +$fatpacked{"CPAN/DistnameInfo.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_DISTNAMEINFO'; + package CPAN::DistnameInfo;$VERSION="0.12";use strict;sub distname_info {my$file=shift or return;my ($dist,$version)=$file =~ /^ + ((?:[-+.]*(?:[A-Za-z0-9]+|(?<=\D)_|_(?=\D))* + (?: + [A-Za-z](?=[^A-Za-z]|$) + | + \d(?=-) + )(? 6 and $1 & 1)or ($2 and $2 >= 50))or $3}elsif ($version =~ /\d\D\d+_\d/ or $version =~ /-TRIAL/){$dev=1}}else {$version=undef}($dist,$version,$dev)}sub new {my$class=shift;my$distfile=shift;$distfile =~ s,//+,/,g;my%info=(pathname=>$distfile);($info{filename}=$distfile)=~ s,^(((.*?/)?authors/)?id/)?([A-Z])/(\4[A-Z])/(\5[-A-Z0-9]*)/,, and $info{cpanid}=$6;if ($distfile =~ m,([^/]+)\.(tar\.(?:g?z|bz2)|zip|tgz)$,i){$info{distvname}=$1;$info{extension}=$2}@info{qw(dist version beta)}=distname_info($info{distvname});$info{maturity}=delete$info{beta}? 'developer' : 'released';return bless \%info,$class}sub dist {shift->{dist}}sub version {shift->{version}}sub maturity {shift->{maturity}}sub filename {shift->{filename}}sub cpanid {shift->{cpanid}}sub distvname {shift->{distvname}}sub extension {shift->{extension}}sub pathname {shift->{pathname}}sub properties {%{$_[0]}}1; +CPAN_DISTNAMEINFO + +$fatpacked{"CPAN/Meta.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META'; + use 5.006;use strict;use warnings;package CPAN::Meta;our$VERSION='2.150005';use Carp qw(carp croak);use CPAN::Meta::Feature;use CPAN::Meta::Prereqs;use CPAN::Meta::Converter;use CPAN::Meta::Validator;use Parse::CPAN::Meta 1.4414 ();BEGIN {*_dclone=\&CPAN::Meta::Converter::_dclone}BEGIN {my@STRING_READERS=qw(abstract description dynamic_config generated_by name release_status version);no strict 'refs';for my$attr (@STRING_READERS){*$attr=sub {$_[0]{$attr }}}}BEGIN {my@LIST_READERS=qw(author keywords license);no strict 'refs';for my$attr (@LIST_READERS){*$attr=sub {my$value=$_[0]{$attr };croak "$attr must be called in list context" unless wantarray;return @{_dclone($value)}if ref$value;return$value}}}sub authors {$_[0]->author}sub licenses {$_[0]->license}BEGIN {my@MAP_READERS=qw(meta-spec resources provides no_index prereqs optional_features);no strict 'refs';for my$attr (@MAP_READERS){(my$subname=$attr)=~ s/-/_/;*$subname=sub {my$value=$_[0]{$attr };return _dclone($value)if$value;return {}}}}sub custom_keys {return grep {/^x_/i}keys %{$_[0]}}sub custom {my ($self,$attr)=@_;my$value=$self->{$attr};return _dclone($value)if ref$value;return$value}sub _new {my ($class,$struct,$options)=@_;my$self;if ($options->{lazy_validation}){my$cmc=CPAN::Meta::Converter->new($struct);$self=$cmc->convert(version=>2);return bless$self,$class}else {my$cmv=CPAN::Meta::Validator->new($struct);unless ($cmv->is_valid){die "Invalid metadata structure. Errors: " .join(", ",$cmv->errors)."\n"}}my$version=$struct->{'meta-spec'}{version}|| '1.0';if ($version==2){$self=$struct}else {my$cmc=CPAN::Meta::Converter->new($struct);$self=$cmc->convert(version=>2)}return bless$self,$class}sub new {my ($class,$struct,$options)=@_;my$self=eval {$class->_new($struct,$options)};croak($@)if $@;return$self}sub create {my ($class,$struct,$options)=@_;my$version=__PACKAGE__->VERSION || 2;$struct->{generated_by}||= __PACKAGE__ ." version $version" ;$struct->{'meta-spec'}{version}||= int($version);my$self=eval {$class->_new($struct,$options)};croak ($@)if $@;return$self}sub load_file {my ($class,$file,$options)=@_;$options->{lazy_validation}=1 unless exists$options->{lazy_validation};croak "load_file() requires a valid, readable filename" unless -r $file;my$self;eval {my$struct=Parse::CPAN::Meta->load_file($file);$self=$class->_new($struct,$options)};croak($@)if $@;return$self}sub load_yaml_string {my ($class,$yaml,$options)=@_;$options->{lazy_validation}=1 unless exists$options->{lazy_validation};my$self;eval {my ($struct)=Parse::CPAN::Meta->load_yaml_string($yaml);$self=$class->_new($struct,$options)};croak($@)if $@;return$self}sub load_json_string {my ($class,$json,$options)=@_;$options->{lazy_validation}=1 unless exists$options->{lazy_validation};my$self;eval {my$struct=Parse::CPAN::Meta->load_json_string($json);$self=$class->_new($struct,$options)};croak($@)if $@;return$self}sub load_string {my ($class,$string,$options)=@_;$options->{lazy_validation}=1 unless exists$options->{lazy_validation};my$self;eval {my$struct=Parse::CPAN::Meta->load_string($string);$self=$class->_new($struct,$options)};croak($@)if $@;return$self}sub save {my ($self,$file,$options)=@_;my$version=$options->{version}|| '2';my$layer=$] ge '5.008001' ? ':utf8' : '';if ($version ge '2'){carp "'$file' should end in '.json'" unless$file =~ m{\.json$}}else {carp "'$file' should end in '.yml'" unless$file =~ m{\.yml$}}my$data=$self->as_string($options);open my$fh,">$layer",$file or die "Error opening '$file' for writing: $!\n";print {$fh}$data;close$fh or die "Error closing '$file': $!\n";return 1}sub meta_spec_version {my ($self)=@_;return$self->meta_spec->{version}}sub effective_prereqs {my ($self,$features)=@_;$features ||= [];my$prereq=CPAN::Meta::Prereqs->new($self->prereqs);return$prereq unless @$features;my@other=map {;$self->feature($_)->prereqs}@$features;return$prereq->with_merged_prereqs(\@other)}sub should_index_file {my ($self,$filename)=@_;for my$no_index_file (@{$self->no_index->{file}|| []}){return if$filename eq $no_index_file}for my$no_index_dir (@{$self->no_index->{directory}}){$no_index_dir =~ s{$}{/} unless$no_index_dir =~ m{/\z};return if index($filename,$no_index_dir)==0}return 1}sub should_index_package {my ($self,$package)=@_;for my$no_index_pkg (@{$self->no_index->{package}|| []}){return if$package eq $no_index_pkg}for my$no_index_ns (@{$self->no_index->{namespace}}){return if index($package,"${no_index_ns}::")==0}return 1}sub features {my ($self)=@_;my$opt_f=$self->optional_features;my@features=map {;CPAN::Meta::Feature->new($_=>$opt_f->{$_ })}keys %$opt_f;return@features}sub feature {my ($self,$ident)=@_;croak "no feature named $ident" unless my$f=$self->optional_features->{$ident };return CPAN::Meta::Feature->new($ident,$f)}sub as_struct {my ($self,$options)=@_;my$struct=_dclone($self);if ($options->{version}){my$cmc=CPAN::Meta::Converter->new($struct);$struct=$cmc->convert(version=>$options->{version})}return$struct}sub as_string {my ($self,$options)=@_;my$version=$options->{version}|| '2';my$struct;if ($self->meta_spec_version ne $version){my$cmc=CPAN::Meta::Converter->new($self->as_struct);$struct=$cmc->convert(version=>$version)}else {$struct=$self->as_struct}my ($data,$backend);if ($version ge '2'){$backend=Parse::CPAN::Meta->json_backend();local$struct->{x_serialization_backend}=sprintf '%s version %s',$backend,$backend->VERSION;$data=$backend->new->pretty->canonical->encode($struct)}else {$backend=Parse::CPAN::Meta->yaml_backend();local$struct->{x_serialization_backend}=sprintf '%s version %s',$backend,$backend->VERSION;$data=eval {no strict 'refs';&{"$backend\::Dump"}($struct)};if ($@){croak$backend->can('errstr')? $backend->errstr : $@}}return$data}sub TO_JSON {return {%{$_[0]}}}1; +CPAN_META + +$fatpacked{"CPAN/Meta/Check.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_CHECK'; + package CPAN::Meta::Check;$CPAN::Meta::Check::VERSION='0.012';use strict;use warnings;use base 'Exporter';our@EXPORT=qw//;our@EXPORT_OK=qw/check_requirements requirements_for verify_dependencies/;our%EXPORT_TAGS=(all=>[@EXPORT,@EXPORT_OK ]);use CPAN::Meta::Prereqs '2.132830';use CPAN::Meta::Requirements 2.121;use Module::Metadata 1.000023;sub _check_dep {my ($reqs,$module,$dirs)=@_;$module eq 'perl' and return ($reqs->accepts_module($module,$])? (): sprintf "Your Perl (%s) is not in the range '%s'",$],$reqs->requirements_for_module($module));my$metadata=Module::Metadata->new_from_module($module,inc=>$dirs);return "Module '$module' is not installed" if not defined$metadata;my$version=eval {$metadata->version};return "Missing version info for module '$module'" if$reqs->requirements_for_module($module)and not $version;return sprintf 'Installed version (%s) of %s is not in range \'%s\'',$version,$module,$reqs->requirements_for_module($module)if not $reqs->accepts_module($module,$version || 0);return}sub _check_conflict {my ($reqs,$module,$dirs)=@_;my$metadata=Module::Metadata->new_from_module($module,inc=>$dirs);return if not defined$metadata;my$version=eval {$metadata->version};return "Missing version info for module '$module'" if not $version;return sprintf 'Installed version (%s) of %s is in range \'%s\'',$version,$module,$reqs->requirements_for_module($module)if$reqs->accepts_module($module,$version);return}sub requirements_for {my ($meta,$phases,$type)=@_;my$prereqs=ref($meta)eq 'CPAN::Meta' ? $meta->effective_prereqs : $meta;return$prereqs->merged_requirements(ref($phases)? $phases : [$phases ],[$type ])}sub check_requirements {my ($reqs,$type,$dirs)=@_;return +{map {$_=>$type ne 'conflicts' ? scalar _check_dep($reqs,$_,$dirs): scalar _check_conflict($reqs,$_,$dirs)}$reqs->required_modules }}sub verify_dependencies {my ($meta,$phases,$type,$dirs)=@_;my$reqs=requirements_for($meta,$phases,$type);my$issues=check_requirements($reqs,$type,$dirs);return grep {defined}values %{$issues}}1; +CPAN_META_CHECK + +$fatpacked{"CPAN/Meta/Converter.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_CONVERTER'; + use 5.006;use strict;use warnings;package CPAN::Meta::Converter;our$VERSION='2.150005';use CPAN::Meta::Validator;use CPAN::Meta::Requirements;use Parse::CPAN::Meta 1.4400 ();BEGIN {eval "use version ()";if (my$err=$@){eval "use ExtUtils::MakeMaker::version" or die$err}}*_is_qv=version->can('is_qv')? sub {$_[0]->is_qv}: sub {exists $_[0]->{qv}};sub _dclone {my$ref=shift;no warnings 'once';no warnings 'redefine';local*UNIVERSAL::TO_JSON=sub {"$_[0]"};my$json=Parse::CPAN::Meta->json_backend()->new ->utf8 ->allow_blessed ->convert_blessed;$json->decode($json->encode($ref))}my%known_specs=('2'=>'http://search.cpan.org/perldoc?CPAN::Meta::Spec','1.4'=>'http://module-build.sourceforge.net/META-spec-v1.4.html','1.3'=>'http://module-build.sourceforge.net/META-spec-v1.3.html','1.2'=>'http://module-build.sourceforge.net/META-spec-v1.2.html','1.1'=>'http://module-build.sourceforge.net/META-spec-v1.1.html','1.0'=>'http://module-build.sourceforge.net/META-spec-v1.0.html');my@spec_list=sort {$a <=> $b}keys%known_specs;my ($LOWEST,$HIGHEST)=@spec_list[0,-1];sub _keep {$_[0]}sub _keep_or_one {defined($_[0])? $_[0]: 1}sub _keep_or_zero {defined($_[0])? $_[0]: 0}sub _keep_or_unknown {defined($_[0])&& length($_[0])? $_[0]: "unknown"}sub _generated_by {my$gen=shift;my$sig=__PACKAGE__ ." version " .(__PACKAGE__->VERSION || "");return$sig unless defined$gen and length$gen;return$gen if$gen =~ /\Q$sig/;return "$gen, $sig"}sub _listify {!defined $_[0]? undef : ref $_[0]eq 'ARRAY' ? $_[0]: [$_[0]]}sub _prefix_custom {my$key=shift;$key =~ s/^(?!x_) # Unless it already starts with x_ + (?:x-?)? # Remove leading x- or x (if present) + /x_/ix;return$key}sub _ucfirst_custom {my$key=shift;$key=ucfirst$key unless$key =~ /[A-Z]/;return$key}sub _no_prefix_ucfirst_custom {my$key=shift;$key =~ s/^x_//;return _ucfirst_custom($key)}sub _change_meta_spec {my ($element,undef,undef,$version)=@_;return {version=>$version,url=>$known_specs{$version},}}my@open_source=('perl','gpl','apache','artistic','artistic_2','lgpl','bsd','gpl','mit','mozilla','open_source',);my%is_open_source=map {;$_=>1}@open_source;my@valid_licenses_1=(@open_source,'unrestricted','restrictive','unknown',);my%license_map_1=((map {$_=>$_}@valid_licenses_1),artistic2=>'artistic_2',);sub _license_1 {my ($element)=@_;return 'unknown' unless defined$element;if ($license_map_1{lc$element}){return$license_map_1{lc$element}}else {return 'unknown'}}my@valid_licenses_2=qw(agpl_3 apache_1_1 apache_2_0 artistic_1 artistic_2 bsd freebsd gfdl_1_2 gfdl_1_3 gpl_1 gpl_2 gpl_3 lgpl_2_1 lgpl_3_0 mit mozilla_1_0 mozilla_1_1 openssl perl_5 qpl_1_0 ssleay sun zlib open_source restricted unrestricted unknown);my%license_map_2=((map {$_=>$_}@valid_licenses_2),apache=>'apache_2_0',artistic=>'artistic_1',artistic2=>'artistic_2',gpl=>'open_source',lgpl=>'open_source',mozilla=>'open_source',perl=>'perl_5',restrictive=>'restricted',);sub _license_2 {my ($element)=@_;return ['unknown' ]unless defined$element;$element=[$element ]unless ref$element eq 'ARRAY';my@new_list;for my$lic (@$element){next unless defined$lic;if (my$new=$license_map_2{lc$lic}){push@new_list,$new}}return@new_list ? \@new_list : ['unknown' ]}my%license_downgrade_map=qw(agpl_3 open_source apache_1_1 apache apache_2_0 apache artistic_1 artistic artistic_2 artistic_2 bsd bsd freebsd open_source gfdl_1_2 open_source gfdl_1_3 open_source gpl_1 gpl gpl_2 gpl gpl_3 gpl lgpl_2_1 lgpl lgpl_3_0 lgpl mit mit mozilla_1_0 mozilla mozilla_1_1 mozilla openssl open_source perl_5 perl qpl_1_0 open_source ssleay open_source sun open_source zlib open_source open_source open_source restricted restrictive unrestricted unrestricted unknown unknown);sub _downgrade_license {my ($element)=@_;if (!defined$element){return "unknown"}elsif(ref$element eq 'ARRAY'){if (@$element > 1){if (grep {!$is_open_source{$license_downgrade_map{lc $_}|| 'unknown' }}@$element){return 'unknown'}else {return 'open_source'}}elsif (@$element==1){return$license_downgrade_map{lc$element->[0]}|| "unknown"}}elsif (!ref$element){return$license_downgrade_map{lc$element}|| "unknown"}return "unknown"}my$no_index_spec_1_2={'file'=>\&_listify,'dir'=>\&_listify,'package'=>\&_listify,'namespace'=>\&_listify,};my$no_index_spec_1_3={'file'=>\&_listify,'directory'=>\&_listify,'package'=>\&_listify,'namespace'=>\&_listify,};my$no_index_spec_2={'file'=>\&_listify,'directory'=>\&_listify,'package'=>\&_listify,'namespace'=>\&_listify,':custom'=>\&_prefix_custom,};sub _no_index_1_2 {my (undef,undef,$meta)=@_;my$no_index=$meta->{no_index}|| $meta->{private};return unless$no_index;if (!ref$no_index){my$item=$no_index;$no_index={dir=>[$item ],file=>[$item ]}}elsif (ref$no_index eq 'ARRAY'){my$list=$no_index;$no_index={dir=>[@$list ],file=>[@$list ]}}if (exists$no_index->{files}){$no_index->{file}=delete$no_index->{files}}if (exists$no_index->{modules}){$no_index->{module}=delete$no_index->{modules}}return _convert($no_index,$no_index_spec_1_2)}sub _no_index_directory {my ($element,$key,$meta,$version)=@_;return unless$element;if (!ref$element){my$item=$element;$element={directory=>[$item ],file=>[$item ]}}elsif (ref$element eq 'ARRAY'){my$list=$element;$element={directory=>[@$list ],file=>[@$list ]}}if (exists$element->{dir}){$element->{directory}=delete$element->{dir}}if (exists$element->{files}){$element->{file}=delete$element->{files}}if (exists$element->{modules}){$element->{module}=delete$element->{modules}}my$spec=$version==2 ? $no_index_spec_2 : $no_index_spec_1_3;return _convert($element,$spec)}sub _is_module_name {my$mod=shift;return unless defined$mod && length$mod;return$mod =~ m{^[A-Za-z][A-Za-z0-9_]*(?:::[A-Za-z0-9_]+)*$}}sub _clean_version {my ($element)=@_;return 0 if!defined$element;$element =~ s{^\s*}{};$element =~ s{\s*$}{};$element =~ s{^\.}{0.};return 0 if!length$element;return 0 if ($element eq 'undef' || $element eq '');my$v=eval {version->new($element)};if (defined$v){return _is_qv($v)? $v->normal : $element}else {return 0}}sub _bad_version_hook {my ($v)=@_;$v =~ s{^\s*}{};$v =~ s{\s*$}{};$v =~ s{[a-z]+$}{};my$vobj=eval {version->new($v)};return defined($vobj)? $vobj : version->new(0)}sub _version_map {my ($element)=@_;return unless defined$element;if (ref$element eq 'HASH'){my$new_map=CPAN::Meta::Requirements->new({bad_version_hook=>\&_bad_version_hook });while (my ($k,$v)=each %$element){next unless _is_module_name($k);if (!defined($v)||!length($v)|| $v eq 'undef' || $v eq ''){$v=0}if (_is_module_name($v)&&!version::is_lax($v)){$new_map->add_minimum($k=>0);$new_map->add_minimum($v=>0)}$new_map->add_string_requirement($k=>$v)}return$new_map->as_string_hash}elsif (ref$element eq 'ARRAY'){my$hashref={map {$_=>0}@$element };return _version_map($hashref)}elsif (ref$element eq '' && length$element){return {$element=>0 }}return}sub _prereqs_from_1 {my (undef,undef,$meta)=@_;my$prereqs={};for my$phase (qw/build configure/){my$key="${phase}_requires";$prereqs->{$phase}{requires}=_version_map($meta->{$key})if$meta->{$key}}for my$rel (qw/requires recommends conflicts/){$prereqs->{runtime}{$rel}=_version_map($meta->{$rel})if$meta->{$rel}}return$prereqs}my$prereqs_spec={configure=>\&_prereqs_rel,build=>\&_prereqs_rel,test=>\&_prereqs_rel,runtime=>\&_prereqs_rel,develop=>\&_prereqs_rel,':custom'=>\&_prefix_custom,};my$relation_spec={requires=>\&_version_map,recommends=>\&_version_map,suggests=>\&_version_map,conflicts=>\&_version_map,':custom'=>\&_prefix_custom,};sub _cleanup_prereqs {my ($prereqs,$key,$meta,$to_version)=@_;return unless$prereqs && ref$prereqs eq 'HASH';return _convert($prereqs,$prereqs_spec,$to_version)}sub _prereqs_rel {my ($relation,$key,$meta,$to_version)=@_;return unless$relation && ref$relation eq 'HASH';return _convert($relation,$relation_spec,$to_version)}BEGIN {my@old_prereqs=qw(requires configure_requires recommends conflicts);for (@old_prereqs){my$sub="_get_$_";my ($phase,$type)=split qr/_/,$_;if (!defined$type){$type=$phase;$phase='runtime'}no strict 'refs';*{$sub}=sub {_extract_prereqs($_[2]->{prereqs},$phase,$type)}}}sub _get_build_requires {my ($data,$key,$meta)=@_;my$test_h=_extract_prereqs($_[2]->{prereqs},qw(test requires))|| {};my$build_h=_extract_prereqs($_[2]->{prereqs},qw(build requires))|| {};my$test_req=CPAN::Meta::Requirements->from_string_hash($test_h);my$build_req=CPAN::Meta::Requirements->from_string_hash($build_h);$test_req->add_requirements($build_req)->as_string_hash}sub _extract_prereqs {my ($prereqs,$phase,$type)=@_;return unless ref$prereqs eq 'HASH';return scalar _version_map($prereqs->{$phase}{$type})}sub _downgrade_optional_features {my (undef,undef,$meta)=@_;return unless exists$meta->{optional_features};my$origin=$meta->{optional_features};my$features={};for my$name (keys %$origin){$features->{$name}={description=>$origin->{$name}{description},requires=>_extract_prereqs($origin->{$name}{prereqs},'runtime','requires'),configure_requires=>_extract_prereqs($origin->{$name}{prereqs},'runtime','configure_requires'),build_requires=>_extract_prereqs($origin->{$name}{prereqs},'runtime','build_requires'),recommends=>_extract_prereqs($origin->{$name}{prereqs},'runtime','recommends'),conflicts=>_extract_prereqs($origin->{$name}{prereqs},'runtime','conflicts'),};for my$k (keys %{$features->{$name}}){delete$features->{$name}{$k}unless defined$features->{$name}{$k}}}return$features}sub _upgrade_optional_features {my (undef,undef,$meta)=@_;return unless exists$meta->{optional_features};my$origin=$meta->{optional_features};my$features={};for my$name (keys %$origin){$features->{$name}={description=>$origin->{$name}{description},prereqs=>_prereqs_from_1(undef,undef,$origin->{$name}),};delete$features->{$name}{prereqs}{configure}}return$features}my$optional_features_2_spec={description=>\&_keep,prereqs=>\&_cleanup_prereqs,':custom'=>\&_prefix_custom,};sub _feature_2 {my ($element,$key,$meta,$to_version)=@_;return unless$element && ref$element eq 'HASH';_convert($element,$optional_features_2_spec,$to_version)}sub _cleanup_optional_features_2 {my ($element,$key,$meta,$to_version)=@_;return unless$element && ref$element eq 'HASH';my$new_data={};for my$k (keys %$element){$new_data->{$k}=_feature_2($element->{$k},$k,$meta,$to_version)}return unless keys %$new_data;return$new_data}sub _optional_features_1_4 {my ($element)=@_;return unless$element;$element=_optional_features_as_map($element);for my$name (keys %$element){for my$drop (qw/requires_packages requires_os excluded_os/){delete$element->{$name}{$drop}}}return$element}sub _optional_features_as_map {my ($element)=@_;return unless$element;if (ref$element eq 'ARRAY'){my%map;for my$feature (@$element){my (@parts)=%$feature;$map{$parts[0]}=$parts[1]}$element=\%map}return$element}sub _is_urlish {defined $_[0]&& $_[0]=~ m{\A[-+.a-z0-9]+:.+}i}sub _url_or_drop {my ($element)=@_;return$element if _is_urlish($element);return}sub _url_list {my ($element)=@_;return unless$element;$element=_listify($element);$element=[grep {_is_urlish($_)}@$element ];return unless @$element;return$element}sub _author_list {my ($element)=@_;return ['unknown' ]unless$element;$element=_listify($element);$element=[map {defined $_ && length $_ ? $_ : 'unknown'}@$element ];return ['unknown' ]unless @$element;return$element}my$resource2_upgrade={license=>sub {return _is_urlish($_[0])? _listify($_[0]): undef},homepage=>\&_url_or_drop,bugtracker=>sub {my ($item)=@_;return unless$item;if ($item =~ m{^mailto:(.*)$}){return {mailto=>$1 }}elsif(_is_urlish($item)){return {web=>$item }}else {return}},repository=>sub {return _is_urlish($_[0])? {url=>$_[0]}: undef},':custom'=>\&_prefix_custom,};sub _upgrade_resources_2 {my (undef,undef,$meta,$version)=@_;return unless exists$meta->{resources};return _convert($meta->{resources},$resource2_upgrade)}my$bugtracker2_spec={web=>\&_url_or_drop,mailto=>\&_keep,':custom'=>\&_prefix_custom,};sub _repo_type {my ($element,$key,$meta,$to_version)=@_;return$element if defined$element;return unless exists$meta->{url};my$repo_url=$meta->{url};for my$type (qw/git svn/){return$type if$repo_url =~ m{\A$type}}return}my$repository2_spec={web=>\&_url_or_drop,url=>\&_url_or_drop,type=>\&_repo_type,':custom'=>\&_prefix_custom,};my$resources2_cleanup={license=>\&_url_list,homepage=>\&_url_or_drop,bugtracker=>sub {ref $_[0]? _convert($_[0],$bugtracker2_spec): undef},repository=>sub {my$data=shift;ref$data ? _convert($data,$repository2_spec): undef},':custom'=>\&_prefix_custom,};sub _cleanup_resources_2 {my ($resources,$key,$meta,$to_version)=@_;return unless$resources && ref$resources eq 'HASH';return _convert($resources,$resources2_cleanup,$to_version)}my$resource1_spec={license=>\&_url_or_drop,homepage=>\&_url_or_drop,bugtracker=>\&_url_or_drop,repository=>\&_url_or_drop,':custom'=>\&_keep,};sub _resources_1_3 {my (undef,undef,$meta,$version)=@_;return unless exists$meta->{resources};return _convert($meta->{resources},$resource1_spec)}*_resources_1_4=*_resources_1_3;sub _resources_1_2 {my (undef,undef,$meta)=@_;my$resources=$meta->{resources}|| {};if ($meta->{license_url}&&!$resources->{license}){$resources->{license}=$meta->{license_url}if _is_urlish($meta->{license_url})}return unless keys %$resources;return _convert($resources,$resource1_spec)}my$resource_downgrade_spec={license=>sub {return ref $_[0]? $_[0]->[0]: $_[0]},homepage=>\&_url_or_drop,bugtracker=>sub {return $_[0]->{web}},repository=>sub {return $_[0]->{url}|| $_[0]->{web}},':custom'=>\&_no_prefix_ucfirst_custom,};sub _downgrade_resources {my (undef,undef,$meta,$version)=@_;return unless exists$meta->{resources};return _convert($meta->{resources},$resource_downgrade_spec)}sub _release_status {my ($element,undef,$meta)=@_;return$element if$element && $element =~ m{\A(?:stable|testing|unstable)\z};return _release_status_from_version(undef,undef,$meta)}sub _release_status_from_version {my (undef,undef,$meta)=@_;my$version=$meta->{version}|| '';return ($version =~ /_/)? 'testing' : 'stable'}my$provides_spec={file=>\&_keep,version=>\&_keep,};my$provides_spec_2={file=>\&_keep,version=>\&_keep,':custom'=>\&_prefix_custom,};sub _provides {my ($element,$key,$meta,$to_version)=@_;return unless defined$element && ref$element eq 'HASH';my$spec=$to_version==2 ? $provides_spec_2 : $provides_spec;my$new_data={};for my$k (keys %$element){$new_data->{$k}=_convert($element->{$k},$spec,$to_version);$new_data->{$k}{version}=_clean_version($element->{$k}{version})if exists$element->{$k}{version}}return$new_data}sub _convert {my ($data,$spec,$to_version,$is_fragment)=@_;my$new_data={};for my$key (keys %$spec){next if$key eq ':custom' || $key eq ':drop';next unless my$fcn=$spec->{$key};if ($is_fragment && $key eq 'generated_by'){$fcn=\&_keep}die "spec for '$key' is not a coderef" unless ref$fcn && ref$fcn eq 'CODE';my$new_value=$fcn->($data->{$key},$key,$data,$to_version);$new_data->{$key}=$new_value if defined$new_value}my$drop_list=$spec->{':drop'};my$customizer=$spec->{':custom'}|| \&_keep;for my$key (keys %$data){next if$drop_list && grep {$key eq $_}@$drop_list;next if exists$spec->{$key};$new_data->{$customizer->($key)}=$data->{$key}}return$new_data}my%up_convert=('2-from-1.4'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_2,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'release_status'=>\&_release_status,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_upgrade_optional_features,'provides'=>\&_provides,'resources'=>\&_upgrade_resources_2,'description'=>\&_keep,'prereqs'=>\&_prereqs_from_1,':drop'=>[qw(build_requires configure_requires conflicts distribution_type license_url private recommends requires) ],':custom'=>\&_prefix_custom,},'1.4-from-1.3'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_optional_features_1_4,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_4,'configure_requires'=>\&_keep,':drop'=>[qw(license_url private)],':custom'=>\&_keep },'1.3-from-1.2'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_3,':drop'=>[qw(license_url private)],':custom'=>\&_keep },'1.2-from-1.1'=>{'version'=>\&_keep,'license'=>\&_license_1,'name'=>\&_keep,'generated_by'=>\&_generated_by,'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'meta-spec'=>\&_change_meta_spec,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'recommends'=>\&_version_map,'requires'=>\&_version_map,'keywords'=>\&_keep,'no_index'=>\&_no_index_1_2,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'resources'=>\&_resources_1_2,':drop'=>[qw(license_url private)],':custom'=>\&_keep },'1.1-from-1.0'=>{'version'=>\&_keep,'name'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'recommends'=>\&_version_map,'requires'=>\&_version_map,'license_url'=>\&_url_or_drop,'private'=>\&_keep,':custom'=>\&_keep },);my%down_convert=('1.4-from-2'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_downgrade_license,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_get_build_requires,'configure_requires'=>\&_get_configure_requires,'conflicts'=>\&_get_conflicts,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_downgrade_optional_features,'provides'=>\&_provides,'recommends'=>\&_get_recommends,'requires'=>\&_get_requires,'resources'=>\&_downgrade_resources,':drop'=>[qw(description prereqs release_status)],':custom'=>\&_keep },'1.3-from-1.4'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_3,':drop'=>[qw(configure_requires)],':custom'=>\&_keep,},'1.2-from-1.3'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_1_2,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_3,':custom'=>\&_keep,},'1.1-from-1.2'=>{'version'=>\&_keep,'name'=>\&_keep,'meta-spec'=>\&_change_meta_spec,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'private'=>\&_keep,'recommends'=>\&_version_map,'requires'=>\&_version_map,':drop'=>[qw(abstract author provides no_index keywords resources)],':custom'=>\&_keep,},'1.0-from-1.1'=>{'name'=>\&_keep,'meta-spec'=>\&_change_meta_spec,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'recommends'=>\&_version_map,'requires'=>\&_version_map,':custom'=>\&_keep,},);my%cleanup=('2'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_2,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'release_status'=>\&_release_status,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_cleanup_optional_features_2,'provides'=>\&_provides,'resources'=>\&_cleanup_resources_2,'description'=>\&_keep,'prereqs'=>\&_cleanup_prereqs,':drop'=>[qw(build_requires configure_requires conflicts distribution_type license_url private recommends requires) ],':custom'=>\&_prefix_custom,},'1.4'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_optional_features_1_4,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_4,'configure_requires'=>\&_keep,':custom'=>\&_keep },'1.3'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_3,':custom'=>\&_keep },'1.2'=>{'version'=>\&_keep,'license'=>\&_license_1,'name'=>\&_keep,'generated_by'=>\&_generated_by,'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'meta-spec'=>\&_change_meta_spec,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'recommends'=>\&_version_map,'requires'=>\&_version_map,'keywords'=>\&_keep,'no_index'=>\&_no_index_1_2,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'resources'=>\&_resources_1_2,':custom'=>\&_keep },'1.1'=>{'version'=>\&_keep,'name'=>\&_keep,'meta-spec'=>\&_change_meta_spec,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'recommends'=>\&_version_map,'requires'=>\&_version_map,'license_url'=>\&_url_or_drop,'private'=>\&_keep,':custom'=>\&_keep },'1.0'=>{'name'=>\&_keep,'meta-spec'=>\&_change_meta_spec,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'recommends'=>\&_version_map,'requires'=>\&_version_map,':custom'=>\&_keep,},);my%fragments_generate=('2'=>{'abstract'=>'abstract','author'=>'author','generated_by'=>'generated_by','license'=>'license','name'=>'name','version'=>'version','dynamic_config'=>'dynamic_config','release_status'=>'release_status','keywords'=>'keywords','no_index'=>'no_index','optional_features'=>'optional_features','provides'=>'provides','resources'=>'resources','description'=>'description','prereqs'=>'prereqs',},'1.4'=>{'abstract'=>'abstract','author'=>'author','generated_by'=>'generated_by','license'=>'license','name'=>'name','version'=>'version','build_requires'=>'prereqs','conflicts'=>'prereqs','distribution_type'=>'distribution_type','dynamic_config'=>'dynamic_config','keywords'=>'keywords','no_index'=>'no_index','optional_features'=>'optional_features','provides'=>'provides','recommends'=>'prereqs','requires'=>'prereqs','resources'=>'resources','configure_requires'=>'prereqs',},);$fragments_generate{$_}=$fragments_generate{'1.4'}for qw/1.3 1.2 1.1 1.0/;sub new {my ($class,$data,%args)=@_;my$self={'data'=>$data,'spec'=>_extract_spec_version($data,$args{default_version}),};return bless$self,$class}sub _extract_spec_version {my ($data,$default)=@_;my$spec=$data->{'meta-spec'};return($default || "1.0")unless defined$spec && ref$spec eq 'HASH';my$v=$spec->{version};if (defined$v && $v =~ /^\d+(?:\.\d+)?$/){return$v if defined$v && grep {$v eq $_}keys%known_specs;return$v+0 if defined$v && grep {$v==$_}keys%known_specs}return "2" if exists$data->{prereqs};return "1.4" if exists$data->{configure_requires};return($default || "1.2")}sub convert {my ($self,%args)=@_;my$args={%args };my$new_version=$args->{version}|| $HIGHEST;my$is_fragment=$args->{is_fragment};my ($old_version)=$self->{spec};my$converted=_dclone($self->{data});if ($old_version==$new_version){$converted=_convert($converted,$cleanup{$old_version},$old_version,$is_fragment);unless ($args->{is_fragment}){my$cmv=CPAN::Meta::Validator->new($converted);unless ($cmv->is_valid){my$errs=join("\n",$cmv->errors);die "Failed to clean-up $old_version metadata. Errors:\n$errs\n"}}return$converted}elsif ($old_version > $new_version){my@vers=sort {$b <=> $a}keys%known_specs;for my$i (0 .. $#vers-1){next if$vers[$i]> $old_version;last if$vers[$i+1]< $new_version;my$spec_string="$vers[$i+1]-from-$vers[$i]";$converted=_convert($converted,$down_convert{$spec_string},$vers[$i+1],$is_fragment);unless ($args->{is_fragment}){my$cmv=CPAN::Meta::Validator->new($converted);unless ($cmv->is_valid){my$errs=join("\n",$cmv->errors);die "Failed to downconvert metadata to $vers[$i+1]. Errors:\n$errs\n"}}}return$converted}else {my@vers=sort {$a <=> $b}keys%known_specs;for my$i (0 .. $#vers-1){next if$vers[$i]< $old_version;last if$vers[$i+1]> $new_version;my$spec_string="$vers[$i+1]-from-$vers[$i]";$converted=_convert($converted,$up_convert{$spec_string},$vers[$i+1],$is_fragment);unless ($args->{is_fragment}){my$cmv=CPAN::Meta::Validator->new($converted);unless ($cmv->is_valid){my$errs=join("\n",$cmv->errors);die "Failed to upconvert metadata to $vers[$i+1]. Errors:\n$errs\n"}}}return$converted}}sub upgrade_fragment {my ($self)=@_;my ($old_version)=$self->{spec};my%expected=map {;$_=>1}grep {defined}map {$fragments_generate{$old_version}{$_}}keys %{$self->{data}};my$converted=$self->convert(version=>$HIGHEST,is_fragment=>1);for my$key (keys %$converted){next if$key =~ /^x_/i || $key eq 'meta-spec';delete$converted->{$key}unless$expected{$key}}return$converted}1; +CPAN_META_CONVERTER + +$fatpacked{"CPAN/Meta/Feature.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_FEATURE'; + use 5.006;use strict;use warnings;package CPAN::Meta::Feature;our$VERSION='2.150005';use CPAN::Meta::Prereqs;sub new {my ($class,$identifier,$spec)=@_;my%guts=(identifier=>$identifier,description=>$spec->{description},prereqs=>CPAN::Meta::Prereqs->new($spec->{prereqs}),);bless \%guts=>$class}sub identifier {$_[0]{identifier}}sub description {$_[0]{description}}sub prereqs {$_[0]{prereqs}}1; +CPAN_META_FEATURE + +$fatpacked{"CPAN/Meta/History.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_HISTORY'; + use 5.006;use strict;use warnings;package CPAN::Meta::History;our$VERSION='2.150005';1; +CPAN_META_HISTORY + +$fatpacked{"CPAN/Meta/Merge.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_MERGE'; + use strict;use warnings;package CPAN::Meta::Merge;our$VERSION='2.150005';use Carp qw/croak/;use Scalar::Util qw/blessed/;use CPAN::Meta::Converter 2.141170;sub _is_identical {my ($left,$right)=@_;return (not defined$left and not defined$right)|| (defined$left and defined$right and $left eq $right)}sub _identical {my ($left,$right,$path)=@_;croak sprintf "Can't merge attribute %s: '%s' does not equal '%s'",join('.',@{$path}),$left,$right unless _is_identical($left,$right);return$left}sub _merge {my ($current,$next,$mergers,$path)=@_;for my$key (keys %{$next}){if (not exists$current->{$key}){$current->{$key}=$next->{$key}}elsif (my$merger=$mergers->{$key}){$current->{$key}=$merger->($current->{$key},$next->{$key},[@{$path},$key ])}elsif ($merger=$mergers->{':default'}){$current->{$key}=$merger->($current->{$key},$next->{$key},[@{$path},$key ])}else {croak sprintf "Can't merge unknown attribute '%s'",join '.',@{$path},$key}}return$current}sub _uniq {my%seen=();return grep {not $seen{$_}++}@_}sub _set_addition {my ($left,$right)=@_;return [+_uniq(@{$left},@{$right})]}sub _uniq_map {my ($left,$right,$path)=@_;for my$key (keys %{$right}){if (not exists$left->{$key}){$left->{$key}=$right->{$key}}elsif (_is_identical($left->{$key},$right->{$key})){1}elsif (ref$left->{$key}eq 'HASH' and ref$right->{$key}eq 'HASH'){$left->{$key}=_uniq_map($left->{$key},$right->{$key},[@{$path},$key ])}else {croak 'Duplication of element ' .join '.',@{$path},$key}}return$left}sub _improvize {my ($left,$right,$path)=@_;my ($name)=reverse @{$path};if ($name =~ /^x_/){if (ref($left)eq 'ARRAY'){return _set_addition($left,$right,$path)}elsif (ref($left)eq 'HASH'){return _uniq_map($left,$right,$path)}else {return _identical($left,$right,$path)}}croak sprintf "Can't merge '%s'",join '.',@{$path}}sub _optional_features {my ($left,$right,$path)=@_;for my$key (keys %{$right}){if (not exists$left->{$key}){$left->{$key}=$right->{$key}}else {for my$subkey (keys %{$right->{$key}}){next if$subkey eq 'prereqs';if (not exists$left->{$key}{$subkey}){$left->{$key}{$subkey}=$right->{$key}{$subkey}}else {Carp::croak "Cannot merge two optional_features named '$key' with different '$subkey' values" if do {no warnings 'uninitialized';$left->{$key}{$subkey}ne $right->{$key}{$subkey}}}}require CPAN::Meta::Prereqs;$left->{$key}{prereqs}=CPAN::Meta::Prereqs->new($left->{$key}{prereqs})->with_merged_prereqs(CPAN::Meta::Prereqs->new($right->{$key}{prereqs}))->as_string_hash}}return$left}my%default=(abstract=>\&_identical,author=>\&_set_addition,dynamic_config=>sub {my ($left,$right)=@_;return$left || $right},generated_by=>sub {my ($left,$right)=@_;return join ', ',_uniq(split(/, /,$left),split(/, /,$right))},license=>\&_set_addition,'meta-spec'=>{version=>\&_identical,url=>\&_identical },name=>\&_identical,release_status=>\&_identical,version=>\&_identical,description=>\&_identical,keywords=>\&_set_addition,no_index=>{map {($_=>\&_set_addition)}qw/file directory package namespace/ },optional_features=>\&_optional_features,prereqs=>sub {require CPAN::Meta::Prereqs;my ($left,$right)=map {CPAN::Meta::Prereqs->new($_)}@_[0,1];return$left->with_merged_prereqs($right)->as_string_hash},provides=>\&_uniq_map,resources=>{license=>\&_set_addition,homepage=>\&_identical,bugtracker=>\&_uniq_map,repository=>\&_uniq_map,':default'=>\&_improvize,},':default'=>\&_improvize,);sub new {my ($class,%arguments)=@_;croak 'default version required' if not exists$arguments{default_version};my%mapping=%default;my%extra=%{$arguments{extra_mappings}|| {}};for my$key (keys%extra){if (ref($mapping{$key})eq 'HASH'){$mapping{$key}={%{$mapping{$key}},%{$extra{$key}}}}else {$mapping{$key}=$extra{$key}}}return bless {default_version=>$arguments{default_version},mapping=>_coerce_mapping(\%mapping,[]),},$class}my%coderef_for=(set_addition=>\&_set_addition,uniq_map=>\&_uniq_map,identical=>\&_identical,improvize=>\&_improvize,);sub _coerce_mapping {my ($orig,$map_path)=@_;my%ret;for my$key (keys %{$orig}){my$value=$orig->{$key};if (ref($orig->{$key})eq 'CODE'){$ret{$key}=$value}elsif (ref($value)eq 'HASH'){my$mapping=_coerce_mapping($value,[@{$map_path},$key ]);$ret{$key}=sub {my ($left,$right,$path)=@_;return _merge($left,$right,$mapping,[@{$path}])}}elsif ($coderef_for{$value}){$ret{$key}=$coderef_for{$value}}else {croak "Don't know what to do with " .join '.',@{$map_path},$key}}return \%ret}sub merge {my ($self,@items)=@_;my$current={};for my$next (@items){if (blessed($next)&& $next->isa('CPAN::Meta')){$next=$next->as_struct}elsif (ref($next)eq 'HASH'){my$cmc=CPAN::Meta::Converter->new($next,default_version=>$self->{default_version});$next=$cmc->upgrade_fragment}else {croak "Don't know how to merge '$next'"}$current=_merge($current,$next,$self->{mapping},[])}return$current}1; +CPAN_META_MERGE + +$fatpacked{"CPAN/Meta/Prereqs.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_PREREQS'; + use 5.006;use strict;use warnings;package CPAN::Meta::Prereqs;our$VERSION='2.150005';use Carp qw(confess);use Scalar::Util qw(blessed);use CPAN::Meta::Requirements 2.121;sub __legal_phases {qw(configure build test runtime develop)}sub __legal_types {qw(requires recommends suggests conflicts)}sub new {my ($class,$prereq_spec)=@_;$prereq_spec ||= {};my%is_legal_phase=map {;$_=>1}$class->__legal_phases;my%is_legal_type=map {;$_=>1}$class->__legal_types;my%guts;PHASE: for my$phase (keys %$prereq_spec){next PHASE unless$phase =~ /\Ax_/i or $is_legal_phase{$phase};my$phase_spec=$prereq_spec->{$phase };next PHASE unless keys %$phase_spec;TYPE: for my$type (keys %$phase_spec){next TYPE unless$type =~ /\Ax_/i or $is_legal_type{$type};my$spec=$phase_spec->{$type };next TYPE unless keys %$spec;$guts{prereqs}{$phase}{$type}=CPAN::Meta::Requirements->from_string_hash($spec)}}return bless \%guts=>$class}sub requirements_for {my ($self,$phase,$type)=@_;confess "requirements_for called without phase" unless defined$phase;confess "requirements_for called without type" unless defined$type;unless ($phase =~ /\Ax_/i or grep {$phase eq $_}$self->__legal_phases){confess "requested requirements for unknown phase: $phase"}unless ($type =~ /\Ax_/i or grep {$type eq $_}$self->__legal_types){confess "requested requirements for unknown type: $type"}my$req=($self->{prereqs}{$phase}{$type}||= CPAN::Meta::Requirements->new);$req->finalize if$self->is_finalized;return$req}sub with_merged_prereqs {my ($self,$other)=@_;my@other=blessed($other)? $other : @$other;my@prereq_objs=($self,@other);my%new_arg;for my$phase ($self->__legal_phases){for my$type ($self->__legal_types){my$req=CPAN::Meta::Requirements->new;for my$prereq (@prereq_objs){my$this_req=$prereq->requirements_for($phase,$type);next unless$this_req->required_modules;$req->add_requirements($this_req)}next unless$req->required_modules;$new_arg{$phase }{$type }=$req->as_string_hash}}return (ref$self)->new(\%new_arg)}sub merged_requirements {my ($self,$phases,$types)=@_;$phases=[qw/runtime build test/]unless defined$phases;$types=[qw/requires recommends/]unless defined$types;confess "merged_requirements phases argument must be an arrayref" unless ref$phases eq 'ARRAY';confess "merged_requirements types argument must be an arrayref" unless ref$types eq 'ARRAY';my$req=CPAN::Meta::Requirements->new;for my$phase (@$phases){unless ($phase =~ /\Ax_/i or grep {$phase eq $_}$self->__legal_phases){confess "requested requirements for unknown phase: $phase"}for my$type (@$types){unless ($type =~ /\Ax_/i or grep {$type eq $_}$self->__legal_types){confess "requested requirements for unknown type: $type"}$req->add_requirements($self->requirements_for($phase,$type))}}$req->finalize if$self->is_finalized;return$req}sub as_string_hash {my ($self)=@_;my%hash;for my$phase ($self->__legal_phases){for my$type ($self->__legal_types){my$req=$self->requirements_for($phase,$type);next unless$req->required_modules;$hash{$phase }{$type }=$req->as_string_hash}}return \%hash}sub is_finalized {$_[0]{finalized}}sub finalize {my ($self)=@_;$self->{finalized}=1;for my$phase (keys %{$self->{prereqs}}){$_->finalize for values %{$self->{prereqs}{$phase}}}}sub clone {my ($self)=@_;my$clone=(ref$self)->new($self->as_string_hash)}1; +CPAN_META_PREREQS + +$fatpacked{"CPAN/Meta/Requirements.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_REQUIREMENTS'; + use strict;use warnings;package CPAN::Meta::Requirements;our$VERSION='2.133';use Carp ();BEGIN {eval "use version ()";if (my$err=$@){eval "use ExtUtils::MakeMaker::version" or die$err}}*_is_qv=version->can('is_qv')? sub {$_[0]->is_qv}: sub {exists $_[0]->{qv}};my$V0=version->new(0);my@valid_options=qw(bad_version_hook);sub new {my ($class,$options)=@_;$options ||= {};Carp::croak "Argument to $class\->new() must be a hash reference" unless ref$options eq 'HASH';my%self=map {;$_=>$options->{$_}}@valid_options;return bless \%self=>$class}sub _find_magic_vstring {my$value=shift;my$tvalue='';require B;my$sv=B::svref_2object(\$value);my$magic=ref($sv)eq 'B::PVMG' ? $sv->MAGIC : undef;while ($magic){if ($magic->TYPE eq 'V'){$tvalue=$magic->PTR;$tvalue =~ s/^v?(.+)$/v$1/;last}else {$magic=$magic->MOREMAGIC}}return$tvalue}sub _isa_version {UNIVERSAL::isa($_[0],'UNIVERSAL')&& $_[0]->isa('version')}sub _version_object {my ($self,$module,$version)=@_;my ($vobj,$err);if (not defined$version or (!ref($version)&& $version eq '0')){return$V0}elsif (ref($version)eq 'version' || _isa_version($version)){$vobj=$version}else {if ($INC{'version/vpp.pm'}|| $INC{'ExtUtils/MakeMaker/version/vpp.pm'}){my$magic=_find_magic_vstring($version);$version=$magic if length$magic}eval {local$SIG{__WARN__}=sub {die "Invalid version: $_[0]"};$vobj=version->new($version)};if (my$err=$@){my$hook=$self->{bad_version_hook};$vobj=eval {$hook->($version,$module)}if ref$hook eq 'CODE';unless (eval {$vobj->isa("version")}){$err =~ s{ at .* line \d+.*$}{};die "Can't convert '$version': $err"}}}if ($vobj =~ m{\A\.}){$vobj=version->new("0$vobj")}if (_is_qv($vobj)){$vobj=version->new($vobj->normal)}return$vobj}BEGIN {for my$type (qw(maximum exclusion exact_version)){my$method="with_$type";my$to_add=$type eq 'exact_version' ? $type : "add_$type";my$code=sub {my ($self,$name,$version)=@_;$version=$self->_version_object($name,$version);$self->__modify_entry_for($name,$method,$version);return$self};no strict 'refs';*$to_add=$code}}sub add_minimum {my ($self,$name,$version)=@_;if (not defined$version or "$version" eq '0'){return$self if$self->__entry_for($name);Carp::confess("can't add new requirements to finalized requirements")if$self->is_finalized;$self->{requirements}{$name }=CPAN::Meta::Requirements::_Range::Range->with_minimum($V0)}else {$version=$self->_version_object($name,$version);$self->__modify_entry_for($name,'with_minimum',$version)}return$self}sub add_requirements {my ($self,$req)=@_;for my$module ($req->required_modules){my$modifiers=$req->__entry_for($module)->as_modifiers;for my$modifier (@$modifiers){my ($method,@args)=@$modifier;$self->$method($module=>@args)}}return$self}sub accepts_module {my ($self,$module,$version)=@_;$version=$self->_version_object($module,$version);return 1 unless my$range=$self->__entry_for($module);return$range->_accepts($version)}sub clear_requirement {my ($self,$module)=@_;return$self unless$self->__entry_for($module);Carp::confess("can't clear requirements on finalized requirements")if$self->is_finalized;delete$self->{requirements}{$module };return$self}sub requirements_for_module {my ($self,$module)=@_;my$entry=$self->__entry_for($module);return unless$entry;return$entry->as_string}sub required_modules {keys %{$_[0]{requirements}}}sub clone {my ($self)=@_;my$new=(ref$self)->new;return$new->add_requirements($self)}sub __entry_for {$_[0]{requirements}{$_[1]}}sub __modify_entry_for {my ($self,$name,$method,$version)=@_;my$fin=$self->is_finalized;my$old=$self->__entry_for($name);Carp::confess("can't add new requirements to finalized requirements")if$fin and not $old;my$new=($old || 'CPAN::Meta::Requirements::_Range::Range')->$method($version);Carp::confess("can't modify finalized requirements")if$fin and $old->as_string ne $new->as_string;$self->{requirements}{$name }=$new}sub is_simple {my ($self)=@_;for my$module ($self->required_modules){return if$self->__entry_for($module)->as_string =~ /\s/}return 1}sub is_finalized {$_[0]{finalized}}sub finalize {$_[0]{finalized}=1}sub as_string_hash {my ($self)=@_;my%hash=map {;$_=>$self->{requirements}{$_}->as_string}$self->required_modules;return \%hash}my%methods_for_op=('=='=>[qw(exact_version) ],'!='=>[qw(add_exclusion) ],'>='=>[qw(add_minimum) ],'<='=>[qw(add_maximum) ],'>'=>[qw(add_minimum add_exclusion) ],'<'=>[qw(add_maximum add_exclusion) ],);sub add_string_requirement {my ($self,$module,$req)=@_;unless (defined$req && length$req){$req=0;$self->_blank_carp($module)}my$magic=_find_magic_vstring($req);if (length$magic){$self->add_minimum($module=>$magic);return}my@parts=split qr{\s*,\s*},$req;for my$part (@parts){my ($op,$ver)=$part =~ m{\A\s*(==|>=|>|<=|<|!=)\s*(.*)\z};if (!defined$op){$self->add_minimum($module=>$part)}else {Carp::confess("illegal requirement string: $req")unless my$methods=$methods_for_op{$op };$self->$_($module=>$ver)for @$methods}}}sub _blank_carp {my ($self,$module)=@_;Carp::carp("Undefined requirement for $module treated as '0'")}sub from_string_hash {my ($class,$hash,$options)=@_;my$self=$class->new($options);for my$module (keys %$hash){my$req=$hash->{$module};unless (defined$req && length$req){$req=0;$class->_blank_carp($module)}$self->add_string_requirement($module,$req)}return$self}{package CPAN::Meta::Requirements::_Range::Exact;sub _new {bless {version=>$_[1]}=>$_[0]}sub _accepts {return $_[0]{version}==$_[1]}sub as_string {return "== $_[0]{version}"}sub as_modifiers {return [[exact_version=>$_[0]{version}]]}sub _clone {(ref $_[0])->_new(version->new($_[0]{version}))}sub with_exact_version {my ($self,$version)=@_;return$self->_clone if$self->_accepts($version);Carp::confess("illegal requirements: unequal exact version specified")}sub with_minimum {my ($self,$minimum)=@_;return$self->_clone if$self->{version}>= $minimum;Carp::confess("illegal requirements: minimum above exact specification")}sub with_maximum {my ($self,$maximum)=@_;return$self->_clone if$self->{version}<= $maximum;Carp::confess("illegal requirements: maximum below exact specification")}sub with_exclusion {my ($self,$exclusion)=@_;return$self->_clone unless$exclusion==$self->{version};Carp::confess("illegal requirements: excluded exact specification")}}{package CPAN::Meta::Requirements::_Range::Range;sub _self {ref($_[0])? $_[0]: (bless {}=>$_[0])}sub _clone {return (bless {}=>$_[0])unless ref $_[0];my ($s)=@_;my%guts=((exists$s->{minimum}? (minimum=>version->new($s->{minimum})): ()),(exists$s->{maximum}? (maximum=>version->new($s->{maximum})): ()),(exists$s->{exclusions}? (exclusions=>[map {version->new($_)}@{$s->{exclusions}}]): ()),);bless \%guts=>ref($s)}sub as_modifiers {my ($self)=@_;my@mods;push@mods,[add_minimum=>$self->{minimum}]if exists$self->{minimum};push@mods,[add_maximum=>$self->{maximum}]if exists$self->{maximum};push@mods,map {;[add_exclusion=>$_ ]}@{$self->{exclusions}|| []};return \@mods}sub as_string {my ($self)=@_;return 0 if!keys %$self;return "$self->{minimum}" if (keys %$self)==1 and exists$self->{minimum};my@exclusions=@{$self->{exclusions}|| []};my@parts;for my$pair ([qw(>= > minimum) ],[qw(<= < maximum) ],){my ($op,$e_op,$k)=@$pair;if (exists$self->{$k}){my@new_exclusions=grep {$_!=$self->{$k }}@exclusions;if (@new_exclusions==@exclusions){push@parts,"$op $self->{ $k }"}else {push@parts,"$e_op $self->{ $k }";@exclusions=@new_exclusions}}}push@parts,map {;"!= $_"}@exclusions;return join q{, },@parts}sub with_exact_version {my ($self,$version)=@_;$self=$self->_clone;Carp::confess("illegal requirements: exact specification outside of range")unless$self->_accepts($version);return CPAN::Meta::Requirements::_Range::Exact->_new($version)}sub _simplify {my ($self)=@_;if (defined$self->{minimum}and defined$self->{maximum}){if ($self->{minimum}==$self->{maximum}){Carp::confess("illegal requirements: excluded all values")if grep {$_==$self->{minimum}}@{$self->{exclusions}|| []};return CPAN::Meta::Requirements::_Range::Exact->_new($self->{minimum})}Carp::confess("illegal requirements: minimum exceeds maximum")if$self->{minimum}> $self->{maximum}}if ($self->{exclusions}){my%seen;@{$self->{exclusions}}=grep {(!defined$self->{minimum}or $_ >= $self->{minimum})and (!defined$self->{maximum}or $_ <= $self->{maximum})and !$seen{$_}++}@{$self->{exclusions}}}return$self}sub with_minimum {my ($self,$minimum)=@_;$self=$self->_clone;if (defined (my$old_min=$self->{minimum})){$self->{minimum}=(sort {$b cmp $a}($minimum,$old_min))[0]}else {$self->{minimum}=$minimum}return$self->_simplify}sub with_maximum {my ($self,$maximum)=@_;$self=$self->_clone;if (defined (my$old_max=$self->{maximum})){$self->{maximum}=(sort {$a cmp $b}($maximum,$old_max))[0]}else {$self->{maximum}=$maximum}return$self->_simplify}sub with_exclusion {my ($self,$exclusion)=@_;$self=$self->_clone;push @{$self->{exclusions}||= []},$exclusion;return$self->_simplify}sub _accepts {my ($self,$version)=@_;return if defined$self->{minimum}and $version < $self->{minimum};return if defined$self->{maximum}and $version > $self->{maximum};return if defined$self->{exclusions}and grep {$version==$_}@{$self->{exclusions}};return 1}}1; +CPAN_META_REQUIREMENTS + +$fatpacked{"CPAN/Meta/Spec.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_SPEC'; + use 5.006;use strict;use warnings;package CPAN::Meta::Spec;our$VERSION='2.150005';1; +CPAN_META_SPEC + +$fatpacked{"CPAN/Meta/Validator.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_VALIDATOR'; + use 5.006;use strict;use warnings;package CPAN::Meta::Validator;our$VERSION='2.150005';my%known_specs=('1.4'=>'http://module-build.sourceforge.net/META-spec-v1.4.html','1.3'=>'http://module-build.sourceforge.net/META-spec-v1.3.html','1.2'=>'http://module-build.sourceforge.net/META-spec-v1.2.html','1.1'=>'http://module-build.sourceforge.net/META-spec-v1.1.html','1.0'=>'http://module-build.sourceforge.net/META-spec-v1.0.html');my%known_urls=map {$known_specs{$_}=>$_}keys%known_specs;my$module_map1={'map'=>{':key'=>{name=>\&module,value=>\&exversion }}};my$module_map2={'map'=>{':key'=>{name=>\&module,value=>\&version }}};my$no_index_2={'map'=>{file=>{list=>{value=>\&string }},directory=>{list=>{value=>\&string }},'package'=>{list=>{value=>\&string }},namespace=>{list=>{value=>\&string }},':key'=>{name=>\&custom_2,value=>\&anything },}};my$no_index_1_3={'map'=>{file=>{list=>{value=>\&string }},directory=>{list=>{value=>\&string }},'package'=>{list=>{value=>\&string }},namespace=>{list=>{value=>\&string }},':key'=>{name=>\&string,value=>\&anything },}};my$no_index_1_2={'map'=>{file=>{list=>{value=>\&string }},dir=>{list=>{value=>\&string }},'package'=>{list=>{value=>\&string }},namespace=>{list=>{value=>\&string }},':key'=>{name=>\&string,value=>\&anything },}};my$no_index_1_1={'map'=>{':key'=>{name=>\&string,list=>{value=>\&string }},}};my$prereq_map={map=>{':key'=>{name=>\&phase,'map'=>{':key'=>{name=>\&relation,%$module_map1,},},}},};my%definitions=('2'=>{'abstract'=>{mandatory=>1,value=>\&string },'author'=>{mandatory=>1,list=>{value=>\&string }},'dynamic_config'=>{mandatory=>1,value=>\&boolean },'generated_by'=>{mandatory=>1,value=>\&string },'license'=>{mandatory=>1,list=>{value=>\&license }},'meta-spec'=>{mandatory=>1,'map'=>{version=>{mandatory=>1,value=>\&version},url=>{value=>\&url },':key'=>{name=>\&custom_2,value=>\&anything },}},'name'=>{mandatory=>1,value=>\&string },'release_status'=>{mandatory=>1,value=>\&release_status },'version'=>{mandatory=>1,value=>\&version },'description'=>{value=>\&string },'keywords'=>{list=>{value=>\&string }},'no_index'=>$no_index_2,'optional_features'=>{'map'=>{':key'=>{name=>\&string,'map'=>{description=>{value=>\&string },prereqs=>$prereq_map,':key'=>{name=>\&custom_2,value=>\&anything },}}}},'prereqs'=>$prereq_map,'provides'=>{'map'=>{':key'=>{name=>\&module,'map'=>{file=>{mandatory=>1,value=>\&file },version=>{value=>\&version },':key'=>{name=>\&custom_2,value=>\&anything },}}}},'resources'=>{'map'=>{license=>{list=>{value=>\&url }},homepage=>{value=>\&url },bugtracker=>{'map'=>{web=>{value=>\&url },mailto=>{value=>\&string},':key'=>{name=>\&custom_2,value=>\&anything },}},repository=>{'map'=>{web=>{value=>\&url },url=>{value=>\&url },type=>{value=>\&string },':key'=>{name=>\&custom_2,value=>\&anything },}},':key'=>{value=>\&string,name=>\&custom_2 },}},':key'=>{name=>\&custom_2,value=>\&anything },},'1.4'=>{'meta-spec'=>{mandatory=>1,'map'=>{version=>{mandatory=>1,value=>\&version},url=>{mandatory=>1,value=>\&urlspec },':key'=>{name=>\&string,value=>\&anything },},},'name'=>{mandatory=>1,value=>\&string },'version'=>{mandatory=>1,value=>\&version },'abstract'=>{mandatory=>1,value=>\&string },'author'=>{mandatory=>1,list=>{value=>\&string }},'license'=>{mandatory=>1,value=>\&license },'generated_by'=>{mandatory=>1,value=>\&string },'distribution_type'=>{value=>\&string },'dynamic_config'=>{value=>\&boolean },'requires'=>$module_map1,'recommends'=>$module_map1,'build_requires'=>$module_map1,'configure_requires'=>$module_map1,'conflicts'=>$module_map2,'optional_features'=>{'map'=>{':key'=>{name=>\&string,'map'=>{description=>{value=>\&string },requires=>$module_map1,recommends=>$module_map1,build_requires=>$module_map1,conflicts=>$module_map2,':key'=>{name=>\&string,value=>\&anything },}}}},'provides'=>{'map'=>{':key'=>{name=>\&module,'map'=>{file=>{mandatory=>1,value=>\&file },version=>{value=>\&version },':key'=>{name=>\&string,value=>\&anything },}}}},'no_index'=>$no_index_1_3,'private'=>$no_index_1_3,'keywords'=>{list=>{value=>\&string }},'resources'=>{'map'=>{license=>{value=>\&url },homepage=>{value=>\&url },bugtracker=>{value=>\&url },repository=>{value=>\&url },':key'=>{value=>\&string,name=>\&custom_1 },}},':key'=>{name=>\&string,value=>\&anything },},'1.3'=>{'meta-spec'=>{mandatory=>1,'map'=>{version=>{mandatory=>1,value=>\&version},url=>{mandatory=>1,value=>\&urlspec },':key'=>{name=>\&string,value=>\&anything },},},'name'=>{mandatory=>1,value=>\&string },'version'=>{mandatory=>1,value=>\&version },'abstract'=>{mandatory=>1,value=>\&string },'author'=>{mandatory=>1,list=>{value=>\&string }},'license'=>{mandatory=>1,value=>\&license },'generated_by'=>{mandatory=>1,value=>\&string },'distribution_type'=>{value=>\&string },'dynamic_config'=>{value=>\&boolean },'requires'=>$module_map1,'recommends'=>$module_map1,'build_requires'=>$module_map1,'conflicts'=>$module_map2,'optional_features'=>{'map'=>{':key'=>{name=>\&string,'map'=>{description=>{value=>\&string },requires=>$module_map1,recommends=>$module_map1,build_requires=>$module_map1,conflicts=>$module_map2,':key'=>{name=>\&string,value=>\&anything },}}}},'provides'=>{'map'=>{':key'=>{name=>\&module,'map'=>{file=>{mandatory=>1,value=>\&file },version=>{value=>\&version },':key'=>{name=>\&string,value=>\&anything },}}}},'no_index'=>$no_index_1_3,'private'=>$no_index_1_3,'keywords'=>{list=>{value=>\&string }},'resources'=>{'map'=>{license=>{value=>\&url },homepage=>{value=>\&url },bugtracker=>{value=>\&url },repository=>{value=>\&url },':key'=>{value=>\&string,name=>\&custom_1 },}},':key'=>{name=>\&string,value=>\&anything },},'1.2'=>{'meta-spec'=>{mandatory=>1,'map'=>{version=>{mandatory=>1,value=>\&version},url=>{mandatory=>1,value=>\&urlspec },':key'=>{name=>\&string,value=>\&anything },},},'name'=>{mandatory=>1,value=>\&string },'version'=>{mandatory=>1,value=>\&version },'license'=>{mandatory=>1,value=>\&license },'generated_by'=>{mandatory=>1,value=>\&string },'author'=>{mandatory=>1,list=>{value=>\&string }},'abstract'=>{mandatory=>1,value=>\&string },'distribution_type'=>{value=>\&string },'dynamic_config'=>{value=>\&boolean },'keywords'=>{list=>{value=>\&string }},'private'=>$no_index_1_2,'$no_index'=>$no_index_1_2,'requires'=>$module_map1,'recommends'=>$module_map1,'build_requires'=>$module_map1,'conflicts'=>$module_map2,'optional_features'=>{'map'=>{':key'=>{name=>\&string,'map'=>{description=>{value=>\&string },requires=>$module_map1,recommends=>$module_map1,build_requires=>$module_map1,conflicts=>$module_map2,':key'=>{name=>\&string,value=>\&anything },}}}},'provides'=>{'map'=>{':key'=>{name=>\&module,'map'=>{file=>{mandatory=>1,value=>\&file },version=>{value=>\&version },':key'=>{name=>\&string,value=>\&anything },}}}},'resources'=>{'map'=>{license=>{value=>\&url },homepage=>{value=>\&url },bugtracker=>{value=>\&url },repository=>{value=>\&url },':key'=>{value=>\&string,name=>\&custom_1 },}},':key'=>{name=>\&string,value=>\&anything },},'1.1'=>{'name'=>{value=>\&string },'version'=>{mandatory=>1,value=>\&version },'license'=>{value=>\&license },'generated_by'=>{value=>\&string },'license_uri'=>{value=>\&url },'distribution_type'=>{value=>\&string },'dynamic_config'=>{value=>\&boolean },'private'=>$no_index_1_1,'requires'=>$module_map1,'recommends'=>$module_map1,'build_requires'=>$module_map1,'conflicts'=>$module_map2,':key'=>{name=>\&string,value=>\&anything },},'1.0'=>{'name'=>{value=>\&string },'version'=>{mandatory=>1,value=>\&version },'license'=>{value=>\&license },'generated_by'=>{value=>\&string },'license_uri'=>{value=>\&url },'distribution_type'=>{value=>\&string },'dynamic_config'=>{value=>\&boolean },'requires'=>$module_map1,'recommends'=>$module_map1,'build_requires'=>$module_map1,'conflicts'=>$module_map2,':key'=>{name=>\&string,value=>\&anything },},);sub new {my ($class,$data)=@_;my$self={'data'=>$data,'spec'=>eval {$data->{'meta-spec'}{'version'}}|| "1.0",'errors'=>undef,};return bless$self,$class}sub is_valid {my$self=shift;my$data=$self->{data};my$spec_version=$self->{spec};$self->check_map($definitions{$spec_version},$data);return!$self->errors}sub errors {my$self=shift;return ()unless(defined$self->{errors});return @{$self->{errors}}}my$spec_error="Missing validation action in specification. " ."Must be one of 'map', 'list', or 'value'";sub check_map {my ($self,$spec,$data)=@_;if(ref($spec)ne 'HASH'){$self->_error("Unknown META specification, cannot validate.");return}if(ref($data)ne 'HASH'){$self->_error("Expected a map structure from string or file.");return}for my$key (keys %$spec){next unless($spec->{$key}->{mandatory});next if(defined$data->{$key});push @{$self->{stack}},$key;$self->_error("Missing mandatory field, '$key'");pop @{$self->{stack}}}for my$key (keys %$data){push @{$self->{stack}},$key;if($spec->{$key}){if($spec->{$key}{value}){$spec->{$key}{value}->($self,$key,$data->{$key})}elsif($spec->{$key}{'map'}){$self->check_map($spec->{$key}{'map'},$data->{$key})}elsif($spec->{$key}{'list'}){$self->check_list($spec->{$key}{'list'},$data->{$key})}else {$self->_error("$spec_error for '$key'")}}elsif ($spec->{':key'}){$spec->{':key'}{name}->($self,$key,$key);if($spec->{':key'}{value}){$spec->{':key'}{value}->($self,$key,$data->{$key})}elsif($spec->{':key'}{'map'}){$self->check_map($spec->{':key'}{'map'},$data->{$key})}elsif($spec->{':key'}{'list'}){$self->check_list($spec->{':key'}{'list'},$data->{$key})}else {$self->_error("$spec_error for ':key'")}}else {$self->_error("Unknown key, '$key', found in map structure")}pop @{$self->{stack}}}}sub check_list {my ($self,$spec,$data)=@_;if(ref($data)ne 'ARRAY'){$self->_error("Expected a list structure");return}if(defined$spec->{mandatory}){if(!defined$data->[0]){$self->_error("Missing entries from mandatory list")}}for my$value (@$data){push @{$self->{stack}},$value || "";if(defined$spec->{value}){$spec->{value}->($self,'list',$value)}elsif(defined$spec->{'map'}){$self->check_map($spec->{'map'},$value)}elsif(defined$spec->{'list'}){$self->check_list($spec->{'list'},$value)}elsif ($spec->{':key'}){$self->check_map($spec,$value)}else {$self->_error("$spec_error associated with '$self->{stack}[-2]'")}pop @{$self->{stack}}}}sub header {my ($self,$key,$value)=@_;if(defined$value){return 1 if($value && $value =~ /^--- #YAML:1.0/)}$self->_error("file does not have a valid YAML header.");return 0}sub release_status {my ($self,$key,$value)=@_;if(defined$value){my$version=$self->{data}{version}|| '';if ($version =~ /_/){return 1 if ($value =~ /\A(?:testing|unstable)\z/);$self->_error("'$value' for '$key' is invalid for version '$version'")}else {return 1 if ($value =~ /\A(?:stable|testing|unstable)\z/);$self->_error("'$value' for '$key' is invalid")}}else {$self->_error("'$key' is not defined")}return 0}sub _uri_split {return $_[0]=~ m,(?:([^:/?#]+):)?(?://([^/?#]*))?([^?#]*)(?:\?([^#]*))?(?:#(.*))?,}sub url {my ($self,$key,$value)=@_;if(defined$value){my ($scheme,$auth,$path,$query,$frag)=_uri_split($value);unless (defined$scheme && length$scheme){$self->_error("'$value' for '$key' does not have a URL scheme");return 0}unless (defined$auth && length$auth){$self->_error("'$value' for '$key' does not have a URL authority");return 0}return 1}$value ||= '';$self->_error("'$value' for '$key' is not a valid URL.");return 0}sub urlspec {my ($self,$key,$value)=@_;if(defined$value){return 1 if($value && $known_specs{$self->{spec}}eq $value);if($value && $known_urls{$value}){$self->_error('META specification URL does not match version');return 0}}$self->_error('Unknown META specification');return 0}sub anything {return 1}sub string {my ($self,$key,$value)=@_;if(defined$value){return 1 if($value || $value =~ /^0$/)}$self->_error("value is an undefined string");return 0}sub string_or_undef {my ($self,$key,$value)=@_;return 1 unless(defined$value);return 1 if($value || $value =~ /^0$/);$self->_error("No string defined for '$key'");return 0}sub file {my ($self,$key,$value)=@_;return 1 if(defined$value);$self->_error("No file defined for '$key'");return 0}sub exversion {my ($self,$key,$value)=@_;if(defined$value && ($value || $value =~ /0/)){my$pass=1;for(split(",",$value)){$self->version($key,$_)or ($pass=0)}return$pass}$value='' unless(defined$value);$self->_error("'$value' for '$key' is not a valid version.");return 0}sub version {my ($self,$key,$value)=@_;if(defined$value){return 0 unless($value || $value =~ /0/);return 1 if($value =~ /^\s*((<|<=|>=|>|!=|==)\s*)?v?\d+((\.\d+((_|\.)\d+)?)?)/)}else {$value=''}$self->_error("'$value' for '$key' is not a valid version.");return 0}sub boolean {my ($self,$key,$value)=@_;if(defined$value){return 1 if($value =~ /^(0|1|true|false)$/)}else {$value=''}$self->_error("'$value' for '$key' is not a boolean value.");return 0}my%v1_licenses=('perl'=>'http://dev.perl.org/licenses/','gpl'=>'http://www.opensource.org/licenses/gpl-license.php','apache'=>'http://apache.org/licenses/LICENSE-2.0','artistic'=>'http://opensource.org/licenses/artistic-license.php','artistic_2'=>'http://opensource.org/licenses/artistic-license-2.0.php','lgpl'=>'http://www.opensource.org/licenses/lgpl-license.php','bsd'=>'http://www.opensource.org/licenses/bsd-license.php','gpl'=>'http://www.opensource.org/licenses/gpl-license.php','mit'=>'http://opensource.org/licenses/mit-license.php','mozilla'=>'http://opensource.org/licenses/mozilla1.1.php','open_source'=>undef,'unrestricted'=>undef,'restrictive'=>undef,'unknown'=>undef,);my%v2_licenses=map {$_=>1}qw(agpl_3 apache_1_1 apache_2_0 artistic_1 artistic_2 bsd freebsd gfdl_1_2 gfdl_1_3 gpl_1 gpl_2 gpl_3 lgpl_2_1 lgpl_3_0 mit mozilla_1_0 mozilla_1_1 openssl perl_5 qpl_1_0 ssleay sun zlib open_source restricted unrestricted unknown);sub license {my ($self,$key,$value)=@_;my$licenses=$self->{spec}< 2 ? \%v1_licenses : \%v2_licenses;if(defined$value){return 1 if($value && exists$licenses->{$value})}else {$value=''}$self->_error("License '$value' is invalid");return 0}sub custom_1 {my ($self,$key)=@_;if(defined$key){return 1 if($key && $key =~ /^[_a-z]+$/i && $key =~ /[A-Z]/)}else {$key=''}$self->_error("Custom resource '$key' must be in CamelCase.");return 0}sub custom_2 {my ($self,$key)=@_;if(defined$key){return 1 if($key && $key =~ /^x_/i)}else {$key=''}$self->_error("Custom key '$key' must begin with 'x_' or 'X_'.");return 0}sub identifier {my ($self,$key)=@_;if(defined$key){return 1 if($key && $key =~ /^([a-z][_a-z]+)$/i)}else {$key=''}$self->_error("Key '$key' is not a legal identifier.");return 0}sub module {my ($self,$key)=@_;if(defined$key){return 1 if($key && $key =~ /^[A-Za-z0-9_]+(::[A-Za-z0-9_]+)*$/)}else {$key=''}$self->_error("Key '$key' is not a legal module name.");return 0}my@valid_phases=qw/configure build test runtime develop/;sub phase {my ($self,$key)=@_;if(defined$key){return 1 if(length$key && grep {$key eq $_}@valid_phases);return 1 if$key =~ /x_/i}else {$key=''}$self->_error("Key '$key' is not a legal phase.");return 0}my@valid_relations=qw/requires recommends suggests conflicts/;sub relation {my ($self,$key)=@_;if(defined$key){return 1 if(length$key && grep {$key eq $_}@valid_relations);return 1 if$key =~ /x_/i}else {$key=''}$self->_error("Key '$key' is not a legal prereq relationship.");return 0}sub _error {my$self=shift;my$mess=shift;$mess .= ' ('.join(' -> ',@{$self->{stack}}).')' if($self->{stack});$mess .= " [Validation: $self->{spec}]";push @{$self->{errors}},$mess}1; +CPAN_META_VALIDATOR + +$fatpacked{"CPAN/Meta/YAML.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_YAML'; + use 5.008001;use strict;use warnings;package CPAN::Meta::YAML;$CPAN::Meta::YAML::VERSION='0.016';;use Exporter;our@ISA=qw{Exporter};our@EXPORT=qw{Load Dump};our@EXPORT_OK=qw{LoadFile DumpFile freeze thaw};sub Dump {return CPAN::Meta::YAML->new(@_)->_dump_string}sub Load {my$self=CPAN::Meta::YAML->_load_string(@_);if (wantarray){return @$self}else {return$self->[-1]}}BEGIN {*freeze=\&Dump;*thaw=\&Load}sub DumpFile {my$file=shift;return CPAN::Meta::YAML->new(@_)->_dump_file($file)}sub LoadFile {my$file=shift;my$self=CPAN::Meta::YAML->_load_file($file);if (wantarray){return @$self}else {return$self->[-1]}}sub new {my$class=shift;bless [@_ ],$class}sub read_string {my$self=shift;$self->_load_string(@_)}sub write_string {my$self=shift;$self->_dump_string(@_)}sub read {my$self=shift;$self->_load_file(@_)}sub write {my$self=shift;$self->_dump_file(@_)}my@UNPRINTABLE=qw(0 x01 x02 x03 x04 x05 x06 a b t n v f r x0E x0F x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x1A e x1C x1D x1E x1F);my%UNESCAPES=(0=>"\x00",z=>"\x00",N=>"\x85",a=>"\x07",b=>"\x08",t=>"\x09",n=>"\x0a",v=>"\x0b",f=>"\x0c",r=>"\x0d",e=>"\x1b",'\\'=>'\\',);my%QUOTE=map {$_=>1}qw{null true false};my$re_capture_double_quoted=qr/\"([^\\"]*(?:\\.[^\\"]*)*)\"/;my$re_capture_single_quoted=qr/\'([^\']*(?:\'\'[^\']*)*)\'/;my$re_capture_unquoted_key=qr/([^:]+(?::+\S(?:[^:]*|.*?(?=:)))*)(?=\s*\:(?:\s+|$))/;my$re_trailing_comment=qr/(?:\s+\#.*)?/;my$re_key_value_separator=qr/\s*:(?:\s+(?:\#.*)?|$)/;sub _load_file {my$class=ref $_[0]? ref shift : shift;my$file=shift or $class->_error('You did not specify a file name');$class->_error("File '$file' does not exist")unless -e $file;$class->_error("'$file' is a directory, not a file")unless -f _;$class->_error("Insufficient permissions to read '$file'")unless -r _;open(my$fh,"<:unix:encoding(UTF-8)",$file);unless ($fh){$class->_error("Failed to open file '$file': $!")}if (_can_flock()){flock($fh,Fcntl::LOCK_SH())or warn "Couldn't lock '$file' for reading: $!"}my$contents=eval {use warnings FATAL=>'utf8';local $/;<$fh>};if (my$err=$@){$class->_error("Error reading from file '$file': $err")}unless (close$fh){$class->_error("Failed to close file '$file': $!")}$class->_load_string($contents)}sub _load_string {my$class=ref $_[0]? ref shift : shift;my$self=bless [],$class;my$string=$_[0];eval {unless (defined$string){die \"Did not provide a string to load"}if (utf8::is_utf8($string)&&!utf8::valid($string)){die \<<'...'}utf8::upgrade($string);$string =~ s/^\x{FEFF}//;return$self unless length$string;my@lines=grep {!/^\s*(?:\#.*)?\z/}split /(?:\015{1,2}\012|\015|\012)/,$string;@lines and $lines[0]=~ /^\%YAML[: ][\d\.]+.*\z/ and shift@lines;my$in_document=0;while (@lines){if ($lines[0]=~ /^---\s*(?:(.+)\s*)?\z/){shift@lines;if (defined $1 and $1 !~ /^(?:\#.+|\%YAML[: ][\d\.]+)\z/){push @$self,$self->_load_scalar("$1",[undef ],\@lines);next}$in_document=1}if (!@lines or $lines[0]=~ /^(?:---|\.\.\.)/){push @$self,undef;while (@lines and $lines[0]!~ /^---/){shift@lines}$in_document=0}elsif (!$in_document && @$self){die \"CPAN::Meta::YAML failed to classify the line '$lines[0]'"}elsif ($lines[0]=~ /^\s*\-(?:\s|$|-+$)/){my$document=[];push @$self,$document;$self->_load_array($document,[0 ],\@lines)}elsif ($lines[0]=~ /^(\s*)\S/){my$document={};push @$self,$document;$self->_load_hash($document,[length($1)],\@lines)}else {die \"CPAN::Meta::YAML failed to classify the line '$lines[0]'"}}};my$err=$@;if (ref$err eq 'SCALAR'){$self->_error(${$err})}elsif ($err){$self->_error($err)}return$self}sub _unquote_single {my ($self,$string)=@_;return '' unless length$string;$string =~ s/\'\'/\'/g;return$string}sub _unquote_double {my ($self,$string)=@_;return '' unless length$string;$string =~ s/\\"/"/g;$string =~ s{\\([Nnever\\fartz0b]|x([0-9a-fA-F]{2}))} + Read an invalid UTF-8 string (maybe mixed UTF-8 and 8-bit character set). + Did you decode with lax ":utf8" instead of strict ":encoding(UTF-8)"? + ... + {(length($1)>1)?pack("H2",$2):$UNESCAPES{$1}}gex;return$string}sub _load_scalar {my ($self,$string,$indent,$lines)=@_;$string =~ s/\s*\z//;return undef if$string eq '~';if ($string =~ /^$re_capture_single_quoted$re_trailing_comment\z/){return$self->_unquote_single($1)}if ($string =~ /^$re_capture_double_quoted$re_trailing_comment\z/){return$self->_unquote_double($1)}if ($string =~ /^[\'\"!&]/){die \"CPAN::Meta::YAML does not support a feature in line '$string'"}return {}if$string =~ /^{}(?:\s+\#.*)?\z/;return []if$string =~ /^\[\](?:\s+\#.*)?\z/;if ($string !~ /^[>|]/){die \"CPAN::Meta::YAML found illegal characters in plain scalar: '$string'" if$string =~ /^(?:-(?:\s|$)|[\@\%\`])/ or $string =~ /:(?:\s|$)/;$string =~ s/\s+#.*\z//;return$string}die \"CPAN::Meta::YAML failed to find multi-line scalar content" unless @$lines;$lines->[0]=~ /^(\s*)/;$indent->[-1]=length("$1");if (defined$indent->[-2]and $indent->[-1]<= $indent->[-2]){die \"CPAN::Meta::YAML found bad indenting in line '$lines->[0]'"}my@multiline=();while (@$lines){$lines->[0]=~ /^(\s*)/;last unless length($1)>= $indent->[-1];push@multiline,substr(shift(@$lines),length($1))}my$j=(substr($string,0,1)eq '>')? ' ' : "\n";my$t=(substr($string,1,1)eq '-')? '' : "\n";return join($j,@multiline).$t}sub _load_array {my ($self,$array,$indent,$lines)=@_;while (@$lines){if ($lines->[0]=~ /^(?:---|\.\.\.)/){while (@$lines and $lines->[0]!~ /^---/){shift @$lines}return 1}$lines->[0]=~ /^(\s*)/;if (length($1)< $indent->[-1]){return 1}elsif (length($1)> $indent->[-1]){die \"CPAN::Meta::YAML found bad indenting in line '$lines->[0]'"}if ($lines->[0]=~ /^(\s*\-\s+)[^\'\"]\S*\s*:(?:\s+|$)/){my$indent2=length("$1");$lines->[0]=~ s/-/ /;push @$array,{};$self->_load_hash($array->[-1],[@$indent,$indent2 ],$lines)}elsif ($lines->[0]=~ /^\s*\-\s*\z/){shift @$lines;unless (@$lines){push @$array,undef;return 1}if ($lines->[0]=~ /^(\s*)\-/){my$indent2=length("$1");if ($indent->[-1]==$indent2){push @$array,undef}else {push @$array,[];$self->_load_array($array->[-1],[@$indent,$indent2 ],$lines)}}elsif ($lines->[0]=~ /^(\s*)\S/){push @$array,{};$self->_load_hash($array->[-1],[@$indent,length("$1")],$lines)}else {die \"CPAN::Meta::YAML failed to classify line '$lines->[0]'"}}elsif ($lines->[0]=~ /^\s*\-(\s*)(.+?)\s*\z/){shift @$lines;push @$array,$self->_load_scalar("$2",[@$indent,undef ],$lines)}elsif (defined$indent->[-2]and $indent->[-1]==$indent->[-2]){return 1}else {die \"CPAN::Meta::YAML failed to classify line '$lines->[0]'"}}return 1}sub _load_hash {my ($self,$hash,$indent,$lines)=@_;while (@$lines){if ($lines->[0]=~ /^(?:---|\.\.\.)/){while (@$lines and $lines->[0]!~ /^---/){shift @$lines}return 1}$lines->[0]=~ /^(\s*)/;if (length($1)< $indent->[-1]){return 1}elsif (length($1)> $indent->[-1]){die \"CPAN::Meta::YAML found bad indenting in line '$lines->[0]'"}my$key;if ($lines->[0]=~ s/^\s*$re_capture_single_quoted$re_key_value_separator//){$key=$self->_unquote_single($1)}elsif ($lines->[0]=~ s/^\s*$re_capture_double_quoted$re_key_value_separator//){$key=$self->_unquote_double($1)}elsif ($lines->[0]=~ s/^\s*$re_capture_unquoted_key$re_key_value_separator//){$key=$1;$key =~ s/\s+$//}elsif ($lines->[0]=~ /^\s*\?/){die \"CPAN::Meta::YAML does not support a feature in line '$lines->[0]'"}else {die \"CPAN::Meta::YAML failed to classify line '$lines->[0]'"}if (exists$hash->{$key}){warn "CPAN::Meta::YAML found a duplicate key '$key' in line '$lines->[0]'"}if (length$lines->[0]){$hash->{$key}=$self->_load_scalar(shift(@$lines),[@$indent,undef ],$lines)}else {shift @$lines;unless (@$lines){$hash->{$key}=undef;return 1}if ($lines->[0]=~ /^(\s*)-/){$hash->{$key}=[];$self->_load_array($hash->{$key},[@$indent,length($1)],$lines)}elsif ($lines->[0]=~ /^(\s*)./){my$indent2=length("$1");if ($indent->[-1]>= $indent2){$hash->{$key}=undef}else {$hash->{$key}={};$self->_load_hash($hash->{$key},[@$indent,length($1)],$lines)}}}}return 1}sub _dump_file {my$self=shift;require Fcntl;my$file=shift or $self->_error('You did not specify a file name');my$fh;if (_can_flock()){my$flags=Fcntl::O_WRONLY()|Fcntl::O_CREAT();sysopen($fh,$file,$flags);unless ($fh){$self->_error("Failed to open file '$file' for writing: $!")}binmode($fh,":raw:encoding(UTF-8)");flock($fh,Fcntl::LOCK_EX())or warn "Couldn't lock '$file' for reading: $!";truncate$fh,0;seek$fh,0,0}else {open$fh,">:unix:encoding(UTF-8)",$file}print {$fh}$self->_dump_string;unless (close$fh){$self->_error("Failed to close file '$file': $!")}return 1}sub _dump_string {my$self=shift;return '' unless ref$self && @$self;my$indent=0;my@lines=();eval {for my$cursor (@$self){push@lines,'---';if (!defined$cursor){}elsif (!ref$cursor){$lines[-1].= ' ' .$self->_dump_scalar($cursor)}elsif (ref$cursor eq 'ARRAY'){unless (@$cursor){$lines[-1].= ' []';next}push@lines,$self->_dump_array($cursor,$indent,{})}elsif (ref$cursor eq 'HASH'){unless (%$cursor){$lines[-1].= ' {}';next}push@lines,$self->_dump_hash($cursor,$indent,{})}else {die \("Cannot serialize " .ref($cursor))}}};if (ref $@ eq 'SCALAR'){$self->_error(${$@})}elsif ($@){$self->_error($@)}join '',map {"$_\n"}@lines}sub _has_internal_string_value {my$value=shift;my$b_obj=B::svref_2object(\$value);return$b_obj->FLAGS & B::SVf_POK()}sub _dump_scalar {my$string=$_[1];my$is_key=$_[2];my$has_string_flag=_has_internal_string_value($string);return '~' unless defined$string;return "''" unless length$string;if (Scalar::Util::looks_like_number($string)){if ($is_key || $has_string_flag){return qq['$string']}else {return$string}}if ($string =~ /[\x00-\x09\x0b-\x0d\x0e-\x1f\x7f-\x9f\'\n]/){$string =~ s/\\/\\\\/g;$string =~ s/"/\\"/g;$string =~ s/\n/\\n/g;$string =~ s/[\x85]/\\N/g;$string =~ s/([\x00-\x1f])/\\$UNPRINTABLE[ord($1)]/g;$string =~ s/([\x7f-\x9f])/'\x' . sprintf("%X",ord($1))/ge;return qq|"$string"|}if ($string =~ /(?:^[~!@#%&*|>?:,'"`{}\[\]]|^-+$|\s|:\z)/ or $QUOTE{$string}){return "'$string'"}return$string}sub _dump_array {my ($self,$array,$indent,$seen)=@_;if ($seen->{refaddr($array)}++){die \"CPAN::Meta::YAML does not support circular references"}my@lines=();for my$el (@$array){my$line=(' ' x $indent).'-';my$type=ref$el;if (!$type){$line .= ' ' .$self->_dump_scalar($el);push@lines,$line}elsif ($type eq 'ARRAY'){if (@$el){push@lines,$line;push@lines,$self->_dump_array($el,$indent + 1,$seen)}else {$line .= ' []';push@lines,$line}}elsif ($type eq 'HASH'){if (keys %$el){push@lines,$line;push@lines,$self->_dump_hash($el,$indent + 1,$seen)}else {$line .= ' {}';push@lines,$line}}else {die \"CPAN::Meta::YAML does not support $type references"}}@lines}sub _dump_hash {my ($self,$hash,$indent,$seen)=@_;if ($seen->{refaddr($hash)}++){die \"CPAN::Meta::YAML does not support circular references"}my@lines=();for my$name (sort keys %$hash){my$el=$hash->{$name};my$line=(' ' x $indent).$self->_dump_scalar($name,1).":";my$type=ref$el;if (!$type){$line .= ' ' .$self->_dump_scalar($el);push@lines,$line}elsif ($type eq 'ARRAY'){if (@$el){push@lines,$line;push@lines,$self->_dump_array($el,$indent + 1,$seen)}else {$line .= ' []';push@lines,$line}}elsif ($type eq 'HASH'){if (keys %$el){push@lines,$line;push@lines,$self->_dump_hash($el,$indent + 1,$seen)}else {$line .= ' {}';push@lines,$line}}else {die \"CPAN::Meta::YAML does not support $type references"}}@lines}our$errstr='';sub _error {require Carp;$errstr=$_[1];$errstr =~ s/ at \S+ line \d+.*//;Carp::croak($errstr)}my$errstr_warned;sub errstr {require Carp;Carp::carp("CPAN::Meta::YAML->errstr and \$CPAN::Meta::YAML::errstr is deprecated")unless$errstr_warned++;$errstr}use B;my$HAS_FLOCK;sub _can_flock {if (defined$HAS_FLOCK){return$HAS_FLOCK}else {require Config;my$c=\%Config::Config;$HAS_FLOCK=grep {$c->{$_}}qw/d_flock d_fcntl_can_lock d_lockf/;require Fcntl if$HAS_FLOCK;return$HAS_FLOCK}}use Scalar::Util ();BEGIN {local $@;if (eval {Scalar::Util->VERSION(1.18)}){*refaddr=*Scalar::Util::refaddr}else {eval <<'END_PERL'}}delete$CPAN::Meta::YAML::{refaddr};1; + # Scalar::Util failed to load or too old + sub refaddr { + my $pkg = ref($_[0]) or return undef; + if ( !! UNIVERSAL::can($_[0], 'can') ) { + bless $_[0], 'Scalar::Util::Fake'; + } else { + $pkg = undef; + } + "$_[0]" =~ /0x(\w+)/; + my $i = do { no warnings 'portable'; hex $1 }; + bless $_[0], $pkg if defined $pkg; + $i; + } + END_PERL +CPAN_META_YAML + +$fatpacked{"Exporter.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'EXPORTER'; + package Exporter;require 5.006;our$Debug=0;our$ExportLevel=0;our$Verbose ||=0;our$VERSION='5.70';our (%Cache);sub as_heavy {require Exporter::Heavy;my$c=(caller(1))[3];$c =~ s/.*:://;\&{"Exporter::Heavy::heavy_$c"}}sub export {goto &{as_heavy()}}sub import {my$pkg=shift;my$callpkg=caller($ExportLevel);if ($pkg eq "Exporter" and @_ and $_[0]eq "import"){*{$callpkg."::import"}=\&import;return}my$exports=\@{"$pkg\::EXPORT"};my$fail=${$pkg .'::'}{EXPORT_FAIL}&& \@{"$pkg\::EXPORT_FAIL"};return export$pkg,$callpkg,@_ if$Verbose or $Debug or $fail && @$fail > 1;my$export_cache=($Cache{$pkg}||={});my$args=@_ or @_=@$exports;if ($args and not %$export_cache){s/^&//,$export_cache->{$_}=1 foreach (@$exports,@{"$pkg\::EXPORT_OK"})}my$heavy;if ($args or $fail){($heavy=(/\W/ or $args and not exists$export_cache->{$_}or $fail and @$fail and $_ eq $fail->[0]))and last foreach (@_)}else {($heavy=/\W/)and last foreach (@_)}return export$pkg,$callpkg,($args ? @_ : ())if$heavy;local$SIG{__WARN__}=sub {require Carp;&Carp::carp}if not $SIG{__WARN__};*{"$callpkg\::$_"}=\&{"$pkg\::$_"}foreach @_}sub export_fail {my$self=shift;@_}sub export_to_level {goto &{as_heavy()}}sub export_tags {goto &{as_heavy()}}sub export_ok_tags {goto &{as_heavy()}}sub require_version {goto &{as_heavy()}}1; +EXPORTER + +$fatpacked{"Exporter/Heavy.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'EXPORTER_HEAVY'; + package Exporter::Heavy;use strict;no strict 'refs';require Exporter;our$VERSION=$Exporter::VERSION;sub _rebuild_cache {my ($pkg,$exports,$cache)=@_;s/^&// foreach @$exports;@{$cache}{@$exports}=(1)x @$exports;my$ok=\@{"${pkg}::EXPORT_OK"};if (@$ok){s/^&// foreach @$ok;@{$cache}{@$ok}=(1)x @$ok}}sub heavy_export {my$oldwarn=$SIG{__WARN__};local$SIG{__WARN__}=sub {local$SIG{__WARN__}=$oldwarn;my$text=shift;if ($text =~ s/ at \S*Exporter\S*.pm line \d+.*\n//){require Carp;local$Carp::CarpLevel=1;Carp::carp($text)}else {warn$text}};local$SIG{__DIE__}=sub {require Carp;local$Carp::CarpLevel=1;Carp::croak("$_[0]Illegal null symbol in \@${1}::EXPORT")if $_[0]=~ /^Unable to create sub named "(.*?)::"/};my($pkg,$callpkg,@imports)=@_;my($type,$sym,$cache_is_current,$oops);my($exports,$export_cache)=(\@{"${pkg}::EXPORT"},$Exporter::Cache{$pkg}||={});if (@imports){if (!%$export_cache){_rebuild_cache ($pkg,$exports,$export_cache);$cache_is_current=1}if (grep m{^[/!:]},@imports){my$tagsref=\%{"${pkg}::EXPORT_TAGS"};my$tagdata;my%imports;my($remove,$spec,@names,@allexports);unshift@imports,':DEFAULT' if$imports[0]=~ m/^!/;for$spec (@imports){$remove=$spec =~ s/^!//;if ($spec =~ s/^://){if ($spec eq 'DEFAULT'){@names=@$exports}elsif ($tagdata=$tagsref->{$spec}){@names=@$tagdata}else {warn qq["$spec" is not defined in %${pkg}::EXPORT_TAGS];++$oops;next}}elsif ($spec =~ m:^/(.*)/$:){my$patn=$1;@allexports=keys %$export_cache unless@allexports;@names=grep(/$patn/,@allexports)}else {@names=($spec)}warn "Import ".($remove ? "del":"add").": @names " if$Exporter::Verbose;if ($remove){for$sym (@names){delete$imports{$sym}}}else {@imports{@names}=(1)x @names}}@imports=keys%imports}my@carp;for$sym (@imports){if (!$export_cache->{$sym}){if ($sym =~ m/^\d/){$pkg->VERSION($sym);if (@imports==1){@imports=@$exports;last}if (@imports==2 and!$imports[1]){@imports=();last}}elsif ($sym !~ s/^&// ||!$export_cache->{$sym}){unless ($cache_is_current){%$export_cache=();_rebuild_cache ($pkg,$exports,$export_cache);$cache_is_current=1}if (!$export_cache->{$sym}){push@carp,qq["$sym" is not exported by the $pkg module\n];$oops++}}}}if ($oops){require Carp;Carp::croak("@{carp}Can't continue after import errors")}}else {@imports=@$exports}my($fail,$fail_cache)=(\@{"${pkg}::EXPORT_FAIL"},$Exporter::FailCache{$pkg}||={});if (@$fail){if (!%$fail_cache){my@expanded=map {/^\w/ ? ($_,'&'.$_): $_}@$fail;warn "${pkg}::EXPORT_FAIL cached: @expanded" if$Exporter::Verbose;@{$fail_cache}{@expanded}=(1)x @expanded}my@failed;for$sym (@imports){push(@failed,$sym)if$fail_cache->{$sym}}if (@failed){@failed=$pkg->export_fail(@failed);for$sym (@failed){require Carp;Carp::carp(qq["$sym" is not implemented by the $pkg module ],"on this architecture")}if (@failed){require Carp;Carp::croak("Can't continue after import errors")}}}warn "Importing into $callpkg from $pkg: ",join(", ",sort@imports)if$Exporter::Verbose;for$sym (@imports){(*{"${callpkg}::$sym"}=\&{"${pkg}::$sym"},next)unless$sym =~ s/^(\W)//;$type=$1;no warnings 'once';*{"${callpkg}::$sym"}=$type eq '&' ? \&{"${pkg}::$sym"}: $type eq '$' ? \${"${pkg}::$sym"}: $type eq '@' ? \@{"${pkg}::$sym"}: $type eq '%' ? \%{"${pkg}::$sym"}: $type eq '*' ? *{"${pkg}::$sym"}: do {require Carp;Carp::croak("Can't export symbol: $type$sym")}}}sub heavy_export_to_level {my$pkg=shift;my$level=shift;(undef)=shift;my$callpkg=caller($level);$pkg->export($callpkg,@_)}sub _push_tags {my($pkg,$var,$syms)=@_;my@nontag=();my$export_tags=\%{"${pkg}::EXPORT_TAGS"};push(@{"${pkg}::$var"},map {$export_tags->{$_}? @{$export_tags->{$_}}: scalar(push(@nontag,$_),$_)}(@$syms)? @$syms : keys %$export_tags);if (@nontag and $^W){require Carp;Carp::carp(join(", ",@nontag)." are not tags of $pkg")}}sub heavy_require_version {my($self,$wanted)=@_;my$pkg=ref$self || $self;return ${pkg}->VERSION($wanted)}sub heavy_export_tags {_push_tags((caller)[0],"EXPORT",\@_)}sub heavy_export_ok_tags {_push_tags((caller)[0],"EXPORT_OK",\@_)}1; +EXPORTER_HEAVY + +$fatpacked{"File/pushd.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'FILE_PUSHD'; + use strict;use warnings;package File::pushd;our$VERSION='1.009';our@EXPORT=qw(pushd tempd);our@ISA=qw(Exporter);use Exporter;use Carp;use Cwd qw(getcwd abs_path);use File::Path qw(rmtree);use File::Temp qw();use File::Spec;use overload q{""}=>sub {File::Spec->canonpath($_[0]->{_pushd})},fallback=>1;sub pushd {my ($target_dir,$options)=@_;$options->{untaint_pattern}||= qr{^([-+@\w./]+)$};$target_dir="." unless defined$target_dir;croak "Can't locate directory $target_dir" unless -d $target_dir;my$tainted_orig=getcwd;my$orig;if ($tainted_orig =~ $options->{untaint_pattern}){$orig=$1}else {$orig=$tainted_orig}my$tainted_dest;eval {$tainted_dest=$target_dir ? abs_path($target_dir): $orig};croak "Can't locate absolute path for $target_dir: $@" if $@;my$dest;if ($tainted_dest =~ $options->{untaint_pattern}){$dest=$1}else {$dest=$tainted_dest}if ($dest ne $orig){chdir$dest or croak "Can't chdir to $dest\: $!"}my$self=bless {_pushd=>$dest,_original=>$orig },__PACKAGE__;return$self}sub tempd {my ($options)=@_;my$dir;eval {$dir=pushd(File::Temp::tempdir(CLEANUP=>0),$options)};croak $@ if $@;$dir->{_tempd}=1;return$dir}sub preserve {my$self=shift;return 1 if!$self->{"_tempd"};if (@_==0){return$self->{_preserve}=1}else {return$self->{_preserve}=$_[0]? 1 : 0}}sub DESTROY {my ($self)=@_;my$orig=$self->{_original};chdir$orig if$orig;if ($self->{_tempd}&&!$self->{_preserve}){my$err=do {local $@;eval {rmtree($self->{_pushd})};$@};carp$err if$err}}1; +FILE_PUSHD + +$fatpacked{"HTTP/Tiny.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'HTTP_TINY'; + package HTTP::Tiny;use strict;use warnings;our$VERSION='0.056';use Carp ();my@attributes;BEGIN {@attributes=qw(cookie_jar default_headers http_proxy https_proxy keep_alive local_address max_redirect max_size proxy no_proxy timeout SSL_options verify_SSL);my%persist_ok=map {;$_=>1}qw(cookie_jar default_headers max_redirect max_size);no strict 'refs';no warnings 'uninitialized';for my$accessor (@attributes){*{$accessor}=sub {@_ > 1 ? do {delete $_[0]->{handle}if!$persist_ok{$accessor}&& $_[1]ne $_[0]->{$accessor};$_[0]->{$accessor}=$_[1]}: $_[0]->{$accessor}}}}sub agent {my($self,$agent)=@_;if(@_ > 1){$self->{agent}=(defined$agent && $agent =~ / $/)? $agent .$self->_agent : $agent}return$self->{agent}}sub new {my($class,%args)=@_;my$self={max_redirect=>5,timeout=>60,keep_alive=>1,verify_SSL=>$args{verify_SSL}|| $args{verify_ssl}|| 0,no_proxy=>$ENV{no_proxy},};bless$self,$class;$class->_validate_cookie_jar($args{cookie_jar})if$args{cookie_jar};for my$key (@attributes){$self->{$key}=$args{$key}if exists$args{$key}}$self->agent(exists$args{agent}? $args{agent}: $class->_agent);$self->_set_proxies;return$self}sub _set_proxies {my ($self)=@_;if (!exists$self->{proxy}){$self->{proxy}=$ENV{all_proxy}|| $ENV{ALL_PROXY}}if (defined$self->{proxy}){$self->_split_proxy('generic proxy'=>$self->{proxy})}else {delete$self->{proxy}}if (!exists$self->{http_proxy}){local$ENV{HTTP_PROXY}if$ENV{REQUEST_METHOD};$self->{http_proxy}=$ENV{http_proxy}|| $ENV{HTTP_PROXY}|| $self->{proxy}}if (defined$self->{http_proxy}){$self->_split_proxy(http_proxy=>$self->{http_proxy});$self->{_has_proxy}{http}=1}else {delete$self->{http_proxy}}if (!exists$self->{https_proxy}){$self->{https_proxy}=$ENV{https_proxy}|| $ENV{HTTPS_PROXY}|| $self->{proxy}}if ($self->{https_proxy}){$self->_split_proxy(https_proxy=>$self->{https_proxy});$self->{_has_proxy}{https}=1}else {delete$self->{https_proxy}}unless (ref$self->{no_proxy}eq 'ARRAY'){$self->{no_proxy}=(defined$self->{no_proxy})? [split /\s*,\s*/,$self->{no_proxy}]: []}return}for my$sub_name (qw/get head put post delete/){my$req_method=uc$sub_name;no strict 'refs';eval <<"HERE"}sub post_form {my ($self,$url,$data,$args)=@_;(@_==3 || @_==4 && ref$args eq 'HASH')or Carp::croak(q/Usage: $http->post_form(URL, DATAREF, [HASHREF])/ ."\n");my$headers={};while (my ($key,$value)=each %{$args->{headers}|| {}}){$headers->{lc$key}=$value}delete$args->{headers};return$self->request('POST',$url,{%$args,content=>$self->www_form_urlencode($data),headers=>{%$headers,'content-type'=>'application/x-www-form-urlencoded' },})}sub mirror {my ($self,$url,$file,$args)=@_;@_==3 || (@_==4 && ref$args eq 'HASH')or Carp::croak(q/Usage: $http->mirror(URL, FILE, [HASHREF])/ ."\n");if (-e $file and my$mtime=(stat($file))[9]){$args->{headers}{'if-modified-since'}||= $self->_http_date($mtime)}my$tempfile=$file .int(rand(2**31));require Fcntl;sysopen my$fh,$tempfile,Fcntl::O_CREAT()|Fcntl::O_EXCL()|Fcntl::O_WRONLY()or Carp::croak(qq/Error: Could not create temporary file $tempfile for downloading: $!\n/);binmode$fh;$args->{data_callback}=sub {print {$fh}$_[0]};my$response=$self->request('GET',$url,$args);close$fh or Carp::croak(qq/Error: Caught error closing temporary file $tempfile: $!\n/);if ($response->{success}){rename$tempfile,$file or Carp::croak(qq/Error replacing $file with $tempfile: $!\n/);my$lm=$response->{headers}{'last-modified'};if ($lm and my$mtime=$self->_parse_http_date($lm)){utime$mtime,$mtime,$file}}$response->{success}||= $response->{status}eq '304';unlink$tempfile;return$response}my%idempotent=map {$_=>1}qw/GET HEAD PUT DELETE OPTIONS TRACE/;sub request {my ($self,$method,$url,$args)=@_;@_==3 || (@_==4 && ref$args eq 'HASH')or Carp::croak(q/Usage: $http->request(METHOD, URL, [HASHREF])/ ."\n");$args ||= {};my$response;for (0 .. 1){$response=eval {$self->_request($method,$url,$args)};last unless $@ && $idempotent{$method}&& $@ =~ m{^(?:Socket closed|Unexpected end)}}if (my$e=$@){if (ref$e eq 'HASH' && exists$e->{status}){return$e}$e="$e";$response={url=>$url,success=>q{},status=>599,reason=>'Internal Exception',content=>$e,headers=>{'content-type'=>'text/plain','content-length'=>length$e,}}}return$response}sub www_form_urlencode {my ($self,$data)=@_;(@_==2 && ref$data)or Carp::croak(q/Usage: $http->www_form_urlencode(DATAREF)/ ."\n");(ref$data eq 'HASH' || ref$data eq 'ARRAY')or Carp::croak("form data must be a hash or array reference\n");my@params=ref$data eq 'HASH' ? %$data : @$data;@params % 2==0 or Carp::croak("form data reference must have an even number of terms\n");my@terms;while(@params){my ($key,$value)=splice(@params,0,2);if (ref$value eq 'ARRAY'){unshift@params,map {$key=>$_}@$value}else {push@terms,join("=",map {$self->_uri_escape($_)}$key,$value)}}return join("&",(ref$data eq 'ARRAY')? (@terms): (sort@terms))}sub can_ssl {my ($self)=@_;my($ok,$reason)=(1,'');unless (eval {require IO::Socket::SSL;IO::Socket::SSL->VERSION(1.42)}){$ok=0;$reason .= qq/IO::Socket::SSL 1.42 must be installed for https support\n/}unless (eval {require Net::SSLeay;Net::SSLeay->VERSION(1.49)}){$ok=0;$reason .= qq/Net::SSLeay 1.49 must be installed for https support\n/}if (ref($self)&& ($self->{verify_SSL}|| $self->{SSL_options}{SSL_verify_mode})){my$handle=HTTP::Tiny::Handle->new(SSL_options=>$self->{SSL_options},verify_SSL=>$self->{verify_SSL},);unless (eval {$handle->_find_CA_file;1}){$ok=0;$reason .= "$@"}}wantarray ? ($ok,$reason): $ok}my%DefaultPort=(http=>80,https=>443,);sub _agent {my$class=ref($_[0])|| $_[0];(my$default_agent=$class)=~ s{::}{-}g;return$default_agent ."/" .$class->VERSION}sub _request {my ($self,$method,$url,$args)=@_;my ($scheme,$host,$port,$path_query,$auth)=$self->_split_url($url);my$request={method=>$method,scheme=>$scheme,host=>$host,port=>$port,host_port=>($port==$DefaultPort{$scheme}? $host : "$host:$port"),uri=>$path_query,headers=>{},};my$handle=delete$self->{handle};if ($handle){unless ($handle->can_reuse($scheme,$host,$port)){$handle->close;undef$handle}}$handle ||= $self->_open_handle($request,$scheme,$host,$port);$self->_prepare_headers_and_cb($request,$args,$url,$auth);$handle->write_request($request);my$response;do {$response=$handle->read_response_header}until (substr($response->{status},0,1)ne '1');$self->_update_cookie_jar($url,$response)if$self->{cookie_jar};if (my@redir_args=$self->_maybe_redirect($request,$response,$args)){$handle->close;return$self->_request(@redir_args,$args)}my$known_message_length;if ($method eq 'HEAD' || $response->{status}=~ /^[23]04/){$known_message_length=1}else {my$data_cb=$self->_prepare_data_cb($response,$args);$known_message_length=$handle->read_body($data_cb,$response)}if ($self->{keep_alive}&& $known_message_length && $response->{protocol}eq 'HTTP/1.1' && ($response->{headers}{connection}|| '')ne 'close'){$self->{handle}=$handle}else {$handle->close}$response->{success}=substr($response->{status},0,1)eq '2';$response->{url}=$url;return$response}sub _open_handle {my ($self,$request,$scheme,$host,$port)=@_;my$handle=HTTP::Tiny::Handle->new(timeout=>$self->{timeout},SSL_options=>$self->{SSL_options},verify_SSL=>$self->{verify_SSL},local_address=>$self->{local_address},keep_alive=>$self->{keep_alive});if ($self->{_has_proxy}{$scheme}&&!grep {$host =~ /\Q$_\E$/}@{$self->{no_proxy}}){return$self->_proxy_connect($request,$handle)}else {return$handle->connect($scheme,$host,$port)}}sub _proxy_connect {my ($self,$request,$handle)=@_;my@proxy_vars;if ($request->{scheme}eq 'https'){Carp::croak(qq{No https_proxy defined})unless$self->{https_proxy};@proxy_vars=$self->_split_proxy(https_proxy=>$self->{https_proxy});if ($proxy_vars[0]eq 'https'){Carp::croak(qq{Can't proxy https over https: $request->{uri} via $self->{https_proxy}})}}else {Carp::croak(qq{No http_proxy defined})unless$self->{http_proxy};@proxy_vars=$self->_split_proxy(http_proxy=>$self->{http_proxy})}my ($p_scheme,$p_host,$p_port,$p_auth)=@proxy_vars;if (length$p_auth &&!defined$request->{headers}{'proxy-authorization'}){$self->_add_basic_auth_header($request,'proxy-authorization'=>$p_auth)}$handle->connect($p_scheme,$p_host,$p_port);if ($request->{scheme}eq 'https'){$self->_create_proxy_tunnel($request,$handle)}else {$request->{uri}="$request->{scheme}://$request->{host_port}$request->{uri}"}return$handle}sub _split_proxy {my ($self,$type,$proxy)=@_;my ($scheme,$host,$port,$path_query,$auth)=eval {$self->_split_url($proxy)};unless(defined($scheme)&& length($scheme)&& length($host)&& length($port)&& $path_query eq '/'){Carp::croak(qq{$type URL must be in format http[s]://[auth@]:/\n})}return ($scheme,$host,$port,$auth)}sub _create_proxy_tunnel {my ($self,$request,$handle)=@_;$handle->_assert_ssl;my$agent=exists($request->{headers}{'user-agent'})? $request->{headers}{'user-agent'}: $self->{agent};my$connect_request={method=>'CONNECT',uri=>"$request->{host}:$request->{port}",headers=>{host=>"$request->{host}:$request->{port}",'user-agent'=>$agent,}};if ($request->{headers}{'proxy-authorization'}){$connect_request->{headers}{'proxy-authorization'}=delete$request->{headers}{'proxy-authorization'}}$handle->write_request($connect_request);my$response;do {$response=$handle->read_response_header}until (substr($response->{status},0,1)ne '1');unless (substr($response->{status},0,1)eq '2'){die$response}$handle->start_ssl($request->{host});return}sub _prepare_headers_and_cb {my ($self,$request,$args,$url,$auth)=@_;for ($self->{default_headers},$args->{headers}){next unless defined;while (my ($k,$v)=each %$_){$request->{headers}{lc$k}=$v}}if (exists$request->{headers}{'host'}){die(qq/The 'Host' header must not be provided as header option\n/)}$request->{headers}{'host'}=$request->{host_port};$request->{headers}{'user-agent'}||= $self->{agent};$request->{headers}{'connection'}="close" unless$self->{keep_alive};if (defined$args->{content}){if (ref$args->{content}eq 'CODE'){$request->{headers}{'content-type'}||= "application/octet-stream";$request->{headers}{'transfer-encoding'}='chunked' unless$request->{headers}{'content-length'}|| $request->{headers}{'transfer-encoding'};$request->{cb}=$args->{content}}elsif (length$args->{content}){my$content=$args->{content};if ($] ge '5.008'){utf8::downgrade($content,1)or die(qq/Wide character in request message body\n/)}$request->{headers}{'content-type'}||= "application/octet-stream";$request->{headers}{'content-length'}=length$content unless$request->{headers}{'content-length'}|| $request->{headers}{'transfer-encoding'};$request->{cb}=sub {substr$content,0,length$content,''}}$request->{trailer_cb}=$args->{trailer_callback}if ref$args->{trailer_callback}eq 'CODE'}if ($self->{cookie_jar}){my$cookies=$self->cookie_jar->cookie_header($url);$request->{headers}{cookie}=$cookies if length$cookies}if (length$auth &&!defined$request->{headers}{authorization}){$self->_add_basic_auth_header($request,'authorization'=>$auth)}return}sub _add_basic_auth_header {my ($self,$request,$header,$auth)=@_;require MIME::Base64;$request->{headers}{$header}="Basic " .MIME::Base64::encode_base64($auth,"");return}sub _prepare_data_cb {my ($self,$response,$args)=@_;my$data_cb=$args->{data_callback};$response->{content}='';if (!$data_cb || $response->{status}!~ /^2/){if (defined$self->{max_size}){$data_cb=sub {$_[1]->{content}.= $_[0];die(qq/Size of response body exceeds the maximum allowed of $self->{max_size}\n/)if length $_[1]->{content}> $self->{max_size}}}else {$data_cb=sub {$_[1]->{content}.= $_[0]}}}return$data_cb}sub _update_cookie_jar {my ($self,$url,$response)=@_;my$cookies=$response->{headers}->{'set-cookie'};return unless defined$cookies;my@cookies=ref$cookies ? @$cookies : $cookies;$self->cookie_jar->add($url,$_)for@cookies;return}sub _validate_cookie_jar {my ($class,$jar)=@_;for my$method (qw/add cookie_header/){Carp::croak(qq/Cookie jar must provide the '$method' method\n/)unless ref($jar)&& ref($jar)->can($method)}return}sub _maybe_redirect {my ($self,$request,$response,$args)=@_;my$headers=$response->{headers};my ($status,$method)=($response->{status},$request->{method});if (($status eq '303' or ($status =~ /^30[1278]/ && $method =~ /^GET|HEAD$/))and $headers->{location}and ++$args->{redirects}<= $self->{max_redirect}){my$location=($headers->{location}=~ /^\//)? "$request->{scheme}://$request->{host_port}$headers->{location}" : $headers->{location};return (($status eq '303' ? 'GET' : $method),$location)}return}sub _split_url {my$url=pop;my ($scheme,$host,$path_query)=$url =~ m<\A([^:/?#]+)://([^/?#]*)([^#]*)> or die(qq/Cannot parse URL: '$url'\n/);$scheme=lc$scheme;$path_query="/$path_query" unless$path_query =~ m<\A/>;my$auth='';if ((my$i=index$host,'@')!=-1){$auth=substr$host,0,$i,'';substr$host,0,1,'';$auth =~ s/%([0-9A-Fa-f]{2})/chr(hex($1))/eg}my$port=$host =~ s/:(\d*)\z// && length $1 ? $1 : $scheme eq 'http' ? 80 : $scheme eq 'https' ? 443 : undef;return ($scheme,(length$host ? lc$host : "localhost"),$port,$path_query,$auth)}my$DoW="Sun|Mon|Tue|Wed|Thu|Fri|Sat";my$MoY="Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec";sub _http_date {my ($sec,$min,$hour,$mday,$mon,$year,$wday)=gmtime($_[1]);return sprintf("%s, %02d %s %04d %02d:%02d:%02d GMT",substr($DoW,$wday*4,3),$mday,substr($MoY,$mon*4,3),$year+1900,$hour,$min,$sec)}sub _parse_http_date {my ($self,$str)=@_;require Time::Local;my@tl_parts;if ($str =~ /^[SMTWF][a-z]+, +(\d{1,2}) ($MoY) +(\d\d\d\d) +(\d\d):(\d\d):(\d\d) +GMT$/){@tl_parts=($6,$5,$4,$1,(index($MoY,$2)/4),$3)}elsif ($str =~ /^[SMTWF][a-z]+, +(\d\d)-($MoY)-(\d{2,4}) +(\d\d):(\d\d):(\d\d) +GMT$/){@tl_parts=($6,$5,$4,$1,(index($MoY,$2)/4),$3)}elsif ($str =~ /^[SMTWF][a-z]+ +($MoY) +(\d{1,2}) +(\d\d):(\d\d):(\d\d) +(?:[^0-9]+ +)?(\d\d\d\d)$/){@tl_parts=($5,$4,$3,$2,(index($MoY,$1)/4),$6)}return eval {my$t=@tl_parts ? Time::Local::timegm(@tl_parts): -1;$t < 0 ? undef : $t}}my%escapes=map {chr($_)=>sprintf("%%%02X",$_)}0..255;$escapes{' '}="+";my$unsafe_char=qr/[^A-Za-z0-9\-\._~]/;sub _uri_escape {my ($self,$str)=@_;if ($] ge '5.008'){utf8::encode($str)}else {$str=pack("U*",unpack("C*",$str))if (length$str==do {use bytes;length$str});$str=pack("C*",unpack("C*",$str))}$str =~ s/($unsafe_char)/$escapes{$1}/ge;return$str}package HTTP::Tiny::Handle;use strict;use warnings;use Errno qw[EINTR EPIPE];use IO::Socket qw[SOCK_STREAM];my$SOCKET_CLASS=$ENV{PERL_HTTP_TINY_IPV4_ONLY}? 'IO::Socket::INET' : eval {require IO::Socket::IP;IO::Socket::IP->VERSION(0.25)}? 'IO::Socket::IP' : 'IO::Socket::INET';sub BUFSIZE () {32768}my$Printable=sub {local $_=shift;s/\r/\\r/g;s/\n/\\n/g;s/\t/\\t/g;s/([^\x20-\x7E])/sprintf('\\x%.2X', ord($1))/ge;$_};my$Token=qr/[\x21\x23-\x27\x2A\x2B\x2D\x2E\x30-\x39\x41-\x5A\x5E-\x7A\x7C\x7E]/;sub new {my ($class,%args)=@_;return bless {rbuf=>'',timeout=>60,max_line_size=>16384,max_header_lines=>64,verify_SSL=>0,SSL_options=>{},%args },$class}sub connect {@_==4 || die(q/Usage: $handle->connect(scheme, host, port)/ ."\n");my ($self,$scheme,$host,$port)=@_;if ($scheme eq 'https'){$self->_assert_ssl}elsif ($scheme ne 'http'){die(qq/Unsupported URL scheme '$scheme'\n/)}$self->{fh}=$SOCKET_CLASS->new(PeerHost=>$host,PeerPort=>$port,$self->{local_address}? (LocalAddr=>$self->{local_address}): (),Proto=>'tcp',Type=>SOCK_STREAM,Timeout=>$self->{timeout},KeepAlive=>!!$self->{keep_alive})or die(qq/Could not connect to '$host:$port': $@\n/);binmode($self->{fh})or die(qq/Could not binmode() socket: '$!'\n/);$self->start_ssl($host)if$scheme eq 'https';$self->{scheme}=$scheme;$self->{host}=$host;$self->{port}=$port;$self->{pid}=$$;$self->{tid}=_get_tid();return$self}sub start_ssl {my ($self,$host)=@_;if (ref($self->{fh})eq 'IO::Socket::SSL'){unless ($self->{fh}->stop_SSL){my$ssl_err=IO::Socket::SSL->errstr;die(qq/Error halting prior SSL connection: $ssl_err/)}}my$ssl_args=$self->_ssl_args($host);IO::Socket::SSL->start_SSL($self->{fh},%$ssl_args,SSL_create_ctx_callback=>sub {my$ctx=shift;Net::SSLeay::CTX_set_mode($ctx,Net::SSLeay::MODE_AUTO_RETRY())},);unless (ref($self->{fh})eq 'IO::Socket::SSL'){my$ssl_err=IO::Socket::SSL->errstr;die(qq/SSL connection failed for $host: $ssl_err\n/)}}sub close {@_==1 || die(q/Usage: $handle->close()/ ."\n");my ($self)=@_;CORE::close($self->{fh})or die(qq/Could not close socket: '$!'\n/)}sub write {@_==2 || die(q/Usage: $handle->write(buf)/ ."\n");my ($self,$buf)=@_;if ($] ge '5.008'){utf8::downgrade($buf,1)or die(qq/Wide character in write()\n/)}my$len=length$buf;my$off=0;local$SIG{PIPE}='IGNORE';while (){$self->can_write or die(qq/Timed out while waiting for socket to become ready for writing\n/);my$r=syswrite($self->{fh},$buf,$len,$off);if (defined$r){$len -= $r;$off += $r;last unless$len > 0}elsif ($!==EPIPE){die(qq/Socket closed by remote server: $!\n/)}elsif ($!!=EINTR){if ($self->{fh}->can('errstr')){my$err=$self->{fh}->errstr();die (qq/Could not write to SSL socket: '$err'\n /)}else {die(qq/Could not write to socket: '$!'\n/)}}}return$off}sub read {@_==2 || @_==3 || die(q/Usage: $handle->read(len [, allow_partial])/ ."\n");my ($self,$len,$allow_partial)=@_;my$buf='';my$got=length$self->{rbuf};if ($got){my$take=($got < $len)? $got : $len;$buf=substr($self->{rbuf},0,$take,'');$len -= $take}while ($len > 0){$self->can_read or die(q/Timed out while waiting for socket to become ready for reading/ ."\n");my$r=sysread($self->{fh},$buf,$len,length$buf);if (defined$r){last unless$r;$len -= $r}elsif ($!!=EINTR){if ($self->{fh}->can('errstr')){my$err=$self->{fh}->errstr();die (qq/Could not read from SSL socket: '$err'\n /)}else {die(qq/Could not read from socket: '$!'\n/)}}}if ($len &&!$allow_partial){die(qq/Unexpected end of stream\n/)}return$buf}sub readline {@_==1 || die(q/Usage: $handle->readline()/ ."\n");my ($self)=@_;while (){if ($self->{rbuf}=~ s/\A ([^\x0D\x0A]* \x0D?\x0A)//x){return $1}if (length$self->{rbuf}>= $self->{max_line_size}){die(qq/Line size exceeds the maximum allowed size of $self->{max_line_size}\n/)}$self->can_read or die(qq/Timed out while waiting for socket to become ready for reading\n/);my$r=sysread($self->{fh},$self->{rbuf},BUFSIZE,length$self->{rbuf});if (defined$r){last unless$r}elsif ($!!=EINTR){if ($self->{fh}->can('errstr')){my$err=$self->{fh}->errstr();die (qq/Could not read from SSL socket: '$err'\n /)}else {die(qq/Could not read from socket: '$!'\n/)}}}die(qq/Unexpected end of stream while looking for line\n/)}sub read_header_lines {@_==1 || @_==2 || die(q/Usage: $handle->read_header_lines([headers])/ ."\n");my ($self,$headers)=@_;$headers ||= {};my$lines=0;my$val;while (){my$line=$self->readline;if (++$lines >= $self->{max_header_lines}){die(qq/Header lines exceeds maximum number allowed of $self->{max_header_lines}\n/)}elsif ($line =~ /\A ([^\x00-\x1F\x7F:]+) : [\x09\x20]* ([^\x0D\x0A]*)/x){my ($field_name)=lc $1;if (exists$headers->{$field_name}){for ($headers->{$field_name}){$_=[$_]unless ref $_ eq "ARRAY";push @$_,$2;$val=\$_->[-1]}}else {$val=\($headers->{$field_name}=$2)}}elsif ($line =~ /\A [\x09\x20]+ ([^\x0D\x0A]*)/x){$val or die(qq/Unexpected header continuation line\n/);next unless length $1;$$val .= ' ' if length $$val;$$val .= $1}elsif ($line =~ /\A \x0D?\x0A \z/x){last}else {die(q/Malformed header line: / .$Printable->($line)."\n")}}return$headers}sub write_request {@_==2 || die(q/Usage: $handle->write_request(request)/ ."\n");my($self,$request)=@_;$self->write_request_header(@{$request}{qw/method uri headers/});$self->write_body($request)if$request->{cb};return}my%HeaderCase=('content-md5'=>'Content-MD5','etag'=>'ETag','te'=>'TE','www-authenticate'=>'WWW-Authenticate','x-xss-protection'=>'X-XSS-Protection',);sub write_header_lines {(@_==2 || @_==3 && ref $_[1]eq 'HASH')|| die(q/Usage: $handle->write_header_lines(headers[,prefix])/ ."\n");my($self,$headers,$prefix_data)=@_;my$buf=(defined$prefix_data ? $prefix_data : '');while (my ($k,$v)=each %$headers){my$field_name=lc$k;if (exists$HeaderCase{$field_name}){$field_name=$HeaderCase{$field_name}}else {$field_name =~ /\A $Token+ \z/xo or die(q/Invalid HTTP header field name: / .$Printable->($field_name)."\n");$field_name =~ s/\b(\w)/\u$1/g;$HeaderCase{lc$field_name}=$field_name}for (ref$v eq 'ARRAY' ? @$v : $v){$_='' unless defined $_;$buf .= "$field_name: $_\x0D\x0A"}}$buf .= "\x0D\x0A";return$self->write($buf)}sub read_body {@_==3 || die(q/Usage: $handle->read_body(callback, response)/ ."\n");my ($self,$cb,$response)=@_;my$te=$response->{headers}{'transfer-encoding'}|| '';my$chunked=grep {/chunked/i}(ref$te eq 'ARRAY' ? @$te : $te);return$chunked ? $self->read_chunked_body($cb,$response): $self->read_content_body($cb,$response)}sub write_body {@_==2 || die(q/Usage: $handle->write_body(request)/ ."\n");my ($self,$request)=@_;if ($request->{headers}{'content-length'}){return$self->write_content_body($request)}else {return$self->write_chunked_body($request)}}sub read_content_body {@_==3 || @_==4 || die(q/Usage: $handle->read_content_body(callback, response, [read_length])/ ."\n");my ($self,$cb,$response,$content_length)=@_;$content_length ||= $response->{headers}{'content-length'};if (defined$content_length){my$len=$content_length;while ($len > 0){my$read=($len > BUFSIZE)? BUFSIZE : $len;$cb->($self->read($read,0),$response);$len -= $read}return length($self->{rbuf})==0}my$chunk;$cb->($chunk,$response)while length($chunk=$self->read(BUFSIZE,1));return}sub write_content_body {@_==2 || die(q/Usage: $handle->write_content_body(request)/ ."\n");my ($self,$request)=@_;my ($len,$content_length)=(0,$request->{headers}{'content-length'});while (){my$data=$request->{cb}->();defined$data && length$data or last;if ($] ge '5.008'){utf8::downgrade($data,1)or die(qq/Wide character in write_content()\n/)}$len += $self->write($data)}$len==$content_length or die(qq/Content-Length mismatch (got: $len expected: $content_length)\n/);return$len}sub read_chunked_body {@_==3 || die(q/Usage: $handle->read_chunked_body(callback, $response)/ ."\n");my ($self,$cb,$response)=@_;while (){my$head=$self->readline;$head =~ /\A ([A-Fa-f0-9]+)/x or die(q/Malformed chunk head: / .$Printable->($head)."\n");my$len=hex($1)or last;$self->read_content_body($cb,$response,$len);$self->read(2)eq "\x0D\x0A" or die(qq/Malformed chunk: missing CRLF after chunk data\n/)}$self->read_header_lines($response->{headers});return 1}sub write_chunked_body {@_==2 || die(q/Usage: $handle->write_chunked_body(request)/ ."\n");my ($self,$request)=@_;my$len=0;while (){my$data=$request->{cb}->();defined$data && length$data or last;if ($] ge '5.008'){utf8::downgrade($data,1)or die(qq/Wide character in write_chunked_body()\n/)}$len += length$data;my$chunk=sprintf '%X',length$data;$chunk .= "\x0D\x0A";$chunk .= $data;$chunk .= "\x0D\x0A";$self->write($chunk)}$self->write("0\x0D\x0A");$self->write_header_lines($request->{trailer_cb}->())if ref$request->{trailer_cb}eq 'CODE';return$len}sub read_response_header {@_==1 || die(q/Usage: $handle->read_response_header()/ ."\n");my ($self)=@_;my$line=$self->readline;$line =~ /\A (HTTP\/(0*\d+\.0*\d+)) [\x09\x20]+ ([0-9]{3}) [\x09\x20]+ ([^\x0D\x0A]*) \x0D?\x0A/x or die(q/Malformed Status-Line: / .$Printable->($line)."\n");my ($protocol,$version,$status,$reason)=($1,$2,$3,$4);die (qq/Unsupported HTTP protocol: $protocol\n/)unless$version =~ /0*1\.0*[01]/;return {status=>$status,reason=>$reason,headers=>$self->read_header_lines,protocol=>$protocol,}}sub write_request_header {@_==4 || die(q/Usage: $handle->write_request_header(method, request_uri, headers)/ ."\n");my ($self,$method,$request_uri,$headers)=@_;return$self->write_header_lines($headers,"$method $request_uri HTTP/1.1\x0D\x0A")}sub _do_timeout {my ($self,$type,$timeout)=@_;$timeout=$self->{timeout}unless defined$timeout && $timeout >= 0;my$fd=fileno$self->{fh};defined$fd && $fd >= 0 or die(qq/select(2): 'Bad file descriptor'\n/);my$initial=time;my$pending=$timeout;my$nfound;vec(my$fdset='',$fd,1)=1;while (){$nfound=($type eq 'read')? select($fdset,undef,undef,$pending): select(undef,$fdset,undef,$pending);if ($nfound==-1){$!==EINTR or die(qq/select(2): '$!'\n/);redo if!$timeout || ($pending=$timeout - (time - $initial))> 0;$nfound=0}last}$!=0;return$nfound}sub can_read {@_==1 || @_==2 || die(q/Usage: $handle->can_read([timeout])/ ."\n");my$self=shift;if (ref($self->{fh})eq 'IO::Socket::SSL'){return 1 if$self->{fh}->pending}return$self->_do_timeout('read',@_)}sub can_write {@_==1 || @_==2 || die(q/Usage: $handle->can_write([timeout])/ ."\n");my$self=shift;return$self->_do_timeout('write',@_)}sub _assert_ssl {my($ok,$reason)=HTTP::Tiny->can_ssl();die$reason unless$ok}sub can_reuse {my ($self,$scheme,$host,$port)=@_;return 0 if $self->{pid}!=$$ || $self->{tid}!=_get_tid()|| length($self->{rbuf})|| $scheme ne $self->{scheme}|| $host ne $self->{host}|| $port ne $self->{port}|| eval {$self->can_read(0)}|| $@ ;return 1}sub _find_CA_file {my$self=shift();if ($self->{SSL_options}->{SSL_ca_file}){unless (-r $self->{SSL_options}->{SSL_ca_file}){die qq/SSL_ca_file '$self->{SSL_options}->{SSL_ca_file}' not found or not readable\n/}return$self->{SSL_options}->{SSL_ca_file}}return Mozilla::CA::SSL_ca_file()if eval {require Mozilla::CA;1};for my$ca_bundle ("/etc/ssl/certs/ca-certificates.crt","/etc/pki/tls/certs/ca-bundle.crt","/etc/ssl/ca-bundle.pem","/etc/openssl/certs/ca-certificates.crt","/etc/ssl/cert.pem","/usr/local/share/certs/ca-root-nss.crt","/etc/pki/tls/cacert.pem","/etc/certs/ca-certificates.crt",){return$ca_bundle if -e $ca_bundle}die qq/Couldn't find a CA bundle with which to verify the SSL certificate.\n/ .qq/Try installing Mozilla::CA from CPAN\n/}sub _get_tid {no warnings 'reserved';return threads->can("tid")? threads->tid : 0}sub _ssl_args {my ($self,$host)=@_;my%ssl_args;if (Net::SSLeay::OPENSSL_VERSION_NUMBER()>= 0x01000000){$ssl_args{SSL_hostname}=$host,}if ($self->{verify_SSL}){$ssl_args{SSL_verifycn_scheme}='http';$ssl_args{SSL_verifycn_name}=$host;$ssl_args{SSL_verify_mode}=0x01;$ssl_args{SSL_ca_file}=$self->_find_CA_file}else {$ssl_args{SSL_verifycn_scheme}='none';$ssl_args{SSL_verify_mode}=0x00}for my$k (keys %{$self->{SSL_options}}){$ssl_args{$k}=$self->{SSL_options}{$k}if$k =~ m/^SSL_/}return \%ssl_args}1; + sub $sub_name { + my (\$self, \$url, \$args) = \@_; + \@_ == 2 || (\@_ == 3 && ref \$args eq 'HASH') + or Carp::croak(q/Usage: \$http->$sub_name(URL, [HASHREF])/ . "\n"); + return \$self->request('$req_method', \$url, \$args || {}); + } + HERE +HTTP_TINY + +$fatpacked{"JSON/PP.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'JSON_PP'; + package JSON::PP;use 5.005;use strict;use base qw(Exporter);use overload ();use Carp ();use B ();$JSON::PP::VERSION='2.27300';@JSON::PP::EXPORT=qw(encode_json decode_json from_json to_json);use constant P_ASCII=>0;use constant P_LATIN1=>1;use constant P_UTF8=>2;use constant P_INDENT=>3;use constant P_CANONICAL=>4;use constant P_SPACE_BEFORE=>5;use constant P_SPACE_AFTER=>6;use constant P_ALLOW_NONREF=>7;use constant P_SHRINK=>8;use constant P_ALLOW_BLESSED=>9;use constant P_CONVERT_BLESSED=>10;use constant P_RELAXED=>11;use constant P_LOOSE=>12;use constant P_ALLOW_BIGNUM=>13;use constant P_ALLOW_BAREKEY=>14;use constant P_ALLOW_SINGLEQUOTE=>15;use constant P_ESCAPE_SLASH=>16;use constant P_AS_NONBLESSED=>17;use constant P_ALLOW_UNKNOWN=>18;use constant OLD_PERL=>$] < 5.008 ? 1 : 0;BEGIN {my@xs_compati_bit_properties=qw(latin1 ascii utf8 indent canonical space_before space_after allow_nonref shrink allow_blessed convert_blessed relaxed allow_unknown);my@pp_bit_properties=qw(allow_singlequote allow_bignum loose allow_barekey escape_slash as_nonblessed);if ($] < 5.008){my$helper=$] >= 5.006 ? 'JSON::PP::Compat5006' : 'JSON::PP::Compat5005';eval qq| require $helper |;if ($@){Carp::croak $@}}for my$name (@xs_compati_bit_properties,@pp_bit_properties){my$flag_name='P_' .uc($name);eval qq/ + sub $name { + my \$enable = defined \$_[1] ? \$_[1] : 1; + + if (\$enable) { + \$_[0]->{PROPS}->[$flag_name] = 1; + } + else { + \$_[0]->{PROPS}->[$flag_name] = 0; + } + + \$_[0]; + } + + sub get_$name { + \$_[0]->{PROPS}->[$flag_name] ? 1 : ''; + } + /}}my%encode_allow_method =map {($_=>1)}qw/utf8 pretty allow_nonref latin1 self_encode escape_slash allow_blessed convert_blessed indent indent_length allow_bignum as_nonblessed/;my%decode_allow_method =map {($_=>1)}qw/utf8 allow_nonref loose allow_singlequote allow_bignum allow_barekey max_size relaxed/;my$JSON;sub encode_json ($) {($JSON ||= __PACKAGE__->new->utf8)->encode(@_)}sub decode_json {($JSON ||= __PACKAGE__->new->utf8)->decode(@_)}sub to_json($) {Carp::croak ("JSON::PP::to_json has been renamed to encode_json.")}sub from_json($) {Carp::croak ("JSON::PP::from_json has been renamed to decode_json.")}sub new {my$class=shift;my$self={max_depth=>512,max_size=>0,indent=>0,FLAGS=>0,fallback=>sub {encode_error('Invalid value. JSON can only reference.')},indent_length=>3,};bless$self,$class}sub encode {return $_[0]->PP_encode_json($_[1])}sub decode {return $_[0]->PP_decode_json($_[1],0x00000000)}sub decode_prefix {return $_[0]->PP_decode_json($_[1],0x00000001)}sub pretty {my ($self,$v)=@_;my$enable=defined$v ? $v : 1;if ($enable){$self->indent(1)->indent_length(3)->space_before(1)->space_after(1)}else {$self->indent(0)->space_before(0)->space_after(0)}$self}sub max_depth {my$max=defined $_[1]? $_[1]: 0x80000000;$_[0]->{max_depth}=$max;$_[0]}sub get_max_depth {$_[0]->{max_depth}}sub max_size {my$max=defined $_[1]? $_[1]: 0;$_[0]->{max_size}=$max;$_[0]}sub get_max_size {$_[0]->{max_size}}sub filter_json_object {$_[0]->{cb_object}=defined $_[1]? $_[1]: 0;$_[0]->{F_HOOK}=($_[0]->{cb_object}or $_[0]->{cb_sk_object})? 1 : 0;$_[0]}sub filter_json_single_key_object {if (@_ > 1){$_[0]->{cb_sk_object}->{$_[1]}=$_[2]}$_[0]->{F_HOOK}=($_[0]->{cb_object}or $_[0]->{cb_sk_object})? 1 : 0;$_[0]}sub indent_length {if (!defined $_[1]or $_[1]> 15 or $_[1]< 0){Carp::carp "The acceptable range of indent_length() is 0 to 15."}else {$_[0]->{indent_length}=$_[1]}$_[0]}sub get_indent_length {$_[0]->{indent_length}}sub sort_by {$_[0]->{sort_by}=defined $_[1]? $_[1]: 1;$_[0]}sub allow_bigint {Carp::carp("allow_bigint() is obsoleted. use allow_bignum() insted.")}{my$max_depth;my$indent;my$ascii;my$latin1;my$utf8;my$space_before;my$space_after;my$canonical;my$allow_blessed;my$convert_blessed;my$indent_length;my$escape_slash;my$bignum;my$as_nonblessed;my$depth;my$indent_count;my$keysort;sub PP_encode_json {my$self=shift;my$obj=shift;$indent_count=0;$depth=0;my$idx=$self->{PROPS};($ascii,$latin1,$utf8,$indent,$canonical,$space_before,$space_after,$allow_blessed,$convert_blessed,$escape_slash,$bignum,$as_nonblessed)=@{$idx}[P_ASCII .. P_SPACE_AFTER,P_ALLOW_BLESSED,P_CONVERT_BLESSED,P_ESCAPE_SLASH,P_ALLOW_BIGNUM,P_AS_NONBLESSED];($max_depth,$indent_length)=@{$self}{qw/max_depth indent_length/};$keysort=$canonical ? sub {$a cmp $b}: undef;if ($self->{sort_by}){$keysort=ref($self->{sort_by})eq 'CODE' ? $self->{sort_by}: $self->{sort_by}=~ /\D+/ ? $self->{sort_by}: sub {$a cmp $b}}encode_error("hash- or arrayref expected (not a simple scalar, use allow_nonref to allow this)")if(!ref$obj and!$idx->[P_ALLOW_NONREF ]);my$str=$self->object_to_json($obj);$str .= "\n" if ($indent);unless ($ascii or $latin1 or $utf8){utf8::upgrade($str)}if ($idx->[P_SHRINK ]){utf8::downgrade($str,1)}return$str}sub object_to_json {my ($self,$obj)=@_;my$type=ref($obj);if($type eq 'HASH'){return$self->hash_to_json($obj)}elsif($type eq 'ARRAY'){return$self->array_to_json($obj)}elsif ($type){if (blessed($obj)){return$self->value_to_json($obj)if ($obj->isa('JSON::PP::Boolean'));if ($convert_blessed and $obj->can('TO_JSON')){my$result=$obj->TO_JSON();if (defined$result and ref($result)){if (refaddr($obj)eq refaddr($result)){encode_error(sprintf("%s::TO_JSON method returned same object as was passed instead of a new one",ref$obj))}}return$self->object_to_json($result)}return "$obj" if ($bignum and _is_bignum($obj));return$self->blessed_to_json($obj)if ($allow_blessed and $as_nonblessed);encode_error(sprintf("encountered object '%s', but neither allow_blessed " ."nor convert_blessed settings are enabled",$obj))unless ($allow_blessed);return 'null'}else {return$self->value_to_json($obj)}}else{return$self->value_to_json($obj)}}sub hash_to_json {my ($self,$obj)=@_;my@res;encode_error("json text or perl structure exceeds maximum nesting level (max_depth set too low?)")if (++$depth > $max_depth);my ($pre,$post)=$indent ? $self->_up_indent(): ('','');my$del=($space_before ? ' ' : '').':' .($space_after ? ' ' : '');for my$k (_sort($obj)){if (OLD_PERL){utf8::decode($k)}push@res,string_to_json($self,$k).$del .($self->object_to_json($obj->{$k})|| $self->value_to_json($obj->{$k}))}--$depth;$self->_down_indent()if ($indent);return '{' .(@res ? $pre : '').(@res ? join(",$pre",@res).$post : '').'}'}sub array_to_json {my ($self,$obj)=@_;my@res;encode_error("json text or perl structure exceeds maximum nesting level (max_depth set too low?)")if (++$depth > $max_depth);my ($pre,$post)=$indent ? $self->_up_indent(): ('','');for my$v (@$obj){push@res,$self->object_to_json($v)|| $self->value_to_json($v)}--$depth;$self->_down_indent()if ($indent);return '[' .(@res ? $pre : '').(@res ? join(",$pre",@res).$post : '').']'}sub value_to_json {my ($self,$value)=@_;return 'null' if(!defined$value);my$b_obj=B::svref_2object(\$value);my$flags=$b_obj->FLAGS;return$value if$flags & (B::SVp_IOK | B::SVp_NOK)and!($flags & B::SVp_POK);my$type=ref($value);if(!$type){return string_to_json($self,$value)}elsif(blessed($value)and $value->isa('JSON::PP::Boolean')){return $$value==1 ? 'true' : 'false'}elsif ($type){if ((overload::StrVal($value)=~ /=(\w+)/)[0]){return$self->value_to_json("$value")}if ($type eq 'SCALAR' and defined $$value){return $$value eq '1' ? 'true' : $$value eq '0' ? 'false' : $self->{PROPS}->[P_ALLOW_UNKNOWN ]? 'null' : encode_error("cannot encode reference to scalar")}if ($self->{PROPS}->[P_ALLOW_UNKNOWN ]){return 'null'}else {if ($type eq 'SCALAR' or $type eq 'REF'){encode_error("cannot encode reference to scalar")}else {encode_error("encountered $value, but JSON can only represent references to arrays or hashes")}}}else {return$self->{fallback}->($value)if ($self->{fallback}and ref($self->{fallback})eq 'CODE');return 'null'}}my%esc=("\n"=>'\n',"\r"=>'\r',"\t"=>'\t',"\f"=>'\f',"\b"=>'\b',"\""=>'\"',"\\"=>'\\\\',"\'"=>'\\\'',);sub string_to_json {my ($self,$arg)=@_;$arg =~ s/([\x22\x5c\n\r\t\f\b])/$esc{$1}/g;$arg =~ s/\//\\\//g if ($escape_slash);$arg =~ s/([\x00-\x08\x0b\x0e-\x1f])/'\\u00' . unpack('H2', $1)/eg;if ($ascii){$arg=JSON_PP_encode_ascii($arg)}if ($latin1){$arg=JSON_PP_encode_latin1($arg)}if ($utf8){utf8::encode($arg)}return '"' .$arg .'"'}sub blessed_to_json {my$reftype=reftype($_[1])|| '';if ($reftype eq 'HASH'){return $_[0]->hash_to_json($_[1])}elsif ($reftype eq 'ARRAY'){return $_[0]->array_to_json($_[1])}else {return 'null'}}sub encode_error {my$error=shift;Carp::croak "$error"}sub _sort {defined$keysort ? (sort$keysort (keys %{$_[0]})): keys %{$_[0]}}sub _up_indent {my$self=shift;my$space=' ' x $indent_length;my ($pre,$post)=('','');$post="\n" .$space x $indent_count;$indent_count++;$pre="\n" .$space x $indent_count;return ($pre,$post)}sub _down_indent {$indent_count--}sub PP_encode_box {{depth=>$depth,indent_count=>$indent_count,}}}sub _encode_ascii {join('',map {$_ <= 127 ? chr($_): $_ <= 65535 ? sprintf('\u%04x',$_): sprintf('\u%x\u%x',_encode_surrogates($_))}unpack('U*',$_[0]))}sub _encode_latin1 {join('',map {$_ <= 255 ? chr($_): $_ <= 65535 ? sprintf('\u%04x',$_): sprintf('\u%x\u%x',_encode_surrogates($_))}unpack('U*',$_[0]))}sub _encode_surrogates {my$uni=$_[0]- 0x10000;return ($uni / 0x400 + 0xD800,$uni % 0x400 + 0xDC00)}sub _is_bignum {$_[0]->isa('Math::BigInt')or $_[0]->isa('Math::BigFloat')}my$max_intsize;BEGIN {my$checkint=1111;for my$d (5..64){$checkint .= 1;my$int=eval qq| $checkint |;if ($int =~ /[eE]/){$max_intsize=$d - 1;last}}}{my%escapes=(b=>"\x8",t=>"\x9",n=>"\xA",f=>"\xC",r=>"\xD",'\\'=>'\\','"'=>'"','/'=>'/',);my$text;my$at;my$ch;my$len;my$depth;my$encoding;my$is_valid_utf8;my$utf8_len;my$utf8;my$max_depth;my$max_size;my$relaxed;my$cb_object;my$cb_sk_object;my$F_HOOK;my$allow_bigint;my$singlequote;my$loose;my$allow_barekey;sub PP_decode_json {my ($self,$opt);($self,$text,$opt)=@_;($at,$ch,$depth)=(0,'',0);if (!defined$text or ref$text){decode_error("malformed JSON string, neither array, object, number, string or atom")}my$idx=$self->{PROPS};($utf8,$relaxed,$loose,$allow_bigint,$allow_barekey,$singlequote)=@{$idx}[P_UTF8,P_RELAXED,P_LOOSE .. P_ALLOW_SINGLEQUOTE];if ($utf8){utf8::downgrade($text,1)or Carp::croak("Wide character in subroutine entry")}else {utf8::upgrade($text);utf8::encode($text)}$len=length$text;($max_depth,$max_size,$cb_object,$cb_sk_object,$F_HOOK)=@{$self}{qw/max_depth max_size cb_object cb_sk_object F_HOOK/};if ($max_size > 1){use bytes;my$bytes=length$text;decode_error(sprintf("attempted decode of JSON text of %s bytes size, but max_size is set to %s" ,$bytes,$max_size),1)if ($bytes > $max_size)}my@octets=unpack('C4',$text);$encoding=($octets[0]and $octets[1])? 'UTF-8' : (!$octets[0]and $octets[1])? 'UTF-16BE' : (!$octets[0]and!$octets[1])? 'UTF-32BE' : ($octets[2])? 'UTF-16LE' : (!$octets[2])? 'UTF-32LE' : 'unknown';white();my$valid_start=defined$ch;my$result=value();return undef if (!$result && ($opt & 0x10000000));decode_error("malformed JSON string, neither array, object, number, string or atom")unless$valid_start;if (!$idx->[P_ALLOW_NONREF ]and!ref$result){decode_error('JSON text must be an object or array (but found number, string, true, false or null,' .' use allow_nonref to allow this)',1)}Carp::croak('something wrong.')if$len < $at;my$consumed=defined$ch ? $at - 1 : $at;white();if ($ch){return ($result,$consumed)if ($opt & 0x00000001);decode_error("garbage after JSON object")}($opt & 0x00000001)? ($result,$consumed): $result}sub next_chr {return$ch=undef if($at >= $len);$ch=substr($text,$at++,1)}sub value {white();return if(!defined$ch);return object()if($ch eq '{');return array()if($ch eq '[');return string()if($ch eq '"' or ($singlequote and $ch eq "'"));return number()if($ch =~ /[0-9]/ or $ch eq '-');return word()}sub string {my ($i,$s,$t,$u);my$utf16;my$is_utf8;($is_valid_utf8,$utf8_len)=('',0);$s='';if($ch eq '"' or ($singlequote and $ch eq "'")){my$boundChar=$ch;OUTER: while(defined(next_chr())){if($ch eq $boundChar){next_chr();if ($utf16){decode_error("missing low surrogate character in surrogate pair")}utf8::decode($s)if($is_utf8);return$s}elsif($ch eq '\\'){next_chr();if(exists$escapes{$ch}){$s .= $escapes{$ch}}elsif($ch eq 'u'){my$u='';for(1..4){$ch=next_chr();last OUTER if($ch !~ /[0-9a-fA-F]/);$u .= $ch}if ($u =~ /^[dD][89abAB][0-9a-fA-F]{2}/){$utf16=$u}elsif ($u =~ /^[dD][c-fC-F][0-9a-fA-F]{2}/){unless (defined$utf16){decode_error("missing high surrogate character in surrogate pair")}$is_utf8=1;$s .= JSON_PP_decode_surrogates($utf16,$u)|| next;$utf16=undef}else {if (defined$utf16){decode_error("surrogate pair expected")}if ((my$hex=hex($u))> 127){$is_utf8=1;$s .= JSON_PP_decode_unicode($u)|| next}else {$s .= chr$hex}}}else{unless ($loose){$at -= 2;decode_error('illegal backslash escape sequence in string')}$s .= $ch}}else{if (ord$ch > 127){unless($ch=is_valid_utf8($ch)){$at -= 1;decode_error("malformed UTF-8 character in JSON string")}else {$at += $utf8_len - 1}$is_utf8=1}if (!$loose){if ($ch =~ /[\x00-\x1f\x22\x5c]/){$at--;decode_error('invalid character encountered while parsing JSON string')}}$s .= $ch}}}decode_error("unexpected end of string while parsing JSON string")}sub white {while(defined$ch){if($ch le ' '){next_chr()}elsif($ch eq '/'){next_chr();if(defined$ch and $ch eq '/'){1 while(defined(next_chr())and $ch ne "\n" and $ch ne "\r")}elsif(defined$ch and $ch eq '*'){next_chr();while(1){if(defined$ch){if($ch eq '*'){if(defined(next_chr())and $ch eq '/'){next_chr();last}}else{next_chr()}}else{decode_error("Unterminated comment")}}next}else{$at--;decode_error("malformed JSON string, neither array, object, number, string or atom")}}else{if ($relaxed and $ch eq '#'){pos($text)=$at;$text =~ /\G([^\n]*(?:\r\n|\r|\n|$))/g;$at=pos($text);next_chr;next}last}}}sub array {my$a=$_[0]|| [];decode_error('json text or perl structure exceeds maximum nesting level (max_depth set too low?)')if (++$depth > $max_depth);next_chr();white();if(defined$ch and $ch eq ']'){--$depth;next_chr();return$a}else {while(defined($ch)){push @$a,value();white();if (!defined$ch){last}if($ch eq ']'){--$depth;next_chr();return$a}if($ch ne ','){last}next_chr();white();if ($relaxed and $ch eq ']'){--$depth;next_chr();return$a}}}decode_error(", or ] expected while parsing array")}sub object {my$o=$_[0]|| {};my$k;decode_error('json text or perl structure exceeds maximum nesting level (max_depth set too low?)')if (++$depth > $max_depth);next_chr();white();if(defined$ch and $ch eq '}'){--$depth;next_chr();if ($F_HOOK){return _json_object_hook($o)}return$o}else {while (defined$ch){$k=($allow_barekey and $ch ne '"' and $ch ne "'")? bareKey(): string();white();if(!defined$ch or $ch ne ':'){$at--;decode_error("':' expected")}next_chr();$o->{$k}=value();white();last if (!defined$ch);if($ch eq '}'){--$depth;next_chr();if ($F_HOOK){return _json_object_hook($o)}return$o}if($ch ne ','){last}next_chr();white();if ($relaxed and $ch eq '}'){--$depth;next_chr();if ($F_HOOK){return _json_object_hook($o)}return$o}}}$at--;decode_error(", or } expected while parsing object/hash")}sub bareKey {my$key;while($ch =~ /[^\x00-\x23\x25-\x2F\x3A-\x40\x5B-\x5E\x60\x7B-\x7F]/){$key .= $ch;next_chr()}return$key}sub word {my$word=substr($text,$at-1,4);if($word eq 'true'){$at += 3;next_chr;return$JSON::PP::true}elsif($word eq 'null'){$at += 3;next_chr;return undef}elsif($word eq 'fals'){$at += 3;if(substr($text,$at,1)eq 'e'){$at++;next_chr;return$JSON::PP::false}}$at--;decode_error("'null' expected")if ($word =~ /^n/);decode_error("'true' expected")if ($word =~ /^t/);decode_error("'false' expected")if ($word =~ /^f/);decode_error("malformed JSON string, neither array, object, number, string or atom")}sub number {my$n='';my$v;if($ch eq '0'){my$peek=substr($text,$at,1);my$hex=$peek =~ /[xX]/;if($hex){decode_error("malformed number (leading zero must not be followed by another digit)");($n)=(substr($text,$at+1)=~ /^([0-9a-fA-F]+)/)}else{($n)=(substr($text,$at)=~ /^([0-7]+)/);if (defined$n and length$n > 1){decode_error("malformed number (leading zero must not be followed by another digit)")}}if(defined$n and length($n)){if (!$hex and length($n)==1){decode_error("malformed number (leading zero must not be followed by another digit)")}$at += length($n)+ $hex;next_chr;return$hex ? hex($n): oct($n)}}if($ch eq '-'){$n='-';next_chr;if (!defined$ch or $ch !~ /\d/){decode_error("malformed number (no digits after initial minus)")}}while(defined$ch and $ch =~ /\d/){$n .= $ch;next_chr}if(defined$ch and $ch eq '.'){$n .= '.';next_chr;if (!defined$ch or $ch !~ /\d/){decode_error("malformed number (no digits after decimal point)")}else {$n .= $ch}while(defined(next_chr)and $ch =~ /\d/){$n .= $ch}}if(defined$ch and ($ch eq 'e' or $ch eq 'E')){$n .= $ch;next_chr;if(defined($ch)and ($ch eq '+' or $ch eq '-')){$n .= $ch;next_chr;if (!defined$ch or $ch =~ /\D/){decode_error("malformed number (no digits after exp sign)")}$n .= $ch}elsif(defined($ch)and $ch =~ /\d/){$n .= $ch}else {decode_error("malformed number (no digits after exp sign)")}while(defined(next_chr)and $ch =~ /\d/){$n .= $ch}}$v .= $n;if ($v !~ /[.eE]/ and length$v > $max_intsize){if ($allow_bigint){require Math::BigInt;return Math::BigInt->new($v)}else {return "$v"}}elsif ($allow_bigint){require Math::BigFloat;return Math::BigFloat->new($v)}return 0+$v}sub is_valid_utf8 {$utf8_len=$_[0]=~ /[\x00-\x7F]/ ? 1 : $_[0]=~ /[\xC2-\xDF]/ ? 2 : $_[0]=~ /[\xE0-\xEF]/ ? 3 : $_[0]=~ /[\xF0-\xF4]/ ? 4 : 0 ;return unless$utf8_len;my$is_valid_utf8=substr($text,$at - 1,$utf8_len);return ($is_valid_utf8 =~ /^(?: + [\x00-\x7F] + |[\xC2-\xDF][\x80-\xBF] + |[\xE0][\xA0-\xBF][\x80-\xBF] + |[\xE1-\xEC][\x80-\xBF][\x80-\xBF] + |[\xED][\x80-\x9F][\x80-\xBF] + |[\xEE-\xEF][\x80-\xBF][\x80-\xBF] + |[\xF0][\x90-\xBF][\x80-\xBF][\x80-\xBF] + |[\xF1-\xF3][\x80-\xBF][\x80-\xBF][\x80-\xBF] + |[\xF4][\x80-\x8F][\x80-\xBF][\x80-\xBF] + )$/x)? $is_valid_utf8 : ''}sub decode_error {my$error=shift;my$no_rep=shift;my$str=defined$text ? substr($text,$at): '';my$mess='';my$type=$] >= 5.008 ? 'U*' : $] < 5.006 ? 'C*' : utf8::is_utf8($str)? 'U*' : 'C*' ;for my$c (unpack($type,$str)){$mess .= $c==0x07 ? '\a' : $c==0x09 ? '\t' : $c==0x0a ? '\n' : $c==0x0d ? '\r' : $c==0x0c ? '\f' : $c < 0x20 ? sprintf('\x{%x}',$c): $c==0x5c ? '\\\\' : $c < 0x80 ? chr($c): sprintf('\x{%x}',$c);if (length$mess >= 20){$mess .= '...';last}}unless (length$mess){$mess='(end of string)'}Carp::croak ($no_rep ? "$error" : "$error, at character offset $at (before \"$mess\")")}sub _json_object_hook {my$o=$_[0];my@ks=keys %{$o};if ($cb_sk_object and @ks==1 and exists$cb_sk_object->{$ks[0]}and ref$cb_sk_object->{$ks[0]}){my@val=$cb_sk_object->{$ks[0]}->($o->{$ks[0]});if (@val==1){return$val[0]}}my@val=$cb_object->($o)if ($cb_object);if (@val==0 or @val > 1){return$o}else {return$val[0]}}sub PP_decode_box {{text=>$text,at=>$at,ch=>$ch,len=>$len,depth=>$depth,encoding=>$encoding,is_valid_utf8=>$is_valid_utf8,}}}sub _decode_surrogates {my$uni=0x10000 + (hex($_[0])- 0xD800)* 0x400 + (hex($_[1])- 0xDC00);my$un=pack('U*',$uni);utf8::encode($un);return$un}sub _decode_unicode {my$un=pack('U',hex shift);utf8::encode($un);return$un}BEGIN {unless (defined&utf8::is_utf8){require Encode;*utf8::is_utf8=*Encode::is_utf8}if ($] >= 5.008){*JSON::PP::JSON_PP_encode_ascii=\&_encode_ascii;*JSON::PP::JSON_PP_encode_latin1=\&_encode_latin1;*JSON::PP::JSON_PP_decode_surrogates=\&_decode_surrogates;*JSON::PP::JSON_PP_decode_unicode=\&_decode_unicode}if ($] >= 5.008 and $] < 5.008003){package JSON::PP;require subs;subs->import('join');eval q| + sub join { + return '' if (@_ < 2); + my $j = shift; + my $str = shift; + for (@_) { $str .= $j . $_; } + return $str; + } + |}sub JSON::PP::incr_parse {local$Carp::CarpLevel=1;($_[0]->{_incr_parser}||= JSON::PP::IncrParser->new)->incr_parse(@_)}sub JSON::PP::incr_skip {($_[0]->{_incr_parser}||= JSON::PP::IncrParser->new)->incr_skip}sub JSON::PP::incr_reset {($_[0]->{_incr_parser}||= JSON::PP::IncrParser->new)->incr_reset}eval q{ + sub JSON::PP::incr_text : lvalue { + $_[0]->{_incr_parser} ||= JSON::PP::IncrParser->new; + + if ( $_[0]->{_incr_parser}->{incr_parsing} ) { + Carp::croak("incr_text can not be called when the incremental parser already started parsing"); + } + $_[0]->{_incr_parser}->{incr_text}; + } + } if ($] >= 5.006)}BEGIN {eval 'require Scalar::Util';unless($@){*JSON::PP::blessed=\&Scalar::Util::blessed;*JSON::PP::reftype=\&Scalar::Util::reftype;*JSON::PP::refaddr=\&Scalar::Util::refaddr}else{eval 'sub UNIVERSAL::a_sub_not_likely_to_be_here { ref($_[0]) }';*JSON::PP::blessed=sub {local($@,$SIG{__DIE__},$SIG{__WARN__});ref($_[0])? eval {$_[0]->a_sub_not_likely_to_be_here}: undef};my%tmap=qw(B::NULL SCALAR B::HV HASH B::AV ARRAY B::CV CODE B::IO IO B::GV GLOB B::REGEXP REGEXP);*JSON::PP::reftype=sub {my$r=shift;return undef unless length(ref($r));my$t=ref(B::svref_2object($r));return exists$tmap{$t}? $tmap{$t}: length(ref($$r))? 'REF' : 'SCALAR'};*JSON::PP::refaddr=sub {return undef unless length(ref($_[0]));my$addr;if(defined(my$pkg=blessed($_[0]))){$addr .= bless $_[0],'Scalar::Util::Fake';bless $_[0],$pkg}else {$addr .= $_[0]}$addr =~ /0x(\w+)/;local $^W;hex($1)}}}$JSON::PP::true=do {bless \(my$dummy=1),"JSON::PP::Boolean"};$JSON::PP::false=do {bless \(my$dummy=0),"JSON::PP::Boolean"};sub is_bool {defined $_[0]and UNIVERSAL::isa($_[0],"JSON::PP::Boolean")}sub true {$JSON::PP::true}sub false {$JSON::PP::false}sub null {undef}package JSON::PP::Boolean;use overload ("0+"=>sub {${$_[0]}},"++"=>sub {$_[0]=${$_[0]}+ 1},"--"=>sub {$_[0]=${$_[0]}- 1},fallback=>1,);package JSON::PP::IncrParser;use strict;use constant INCR_M_WS=>0;use constant INCR_M_STR=>1;use constant INCR_M_BS=>2;use constant INCR_M_JSON=>3;use constant INCR_M_C0=>4;use constant INCR_M_C1=>5;$JSON::PP::IncrParser::VERSION='1.01';my$unpack_format=$] < 5.006 ? 'C*' : 'U*';sub new {my ($class)=@_;bless {incr_nest=>0,incr_text=>undef,incr_parsing=>0,incr_p=>0,},$class}sub incr_parse {my ($self,$coder,$text)=@_;$self->{incr_text}='' unless (defined$self->{incr_text});if (defined$text){if (utf8::is_utf8($text)and!utf8::is_utf8($self->{incr_text})){utf8::upgrade($self->{incr_text});utf8::decode($self->{incr_text})}$self->{incr_text}.= $text}my$max_size=$coder->get_max_size;if (defined wantarray){$self->{incr_mode}=INCR_M_WS unless defined$self->{incr_mode};if (wantarray){my@ret;$self->{incr_parsing}=1;do {push@ret,$self->_incr_parse($coder,$self->{incr_text});unless (!$self->{incr_nest}and $self->{incr_mode}==INCR_M_JSON){$self->{incr_mode}=INCR_M_WS if$self->{incr_mode}!=INCR_M_STR}}until (length$self->{incr_text}>= $self->{incr_p});$self->{incr_parsing}=0;return@ret}else {$self->{incr_parsing}=1;my$obj=$self->_incr_parse($coder,$self->{incr_text});$self->{incr_parsing}=0 if defined$obj;return$obj ? $obj : undef}}}sub _incr_parse {my ($self,$coder,$text,$skip)=@_;my$p=$self->{incr_p};my$restore=$p;my@obj;my$len=length$text;if ($self->{incr_mode}==INCR_M_WS){while ($len > $p){my$s=substr($text,$p,1);$p++ and next if (0x20 >= unpack($unpack_format,$s));$self->{incr_mode}=INCR_M_JSON;last}}while ($len > $p){my$s=substr($text,$p++,1);if ($s eq '"'){if (substr($text,$p - 2,1)eq '\\'){next}if ($self->{incr_mode}!=INCR_M_STR){$self->{incr_mode}=INCR_M_STR}else {$self->{incr_mode}=INCR_M_JSON;unless ($self->{incr_nest}){last}}}if ($self->{incr_mode}==INCR_M_JSON){if ($s eq '[' or $s eq '{'){if (++$self->{incr_nest}> $coder->get_max_depth){Carp::croak('json text or perl structure exceeds maximum nesting level (max_depth set too low?)')}}elsif ($s eq ']' or $s eq '}'){last if (--$self->{incr_nest}<= 0)}elsif ($s eq '#'){while ($len > $p){last if substr($text,$p++,1)eq "\n"}}}}$self->{incr_p}=$p;return if ($self->{incr_mode}==INCR_M_STR and not $self->{incr_nest});return if ($self->{incr_mode}==INCR_M_JSON and $self->{incr_nest}> 0);return '' unless (length substr($self->{incr_text},0,$p));local$Carp::CarpLevel=2;$self->{incr_p}=$restore;$self->{incr_c}=$p;my ($obj,$tail)=$coder->PP_decode_json(substr($self->{incr_text},0,$p),0x10000001);$self->{incr_text}=substr($self->{incr_text},$p);$self->{incr_p}=0;return$obj || ''}sub incr_text {if ($_[0]->{incr_parsing}){Carp::croak("incr_text can not be called when the incremental parser already started parsing")}$_[0]->{incr_text}}sub incr_skip {my$self=shift;$self->{incr_text}=substr($self->{incr_text},$self->{incr_c});$self->{incr_p}=0}sub incr_reset {my$self=shift;$self->{incr_text}=undef;$self->{incr_p}=0;$self->{incr_mode}=0;$self->{incr_nest}=0;$self->{incr_parsing}=0}1; +JSON_PP + +$fatpacked{"JSON/PP/Boolean.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'JSON_PP_BOOLEAN'; + use JSON::PP ();use strict;1; +JSON_PP_BOOLEAN + +$fatpacked{"Module/CPANfile.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_CPANFILE'; + package Module::CPANfile;use strict;use warnings;use Cwd;use Carp ();use Module::CPANfile::Environment;use Module::CPANfile::Requirement;our$VERSION='1.1000';sub new {my($class,$file)=@_;bless {},$class}sub load {my($proto,$file)=@_;my$self=ref$proto ? $proto : $proto->new;$self->parse($file || Cwd::abs_path('cpanfile'));$self}sub save {my($self,$path)=@_;open my$out,">",$path or die "$path: $!";print {$out}$self->to_string}sub parse {my($self,$file)=@_;my$code=do {open my$fh,"<",$file or die "$file: $!";join '',<$fh>};my$env=Module::CPANfile::Environment->new($file);$env->parse($code)or die $@;$self->{_mirrors}=$env->mirrors;$self->{_prereqs}=$env->prereqs}sub from_prereqs {my($proto,$prereqs)=@_;my$self=$proto->new;$self->{_prereqs}=Module::CPANfile::Prereqs->from_cpan_meta($prereqs);$self}sub mirrors {my$self=shift;$self->{_mirrors}|| []}sub features {my$self=shift;map$self->feature($_),$self->{_prereqs}->identifiers}sub feature {my($self,$identifier)=@_;$self->{_prereqs}->feature($identifier)}sub prereq {shift->prereqs}sub prereqs {my$self=shift;$self->{_prereqs}->as_cpan_meta}sub merged_requirements {my$self=shift;$self->{_prereqs}->merged_requirements}sub effective_prereqs {my($self,$features)=@_;$self->prereqs_with(@{$features || []})}sub prereqs_with {my($self,@feature_identifiers)=@_;my$prereqs=$self->prereqs;my@others=map {$self->feature($_)->prereqs}@feature_identifiers;$prereqs->with_merged_prereqs(\@others)}sub prereq_specs {my$self=shift;$self->prereqs->as_string_hash}sub prereq_for_module {my($self,$module)=@_;$self->{_prereqs}->find($module)}sub options_for_module {my($self,$module)=@_;my$prereq=$self->prereq_for_module($module)or return;$prereq->requirement->options}sub merge_meta {my($self,$file,$version)=@_;require CPAN::Meta;$version ||= $file =~ /\.yml$/ ? '1.4' : '2';my$prereq=$self->prereqs;my$meta=CPAN::Meta->load_file($file);my$prereqs_hash=$prereq->with_merged_prereqs($meta->effective_prereqs)->as_string_hash;my$struct={%{$meta->as_struct},prereqs=>$prereqs_hash };CPAN::Meta->new($struct)->save($file,{version=>$version })}sub _dump {my$str=shift;require Data::Dumper;chomp(my$value=Data::Dumper->new([$str])->Terse(1)->Dump);$value}sub to_string {my($self,$include_empty)=@_;my$mirrors=$self->mirrors;my$prereqs=$self->prereq_specs;my$code='';$code .= $self->_dump_mirrors($mirrors);$code .= $self->_dump_prereqs($prereqs,$include_empty);for my$feature ($self->features){$code .= sprintf "feature %s, %s => sub {\n",_dump($feature->{identifier}),_dump($feature->{description});$code .= $self->_dump_prereqs($feature->{spec},$include_empty,4);$code .= "}\n\n"}$code =~ s/\n+$/\n/s;$code}sub _dump_mirrors {my($self,$mirrors)=@_;my$code="";for my$url (@$mirrors){$code .= "mirror '$url';\n"}$code =~ s/\n+$/\n/s;$code}sub _dump_prereqs {my($self,$prereqs,$include_empty,$base_indent)=@_;my$code='';for my$phase (qw(runtime configure build test develop)){my$indent=$phase eq 'runtime' ? '' : ' ';$indent=(' ' x ($base_indent || 0)).$indent;my($phase_code,$requirements);$phase_code .= "on $phase => sub {\n" unless$phase eq 'runtime';for my$type (qw(requires recommends suggests conflicts)){for my$mod (sort keys %{$prereqs->{$phase}{$type}}){my$ver=$prereqs->{$phase}{$type}{$mod};$phase_code .= $ver eq '0' ? "${indent}$type '$mod';\n" : "${indent}$type '$mod', '$ver';\n";$requirements++}}$phase_code .= "\n" unless$requirements;$phase_code .= "};\n" unless$phase eq 'runtime';$code .= $phase_code ."\n" if$requirements or $include_empty}$code =~ s/\n+$/\n/s;$code}1; +MODULE_CPANFILE + +$fatpacked{"Module/CPANfile/Environment.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_CPANFILE_ENVIRONMENT'; + package Module::CPANfile::Environment;use strict;use warnings;use Module::CPANfile::Prereqs;use Carp ();my@bindings=qw(on requires recommends suggests conflicts feature osname mirror configure_requires build_requires test_requires author_requires);my$file_id=1;sub new {my($class,$file)=@_;bless {file=>$file,phase=>'runtime',feature=>undef,features=>{},prereqs=>Module::CPANfile::Prereqs->new,mirrors=>[],},$class}sub bind {my$self=shift;my$pkg=caller;for my$binding (@bindings){no strict 'refs';*{"$pkg\::$binding"}=sub {$self->$binding(@_)}}}sub parse {my($self,$code)=@_;my$err;{local $@;$file_id++;$self->_evaluate(<{file} failed: $err"};return 1}sub _evaluate {my$_environment=$_[0];eval $_[1]}sub prereqs {$_[0]->{prereqs}}sub mirrors {$_[0]->{mirrors}}sub on {my($self,$phase,$code)=@_;local$self->{phase}=$phase;$code->()}sub feature {my($self,$identifier,$description,$code)=@_;if (@_==3 && ref($description)eq 'CODE'){$code=$description;$description=$identifier}unless (ref$description eq '' && ref$code eq 'CODE'){Carp::croak("Usage: feature 'identifier', 'Description' => sub { ... }")}local$self->{feature}=$identifier;$self->prereqs->add_feature($identifier,$description);$code->()}sub osname {die "TODO"}sub mirror {my($self,$url)=@_;push @{$self->{mirrors}},$url}sub requirement_for {my($self,$module,@args)=@_;my$requirement=0;$requirement=shift@args if@args % 2;return Module::CPANfile::Requirement->new(name=>$module,version=>$requirement,@args,)}sub requires {my$self=shift;$self->add_prereq(requires=>@_)}sub recommends {my$self=shift;$self->add_prereq(recommends=>@_)}sub suggests {my$self=shift;$self->add_prereq(suggests=>@_)}sub conflicts {my$self=shift;$self->add_prereq(conflicts=>@_)}sub add_prereq {my($self,$type,$module,@args)=@_;$self->prereqs->add_prereq(feature=>$self->{feature},phase=>$self->{phase},type=>$type,module=>$module,requirement=>$self->requirement_for($module,@args),)}sub configure_requires {my($self,@args)=@_;$self->on(configure=>sub {$self->requires(@args)})}sub build_requires {my($self,@args)=@_;$self->on(build=>sub {$self->requires(@args)})}sub test_requires {my($self,@args)=@_;$self->on(test=>sub {$self->requires(@args)})}sub author_requires {my($self,@args)=@_;$self->on(develop=>sub {$self->requires(@args)})}1; + package Module::CPANfile::Sandbox$file_id; + no warnings; + BEGIN { \$_environment->bind } + + # line 1 "$self->{file}" + $code; + EVAL +MODULE_CPANFILE_ENVIRONMENT + +$fatpacked{"Module/CPANfile/Prereq.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_CPANFILE_PREREQ'; + package Module::CPANfile::Prereq;use strict;sub new {my($class,%options)=@_;bless \%options,$class}sub feature {$_[0]->{feature}}sub phase {$_[0]->{phase}}sub type {$_[0]->{type}}sub module {$_[0]->{module}}sub requirement {$_[0]->{requirement}}sub match_feature {my($self,$identifier)=@_;no warnings 'uninitialized';$self->feature eq $identifier}1; +MODULE_CPANFILE_PREREQ + +$fatpacked{"Module/CPANfile/Prereqs.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_CPANFILE_PREREQS'; + package Module::CPANfile::Prereqs;use strict;use Carp ();use CPAN::Meta::Feature;use Module::CPANfile::Prereq;sub from_cpan_meta {my($class,$prereqs)=@_;my$self=$class->new;for my$phase (keys %$prereqs){for my$type (keys %{$prereqs->{$phase}}){while (my($module,$requirement)=each %{$prereqs->{$phase}{$type}}){$self->add_prereq(phase=>$phase,type=>$type,module=>$module,requirement=>Module::CPANfile::Requirement->new(name=>$module,version=>$requirement),)}}}$self}sub new {my$class=shift;bless {prereqs=>[],features=>{},},$class}sub add_feature {my($self,$identifier,$description)=@_;$self->{features}{$identifier}={description=>$description }}sub add_prereq {my($self,%args)=@_;$self->add(Module::CPANfile::Prereq->new(%args))}sub add {my($self,$prereq)=@_;push @{$self->{prereqs}},$prereq}sub as_cpan_meta {my$self=shift;$self->{cpanmeta}||= $self->build_cpan_meta}sub build_cpan_meta {my($self,$identifier)=@_;my$prereq_spec={};$self->prereq_each($identifier,sub {my$prereq=shift;$prereq_spec->{$prereq->phase}{$prereq->type}{$prereq->module}=$prereq->requirement->version});CPAN::Meta::Prereqs->new($prereq_spec)}sub prereq_each {my($self,$identifier,$code)=@_;for my$prereq (@{$self->{prereqs}}){next unless$prereq->match_feature($identifier);$code->($prereq)}}sub merged_requirements {my$self=shift;my$reqs=CPAN::Meta::Requirements->new;for my$prereq (@{$self->{prereqs}}){$reqs->add_string_requirement($prereq->module,$prereq->requirement->version)}$reqs}sub find {my($self,$module)=@_;for my$prereq (@{$self->{prereqs}}){return$prereq if$prereq->module eq $module}return}sub identifiers {my$self=shift;keys %{$self->{features}}}sub feature {my($self,$identifier)=@_;my$data=$self->{features}{$identifier}or Carp::croak("Unknown feature '$identifier'");my$prereqs=$self->build_cpan_meta($identifier);CPAN::Meta::Feature->new($identifier,{description=>$data->{description},prereqs=>$prereqs->as_string_hash,})}1; +MODULE_CPANFILE_PREREQS + +$fatpacked{"Module/CPANfile/Requirement.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_CPANFILE_REQUIREMENT'; + package Module::CPANfile::Requirement;use strict;sub new {my ($class,%args)=@_;$args{version}||= 0;bless +{name=>delete$args{name},version=>delete$args{version},options=>\%args,},$class}sub name {$_[0]->{name}}sub version {$_[0]->{version}}sub options {$_[0]->{options}}sub has_options {keys %{$_[0]->{options}}> 0}1; +MODULE_CPANFILE_REQUIREMENT + +$fatpacked{"Module/Metadata.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_METADATA'; + package Module::Metadata;sub __clean_eval {eval $_[0]}use strict;use warnings;our$VERSION='1.000027';use Carp qw/croak/;use File::Spec;BEGIN {eval {require Fcntl;Fcntl->import('SEEK_SET');1}or *SEEK_SET=sub {0}}use version 0.87;BEGIN {if ($INC{'Log/Contextual.pm'}){require "Log/Contextual/WarnLogger.pm";Log::Contextual->import('log_info','-default_logger'=>Log::Contextual::WarnLogger->new({env_prefix=>'MODULE_METADATA',}),)}else {*log_info=sub (&) {warn $_[0]->()}}}use File::Find qw(find);my$V_NUM_REGEXP=qr{v?[0-9._]+};my$PKG_FIRST_WORD_REGEXP=qr{ # the FIRST word in a package name + [a-zA-Z_] # the first word CANNOT start with a digit + (?: + [\w']? # can contain letters, digits, _, or ticks + \w # But, NO multi-ticks or trailing ticks + )* + }x;my$PKG_ADDL_WORD_REGEXP=qr{ # the 2nd+ word in a package name + \w # the 2nd+ word CAN start with digits + (?: + [\w']? # and can contain letters or ticks + \w # But, NO multi-ticks or trailing ticks + )* + }x;my$PKG_NAME_REGEXP=qr{ # match a package name + (?: :: )? # a pkg name can start with arisdottle + $PKG_FIRST_WORD_REGEXP # a package word + (?: + (?: :: )+ ### arisdottle (allow one or many times) + $PKG_ADDL_WORD_REGEXP ### a package word + )* # ^ zero, one or many times + (?: + :: # allow trailing arisdottle + )? + }x;my$PKG_REGEXP=qr{ # match a package declaration + ^[\s\{;]* # intro chars on a line + package # the word 'package' + \s+ # whitespace + ($PKG_NAME_REGEXP) # a package name + \s* # optional whitespace + ($V_NUM_REGEXP)? # optional version number + \s* # optional whitesapce + [;\{] # semicolon line terminator or block start (since 5.16) + }x;my$VARNAME_REGEXP=qr{ # match fully-qualified VERSION name + ([\$*]) # sigil - $ or * + ( + ( # optional leading package name + (?:::|\')? # possibly starting like just :: (a la $::VERSION) + (?:\w+(?:::|\'))* # Foo::Bar:: ... + )? + VERSION + )\b + }x;my$VERS_REGEXP=qr{ # match a VERSION definition + (?: + \(\s*$VARNAME_REGEXP\s*\) # with parens + | + $VARNAME_REGEXP # without parens + ) + \s* + =[^=~>] # = but not ==, nor =~, nor => + }x;sub new_from_file {my$class=shift;my$filename=File::Spec->rel2abs(shift);return undef unless defined($filename)&& -f $filename;return$class->_init(undef,$filename,@_)}sub new_from_handle {my$class=shift;my$handle=shift;my$filename=shift;return undef unless defined($handle)&& defined($filename);$filename=File::Spec->rel2abs($filename);return$class->_init(undef,$filename,@_,handle=>$handle)}sub new_from_module {my$class=shift;my$module=shift;my%props=@_;$props{inc}||= \@INC;my$filename=$class->find_module_by_name($module,$props{inc});return undef unless defined($filename)&& -f $filename;return$class->_init($module,$filename,%props)}{my$compare_versions=sub {my ($v1,$op,$v2)=@_;$v1=version->new($v1)unless UNIVERSAL::isa($v1,'version');my$eval_str="\$v1 $op \$v2";my$result=eval$eval_str;log_info {"error comparing versions: '$eval_str' $@"}if $@;return$result};my$normalize_version=sub {my ($version)=@_;if ($version =~ /[=<>!,]/){}elsif (ref$version eq 'version'){$version=$version->is_qv ? $version->normal : $version->stringify}elsif ($version =~ /^[^v][^.]*\.[^.]+\./){$version="v$version"}else {}return$version};my$resolve_module_versions=sub {my$packages=shift;my($file,$version);my$err='';for my$p (@$packages){if (defined($p->{version})){if (defined($version)){if ($compare_versions->($version,'!=',$p->{version})){$err .= " $p->{file} ($p->{version})\n"}else {}}else {$file=$p->{file};$version=$p->{version}}}$file ||= $p->{file}if defined($p->{file})}if ($err){$err=" $file ($version)\n" .$err}my%result=(file=>$file,version=>$version,err=>$err);return \%result};sub provides {my$class=shift;croak "provides() requires key/value pairs \n" if @_ % 2;my%args=@_;croak "provides() takes only one of 'dir' or 'files'\n" if$args{dir}&& $args{files};croak "provides() requires a 'version' argument" unless defined$args{version};croak "provides() does not support version '$args{version}' metadata" unless grep {$args{version}eq $_}qw/1.4 2/;$args{prefix}='lib' unless defined$args{prefix};my$p;if ($args{dir}){$p=$class->package_versions_from_directory($args{dir})}else {croak "provides() requires 'files' to be an array reference\n" unless ref$args{files}eq 'ARRAY';$p=$class->package_versions_from_directory($args{files})}if (length$args{prefix}){$args{prefix}=~ s{/$}{};for my$v (values %$p){$v->{file}="$args{prefix}/$v->{file}"}}return$p}sub package_versions_from_directory {my ($class,$dir,$files)=@_;my@files;if ($files){@files=@$files}else {find({wanted=>sub {push@files,$_ if -f $_ && /\.pm$/},no_chdir=>1,},$dir)}my(%prime,%alt);for my$file (@files){my$mapped_filename=File::Spec::Unix->abs2rel($file,$dir);my@path=split(/\//,$mapped_filename);(my$prime_package=join('::',@path))=~ s/\.pm$//;my$pm_info=$class->new_from_file($file);for my$package ($pm_info->packages_inside){next if$package eq 'main';next if$package eq 'DB';next if grep /^_/,split(/::/,$package);my$version=$pm_info->version($package);$prime_package=$package if lc($prime_package)eq lc($package);if ($package eq $prime_package){if (exists($prime{$package})){croak "Unexpected conflict in '$package'; multiple versions found.\n"}else {$mapped_filename="$package.pm" if lc("$package.pm")eq lc($mapped_filename);$prime{$package}{file}=$mapped_filename;$prime{$package}{version}=$version if defined($version)}}else {push(@{$alt{$package}},{file=>$mapped_filename,version=>$version,})}}}for my$package (keys(%alt)){my$result=$resolve_module_versions->($alt{$package});if (exists($prime{$package})){if ($result->{err}){log_info {"Found conflicting versions for package '$package'\n" ." $prime{$package}{file} ($prime{$package}{version})\n" .$result->{err}}}elsif (defined($result->{version})){if (exists($prime{$package}{version})&& defined($prime{$package}{version})){if ($compare_versions->($prime{$package}{version},'!=',$result->{version})){log_info {"Found conflicting versions for package '$package'\n" ." $prime{$package}{file} ($prime{$package}{version})\n" ." $result->{file} ($result->{version})\n"}}}else {$prime{$package}{file}=$result->{file};$prime{$package}{version}=$result->{version}}}else {}}else {if ($result->{err}){log_info {"Found conflicting versions for package '$package'\n" .$result->{err}}}$prime{$package}{file}=$result->{file};$prime{$package}{version}=$result->{version}if defined($result->{version})}}for (grep defined $_->{version},values%prime){$_->{version}=$normalize_version->($_->{version})}return \%prime}}sub _init {my$class=shift;my$module=shift;my$filename=shift;my%props=@_;my$handle=delete$props{handle};my(%valid_props,@valid_props);@valid_props=qw(collect_pod inc);@valid_props{@valid_props}=delete(@props{@valid_props});warn "Unknown properties: @{[keys %props]}\n" if scalar(%props);my%data=(module=>$module,filename=>$filename,version=>undef,packages=>[],versions=>{},pod=>{},pod_headings=>[],collect_pod=>0,%valid_props,);my$self=bless(\%data,$class);if (not $handle){my$filename=$self->{filename};open$handle,'<',$filename or croak("Can't open '$filename': $!");$self->_handle_bom($handle,$filename)}$self->_parse_fh($handle);unless($self->{module}and length($self->{module})){my ($v,$d,$f)=File::Spec->splitpath($self->{filename});if($f =~ /\.pm$/){$f =~ s/\..+$//;my@candidates=grep /$f$/,@{$self->{packages}};$self->{module}=shift(@candidates)}else {if(grep /main/,@{$self->{packages}}){$self->{module}='main'}else {$self->{module}=$self->{packages}[0]|| ''}}}$self->{version}=$self->{versions}{$self->{module}}if defined($self->{module});return$self}sub _do_find_module {my$class=shift;my$module=shift || croak 'find_module_by_name() requires a package name';my$dirs=shift || \@INC;my$file=File::Spec->catfile(split(/::/,$module));for my$dir (@$dirs){my$testfile=File::Spec->catfile($dir,$file);return [File::Spec->rel2abs($testfile),$dir ]if -e $testfile and!-d _;$testfile .= '.pm';return [File::Spec->rel2abs($testfile),$dir ]if -e $testfile}return}sub find_module_by_name {my$found=shift()->_do_find_module(@_)or return;return$found->[0]}sub find_module_dir_by_name {my$found=shift()->_do_find_module(@_)or return;return$found->[1]}sub _parse_version_expression {my$self=shift;my$line=shift;my($sigil,$variable_name,$package);if ($line =~ /$VERS_REGEXP/o){($sigil,$variable_name,$package)=$2 ? ($1,$2,$3): ($4,$5,$6);if ($package){$package=($package eq '::')? 'main' : $package;$package =~ s/::$//}}return ($sigil,$variable_name,$package)}sub _handle_bom {my ($self,$fh,$filename)=@_;my$pos=tell$fh;return unless defined$pos;my$buf=' ' x 2;my$count=read$fh,$buf,length$buf;return unless defined$count and $count >= 2;my$encoding;if ($buf eq "\x{FE}\x{FF}"){$encoding='UTF-16BE'}elsif ($buf eq "\x{FF}\x{FE}"){$encoding='UTF-16LE'}elsif ($buf eq "\x{EF}\x{BB}"){$buf=' ';$count=read$fh,$buf,length$buf;if (defined$count and $count >= 1 and $buf eq "\x{BF}"){$encoding='UTF-8'}}if (defined$encoding){if ("$]" >= 5.008){binmode($fh,":encoding($encoding)")}}else {seek$fh,$pos,SEEK_SET or croak(sprintf "Can't reset position to the top of '$filename'")}return$encoding}sub _parse_fh {my ($self,$fh)=@_;my($in_pod,$seen_end,$need_vers)=(0,0,0);my(@packages,%vers,%pod,@pod);my$package='main';my$pod_sect='';my$pod_data='';my$in_end=0;while (defined(my$line=<$fh>)){my$line_num=$.;chomp($line);my$is_cut;if ($line =~ /^=([a-zA-Z].*)/){my$cmd=$1;$is_cut=$cmd =~ /^cut(?:[^a-zA-Z]|$)/;$in_pod=!$is_cut}if ($in_pod){if ($line =~ /^=head[1-4]\s+(.+)\s*$/){push(@pod,$1);if ($self->{collect_pod}&& length($pod_data)){$pod{$pod_sect}=$pod_data;$pod_data=''}$pod_sect=$1}elsif ($self->{collect_pod}){$pod_data .= "$line\n"}}elsif ($is_cut){if ($self->{collect_pod}&& length($pod_data)){$pod{$pod_sect}=$pod_data;$pod_data=''}$pod_sect=''}else {next if$in_end;next if$line =~ /^\s*#/;if ($line eq '__END__'){$in_end++;next}last if$line eq '__DATA__';my($version_sigil,$version_fullname,$version_package)=index($line,'VERSION')>= 1 ? $self->_parse_version_expression($line): ();if ($line =~ /$PKG_REGEXP/o){$package=$1;my$version=$2;push(@packages,$package)unless grep($package eq $_,@packages);$need_vers=defined$version ? 0 : 1;if (not exists$vers{$package}and defined$version){my$dwim_version=eval {_dwim_version($version)};croak "Version '$version' from $self->{filename} does not appear to be valid:\n$line\n\nThe fatal error was: $@\n" unless defined$dwim_version;$vers{$package}=$dwim_version}}elsif ($version_fullname && $version_package){push(@packages,$version_package)unless grep($version_package eq $_,@packages);$need_vers=0 if$version_package eq $package;unless (defined$vers{$version_package}&& length$vers{$version_package}){$vers{$version_package}=$self->_evaluate_version_line($version_sigil,$version_fullname,$line)}}elsif ($package eq 'main' && $version_fullname &&!exists($vers{main})){$need_vers=0;my$v=$self->_evaluate_version_line($version_sigil,$version_fullname,$line);$vers{$package}=$v;push(@packages,'main')}elsif ($package eq 'main' &&!exists($vers{main})&& $line =~ /\w/){$need_vers=1;$vers{main}='';push(@packages,'main')}elsif ($version_fullname && $need_vers){$need_vers=0;my$v=$self->_evaluate_version_line($version_sigil,$version_fullname,$line);unless (defined$vers{$package}&& length$vers{$package}){$vers{$package}=$v}}}}if ($self->{collect_pod}&& length($pod_data)){$pod{$pod_sect}=$pod_data}$self->{versions}=\%vers;$self->{packages}=\@packages;$self->{pod}=\%pod;$self->{pod_headings}=\@pod}{my$pn=0;sub _evaluate_version_line {my$self=shift;my($sigil,$variable_name,$line)=@_;$pn++;my$eval=qq{ my \$dummy = q# Hide from _packages_inside() + #; package Module::Metadata::_version::p${pn}; + use version; + sub { + local $sigil$variable_name; + $line; + \$$variable_name + }; + };$eval=$1 if$eval =~ m{^(.+)}s;local $^W;my$vsub=__clean_eval($eval);if ($@ =~ /Can't locate/ && -d 'lib'){local@INC=('lib',@INC);$vsub=__clean_eval($eval)}warn "Error evaling version line '$eval' in $self->{filename}: $@\n" if $@;(ref($vsub)eq 'CODE')or croak "failed to build version sub for $self->{filename}";my$result=eval {$vsub->()};croak "Could not get version from $self->{filename} by executing:\n$eval\n\nThe fatal error was: $@\n" if $@;my$version=eval {_dwim_version($result)};croak "Version '$result' from $self->{filename} does not appear to be valid:\n$eval\n\nThe fatal error was: $@\n" unless defined$version;return$version}}{my@version_prep=(sub {return shift},sub {my$v=shift;$v =~ s{([0-9])[a-z-].*$}{$1}i;return$v},sub {my$v=shift;my$num_dots=()=$v =~ m{(\.)}g;my$num_unders=()=$v =~ m{(_)}g;my$leading_v=substr($v,0,1)eq 'v';if (!$leading_v && $num_dots < 2 && $num_unders > 1){$v =~ s{_}{}g;$num_unders=()=$v =~ m{(_)}g}return$v},sub {my$v=shift;no warnings 'numeric';return 0 + $v},);sub _dwim_version {my ($result)=shift;return$result if ref($result)eq 'version';my ($version,$error);for my$f (@version_prep){$result=$f->($result);$version=eval {version->new($result)};$error ||= $@ if $@;last if defined$version}croak$error unless defined$version;return$version}}sub name {$_[0]->{module}}sub filename {$_[0]->{filename}}sub packages_inside {@{$_[0]->{packages}}}sub pod_inside {@{$_[0]->{pod_headings}}}sub contains_pod {0+@{$_[0]->{pod_headings}}}sub version {my$self=shift;my$mod=shift || $self->{module};my$vers;if (defined($mod)&& length($mod)&& exists($self->{versions}{$mod})){return$self->{versions}{$mod}}else {return undef}}sub pod {my$self=shift;my$sect=shift;if (defined($sect)&& length($sect)&& exists($self->{pod}{$sect})){return$self->{pod}{$sect}}else {return undef}}sub is_indexable {my ($self,$package)=@_;my@indexable_packages=grep {$_ ne 'main'}$self->packages_inside;return!!grep {$_ eq $package}@indexable_packages if$package;return!!@indexable_packages}1; +MODULE_METADATA + +$fatpacked{"Parse/CPAN/Meta.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'PARSE_CPAN_META'; + use 5.008001;use strict;package Parse::CPAN::Meta;our$VERSION='1.4414';use Exporter;use Carp 'croak';our@ISA=qw/Exporter/;our@EXPORT_OK=qw/Load LoadFile/;sub load_file {my ($class,$filename)=@_;my$meta=_slurp($filename);if ($filename =~ /\.ya?ml$/){return$class->load_yaml_string($meta)}elsif ($filename =~ /\.json$/){return$class->load_json_string($meta)}else {$class->load_string($meta)}}sub load_string {my ($class,$string)=@_;if ($string =~ /^---/){return$class->load_yaml_string($string)}elsif ($string =~ /^\s*\{/){return$class->load_json_string($string)}else {return$class->load_yaml_string($string)}}sub load_yaml_string {my ($class,$string)=@_;my$backend=$class->yaml_backend();my$data=eval {no strict 'refs';&{"$backend\::Load"}($string)};croak $@ if $@;return$data || {}}sub load_json_string {my ($class,$string)=@_;my$data=eval {$class->json_backend()->new->decode($string)};croak $@ if $@;return$data || {}}sub yaml_backend {if (!defined$ENV{PERL_YAML_BACKEND}){_can_load('CPAN::Meta::YAML',0.011)or croak "CPAN::Meta::YAML 0.011 is not available\n";return "CPAN::Meta::YAML"}else {my$backend=$ENV{PERL_YAML_BACKEND};_can_load($backend)or croak "Could not load PERL_YAML_BACKEND '$backend'\n";$backend->can("Load")or croak "PERL_YAML_BACKEND '$backend' does not implement Load()\n";return$backend}}sub json_backend {if (!$ENV{PERL_JSON_BACKEND}or $ENV{PERL_JSON_BACKEND}eq 'JSON::PP'){_can_load('JSON::PP'=>2.27103)or croak "JSON::PP 2.27103 is not available\n";return 'JSON::PP'}else {_can_load('JSON'=>2.5)or croak "JSON 2.5 is required for " ."\$ENV{PERL_JSON_BACKEND} = '$ENV{PERL_JSON_BACKEND}'\n";return "JSON"}}sub _slurp {require Encode;open my$fh,"<:raw","$_[0]" or die "can't open $_[0] for reading: $!";my$content=do {local $/;<$fh>};$content=Encode::decode('UTF-8',$content,Encode::PERLQQ());return$content}sub _can_load {my ($module,$version)=@_;(my$file=$module)=~ s{::}{/}g;$file .= ".pm";return 1 if$INC{$file};return 0 if exists$INC{$file};eval {require$file;1}or return 0;if (defined$version){eval {$module->VERSION($version);1}or return 0}return 1}sub LoadFile ($) {return Load(_slurp(shift))}sub Load ($) {require CPAN::Meta::YAML;my$object=eval {CPAN::Meta::YAML::Load(shift)};croak $@ if $@;return$object}1; +PARSE_CPAN_META + +$fatpacked{"Parse/PMFile.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'PARSE_PMFILE'; + package Parse::PMFile;sub __clean_eval {eval $_[0]}use strict;use warnings;use Safe;use JSON::PP ();use Dumpvalue;use version ();use File::Spec ();our$VERSION='0.36';our$VERBOSE=0;our$ALLOW_DEV_VERSION=0;our$FORK=0;our$UNSAFE=$] < 5.010000 ? 1 : 0;sub new {my ($class,$meta,$opts)=@_;bless {%{$opts || {}},META_CONTENT=>$meta},$class}sub parse {my ($self,$pmfile)=@_;$pmfile =~ s|\\|/|g;my($filemtime)=(stat$pmfile)[9];$self->{MTIME}=$filemtime;$self->{PMFILE}=$pmfile;unless ($self->_version_from_meta_ok){my$version;unless (eval {$version=$self->_parse_version;1}){$self->_verbose(1,"error with version in $pmfile: $@");return}$self->{VERSION}=$version;if ($self->{VERSION}=~ /^\{.*\}$/){}elsif ($self->{VERSION}=~ /[_\s]/ &&!$self->{ALLOW_DEV_VERSION}&&!$ALLOW_DEV_VERSION){return}}my($ppp)=$self->_packages_per_pmfile;my@keys_ppp=$self->_filter_ppps(sort keys %$ppp);$self->_verbose(1,"Will check keys_ppp[@keys_ppp]\n");my ($package,%errors);my%checked_in;DBPACK: foreach$package (@keys_ppp){if ($package !~ /^\w[\w\:\']*\w?\z/ || $package !~ /\w\z/ || $package =~ /:/ && $package !~ /::/ || $package =~ /\w:\w/ || $package =~ /:::/){$self->_verbose(1,"Package[$package] did not pass the ultimate sanity check");delete$ppp->{$package};next}if ($self->{USERID}&& $self->{PERMISSIONS}&&!$self->_perm_check($package)){delete$ppp->{$package};next}{my (undef,$module)=split m{/lib/},$self->{PMFILE},2;if ($module){$module =~ s{\.pm\z}{};$module =~ s{/}{::}g;if (lc$module eq lc$package && $module ne $package){$errors{$package}={indexing_warning=>"Capitalization of package ($package) does not match filename!",infile=>$self->{PMFILE},}}}}my$pp=$ppp->{$package};if ($pp->{version}&& $pp->{version}=~ /^\{.*\}$/){my$err=JSON::PP::decode_json($pp->{version});if ($err->{x_normalize}){$errors{$package}={normalize=>$err->{version},infile=>$pp->{infile},};$pp->{version}="undef"}elsif ($err->{openerr}){$pp->{version}="undef";$self->_verbose(1,qq{Parse::PMFile was not able to + read the file. It issued the following error: C< $err->{r} >},);$errors{$package}={open=>$err->{r},infile=>$pp->{infile},}}else {$pp->{version}="undef";$self->_verbose(1,qq{Parse::PMFile was not able to + parse the following line in that file: C< $err->{line} > + + Note: the indexer is running in a Safe compartement and cannot + provide the full functionality of perl in the VERSION line. It + is trying hard, but sometime it fails. As a workaround, please + consider writing a META.yml that contains a 'provides' + attribute or contact the CPAN admins to investigate (yet + another) workaround against "Safe" limitations.)},);$errors{$package}={parse_version=>$err->{line},infile=>$err->{file},}}}for ($package,$pp->{version},){if (!defined || /^\s*$/ || /\s/){delete$ppp->{$package};next}}$checked_in{$package}=$ppp->{$package}}return (wantarray && %errors)? (\%checked_in,\%errors): \%checked_in}sub _perm_check {my ($self,$package)=@_;my$userid=$self->{USERID};my$module=$self->{PERMISSIONS}->module_permissions($package);return 1 if!$module;return 1 if defined$module->m && $module->m eq $userid;return 1 if defined$module->f && $module->f eq $userid;return 1 if defined$module->c && grep {$_ eq $userid}@{$module->c};return}sub _parse_version {my$self=shift;use strict;my$pmfile=$self->{PMFILE};my$tmpfile=File::Spec->catfile(File::Spec->tmpdir,"ParsePMFile$$" .rand(1000));my$pmcp=$pmfile;for ($pmcp){s/([^\\](\\\\)*)@/$1\\@/g}my($v);{package main;my$pid;if ($self->{FORK}|| $FORK){$pid=fork();die "Can't fork: $!" unless defined$pid}if ($pid){waitpid($pid,0);if (open my$fh,'<',$tmpfile){$v=<$fh>}}else {my($comp)=Safe->new;my$eval=qq{ + local(\$^W) = 0; + Parse::PMFile::_parse_version_safely("$pmcp"); + };$comp->permit("entereval");$comp->share("*Parse::PMFile::_parse_version_safely");$comp->share("*version::new");$comp->share("*version::numify");$comp->share_from('main',['*version::','*charstar::','*Exporter::','*DynaLoader::']);$comp->share_from('version',['&qv']);$comp->permit(":base_math");$comp->deny(qw/enteriter iter unstack goto/);version->import('qv')if$self->{UNSAFE}|| $UNSAFE;{no strict;$v=($self->{UNSAFE}|| $UNSAFE)? eval$eval : $comp->reval($eval)}if ($@){my$err=$@;if (ref$err){if ($err->{line}=~ /([\$*])([\w\:\']*)\bVERSION\b.*?\=(.*)/){local($^W)=0;my ($sigil,$vstr)=($1,$3);$self->_restore_overloaded_stuff(1)if$err->{line}=~ /use\s+version\b|version\->|qv\(/;$v=($self->{UNSAFE}|| $UNSAFE)? eval$vstr : $comp->reval($vstr);$v=$$v if$sigil eq '*' && ref$v}if ($@ or!$v){$self->_verbose(1,sprintf("reval failed: err[%s] for eval[%s]",JSON::PP::encode_json($err),$eval,));$v=JSON::PP::encode_json($err)}}else {$v=JSON::PP::encode_json({openerr=>$err })}}if (defined$v){$v=$v->numify if ref($v)=~ /^version(::vpp)?$/}else {$v=""}if ($self->{FORK}|| $FORK){open my$fh,'>:utf8',$tmpfile;print$fh $v;exit 0}else {utf8::encode($v);$v=undef if defined$v &&!length$v;$comp->erase;$self->_restore_overloaded_stuff}}}unlink$tmpfile if ($self->{FORK}|| $FORK)&& -e $tmpfile;return$self->_normalize_version($v)}sub _restore_overloaded_stuff {my ($self,$used_version_in_safe)=@_;return if$self->{UNSAFE}|| $UNSAFE;no strict 'refs';no warnings 'redefine';my$restored;if ($INC{'version/vxs.pm'}){*{'version::(""'}=\&version::vxs::stringify;*{'version::(0+'}=\&version::vxs::numify;*{'version::(cmp'}=\&version::vxs::VCMP;*{'version::(<=>'}=\&version::vxs::VCMP;*{'version::(bool'}=\&version::vxs::boolean;$restored=1}if ($INC{'version/vpp.pm'}){{package charstar;overload->import}if (!$used_version_in_safe){package version::vpp;overload->import}unless ($restored){*{'version::(""'}=\&version::vpp::stringify;*{'version::(0+'}=\&version::vpp::numify;*{'version::(cmp'}=\&version::vpp::vcmp;*{'version::(<=>'}=\&version::vpp::vcmp;*{'version::(bool'}=\&version::vpp::vbool}*{'version::vpp::(""'}=\&version::vpp::stringify;*{'version::vpp::(0+'}=\&version::vpp::numify;*{'version::vpp::(cmp'}=\&version::vpp::vcmp;*{'version::vpp::(<=>'}=\&version::vpp::vcmp;*{'version::vpp::(bool'}=\&version::vpp::vbool;*{'charstar::(""'}=\&charstar::thischar;*{'charstar::(0+'}=\&charstar::thischar;*{'charstar::(++'}=\&charstar::increment;*{'charstar::(--'}=\&charstar::decrement;*{'charstar::(+'}=\&charstar::plus;*{'charstar::(-'}=\&charstar::minus;*{'charstar::(*'}=\&charstar::multiply;*{'charstar::(cmp'}=\&charstar::cmp;*{'charstar::(<=>'}=\&charstar::spaceship;*{'charstar::(bool'}=\&charstar::thischar;*{'charstar::(='}=\&charstar::clone;$restored=1}if (!$restored){*{'version::(""'}=\&version::stringify;*{'version::(0+'}=\&version::numify;*{'version::(cmp'}=\&version::vcmp;*{'version::(<=>'}=\&version::vcmp;*{'version::(bool'}=\&version::boolean}}sub _packages_per_pmfile {my$self=shift;my$ppp={};my$pmfile=$self->{PMFILE};my$filemtime=$self->{MTIME};my$version=$self->{VERSION};open my$fh,"<","$pmfile" or return$ppp;local $/="\n";my$inpod=0;PLINE: while (<$fh>){chomp;my($pline)=$_;$inpod=$pline =~ /^=(?!cut)/ ? 1 : $pline =~ /^=cut/ ? 0 : $inpod;next if$inpod;next if substr($pline,0,4)eq "=cut";$pline =~ s/\#.*//;next if$pline =~ /^\s*$/;if ($pline =~ /^__(?:END|DATA)__\b/ and $pmfile !~ /\.PL$/){last PLINE}my$pkg;my$strict_version;if ($pline =~ m{ + # (.*) # takes too much time if $pline is long + (? 128;$ppp->{$pkg}{parsed}++;$ppp->{$pkg}{infile}=$pmfile;if ($self->_simile($pmfile,$pkg)){$ppp->{$pkg}{simile}=$pmfile;if ($self->_version_from_meta_ok){my$provides=$self->{META_CONTENT}{provides};if (exists$provides->{$pkg}){if (defined$provides->{$pkg}{version}){my$v=$provides->{$pkg}{version};if ($v =~ /[_\s]/ &&!$self->{ALLOW_DEV_VERSION}&&!$ALLOW_DEV_VERSION){next PLINE}unless (eval {$version=$self->_normalize_version($v);1}){$self->_verbose(1,"error with version in $pmfile: $@");next}$ppp->{$pkg}{version}=$version}else {$ppp->{$pkg}{version}="undef"}}}else {if (defined$strict_version){$ppp->{$pkg}{version}=$strict_version }else {$ppp->{$pkg}{version}=defined$version ? $version : ""}no warnings;if ($version eq 'undef'){$ppp->{$pkg}{version}=$version unless defined$ppp->{$pkg}{version}}else {$ppp->{$pkg}{version}=$version if$version > $ppp->{$pkg}{version}|| $version gt $ppp->{$pkg}{version}}}}else {$ppp->{$pkg}{version}=$version unless defined$ppp->{$pkg}{version}&& length($ppp->{$pkg}{version})}$ppp->{$pkg}{filemtime}=$filemtime}else {}}close$fh;$ppp}{no strict;sub _parse_version_safely {my($parsefile)=@_;my$result;local*FH;local $/="\n";open(FH,$parsefile)or die "Could not open '$parsefile': $!";my$inpod=0;while (){$inpod=/^=(?!cut)/ ? 1 : /^=cut/ ? 0 : $inpod;next if$inpod || /^\s*#/;last if /^__(?:END|DATA)__\b/;chop;if (my ($ver)=/package \s+ \S+ \s+ (\S+) \s* [;{]/x){return$ver if version::is_lax($ver)}next unless /(?<=])\=(?![=>])/;my$current_parsed_line=$_;my$eval=qq{ + package # + ExtUtils::MakeMaker::_version; + + local $1$2; + \$$2=undef; do { + $_ + }; \$$2 + };local $^W=0;local$SIG{__WARN__}=sub {};$result=__clean_eval($eval);if ($@ or!defined$result){die +{eval=>$eval,line=>$current_parsed_line,file=>$parsefile,err=>$@,}}last}close FH;$result="undef" unless defined$result;if ((ref$result)=~ /^version(?:::vpp)?\b/){$result=$result->numify}return$result}}sub _filter_ppps {my($self,@ppps)=@_;my@res;MANI: for my$ppp (@ppps){if ($self->{META_CONTENT}){my$no_index=$self->{META_CONTENT}{no_index}|| $self->{META_CONTENT}{private};if (ref($no_index)eq 'HASH'){my%map=(package=>qr{\z},namespace=>qr{::},);for my$k (qw(package namespace)){next unless my$v=$no_index->{$k};my$rest=$map{$k};if (ref$v eq "ARRAY"){for my$ve (@$v){$ve =~ s|::$||;if ($ppp =~ /^$ve$rest/){$self->_verbose(1,"Skipping ppp[$ppp] due to ve[$ve]");next MANI}else {$self->_verbose(1,"NOT skipping ppp[$ppp] due to ve[$ve]")}}}else {$v =~ s|::$||;if ($ppp =~ /^$v$rest/){$self->_verbose(1,"Skipping ppp[$ppp] due to v[$v]");next MANI}else {$self->_verbose(1,"NOT skipping ppp[$ppp] due to v[$v]")}}}}else {$self->_verbose(1,"No keyword 'no_index' or 'private' in META_CONTENT")}}else {}push@res,$ppp}$self->_verbose(1,"Result of filter_ppps: res[@res]");@res}sub _simile {my($self,$file,$package)=@_;$file =~ s|.*/||;$file =~ s|\.pm(?:\.PL)?||;my$ret=$package =~ m/\b\Q$file\E$/;$ret ||= 0;unless ($ret){$ret=1 if lc$file eq 'version'}$self->_verbose(1,"Result of simile(): file[$file] package[$package] ret[$ret]\n");$ret}sub _normalize_version {my($self,$v)=@_;$v="undef" unless defined$v;my$dv=Dumpvalue->new;my$sdv=$dv->stringify($v,1);$self->_verbose(1,"Result of normalize_version: sdv[$sdv]\n");return$v if$v eq "undef";return$v if$v =~ /^\{.*\}$/;$v =~ s/^\s+//;$v =~ s/\s+\z//;if ($v =~ /_/){return$v }if (!version::is_lax($v)){return JSON::PP::encode_json({x_normalize=>'version::is_lax failed',version=>$v })}my$vv=eval {no warnings;version->new($v)->numify};if ($@){return JSON::PP::encode_json({x_normalize=>$@,version=>$v })}if ($vv eq $v){}else {my$forced=$self->_force_numeric($v);if ($forced eq $vv){}elsif ($forced =~ /^v(.+)/){$vv=version->new($1)->numify}else {if ($forced==$vv){$vv=$forced}}}return$vv}sub _force_numeric {my($self,$v)=@_;$v=$self->_readable($v);if ($v =~ /^(\+?)(\d*)(\.(\d*))?/ && (defined $2 && length $2 || defined $4 && length $4)){my$two=defined $2 ? $2 : "";my$three=defined $3 ? $3 : "";$v="$two$three"}$v}sub _version_from_meta_ok {my($self)=@_;return$self->{VERSION_FROM_META_OK}if exists$self->{VERSION_FROM_META_OK};my$c=$self->{META_CONTENT};return($self->{VERSION_FROM_META_OK}=0)unless$c->{provides};my ($mb_v)=(defined$c->{generated_by}? $c->{generated_by}: '')=~ /Module::Build version ([\d\.]+)/;return($self->{VERSION_FROM_META_OK}=1)unless$mb_v;return($self->{VERSION_FROM_META_OK}=1)if$mb_v eq '0.250.0';if ($mb_v >= 0.19 && $mb_v < 0.26 &&!keys %{$c->{provides}}){return($self->{VERSION_FROM_META_OK}=0)}return($self->{VERSION_FROM_META_OK}=1)}sub _verbose {my($self,$level,@what)=@_;warn@what if$level <= ((ref$self && $self->{VERBOSE})|| $VERBOSE)}sub _vcmp {my($self,$l,$r)=@_;local($^W)=0;$self->_verbose(9,"l[$l] r[$r]");return 0 if$l eq $r;for ($l,$r){s/_//g}$self->_verbose(9,"l[$l] r[$r]");for ($l,$r){next unless tr/.// > 1 || /^v/;s/^v?/v/;1 while s/\.0+(\d)/.$1/}$self->_verbose(9,"l[$l] r[$r]");if ($l=~/^v/ <=> $r=~/^v/){for ($l,$r){next if /^v/;$_=$self->_float2vv($_)}}$self->_verbose(9,"l[$l] r[$r]");my$lvstring="v0";my$rvstring="v0";if ($] >= 5.006 && $l =~ /^v/ && $r =~ /^v/){$lvstring=$self->_vstring($l);$rvstring=$self->_vstring($r);$self->_verbose(9,sprintf "lv[%vd] rv[%vd]",$lvstring,$rvstring)}return (($l ne "undef")<=> ($r ne "undef")|| $lvstring cmp $rvstring || $l <=> $r || $l cmp $r)}sub _vgt {my($self,$l,$r)=@_;$self->_vcmp($l,$r)> 0}sub _vlt {my($self,$l,$r)=@_;$self->_vcmp($l,$r)< 0}sub _vge {my($self,$l,$r)=@_;$self->_vcmp($l,$r)>= 0}sub _vle {my($self,$l,$r)=@_;$self->_vcmp($l,$r)<= 0}sub _vstring {my($self,$n)=@_;$n =~ s/^v// or die "Parse::PMFile::_vstring() called with invalid arg [$n]";pack "U*",split /\./,$n}sub _float2vv {my($self,$n)=@_;my($rev)=int($n);$rev ||= 0;my($mantissa)=$n =~ /\.(\d{1,12})/;$mantissa ||= 0;$mantissa .= "0" while length($mantissa)%3;my$ret="v" .$rev;while ($mantissa){$mantissa =~ s/(\d{1,3})// or die "Panic: length>0 but not a digit? mantissa[$mantissa]";$ret .= ".".int($1)}$ret =~ s/(\.0)+/.0/;$ret}sub _readable {my($self,$n)=@_;$n =~ /^([\w\-\+\.]+)/;return $1 if defined $1 && length($1)>0;if ($] < 5.006){$self->_verbose(9,"Suspicious version string seen [$n]\n");return$n}my$better=sprintf "v%vd",$n;$self->_verbose(9,"n[$n] better[$better]");return$better}1; +PARSE_PMFILE + +$fatpacked{"String/ShellQuote.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'STRING_SHELLQUOTE'; + package String::ShellQuote;use strict;use vars qw($VERSION @ISA @EXPORT);require Exporter;$VERSION='1.04';@ISA=qw(Exporter);@EXPORT=qw(shell_quote shell_quote_best_effort shell_comment_quote);sub croak {require Carp;goto&Carp::croak}sub _shell_quote_backend {my@in=@_;my@err=();if (0){require RS::Handy;print RS::Handy::data_dump(\@in)}return \@err,'' unless@in;my$ret='';my$saw_non_equal=0;for (@in){if (!defined $_ or $_ eq ''){$_="''";next}if (s/\x00//g){push@err,"No way to quote string containing null (\\000) bytes"}my$escape=0;if (/=/){if (!$saw_non_equal){$escape=1}}else {$saw_non_equal=1}if (m|[^\w!%+,\-./:=@^]|){$escape=1}if ($escape || (!$saw_non_equal && /=/)){s/'/'\\''/g;s|((?:'\\''){2,})|q{'"} . (q{'} x (length($1) / 4)) . q{"'}|ge;$_="'$_'";s/^''//;s/''$//}}continue {$ret .= "$_ "}chop$ret;return \@err,$ret}sub shell_quote {my ($rerr,$s)=_shell_quote_backend @_;if (@$rerr){my%seen;@$rerr=grep {!$seen{$_}++}@$rerr;my$s=join '',map {"shell_quote(): $_\n"}@$rerr;chomp$s;croak$s}return$s}sub shell_quote_best_effort {my ($rerr,$s)=_shell_quote_backend @_;return$s}sub shell_comment_quote {return '' unless @_;unless (@_==1){croak "Too many arguments to shell_comment_quote " ."(got " .@_ ." expected 1)"}local $_=shift;s/\n/\n#/g;return $_}1; +STRING_SHELLQUOTE + +$fatpacked{"lib/core/only.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'LIB_CORE_ONLY'; + package lib::core::only;use strict;use warnings FATAL=>'all';use Config;sub import {@INC=@Config{qw(privlibexp archlibexp)};return}1; +LIB_CORE_ONLY + +$fatpacked{"local/lib.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'LOCAL_LIB'; + package local::lib;use 5.006;use strict;use warnings;use Config;our$VERSION='2.000015';$VERSION=eval$VERSION;BEGIN {*_WIN32=($^O eq 'MSWin32' || $^O eq 'NetWare' || $^O eq 'symbian')? sub(){1}: sub(){0};*_USE_FSPEC=($^O eq 'MacOS' || $^O eq 'VMS' || $INC{'File/Spec.pm'})? sub(){1}: sub(){0}}our$_DIR_JOIN=_WIN32 ? '\\' : '/';our$_DIR_SPLIT=(_WIN32 || $^O eq 'cygwin')? qr{[\\/]} : qr{/};our$_ROOT=_WIN32 ? do {my$UNC=qr{[\\/]{2}[^\\/]+[\\/][^\\/]+};qr{^(?:$UNC|[A-Za-z]:|)$_DIR_SPLIT}}: qr{^/};our$_PERL;sub _cwd {my$drive=shift;if (!$_PERL){($_PERL)=$^X =~ /(.+)/;if (_is_abs($_PERL)){}elsif (-x $Config{perlpath}){$_PERL=$Config{perlpath}}else {($_PERL)=map {/(.*)/}grep {-x $_}map {join($_DIR_JOIN,$_,$_PERL)}split /\Q$Config{path_sep}\E/,$ENV{PATH}}}local@ENV{qw(PATH IFS CDPATH ENV BASH_ENV)};my$cmd=$drive ? "eval { Cwd::getdcwd(q($drive)) }" : 'getcwd';my$cwd=`"$_PERL" -MCwd -le "print $cmd"`;chomp$cwd;if (!length$cwd && $drive){$cwd=$drive}$cwd =~ s/$_DIR_SPLIT?$/$_DIR_JOIN/;$cwd}sub _catdir {if (_USE_FSPEC){require File::Spec;File::Spec->catdir(@_)}else {my$dir=join($_DIR_JOIN,@_);$dir =~ s{($_DIR_SPLIT)(?:\.?$_DIR_SPLIT)+}{$1}g;$dir}}sub _is_abs {if (_USE_FSPEC){require File::Spec;File::Spec->file_name_is_absolute($_[0])}else {$_[0]=~ $_ROOT}}sub _rel2abs {my ($dir,$base)=@_;return$dir if _is_abs($dir);$base=_WIN32 && $dir =~ s/^([A-Za-z]:)// ? _cwd("$1"): $base ? $base : _cwd;return _catdir($base,$dir)}sub import {my ($class,@args)=@_;push@args,@ARGV if $0 eq '-';my@steps;my%opts;my$shelltype;while (@args){my$arg=shift@args;if ($arg =~ /\xE2\x88\x92/ or $arg =~ /−/){die <<'DEATH'}elsif ($arg eq '--self-contained'){die <<'DEATH'}elsif($arg =~ /^--deactivate(?:=(.*))?$/){my$path=defined $1 ? $1 : shift@args;push@steps,['deactivate',$path]}elsif ($arg eq '--deactivate-all'){push@steps,['deactivate_all']}elsif ($arg =~ /^--shelltype(?:=(.*))?$/){$shelltype=defined $1 ? $1 : shift@args}elsif ($arg eq '--no-create'){$opts{no_create}=1}elsif ($arg =~ /^--/){die "Unknown import argument: $arg"}else {push@steps,['activate',$arg]}}if (!@steps){push@steps,['activate',undef]}my$self=$class->new(%opts);for (@steps){my ($method,@args)=@$_;$self=$self->$method(@args)}if ($0 eq '-'){print$self->environment_vars_string($shelltype);exit 0}else {$self->setup_local_lib}}sub new {my$class=shift;bless {@_},$class}sub clone {my$self=shift;bless {%$self,@_},ref$self}sub inc {$_[0]->{inc}||= \@INC}sub libs {$_[0]->{libs}||= [\'PERL5LIB' ]}sub bins {$_[0]->{bins}||= [\'PATH' ]}sub roots {$_[0]->{roots}||= [\'PERL_LOCAL_LIB_ROOT' ]}sub extra {$_[0]->{extra}||= {}}sub no_create {$_[0]->{no_create}}my$_archname=$Config{archname};my$_version=$Config{version};my@_inc_version_list=reverse split / /,$Config{inc_version_list};my$_path_sep=$Config{path_sep};sub _as_list {my$list=shift;grep length,map {!(ref $_ && ref $_ eq 'SCALAR')? $_ : (defined$ENV{$$_}? split(/\Q$_path_sep/,$ENV{$$_}): ())}ref$list ? @$list : $list}sub _remove_from {my ($list,@remove)=@_;return @$list if!@remove;my%remove=map {$_=>1}@remove;grep!$remove{$_},_as_list($list)}my@_lib_subdirs=([$_version,$_archname],[$_version],[$_archname],(@_inc_version_list ? \@_inc_version_list : ()),[],);sub install_base_bin_path {my ($class,$path)=@_;return _catdir($path,'bin')}sub install_base_perl_path {my ($class,$path)=@_;return _catdir($path,'lib','perl5')}sub install_base_arch_path {my ($class,$path)=@_;_catdir($class->install_base_perl_path($path),$_archname)}sub lib_paths_for {my ($class,$path)=@_;my$base=$class->install_base_perl_path($path);return map {_catdir($base,@$_)}@_lib_subdirs}sub _mm_escape_path {my$path=shift;$path =~ s/\\/\\\\/g;if ($path =~ s/ /\\ /g){$path=qq{"$path"}}return$path}sub _mb_escape_path {my$path=shift;$path =~ s/\\/\\\\/g;return qq{"$path"}}sub installer_options_for {my ($class,$path)=@_;return (PERL_MM_OPT=>defined$path ? "INSTALL_BASE="._mm_escape_path($path): undef,PERL_MB_OPT=>defined$path ? "--install_base "._mb_escape_path($path): undef,)}sub active_paths {my ($self)=@_;$self=ref$self ? $self : $self->new;return grep {my$active_ll=$self->install_base_perl_path($_);grep {$_ eq $active_ll}@{$self->inc}}_as_list($self->roots)}sub deactivate {my ($self,$path)=@_;$self=$self->new unless ref$self;$path=$self->resolve_path($path);$path=$self->normalize_path($path);my@active_lls=$self->active_paths;if (!grep {$_ eq $path}@active_lls){warn "Tried to deactivate inactive local::lib '$path'\n";return$self}my%args=(bins=>[_remove_from($self->bins,$self->install_base_bin_path($path))],libs=>[_remove_from($self->libs,$self->install_base_perl_path($path))],inc=>[_remove_from($self->inc,$self->lib_paths_for($path))],roots=>[_remove_from($self->roots,$path)],);$args{extra}={$self->installer_options_for($args{roots}[0])};$self->clone(%args)}sub deactivate_all {my ($self)=@_;$self=$self->new unless ref$self;my@active_lls=$self->active_paths;my%args;if (@active_lls){%args=(bins=>[_remove_from($self->bins,map$self->install_base_bin_path($_),@active_lls)],libs=>[_remove_from($self->libs,map$self->install_base_perl_path($_),@active_lls)],inc=>[_remove_from($self->inc,map$self->lib_paths_for($_),@active_lls)],roots=>[_remove_from($self->roots,@active_lls)],)}$args{extra}={$self->installer_options_for(undef)};$self->clone(%args)}sub activate {my ($self,$path)=@_;$self=$self->new unless ref$self;$path=$self->resolve_path($path);$self->ensure_dir_structure_for($path)unless$self->no_create;$path=$self->normalize_path($path);my@active_lls=$self->active_paths;if (grep {$_ eq $path}@active_lls[1 .. $#active_lls]){$self=$self->deactivate($path)}my%args;if (!@active_lls || $active_lls[0]ne $path){%args=(bins=>[$self->install_base_bin_path($path),@{$self->bins}],libs=>[$self->install_base_perl_path($path),@{$self->libs}],inc=>[$self->lib_paths_for($path),@{$self->inc}],roots=>[$path,@{$self->roots}],)}$args{extra}={$self->installer_options_for($path)};$self->clone(%args)}sub normalize_path {my ($self,$path)=@_;$path=(Win32::GetShortPathName($path)|| $path)if $^O eq 'MSWin32';return$path}sub build_environment_vars_for {my$self=$_[0]->new->activate($_[1]);$self->build_environment_vars}sub build_activate_environment_vars_for {my$self=$_[0]->new->activate($_[1]);$self->build_environment_vars}sub build_deactivate_environment_vars_for {my$self=$_[0]->new->deactivate($_[1]);$self->build_environment_vars}sub build_deact_all_environment_vars_for {my$self=$_[0]->new->deactivate_all;$self->build_environment_vars}sub build_environment_vars {my$self=shift;(PATH=>join($_path_sep,_as_list($self->bins)),PERL5LIB=>join($_path_sep,_as_list($self->libs)),PERL_LOCAL_LIB_ROOT=>join($_path_sep,_as_list($self->roots)),%{$self->extra},)}sub setup_local_lib_for {my$self=$_[0]->new->activate($_[1]);$self->setup_local_lib}sub setup_local_lib {my$self=shift;require Carp::Heavy if$INC{'Carp.pm'};$self->setup_env_hash;@INC=@{$self->inc}}sub setup_env_hash_for {my$self=$_[0]->new->activate($_[1]);$self->setup_env_hash}sub setup_env_hash {my$self=shift;my%env=$self->build_environment_vars;for my$key (keys%env){if (defined$env{$key}){$ENV{$key}=$env{$key}}else {delete$ENV{$key}}}}sub print_environment_vars_for {print $_[0]->environment_vars_string_for(@_[1..$#_])}sub environment_vars_string_for {my$self=$_[0]->new->activate($_[1]);$self->environment_vars_string}sub environment_vars_string {my ($self,$shelltype)=@_;$shelltype ||= $self->guess_shelltype;my$extra=$self->extra;my@envs=(PATH=>$self->bins,PERL5LIB=>$self->libs,PERL_LOCAL_LIB_ROOT=>$self->roots,map {$_=>$extra->{$_}}sort keys %$extra,);$self->_build_env_string($shelltype,\@envs)}sub _build_env_string {my ($self,$shelltype,$envs)=@_;my@envs=@$envs;my$build_method="build_${shelltype}_env_declaration";my$out='';while (@envs){my ($name,$value)=(shift(@envs),shift(@envs));if (ref$value && @$value==1 && ref$value->[0]&& ref$value->[0]eq 'SCALAR' && ${$value->[0]}eq $name){next}$out .= $self->$build_method($name,$value)}my$wrap_method="wrap_${shelltype}_output";if ($self->can($wrap_method)){return$self->$wrap_method($out)}return$out}sub build_bourne_env_declaration {my ($class,$name,$args)=@_;my$value=$class->_interpolate($args,'${%s}',qr/["\\\$!`]/,'\\%s');if (!defined$value){return qq{unset $name;\n}}$value =~ s/(^|\G|$_path_sep)\$\{$name\}$_path_sep/$1\${$name}\${$name+$_path_sep}/g;$value =~ s/$_path_sep\$\{$name\}$/\${$name+$_path_sep}\${$name}/;qq{${name}="$value"; export ${name};\n}}sub build_csh_env_declaration {my ($class,$name,$args)=@_;my ($value,@vars)=$class->_interpolate($args,'${%s}','"','"\\%s"');if (!defined$value){return qq{unsetenv $name;\n}}my$out='';for my$var (@vars){$out .= qq{if ! \$?$name setenv $name '';\n}}my$value_without=$value;if ($value_without =~ s/(?:^|$_path_sep)\$\{$name\}(?:$_path_sep|$)//g){$out .= qq{if "\${$name}" != '' setenv $name "$value";\n};$out .= qq{if "\${$name}" == '' }}$out .= qq{setenv $name "$value_without";\n};return$out}sub build_cmd_env_declaration {my ($class,$name,$args)=@_;my$value=$class->_interpolate($args,'%%%s%%',qr(%),'%s');if (!$value){return qq{\@set $name=\n}}my$out='';my$value_without=$value;if ($value_without =~ s/(?:^|$_path_sep)%$name%(?:$_path_sep|$)//g){$out .= qq{\@if not "%$name%"=="" set "$name=$value"\n};$out .= qq{\@if "%$name%"=="" }}$out .= qq{\@set "$name=$value_without"\n};return$out}sub build_powershell_env_declaration {my ($class,$name,$args)=@_;my$value=$class->_interpolate($args,'$env:%s','"','`%s');if (!$value){return qq{Remove-Item -ErrorAction 0 Env:\\$name;\n}}my$maybe_path_sep=qq{\$(if("\$env:$name"-eq""){""}else{"$_path_sep"})};$value =~ s/(^|\G|$_path_sep)\$env:$name$_path_sep/$1\$env:$name"+$maybe_path_sep+"/g;$value =~ s/$_path_sep\$env:$name$/"+$maybe_path_sep+\$env:$name+"/;qq{\$env:$name = \$("$value");\n}}sub wrap_powershell_output {my ($class,$out)=@_;return$out || " \n"}sub build_fish_env_declaration {my ($class,$name,$args)=@_;my$value=$class->_interpolate($args,'$%s',qr/[\\"' ]/,'\\%s');if (!defined$value){return qq{set -e $name;\n}}$value =~ s/$_path_sep/ /g;qq{set -x $name $value;\n}}sub _interpolate {my ($class,$args,$var_pat,$escape,$escape_pat)=@_;return unless defined$args;my@args=ref$args ? @$args : $args;return unless@args;my@vars=map {$$_}grep {ref $_ eq 'SCALAR'}@args;my$string=join$_path_sep,map {ref $_ eq 'SCALAR' ? sprintf($var_pat,$$_): do {s/($escape)/sprintf($escape_pat, $1)/ge;$_}}@args;return wantarray ? ($string,\@vars): $string}sub pipeline;sub pipeline {my@methods=@_;my$last=pop(@methods);if (@methods){\sub {my ($obj,@args)=@_;$obj->${pipeline@methods}($obj->$last(@args))}}else {\sub {shift->$last(@_)}}}sub resolve_path {my ($class,$path)=@_;$path=$class->${pipeline qw(resolve_relative_path resolve_home_path resolve_empty_path)}($path);$path}sub resolve_empty_path {my ($class,$path)=@_;if (defined$path){$path}else {'~/perl5'}}sub resolve_home_path {my ($class,$path)=@_;$path =~ /^~([^\/]*)/ or return$path;my$user=$1;my$homedir=do {if (!length($user)&& defined$ENV{HOME}){$ENV{HOME}}else {require File::Glob;File::Glob::bsd_glob("~$user",File::Glob::GLOB_TILDE())}};unless (defined$homedir){require Carp;require Carp::Heavy;Carp::croak("Couldn't resolve homedir for " .(defined$user ? $user : 'current user'))}$path =~ s/^~[^\/]*/$homedir/;$path}sub resolve_relative_path {my ($class,$path)=@_;_rel2abs($path)}sub ensure_dir_structure_for {my ($class,$path)=@_;unless (-d $path){warn "Attempting to create directory ${path}\n"}require File::Basename;my@dirs;while(!-d $path){push@dirs,$path;$path=File::Basename::dirname($path)}mkdir $_ for reverse@dirs;return}sub guess_shelltype {my$shellbin =defined$ENV{SHELL}? ($ENV{SHELL}=~ /([\w.]+)$/)[-1]: ($^O eq 'MSWin32' && exists$ENV{'!EXITCODE'})? 'bash' : ($^O eq 'MSWin32' && $ENV{PROMPT}&& $ENV{COMSPEC})? ($ENV{COMSPEC}=~ /([\w.]+)$/)[-1]: ($^O eq 'MSWin32' &&!$ENV{PROMPT})? 'powershell.exe' : 'sh';for ($shellbin){return /csh$/ ? 'csh' : /fish/ ? 'fish' : /command(?:\.com)?$/i ? 'cmd' : /cmd(?:\.exe)?$/i ? 'cmd' : /4nt(?:\.exe)?$/i ? 'cmd' : /powershell(?:\.exe)?$/i ? 'powershell' : 'bourne'}}1; + WHOA THERE! It looks like you've got some fancy dashes in your commandline! + These are *not* the traditional -- dashes that software recognizes. You + probably got these by copy-pasting from the perldoc for this module as + rendered by a UTF8-capable formatter. This most typically happens on an OS X + terminal, but can happen elsewhere too. Please try again after replacing the + dashes with normal minus signs. + DEATH + FATAL: The local::lib --self-contained flag has never worked reliably and the + original author, Mark Stosberg, was unable or unwilling to maintain it. As + such, this flag has been removed from the local::lib codebase in order to + prevent misunderstandings and potentially broken builds. The local::lib authors + recommend that you look at the lib::core::only module shipped with this + distribution in order to create a more robust environment that is equivalent to + what --self-contained provided (although quite possibly not what you originally + thought it provided due to the poor quality of the documentation, for which we + apologise). + DEATH +LOCAL_LIB + +$fatpacked{"parent.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'PARENT'; + package parent;use strict;use vars qw($VERSION);$VERSION='0.228';sub import {my$class=shift;my$inheritor=caller(0);if (@_ and $_[0]eq '-norequire'){shift @_}else {for (my@filename=@_){if ($_ eq $inheritor){warn "Class '$inheritor' tried to inherit from itself\n"};s{::|'}{/}g;require "$_.pm"}}{no strict 'refs';push @{"$inheritor\::ISA"},@_}};"All your base are belong to us" +PARENT + +$fatpacked{"version.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'VERSION'; + package version;use 5.006002;use strict;use warnings::register;if ($] >= 5.015){warnings::register_categories(qw/version/)}use vars qw(@ISA $VERSION $CLASS $STRICT $LAX *declare *qv);$VERSION=0.9912;$CLASS='version';{local$SIG{'__DIE__'};if (1){eval "use version::vpp $VERSION";die "$@" if ($@);push@ISA,"version::vpp";local $^W;*version::qv=\&version::vpp::qv;*version::declare=\&version::vpp::declare;*version::_VERSION=\&version::vpp::_VERSION;*version::vcmp=\&version::vpp::vcmp;*version::new=\&version::vpp::new;*version::numify=\&version::vpp::numify;*version::normal=\&version::vpp::normal;if ($] >= 5.009000){no strict 'refs';*version::stringify=\&version::vpp::stringify;*{'version::(""'}=\&version::vpp::stringify;*{'version::(<=>'}=\&version::vpp::vcmp;*version::parse=\&version::vpp::parse}}else {push@ISA,"version::vxs";local $^W;*version::declare=\&version::vxs::declare;*version::qv=\&version::vxs::qv;*version::_VERSION=\&version::vxs::_VERSION;*version::vcmp=\&version::vxs::VCMP;*version::new=\&version::vxs::new;*version::numify=\&version::vxs::numify;*version::normal=\&version::vxs::normal;if ($] >= 5.009000){no strict 'refs';*version::stringify=\&version::vxs::stringify;*{'version::(""'}=\&version::vxs::stringify;*{'version::(<=>'}=\&version::vxs::VCMP;*version::parse=\&version::vxs::parse}}}require version::regex;*version::is_lax=\&version::regex::is_lax;*version::is_strict=\&version::regex::is_strict;*LAX=\$version::regex::LAX;*STRICT=\$version::regex::STRICT;sub import {no strict 'refs';my ($class)=shift;unless ($class eq $CLASS){local $^W;*{$class.'::declare'}=\&{$CLASS.'::declare'};*{$class.'::qv'}=\&{$CLASS.'::qv'}}my%args;if (@_){map {$args{$_}=1}@_}else {%args=(qv=>1,'UNIVERSAL::VERSION'=>1,)}my$callpkg=caller();if (exists($args{declare})){*{$callpkg.'::declare'}=sub {return$class->declare(shift)}unless defined(&{$callpkg.'::declare'})}if (exists($args{qv})){*{$callpkg.'::qv'}=sub {return$class->qv(shift)}unless defined(&{$callpkg.'::qv'})}if (exists($args{'UNIVERSAL::VERSION'})){local $^W;*UNIVERSAL::VERSION =\&{$CLASS.'::_VERSION'}}if (exists($args{'VERSION'})){*{$callpkg.'::VERSION'}=\&{$CLASS.'::_VERSION'}}if (exists($args{'is_strict'})){*{$callpkg.'::is_strict'}=\&{$CLASS.'::is_strict'}unless defined(&{$callpkg.'::is_strict'})}if (exists($args{'is_lax'})){*{$callpkg.'::is_lax'}=\&{$CLASS.'::is_lax'}unless defined(&{$callpkg.'::is_lax'})}}1; +VERSION + +$fatpacked{"version/regex.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'VERSION_REGEX'; + package version::regex;use strict;use vars qw($VERSION $CLASS $STRICT $LAX);$VERSION=0.9912;my$FRACTION_PART=qr/\.[0-9]+/;my$STRICT_INTEGER_PART=qr/0|[1-9][0-9]*/;my$LAX_INTEGER_PART=qr/[0-9]+/;my$STRICT_DOTTED_DECIMAL_PART=qr/\.[0-9]{1,3}/;my$LAX_DOTTED_DECIMAL_PART=qr/\.[0-9]+/;my$LAX_ALPHA_PART=qr/_[0-9]+/;my$STRICT_DECIMAL_VERSION=qr/ $STRICT_INTEGER_PART $FRACTION_PART? /x;my$STRICT_DOTTED_DECIMAL_VERSION=qr/ v $STRICT_INTEGER_PART $STRICT_DOTTED_DECIMAL_PART{2,} /x;$STRICT=qr/ $STRICT_DECIMAL_VERSION | $STRICT_DOTTED_DECIMAL_VERSION /x;my$LAX_DECIMAL_VERSION=qr/ $LAX_INTEGER_PART (?: \. | $FRACTION_PART $LAX_ALPHA_PART? )? + | + $FRACTION_PART $LAX_ALPHA_PART? + /x;my$LAX_DOTTED_DECIMAL_VERSION=qr/ + v $LAX_INTEGER_PART (?: $LAX_DOTTED_DECIMAL_PART+ $LAX_ALPHA_PART? )? + | + $LAX_INTEGER_PART? $LAX_DOTTED_DECIMAL_PART{2,} $LAX_ALPHA_PART? + /x;$LAX=qr/ undef | $LAX_DECIMAL_VERSION | $LAX_DOTTED_DECIMAL_VERSION /x;sub is_strict {defined $_[0]&& $_[0]=~ qr/ \A $STRICT \z /x}sub is_lax {defined $_[0]&& $_[0]=~ qr/ \A $LAX \z /x}1; +VERSION_REGEX + +$fatpacked{"version/vpp.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'VERSION_VPP'; + package charstar;use overload ('""'=>\&thischar,'0+'=>\&thischar,'++'=>\&increment,'--'=>\&decrement,'+'=>\&plus,'-'=>\&minus,'*'=>\&multiply,'cmp'=>\&cmp,'<=>'=>\&spaceship,'bool'=>\&thischar,'='=>\&clone,);sub new {my ($self,$string)=@_;my$class=ref($self)|| $self;my$obj={string=>[split(//,$string)],current=>0,};return bless$obj,$class}sub thischar {my ($self)=@_;my$last=$#{$self->{string}};my$curr=$self->{current};if ($curr >= 0 && $curr <= $last){return$self->{string}->[$curr]}else {return ''}}sub increment {my ($self)=@_;$self->{current}++}sub decrement {my ($self)=@_;$self->{current}--}sub plus {my ($self,$offset)=@_;my$rself=$self->clone;$rself->{current}+= $offset;return$rself}sub minus {my ($self,$offset)=@_;my$rself=$self->clone;$rself->{current}-= $offset;return$rself}sub multiply {my ($left,$right,$swapped)=@_;my$char=$left->thischar();return$char * $right}sub spaceship {my ($left,$right,$swapped)=@_;unless (ref($right)){$right=$left->new($right)}return$left->{current}<=> $right->{current}}sub cmp {my ($left,$right,$swapped)=@_;unless (ref($right)){if (length($right)==1){return$left->thischar cmp $right}$right=$left->new($right)}return$left->currstr cmp $right->currstr}sub bool {my ($self)=@_;my$char=$self->thischar;return ($char ne '')}sub clone {my ($left,$right,$swapped)=@_;$right={string=>[@{$left->{string}}],current=>$left->{current},};return bless$right,ref($left)}sub currstr {my ($self,$s)=@_;my$curr=$self->{current};my$last=$#{$self->{string}};if (defined($s)&& $s->{current}< $last){$last=$s->{current}}my$string=join('',@{$self->{string}}[$curr..$last]);return$string}package version::vpp;use 5.006002;use strict;use warnings::register;use Config;use vars qw($VERSION $CLASS @ISA $LAX $STRICT $WARN_CATEGORY);$VERSION=0.9912;$CLASS='version::vpp';if ($] > 5.015){warnings::register_categories(qw/version/);$WARN_CATEGORY='version'}else {$WARN_CATEGORY='numeric'}require version::regex;*version::vpp::is_strict=\&version::regex::is_strict;*version::vpp::is_lax=\&version::regex::is_lax;*LAX=\$version::regex::LAX;*STRICT=\$version::regex::STRICT;use overload ('""'=>\&stringify,'0+'=>\&numify,'cmp'=>\&vcmp,'<=>'=>\&vcmp,'bool'=>\&vbool,'+'=>\&vnoop,'-'=>\&vnoop,'*'=>\&vnoop,'/'=>\&vnoop,'+='=>\&vnoop,'-='=>\&vnoop,'*='=>\&vnoop,'/='=>\&vnoop,'abs'=>\&vnoop,);sub import {no strict 'refs';my ($class)=shift;unless ($class eq $CLASS){local $^W;*{$class.'::declare'}=\&{$CLASS.'::declare'};*{$class.'::qv'}=\&{$CLASS.'::qv'}}my%args;if (@_){map {$args{$_}=1}@_}else {%args=(qv=>1,'UNIVERSAL::VERSION'=>1,)}my$callpkg=caller();if (exists($args{declare})){*{$callpkg.'::declare'}=sub {return$class->declare(shift)}unless defined(&{$callpkg.'::declare'})}if (exists($args{qv})){*{$callpkg.'::qv'}=sub {return$class->qv(shift)}unless defined(&{$callpkg.'::qv'})}if (exists($args{'UNIVERSAL::VERSION'})){no warnings qw/redefine/;*UNIVERSAL::VERSION =\&{$CLASS.'::_VERSION'}}if (exists($args{'VERSION'})){*{$callpkg.'::VERSION'}=\&{$CLASS.'::_VERSION'}}if (exists($args{'is_strict'})){*{$callpkg.'::is_strict'}=\&{$CLASS.'::is_strict'}unless defined(&{$callpkg.'::is_strict'})}if (exists($args{'is_lax'})){*{$callpkg.'::is_lax'}=\&{$CLASS.'::is_lax'}unless defined(&{$callpkg.'::is_lax'})}}my$VERSION_MAX=0x7FFFFFFF;use constant TRUE=>1;use constant FALSE=>0;sub isDIGIT {my ($char)=shift->thischar();return ($char =~ /\d/)}sub isALPHA {my ($char)=shift->thischar();return ($char =~ /[a-zA-Z]/)}sub isSPACE {my ($char)=shift->thischar();return ($char =~ /\s/)}sub BADVERSION {my ($s,$errstr,$error)=@_;if ($errstr){$$errstr=$error}return$s}sub prescan_version {my ($s,$strict,$errstr,$sqv,$ssaw_decimal,$swidth,$salpha)=@_;my$qv=defined$sqv ? $$sqv : FALSE;my$saw_decimal=defined$ssaw_decimal ? $$ssaw_decimal : 0;my$width=defined$swidth ? $$swidth : 3;my$alpha=defined$salpha ? $$salpha : FALSE;my$d=$s;if ($qv && isDIGIT($d)){goto dotted_decimal_version}if ($d eq 'v'){$d++;if (isDIGIT($d)){$qv=TRUE}else {return BADVERSION($s,$errstr,"Invalid version format (dotted-decimal versions require at least three parts)")}dotted_decimal_version: if ($strict && $d eq '0' && isDIGIT($d+1)){return BADVERSION($s,$errstr,"Invalid version format (no leading zeros)")}while (isDIGIT($d)){$d++}if ($d eq '.'){$saw_decimal++;$d++}else {if ($strict){return BADVERSION($s,$errstr,"Invalid version format (dotted-decimal versions require at least three parts)")}else {goto version_prescan_finish}}{my$i=0;my$j=0;while (isDIGIT($d)){$i++;while (isDIGIT($d)){$d++;$j++;if ($strict && $j > 3){return BADVERSION($s,$errstr,"Invalid version format (maximum 3 digits between decimals)")}}if ($d eq '_'){if ($strict){return BADVERSION($s,$errstr,"Invalid version format (no underscores)")}if ($alpha){return BADVERSION($s,$errstr,"Invalid version format (multiple underscores)")}$d++;$alpha=TRUE}elsif ($d eq '.'){if ($alpha){return BADVERSION($s,$errstr,"Invalid version format (underscores before decimal)")}$saw_decimal++;$d++}elsif (!isDIGIT($d)){last}$j=0}if ($strict && $i < 2){return BADVERSION($s,$errstr,"Invalid version format (dotted-decimal versions require at least three parts)")}}}else {my$j=0;if ($strict){if ($d eq '.'){return BADVERSION($s,$errstr,"Invalid version format (0 before decimal required)")}if ($d eq '0' && isDIGIT($d+1)){return BADVERSION($s,$errstr,"Invalid version format (no leading zeros)")}}if ($d eq '-'){return BADVERSION($s,$errstr,"Invalid version format (negative version number)")}while (isDIGIT($d)){$d++}if ($d eq '.'){$saw_decimal++;$d++}elsif (!$d || $d eq ';' || isSPACE($d)|| $d eq '}'){if ($d==$s){return BADVERSION($s,$errstr,"Invalid version format (version required)")}goto version_prescan_finish}elsif ($d==$s){return BADVERSION($s,$errstr,"Invalid version format (non-numeric data)")}elsif ($d eq '_'){if ($strict){return BADVERSION($s,$errstr,"Invalid version format (no underscores)")}elsif (isDIGIT($d+1)){return BADVERSION($s,$errstr,"Invalid version format (alpha without decimal)")}else {return BADVERSION($s,$errstr,"Invalid version format (misplaced underscore)")}}elsif ($d){return BADVERSION($s,$errstr,"Invalid version format (non-numeric data)")}if ($d &&!isDIGIT($d)&& ($strict ||!($d eq ';' || isSPACE($d)|| $d eq '}'))){return BADVERSION($s,$errstr,"Invalid version format (fractional part required)")}while (isDIGIT($d)){$d++;$j++;if ($d eq '.' && isDIGIT($d-1)){if ($alpha){return BADVERSION($s,$errstr,"Invalid version format (underscores before decimal)")}if ($strict){return BADVERSION($s,$errstr,"Invalid version format (dotted-decimal versions must begin with 'v')")}$d=$s;$qv=TRUE;goto dotted_decimal_version}if ($d eq '_'){if ($strict){return BADVERSION($s,$errstr,"Invalid version format (no underscores)")}if ($alpha){return BADVERSION($s,$errstr,"Invalid version format (multiple underscores)")}if (!isDIGIT($d+1)){return BADVERSION($s,$errstr,"Invalid version format (misplaced underscore)")}$width=$j;$d++;$alpha=TRUE}}}version_prescan_finish: while (isSPACE($d)){$d++}if ($d &&!isDIGIT($d)&& (!($d eq ';' || $d eq '}'))){return BADVERSION($s,$errstr,"Invalid version format (non-numeric data)")}if ($saw_decimal > 1 && ($d-1)eq '.'){return BADVERSION($s,$errstr,"Invalid version format (trailing decimal)")}if (defined$sqv){$$sqv=$qv}if (defined$swidth){$$swidth=$width}if (defined$ssaw_decimal){$$ssaw_decimal=$saw_decimal}if (defined$salpha){$$salpha=$alpha}return$d}sub scan_version {my ($s,$rv,$qv)=@_;my$start;my$pos;my$last;my$errstr;my$saw_decimal=0;my$width=3;my$alpha=FALSE;my$vinf=FALSE;my@av;$s=new charstar$s;while (isSPACE($s)){$s++}$last=prescan_version($s,FALSE,\$errstr,\$qv,\$saw_decimal,\$width,\$alpha);if ($errstr){if ($s ne 'undef'){require Carp;Carp::croak($errstr)}}$start=$s;if ($s eq 'v'){$s++}$pos=$s;if ($qv){$$rv->{qv}=$qv}if ($alpha){$$rv->{alpha}=$alpha}if (!$qv && $width < 3){$$rv->{width}=$width}while (isDIGIT($pos)){$pos++}if (!isALPHA($pos)){my$rev;for (;;){$rev=0;{my$end=$pos;my$mult=1;my$orev;if (!$qv && $s > $start && $saw_decimal==1){$mult *= 100;while ($s < $end){$orev=$rev;$rev += $s * $mult;$mult /= 10;if ((abs($orev)> abs($rev))|| (abs($rev)> $VERSION_MAX)){warn("Integer overflow in version %d",$VERSION_MAX);$s=$end - 1;$rev=$VERSION_MAX;$vinf=1}$s++;if ($s eq '_'){$s++}}}else {while (--$end >= $s){$orev=$rev;$rev += $end * $mult;$mult *= 10;if ((abs($orev)> abs($rev))|| (abs($rev)> $VERSION_MAX)){warn("Integer overflow in version");$end=$s - 1;$rev=$VERSION_MAX;$vinf=1}}}}push@av,$rev;if ($vinf){$s=$last;last}elsif ($pos eq '.'){$pos++;if ($qv){while ($pos eq '0'){$pos++}}$s=$pos}elsif ($pos eq '_' && isDIGIT($pos+1)){$s=++$pos}elsif ($pos eq ',' && isDIGIT($pos+1)){$s=++$pos}elsif (isDIGIT($pos)){$s=$pos}else {$s=$pos;last}if ($qv){while (isDIGIT($pos)){$pos++}}else {my$digits=0;while ((isDIGIT($pos)|| $pos eq '_')&& $digits < 3){if ($pos ne '_'){$digits++}$pos++}}}}if ($qv){my$len=$#av;$len=2 - $len;while ($len-- > 0){push@av,0}}if ($vinf){$$rv->{original}="v.Inf";$$rv->{vinf}=1}elsif ($s > $start){$$rv->{original}=$start->currstr($s);if ($qv && $saw_decimal==1 && $start ne 'v'){$$rv->{original}='v' .$$rv->{original}}}else {$$rv->{original}='0';push(@av,0)}$$rv->{version}=\@av;if ($s eq 'undef'){$s += 5}return$s}sub new {my$class=shift;unless (defined$class or $#_ > 1){require Carp;Carp::croak('Usage: version::new(class, version)')}my$self=bless ({},ref ($class)|| $class);my$qv=FALSE;if ($#_==1){$qv=TRUE}my$value=pop;if (ref($value)&& eval('$value->isa("version")')){$self->{version}=[@{$value->{version}}];$self->{qv}=1 if$value->{qv};$self->{alpha}=1 if$value->{alpha};$self->{original}=''.$value->{original};return$self}if (not defined$value or $value =~ /^undef$/){push @{$self->{version}},0;$self->{original}="0";return ($self)}if (ref($value)=~ m/ARRAY|HASH/){require Carp;Carp::croak("Invalid version format (non-numeric data)")}$value=_un_vstring($value);if ($Config{d_setlocale}){use POSIX qw/locale_h/;use if$Config{d_setlocale},'locale';my$currlocale=setlocale(LC_ALL);if (localeconv()->{decimal_point}eq ','){$value =~ tr/,/./}}if ($value =~ /\d+.?\d*e[-+]?\d+/){$value=sprintf("%.9f",$value);$value =~ s/(0+)$//}my$s=scan_version($value,\$self,$qv);if ($s){warn("Version string '%s' contains invalid data; " ."ignoring: '%s'",$value,$s)}return ($self)}*parse=\&new;sub numify {my ($self)=@_;unless (_verify($self)){require Carp;Carp::croak("Invalid version object")}my$width=$self->{width}|| 3;my$alpha=$self->{alpha}|| "";my$len=$#{$self->{version}};my$digit=$self->{version}[0];my$string=sprintf("%d.",$digit);if ($alpha and warnings::enabled()){warnings::warn($WARN_CATEGORY,'alpha->numify() is lossy')}for (my$i=1 ;$i < $len ;$i++ ){$digit=$self->{version}[$i];if ($width < 3){my$denom=10**(3-$width);my$quot=int($digit/$denom);my$rem=$digit - ($quot * $denom);$string .= sprintf("%0".$width."d_%d",$quot,$rem)}else {$string .= sprintf("%03d",$digit)}}if ($len > 0){$digit=$self->{version}[$len];if ($alpha && $width==3){$string .= "_"}$string .= sprintf("%0".$width."d",$digit)}else {$string .= sprintf("000")}return$string}sub normal {my ($self)=@_;unless (_verify($self)){require Carp;Carp::croak("Invalid version object")}my$alpha=$self->{alpha}|| "";my$qv=$self->{qv}|| "";my$len=$#{$self->{version}};my$digit=$self->{version}[0];my$string=sprintf("v%d",$digit);for (my$i=1 ;$i < $len ;$i++ ){$digit=$self->{version}[$i];$string .= sprintf(".%d",$digit)}if ($len > 0){$digit=$self->{version}[$len];if ($alpha){$string .= sprintf("_%0d",$digit)}else {$string .= sprintf(".%0d",$digit)}}if ($len <= 2){for ($len=2 - $len;$len!=0;$len-- ){$string .= sprintf(".%0d",0)}}return$string}sub stringify {my ($self)=@_;unless (_verify($self)){require Carp;Carp::croak("Invalid version object")}return exists$self->{original}? $self->{original}: exists$self->{qv}? $self->normal : $self->numify}sub vcmp {require UNIVERSAL;my ($left,$right,$swap)=@_;my$class=ref($left);unless (UNIVERSAL::isa($right,$class)){$right=$class->new($right)}if ($swap){($left,$right)=($right,$left)}unless (_verify($left)){require Carp;Carp::croak("Invalid version object")}unless (_verify($right)){require Carp;Carp::croak("Invalid version format")}my$l=$#{$left->{version}};my$r=$#{$right->{version}};my$m=$l < $r ? $l : $r;my$lalpha=$left->is_alpha;my$ralpha=$right->is_alpha;my$retval=0;my$i=0;while ($i <= $m && $retval==0){$retval=$left->{version}[$i]<=> $right->{version}[$i];$i++}if ($retval==0 && $l==$r && $left->{version}[$m]==$right->{version}[$m]&& ($lalpha || $ralpha)){if ($lalpha &&!$ralpha){$retval=-1}elsif ($ralpha &&!$lalpha){$retval=+1}}if ($retval==0 && $l!=$r){if ($l < $r){while ($i <= $r && $retval==0){if ($right->{version}[$i]!=0){$retval=-1}$i++}}else {while ($i <= $l && $retval==0){if ($left->{version}[$i]!=0){$retval=+1}$i++}}}return$retval}sub vbool {my ($self)=@_;return vcmp($self,$self->new("0"),1)}sub vnoop {require Carp;Carp::croak("operation not supported with version object")}sub is_alpha {my ($self)=@_;return (exists$self->{alpha})}sub qv {my$value=shift;my$class=$CLASS;if (@_){$class=ref($value)|| $value;$value=shift}$value=_un_vstring($value);$value='v'.$value unless$value =~ /(^v|\d+\.\d+\.\d)/;my$obj=$CLASS->new($value);return bless$obj,$class}*declare=\&qv;sub is_qv {my ($self)=@_;return (exists$self->{qv})}sub _verify {my ($self)=@_;if (ref($self)&& eval {exists$self->{version}}&& ref($self->{version})eq 'ARRAY'){return 1}else {return 0}}sub _is_non_alphanumeric {my$s=shift;$s=new charstar$s;while ($s){return 0 if isSPACE($s);return 1 unless (isALPHA($s)|| isDIGIT($s)|| $s =~ /[.-]/);$s++}return 0}sub _un_vstring {my$value=shift;if (length($value)>= 1 && $value !~ /[,._]/ && _is_non_alphanumeric($value)){my$tvalue;if ($] >= 5.008_001){$tvalue=_find_magic_vstring($value);$value=$tvalue if length$tvalue}elsif ($] >= 5.006_000){$tvalue=sprintf("v%vd",$value);if ($tvalue =~ /^v\d+(\.\d+)*$/){$value=$tvalue}}}return$value}sub _find_magic_vstring {my$value=shift;my$tvalue='';require B;my$sv=B::svref_2object(\$value);my$magic=ref($sv)eq 'B::PVMG' ? $sv->MAGIC : undef;while ($magic){if ($magic->TYPE eq 'V'){$tvalue=$magic->PTR;$tvalue =~ s/^v?(.+)$/v$1/;last}else {$magic=$magic->MOREMAGIC}}return$tvalue}sub _VERSION {my ($obj,$req)=@_;my$class=ref($obj)|| $obj;no strict 'refs';if (exists$INC{"$class.pm"}and not %{"$class\::"}and $] >= 5.008){require Carp;Carp::croak("$class defines neither package nor VERSION" ."--version check failed")}my$version=eval "\$$class\::VERSION";if (defined$version){local $^W if $] <= 5.008;$version=version::vpp->new($version)}if (defined$req){unless (defined$version){require Carp;my$msg=$] < 5.006 ? "$class version $req required--this is only version " : "$class does not define \$$class\::VERSION" ."--version check failed";if ($ENV{VERSION_DEBUG}){Carp::confess($msg)}else {Carp::croak($msg)}}$req=version::vpp->new($req);if ($req > $version){require Carp;if ($req->is_qv){Carp::croak(sprintf ("%s version %s required--"."this is only version %s",$class,$req->normal,$version->normal))}else {Carp::croak(sprintf ("%s version %s required--"."this is only version %s",$class,$req->stringify,$version->stringify))}}}return defined$version ? $version->stringify : undef}1; +VERSION_VPP + +s/^ //mg for values %fatpacked; + +my $class = 'FatPacked::'.(0+\%fatpacked); +no strict 'refs'; +*{"${class}::files"} = sub { keys %{$_[0]} }; + +if ($] < 5.008) { + *{"${class}::INC"} = sub { + if (my $fat = $_[0]{$_[1]}) { + my $pos = 0; + my $last = length $fat; + return (sub { + return 0 if $pos == $last; + my $next = (1 + index $fat, "\n", $pos) || $last; + $_ .= substr $fat, $pos, $next - $pos; + $pos = $next; + return 1; + }); + } + }; +} + +else { + *{"${class}::INC"} = sub { + if (my $fat = $_[0]{$_[1]}) { + open my $fh, '<', \$fat + or die "FatPacker error loading $_[1] (could be a perl installation issue?)"; + return $fh; + } + return; + }; +} + +unshift @INC, bless \%fatpacked, $class; + } # END OF FATPACK CODE + + + +use strict; +use App::cpanminus::script; + + +unless (caller) { + my $app = App::cpanminus::script->new; + $app->parse_options(@ARGV); + exit $app->doit; +} + +__END__ + +=head1 NAME + +cpanm - get, unpack build and install modules from CPAN + +=head1 SYNOPSIS + + cpanm Test::More # install Test::More + cpanm MIYAGAWA/Plack-0.99_05.tar.gz # full distribution path + cpanm http://example.org/LDS/CGI.pm-3.20.tar.gz # install from URL + cpanm ~/dists/MyCompany-Enterprise-1.00.tar.gz # install from a local file + cpanm --interactive Task::Kensho # Configure interactively + cpanm . # install from local directory + cpanm --installdeps . # install all the deps for the current directory + cpanm -L extlib Plack # install Plack and all non-core deps into extlib + cpanm --mirror http://cpan.cpantesters.org/ DBI # use the fast-syncing mirror + cpanm --from https://cpan.metacpan.org/ Plack # use only the HTTPS mirror + +=head1 COMMANDS + +=over 4 + +=item (arguments) + +Command line arguments can be either a module name, distribution file, +local file path, HTTP URL or git repository URL. Following commands +will all work as you expect. + + cpanm Plack + cpanm Plack/Request.pm + cpanm MIYAGAWA/Plack-1.0000.tar.gz + cpanm /path/to/Plack-1.0000.tar.gz + cpanm http://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA/Plack-0.9990.tar.gz + cpanm git://github.com/plack/Plack.git + +Additionally, you can use the notation using C<~> and C<@> to specify +version for a given module. C<~> specifies the version requirement in +the L format, while C<@> pins the exact version, and +is a shortcut for C<~"== VERSION">. + + cpanm Plack~1.0000 # 1.0000 or later + cpanm Plack~">= 1.0000, < 2.0000" # latest of 1.xxxx + cpanm Plack@0.9990 # specific version. same as Plack~"== 0.9990" + +The version query including specific version or range will be sent to +L to search for previous releases. The query will search for +BackPAN archives by default, unless you specify C<--dev> option, in +which case, archived versions will be filtered out. + +For a git repository, you can specify a branch, tag, or commit SHA to +build. The default is C + + cpanm git://github.com/plack/Plack.git@1.0000 # tag + cpanm git://github.com/plack/Plack.git@devel # branch + +=item -i, --install + +Installs the modules. This is a default behavior and this is just a +compatibility option to make it work like L or L. + +=item --self-upgrade + +Upgrades itself. It's just an alias for: + + cpanm App::cpanminus + +=item --info + +Displays the distribution information in +C format in the standard out. + +=item --installdeps + +Installs the dependencies of the target distribution but won't build +itself. Handy if you want to try the application from a version +controlled repository such as git. + + cpanm --installdeps . + +=item --look + +Download and unpack the distribution and then open the directory with +your shell. Handy to poke around the source code or do manual +testing. + +=item -h, --help + +Displays the help message. + +=item -V, --version + +Displays the version number. + +=back + +=head1 OPTIONS + +You can specify the default options in C environment variable. + +=over 4 + +=item -f, --force + +Force install modules even when testing failed. + +=item -n, --notest + +Skip the testing of modules. Use this only when you just want to save +time for installing hundreds of distributions to the same perl and +architecture you've already tested to make sure it builds fine. + +Defaults to false, and you can say C<--no-notest> to override when it +is set in the default options in C. + +=item --test-only + +Run the tests only, and do not install the specified module or +distributions. Handy if you want to verify the new (or even old) +releases pass its unit tests without installing the module. + +Note that if you specify this option with a module or distribution +that has dependencies, these dependencies will be installed if you +don't currently have them. + +=item -S, --sudo + +Switch to the root user with C when installing modules. Use this +if you want to install modules to the system perl include path. + +Defaults to false, and you can say C<--no-sudo> to override when it is +set in the default options in C. + +=item -v, --verbose + +Makes the output verbose. It also enables the interactive +configuration. (See --interactive) + +=item -q, --quiet + +Makes the output even more quiet than the default. It only shows the +successful/failed dependencies to the output. + +=item -l, --local-lib + +Sets the L compatible path to install modules to. You +don't need to set this if you already configure the shell environment +variables using L, but this can be used to override that +as well. + +=item -L, --local-lib-contained + +Same with C<--local-lib> but with L<--self-contained> set. All +non-core dependencies will be installed even if they're already +installed. + +For instance, + + cpanm -L extlib Plack + +would install Plack and all of its non-core dependencies into the +directory C, which can be loaded from your application with: + + use local::lib '/path/to/extlib'; + +Note that this option does B reliably work with perl installations +supplied by operating system vendors that strips standard modules from perl, +such as RHEL, Fedora and CentOS, B you also install packages supplying +all the modules that have been stripped. For these systems you will probably +want to install the C meta-package which does just that. + +=item --self-contained + +When examining the dependencies, assume no non-core modules are +installed on the system. Handy if you want to bundle application +dependencies in one directory so you can distribute to other machines. + +=item --exclude-vendor + +Don't include modules installed under the 'vendor' paths when searching for +core modules when the C<--self-contained> flag is in effect. This restores +the behaviour from before version 1.7023 + +=item --mirror + +Specifies the base URL for the CPAN mirror to use, such as +C (you can omit the trailing slash). You +can specify multiple mirror URLs by repeating the command line option. + +You can use a local directory that has a CPAN mirror structure +(created by tools such as L or L) by using a special +URL scheme C. If the given URL begins with `/` (without any +scheme), it is considered as a file scheme as well. + + cpanm --mirror file:///path/to/mirror + cpanm --mirror ~/minicpan # Because shell expands ~ to /home/user + +Defaults to C. + +=item --mirror-only + +Download the mirror's 02packages.details.txt.gz index file instead of +querying the CPAN Meta DB. This will also effectively opt out sending +your local perl versions to backend database servers such as CPAN Meta +DB and MetaCPAN. + +Select this option if you are using a local mirror of CPAN, such as +minicpan when you're offline, or your own CPAN index (a.k.a darkpan). + +=item --from, -M + + cpanm -M https://cpan.metacpan.org/ + cpanm --from https://cpan.metacpan.org/ + +Use the given mirror URL and its index as the I source to search +and download modules from. + +It works similar to C<--mirror> and C<--mirror-only> combined, with a +small difference: unlike C<--mirror> which I the URL to the +list of mirrors, C<--from> (or C<-M> for short) uses the specified URL +as its I source to download index and modules from. This makes +the option always override the default mirror, which might have been +set via global options such as the one set by C +environment variable. + +B It might be useful if you name these options with your shell +aliases, like: + + alias minicpanm='cpanm --from ~/minicpan' + alias darkpan='cpanm --from http://mycompany.example.com/DPAN' + +=item --mirror-index + +B: Specifies the file path to C<02packages.details.txt> +for module search index. + +=item --cpanmetadb + +B: Specifies an alternate URI for CPAN MetaDB index lookups. + +=item --metacpan + +Prefers MetaCPAN API over CPAN MetaDB. + +=item --cpanfile + +B: Specified an alternate path for cpanfile to search for, +when C<--installdeps> command is in use. Defaults to C. + +=item --prompt + +Prompts when a test fails so that you can skip, force install, retry +or look in the shell to see what's going wrong. It also prompts when +one of the dependency failed if you want to proceed the installation. + +Defaults to false, and you can say C<--no-prompt> to override if it's +set in the default options in C. + +=item --dev + +B: search for a newer developer release as well. Defaults to false. + +=item --reinstall + +cpanm, when given a module name in the command line (i.e. C), checks the locally installed version first and skips if it is +already installed. This option makes it skip the check, so: + + cpanm --reinstall Plack + +would reinstall L even if your locally installed version is +latest, or even newer (which would happen if you install a developer +release from version control repositories). + +Defaults to false. + +=item --interactive + +Makes the configuration (such as C and C) +interactive, so you can answer questions in the distribution that +requires custom configuration or Task:: distributions. + +Defaults to false, and you can say C<--no-interactive> to override +when it's set in the default options in C. + +=item --pp, --pureperl + +Prefer Pure perl build of modules by setting C for +MakeMaker and C<--pureperl-only> for Build.PL based +distributions. Note that not all of the CPAN modules support this +convention yet. + +=item --with-recommends, --with-suggests + +B: Installs dependencies declared as C and +C respectively, per META spec. When these dependencies fail +to install, cpanm continues the installation, since they're just +recommendation/suggestion. + +Enabling this could potentially make a circular dependency for a few +modules on CPAN, when C adds a module that C +back the module in return. + +There's also C<--without-recommend> and C<--without-suggests> to +override the default decision made earlier in C. + +Defaults to false for both. + +=item --with-develop + +B: Installs develop phase dependencies in META files or +C when used with C<--installdeps>. Defaults to false. + +=item --with-configure + +B: Installs configure phase dependencies in C +when used with C<--installdeps>. Defaults to false. + +=item --with-feature, --without-feature, --with-all-features + +B: Specifies the feature to enable, if a module supports +optional features per META spec 2.0. + + cpanm --with-feature=opt_csv Spreadsheet::Read + +the features can also be interactively chosen when C<--interactive> +option is enabled. + +C<--with-all-features> enables all the optional features, and +C<--without-feature> can select a feature to disable. + +=item --configure-timeout, --build-timeout, --test-timeout + +Specify the timeout length (in seconds) to wait for the configure, +build and test process. Current default values are: 60 for configure, +3600 for build and 1800 for test. + +=item --configure-args, --build-args, --test-args, --install-args + +B: Pass arguments for configure/build/test/install +commands respectively, for a given module to install. + + cpanm DBD::mysql --configure-args="--cflags=... --libs=..." + +The argument is only enabled for the module passed as a command line +argument, not dependencies. + +=item --scandeps + +B: Scans the depencencies of given modules and output the +tree in a text format. (See C<--format> below for more options) + +Because this command doesn't actually install any distributions, it +will be useful that by typing: + + cpanm --scandeps Catalyst::Runtime + +you can make sure what modules will be installed. + +This command takes into account which modules you already have +installed in your system. If you want to see what modules will be +installed against a vanilla perl installation, you might want to +combine it with C<-L> option. + +=item --format + +B: Determines what format to display the scanned +dependency tree. Available options are C, C, C and +C. + +=over 8 + +=item tree + +Displays the tree in a plain text format. This is the default value. + +=item json, yaml + +Outputs the tree in a JSON or YAML format. L and L modules +need to be installed respectively. The output tree is represented as a +recursive tuple of: + + [ distribution, dependencies ] + +and the container is an array containing the root elements. Note that +there may be multiple root nodes, since you can give multiple modules +to the C<--scandeps> command. + +=item dists + +C is a special output format, where it prints the distribution +filename in the I after the dependency resolution, +like: + + GAAS/MIME-Base64-3.13.tar.gz + GAAS/URI-1.58.tar.gz + PETDANCE/HTML-Tagset-3.20.tar.gz + GAAS/HTML-Parser-3.68.tar.gz + GAAS/libwww-perl-5.837.tar.gz + +which means you can install these distributions in this order without +extra dependencies. When combined with C<-L> option, it will be useful +to replay installations on other machines. + +=back + +=item --save-dists + +Specifies the optional directory path to copy downloaded tarballs in +the CPAN mirror compatible directory structure +i.e. I + +If the distro tarball did not come from CPAN, for example from a local +file or from GitHub, then it will be saved under +I. + +=item --uninst-shadows + +Uninstalls the shadow files of the distribution that you're +installing. This eliminates the confusion if you're trying to install +core (dual-life) modules from CPAN against perl 5.10 or older, or +modules that used to be XS-based but switched to pure perl at some +version. + +If you run cpanm as root and use C or equivalent to +specify custom installation path, you SHOULD disable this option so +you won't accidentally uninstall dual-life modules from the core +include path. + +Defaults to true if your perl version is smaller than 5.12, and you +can disable that with C<--no-uninst-shadows>. + +B: Since version 1.3000 this flag is turned off by default for +perl newer than 5.12, since with 5.12 @INC contains site_perl directory +I the perl core library path, and uninstalling shadows is not +necessary anymore and does more harm by deleting files from the core +library path. + +=item --uninstall, -U + +Uninstalls a module from the library path. It finds a packlist for +given modules, and removes all the files included in the same +distribution. + +If you enable local::lib, it only removes files from the local::lib +directory. + +If you try to uninstall a module in C directory (i.e. core +module), an error will be thrown. + +A dialog will be prompted to confirm the files to be deleted. If you pass +C<-f> option as well, the dialog will be skipped and uninstallation +will be forced. + +=item --cascade-search + +B: Specifies whether to cascade search when you specify +multiple mirrors and a mirror doesn't have a module or has a lower +version of the module than requested. Defaults to false. + +=item --skip-installed + +Specifies whether a module given in the command line is skipped if its latest +version is already installed. Defaults to true. + +B: The C environment variable have to be correctly set +for this to work with modules installed using L, unless +you always use the C<-l> option. + +=item --skip-satisfied + +B: Specifies whether a module (and version) given in the +command line is skipped if it's already installed. + +If you run: + + cpanm --skip-satisfied CGI DBI~1.2 + +cpanm won't install them if you already have CGI (for whatever +versions) or have DBI with version higher than 1.2. It is similar to +C<--skip-installed> but while C<--skip-installed> checks if the +I version of CPAN is installed, C<--skip-satisfied> checks if +a requested version (or not, which means any version) is installed. + +Defaults to false. + +=item --verify + +Verify the integrity of distribution files retrieved from PAUSE using +CHECKSUMS and SIGNATURES (if found). Defaults to false. + +=item --report-perl-version + +Whether it reports the locally installed perl version to the various +web server as part of User-Agent. Defaults to true unless CI related +environment variables such as C, C or C +is enabled. You can disable it by using C<--no-report-perl-version>. + +=item --auto-cleanup + +Specifies the number of days in which cpanm's work directories +expire. Defaults to 7, which means old work directories will be +cleaned up in one week. + +You can set the value to C<0> to make cpan never cleanup those +directories. + +=item --man-pages + +Generates man pages for executables (man1) and libraries (man3). + +Defaults to true (man pages generated) unless C<-L|--local-lib-contained> +option is supplied in which case it's set to false. You can disable +it with C<--no-man-pages>. + +=item --lwp + +Uses L module to download stuff over HTTP. Defaults to true, and +you can say C<--no-lwp> to disable using LWP, when you want to upgrade +LWP from CPAN on some broken perl systems. + +=item --wget + +Uses GNU Wget (if available) to download stuff. Defaults to true, and +you can say C<--no-wget> to disable using Wget (versions of Wget older +than 1.9 don't support the C<--retry-connrefused> option used by cpanm). + +=item --curl + +Uses cURL (if available) to download stuff. Defaults to true, and +you can say C<--no-curl> to disable using cURL. + +Normally with C<--lwp>, C<--wget> and C<--curl> options set to true +(which is the default) cpanm tries L, Wget, cURL and L +(in that order) and uses the first one available. + +=back + +=head1 ENVIRONMENT VARIABLES + +=over 4 + +=item PERL_CPANM_HOME + +The directory cpanm should use to store downloads and build and test +modules. Defaults to the C<.cpanm> directory in your user's home +directory. + +=item PERL_CPANM_OPT + +If set, adds a set of default options to every cpanm command. These +options come first, and so are overridden by command-line options. + +=back + +=head1 SEE ALSO + +L + +=head1 COPYRIGHT + +Copyright 2010- Tatsuhiko Miyagawa. + +=head1 AUTHOR + +Tatsuhiko Miyagawa + +=cut diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py new file mode 100644 index 00000000000..936b6caa810 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -0,0 +1,42 @@ +# +# Author: George Hartzell +# Date: July 21, 2016 +# Author: Justin Too +# Date: September 6, 2015 +# +from spack import * + + +class Perl(Package): + """Perl 5 is a highly capable, feature-rich programming language with over + 27 years of development.""" + homepage = "http://www.perl.org" + url = "http://www.cpan.org/src/5.0/perl-5.22.2.tar.gz" + + version('5.24.0', 'c5bf7f3285439a2d3b6a488e14503701') + version('5.22.2', '5767e2a10dd62a46d7b57f74a90d952b') + version('5.20.3', 'd647d0ea5a7a8194c34759ab9f2610cd') + # 5.18.4 fails with gcc-5 + # https://rt.perl.org/Public/Bug/Display.html?id=123784 + # version('5.18.4' , '1f9334ff730adc05acd3dd7130d295db') + + # Installing cpanm alongside the core makes it safe and simple for + # people/projects to install their own sets of perl modules. Not + # having it in core increases the "energy of activation" for doing + # things cleanly. + variant('cpanm', default=True, + description='Having cpanm in core simplifies adding modules.') + + def install(self, spec, prefix): + configure = Executable('./Configure') + configure("-des", "-Dprefix=" + prefix) + make() + make("test") + make("install") + + if '+cpanm' in spec: + perl_exe = join_path(prefix, 'bin', 'perl') + perl = Executable(perl_exe) + cpanm_installer = join_path(self.package_dir, 'cpanm-installer.pl') + cpanm_package_spec = 'App::cpanminus' + '@' + '1.7042' + perl(cpanm_installer, cpanm_package_spec) From 4d72e0fb9db7716f40899ff4df24e0487a9bf826 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Fri, 22 Jul 2016 17:55:57 -0400 Subject: [PATCH 188/284] Make the cpanm version a variant Rather than hard-coding the verison of `cpanm` that's [optionally] installed into the core, make it a variant with a default value of '1.7042'. Also discovered that `prefix + 'bin'` is the same as `prefix.bin`, so embetter that bit of code. --- var/spack/repos/builtin/packages/perl/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index 936b6caa810..97b28436c6a 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -26,6 +26,8 @@ class Perl(Package): # things cleanly. variant('cpanm', default=True, description='Having cpanm in core simplifies adding modules.') + variant('cpanm_version', default='1.7042', + description='Version of cpanm to install into core if +cpanm.') def install(self, spec, prefix): configure = Executable('./Configure') @@ -35,8 +37,9 @@ def install(self, spec, prefix): make("install") if '+cpanm' in spec: - perl_exe = join_path(prefix, 'bin', 'perl') + perl_exe = join_path(prefix.bin, 'perl') perl = Executable(perl_exe) cpanm_installer = join_path(self.package_dir, 'cpanm-installer.pl') - cpanm_package_spec = 'App::cpanminus' + '@' + '1.7042' + cpanm_version = spec.variants['cpanm_version'].value + cpanm_package_spec = 'App::cpanminus' + '@' + cpanm_version perl(cpanm_installer, cpanm_package_spec) From 08ff7b65afdeaaa0535094843940c0881905b27c Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Fri, 22 Jul 2016 19:16:42 -0400 Subject: [PATCH 189/284] Make running perl's tests conditional Make running perl's tests conditional, one must now specify the `--run-tests` flag to the `spack install` command in order to run the tests. On one system (8 core, 16GB Digital Ocean Droplet), installing without tests takes 3 minutes, with tests takes 16 minutes. --- var/spack/repos/builtin/packages/perl/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index 97b28436c6a..b390ce3bfa4 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -33,7 +33,8 @@ def install(self, spec, prefix): configure = Executable('./Configure') configure("-des", "-Dprefix=" + prefix) make() - make("test") + if self.run_tests: + make("test") make("install") if '+cpanm' in spec: From 1b9becc54162ed9239a03cf613f50ae7996625c3 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 25 Jul 2016 10:27:35 -0400 Subject: [PATCH 190/284] Add the std spack header to perl/package.py Stole the example header from antlr/package.py, included it at the top of perl/package.py. --- .../repos/builtin/packages/perl/package.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index b390ce3bfa4..47623e87c1b 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -1,3 +1,27 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## # # Author: George Hartzell # Date: July 21, 2016 From c994565c621f4283736ac70f7792c20e2d9e1b11 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 1 Aug 2016 23:56:58 -0400 Subject: [PATCH 191/284] Use "resource" machinery to manage cpanm tarball Use the resource machinery to fetch/cache/unpack/... the App::cpanminus tarball. - this hardcodes the version, I can't figure out how to use a variant to hold/set the value and access it in the resource(). - change up the install to use the `with working_dir()` meme. --- .../repos/builtin/packages/perl/package.py | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index 47623e87c1b..2c90b788eca 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -50,8 +50,14 @@ class Perl(Package): # things cleanly. variant('cpanm', default=True, description='Having cpanm in core simplifies adding modules.') - variant('cpanm_version', default='1.7042', - description='Version of cpanm to install into core if +cpanm.') + + resource( + name="cpanm", + url="http://search.cpan.org/CPAN/authors/id/M/MI/MIYAGAWA/App-cpanminus-1.7042.tar.gz", + md5="e87f55fbcb3c13a4754500c18e89219f", + destination="cpanm", + placement="cpanm" + ) def install(self, spec, prefix): configure = Executable('./Configure') @@ -62,9 +68,8 @@ def install(self, spec, prefix): make("install") if '+cpanm' in spec: - perl_exe = join_path(prefix.bin, 'perl') - perl = Executable(perl_exe) - cpanm_installer = join_path(self.package_dir, 'cpanm-installer.pl') - cpanm_version = spec.variants['cpanm_version'].value - cpanm_package_spec = 'App::cpanminus' + '@' + cpanm_version - perl(cpanm_installer, cpanm_package_spec) + with working_dir(join_path('cpanm', 'cpanm')): + perl = Executable(join_path(prefix.bin, 'perl')) + perl('Makefile.PL') + make() + make('install') From f2f5f6c27989c701274cbbb9bfcd7aa1e0f0561a Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 2 Aug 2016 11:18:50 -0400 Subject: [PATCH 192/284] Touch up variant comment --- var/spack/repos/builtin/packages/perl/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index 2c90b788eca..d71a7492bad 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -49,7 +49,7 @@ class Perl(Package): # having it in core increases the "energy of activation" for doing # things cleanly. variant('cpanm', default=True, - description='Having cpanm in core simplifies adding modules.') + description='Optionally install cpanm with the core packages.') resource( name="cpanm", From a6605d842b2a7160c254ddb54ca4e72c3cf3fe92 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 2 Aug 2016 11:25:03 -0400 Subject: [PATCH 193/284] Flake8 cleanup. --- var/spack/repos/builtin/packages/texlive/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/texlive/package.py b/var/spack/repos/builtin/packages/texlive/package.py index f4a4acf3ba3..d44a6e311e8 100644 --- a/var/spack/repos/builtin/packages/texlive/package.py +++ b/var/spack/repos/builtin/packages/texlive/package.py @@ -25,13 +25,14 @@ from spack import * import os + class Texlive(Package): """TeX Live is a free software distribution for the TeX typesetting system""" homepage = "http://www.tug.org/texlive" - version('live', 'e671eea7f142c438959493cc42a2a59b', url = "http://mirror.ctan.org/systems/texlive/tlnet/install-tl-unx.tar.gz") + version('live', 'e671eea7f142c438959493cc42a2a59b', url="http://mirror.ctan.org/systems/texlive/tlnet/install-tl-unx.tar.gz") # There does not seem to be a complete list of schemes. # Examples include: From c77425bb2258ccb9df73044230ebca192e4b209e Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 2 Aug 2016 11:32:19 -0400 Subject: [PATCH 194/284] Remove useless `def unpack` Before I learned that I was stumbling over a real but (#1308), I thought I needed to arrange for the fetcher to skip the unpack step. This commit removes the useful `def unpack`. --- var/spack/repos/builtin/packages/nextflow/package.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py index 865534c2f80..54f3dbf9151 100644 --- a/var/spack/repos/builtin/packages/nextflow/package.py +++ b/var/spack/repos/builtin/packages/nextflow/package.py @@ -36,9 +36,6 @@ class Nextflow(Package): depends_on('jdk') - def unpack(self): - pass - def install(self, spec, prefix): mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) From 4c8672ed1ffb8f2ef55bb0d5f7905baf00b1668b Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 2 Aug 2016 11:45:16 -0400 Subject: [PATCH 195/284] Add previous versions back into package Following the advice of @glennpj, added the old versions back into the packages. I can install the current packages and the previous packages. --- var/spack/repos/builtin/packages/r-jsonlite/package.py | 1 + var/spack/repos/builtin/packages/r-mime/package.py | 1 + var/spack/repos/builtin/packages/r-rcpp/package.py | 1 + 3 files changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/r-jsonlite/package.py b/var/spack/repos/builtin/packages/r-jsonlite/package.py index b47cb5a71aa..d1cb4b52c14 100644 --- a/var/spack/repos/builtin/packages/r-jsonlite/package.py +++ b/var/spack/repos/builtin/packages/r-jsonlite/package.py @@ -42,6 +42,7 @@ class RJsonlite(Package): list_url = "https://cran.r-project.org/src/contrib/Archive/jsonlite" version('1.0', 'c8524e086de22ab39b8ac8000220cc87') + version('0.9.21', '4fc382747f88a79ff0718a0d06bed45d') extends('R') diff --git a/var/spack/repos/builtin/packages/r-mime/package.py b/var/spack/repos/builtin/packages/r-mime/package.py index 6831fc3b606..5e78889a76b 100644 --- a/var/spack/repos/builtin/packages/r-mime/package.py +++ b/var/spack/repos/builtin/packages/r-mime/package.py @@ -34,6 +34,7 @@ class RMime(Package): list_url = "https://cran.r-project.org/src/contrib/Archive/mime" version('0.5', '87e00b6d57b581465c19ae869a723c4d') + version('0.4', '789cb33e41db2206c6fc7c3e9fbc2c02') extends('R') diff --git a/var/spack/repos/builtin/packages/r-rcpp/package.py b/var/spack/repos/builtin/packages/r-rcpp/package.py index 0e84f8829b7..94580a87008 100644 --- a/var/spack/repos/builtin/packages/r-rcpp/package.py +++ b/var/spack/repos/builtin/packages/r-rcpp/package.py @@ -41,6 +41,7 @@ class RRcpp(Package): list_url = "https://cran.r-project.org/src/contrib/Archive/Rcpp" version('0.12.6', 'db4280fb0a79cd19be73a662c33b0a8b') + version('0.12.5', 'f03ec05b4e391cc46e7ce330e82ff5e2') extends('R') From 74a181febce23123c7e028d31931fac464735ac5 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 2 Aug 2016 12:13:24 -0400 Subject: [PATCH 196/284] Remove local copy of cpanm installer This is handled as a resource in the package now. --- .../packages/cabal-install/bootstrap.patch | 11 + .../builtin/packages/cabal-install/package.py | 51 + .../builtin/packages/perl/cpanm-installer.pl | 1075 ----------------- 3 files changed, 62 insertions(+), 1075 deletions(-) create mode 100644 var/spack/repos/builtin/packages/cabal-install/bootstrap.patch create mode 100644 var/spack/repos/builtin/packages/cabal-install/package.py delete mode 100644 var/spack/repos/builtin/packages/perl/cpanm-installer.pl diff --git a/var/spack/repos/builtin/packages/cabal-install/bootstrap.patch b/var/spack/repos/builtin/packages/cabal-install/bootstrap.patch new file mode 100644 index 00000000000..374706ccf46 --- /dev/null +++ b/var/spack/repos/builtin/packages/cabal-install/bootstrap.patch @@ -0,0 +1,11 @@ +--- a/bootstrap.sh 2016-05-02 14:15:09.000000000 +0300 ++++ b/bootstrap.sh 2016-07-03 22:35:15.000000000 +0300 +@@ -69,7 +69,7 @@ + # Find the correct linker/linker-wrapper. + LINK="$(for link in collect2 ld; do + [ $($CC -print-prog-name=$link) = $link ] && continue || +- $CC -print-prog-name=$link ++ $CC -print-prog-name=$link && break + done)" + + # Fall back to "ld"... might work. diff --git a/var/spack/repos/builtin/packages/cabal-install/package.py b/var/spack/repos/builtin/packages/cabal-install/package.py new file mode 100644 index 00000000000..53009704524 --- /dev/null +++ b/var/spack/repos/builtin/packages/cabal-install/package.py @@ -0,0 +1,51 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class CabalInstall(Package): + """The 'cabal' command-line program simplifies the process of managing + Haskell software by automating the fetching, configuration, + compilation and installation of Haskell libraries and programs.""" + + homepage = "http://www.haskell.org/cabal/" + url = "http://hackage.haskell.org/package/cabal-install-1.24.0.0/cabal-install-1.24.0.0.tar.gz" + + version('1.24.0.0', 'beb998cdc385523935620381abe393f4') + + depends_on('zlib') + depends_on('ghc') + + # @mvkorpel's fix from: + # https://github.com/haskell/cabal/issues/3440 + # It works around problem deciding whether to use collect2 or ld. + # The symptom is complaint about "Setup: Unrecognized flags:..." + patch('bootstrap.patch') + + def install(self, spec, prefix): + bash=which("bash") + bash("bootstrap.sh", "--sandbox", prefix) + #bin.install ".cabal-sandbox/bin/cabal" + #bash_completion.install "bash-completion/cabal" diff --git a/var/spack/repos/builtin/packages/perl/cpanm-installer.pl b/var/spack/repos/builtin/packages/perl/cpanm-installer.pl deleted file mode 100644 index 97f56162ffa..00000000000 --- a/var/spack/repos/builtin/packages/perl/cpanm-installer.pl +++ /dev/null @@ -1,1075 +0,0 @@ -#!/usr/bin/env perl -# -# This is a pre-compiled source code for the cpanm (cpanminus) program. -# For more details about how to install cpanm, go to the following URL: -# -# https://github.com/miyagawa/cpanminus -# -# Quickstart: Run the following command and it will install itself for -# you. You might want to run it as a root with sudo if you want to install -# to places like /usr/local/bin. -# -# % curl -L https://cpanmin.us | perl - App::cpanminus -# -# If you don't have curl but wget, replace `curl -L` with `wget -O -`. - -# DO NOT EDIT -- this is an auto generated file - -# This chunk of stuff was generated by App::FatPacker. To find the original -# file's code, look for the end of this BEGIN block or the string 'FATPACK' -BEGIN { -my %fatpacked; - -$fatpacked{"App/cpanminus.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'APP_CPANMINUS'; - package App::cpanminus;our$VERSION="1.7042";1; -APP_CPANMINUS - -$fatpacked{"App/cpanminus/Dependency.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'APP_CPANMINUS_DEPENDENCY'; - package App::cpanminus::Dependency;use strict;use CPAN::Meta::Requirements;sub from_prereqs {my($class,$prereqs,$phases,$types)=@_;my@deps;for my$type (@$types){push@deps,$class->from_versions($prereqs->merged_requirements($phases,[$type])->as_string_hash,$type,)}return@deps}sub from_versions {my($class,$versions,$type)=@_;my@deps;while (my($module,$version)=each %$versions){push@deps,$class->new($module,$version,$type)}@deps}sub merge_with {my($self,$requirements)=@_;$self->{original_version}=$self->version;eval {$requirements->add_string_requirement($self->module,$self->version)};if ($@ =~ /illegal requirements/){warn sprintf("Can't merge requirements for %s: '%s' and '%s'",$self->module,$self->version,$requirements->requirements_for_module($self->module))}$self->{version}=$requirements->requirements_for_module($self->module)}sub new {my($class,$module,$version,$type)=@_;bless {module=>$module,version=>$version,type=>$type || 'requires',},$class}sub module {$_[0]->{module}}sub version {$_[0]->{version}}sub type {$_[0]->{type}}sub requires_version {my$self=shift;if (defined$self->{original_version}){return$self->{original_version}}$self->version}sub is_requirement {$_[0]->{type}eq 'requires'}1; -APP_CPANMINUS_DEPENDENCY - -$fatpacked{"App/cpanminus/script.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'APP_CPANMINUS_SCRIPT'; - package App::cpanminus::script;use strict;use Config;use Cwd ();use App::cpanminus;use App::cpanminus::Dependency;use File::Basename ();use File::Find ();use File::Path ();use File::Spec ();use File::Copy ();use File::Temp ();use Getopt::Long ();use Symbol ();use String::ShellQuote ();use version ();use constant WIN32=>$^O eq 'MSWin32';use constant BAD_TAR=>($^O eq 'solaris' || $^O eq 'hpux');use constant CAN_SYMLINK=>eval {symlink("","");1};our$VERSION=$App::cpanminus::VERSION;if ($INC{"App/FatPacker/Trace.pm"}){require version::vpp}my$quote=WIN32 ? q/"/ : q/'/;sub agent {my$self=shift;my$agent="cpanminus/$VERSION";$agent .= " perl/$]" if$self->{report_perl_version};$agent}sub determine_home {my$class=shift;my$homedir=$ENV{HOME}|| eval {require File::HomeDir;File::HomeDir->my_home}|| join('',@ENV{qw(HOMEDRIVE HOMEPATH)});if (WIN32){require Win32;$homedir=Win32::GetShortPathName($homedir)}return "$homedir/.cpanm"}sub new {my$class=shift;bless {home=>$class->determine_home,cmd=>'install',seen=>{},notest=>undef,test_only=>undef,installdeps=>undef,force=>undef,sudo=>undef,make=>undef,verbose=>undef,quiet=>undef,interactive=>undef,log=>undef,mirrors=>[],mirror_only=>undef,mirror_index=>undef,cpanmetadb=>"http://cpanmetadb.plackperl.org/v1.0/",perl=>$^X,argv=>[],local_lib=>undef,self_contained=>undef,exclude_vendor=>undef,prompt_timeout=>0,prompt=>undef,configure_timeout=>60,build_timeout=>3600,test_timeout=>1800,try_lwp=>1,try_wget=>1,try_curl=>1,uninstall_shadows=>($] < 5.012),skip_installed=>1,skip_satisfied=>0,auto_cleanup=>7,pod2man=>1,installed_dists=>0,install_types=>['requires'],with_develop=>0,with_configure=>0,showdeps=>0,scandeps=>0,scandeps_tree=>[],format=>'tree',save_dists=>undef,skip_configure=>0,verify=>0,report_perl_version=>!$class->maybe_ci,build_args=>{},features=>{},pure_perl=>0,cpanfile_path=>'cpanfile',@_,},$class}sub env {my($self,$key)=@_;$ENV{"PERL_CPANM_" .$key}}sub maybe_ci {my$class=shift;grep$ENV{$_},qw(TRAVIS CI AUTOMATED_TESTING AUTHOR_TESTING)}sub install_type_handlers {my$self=shift;my@handlers;for my$type (qw(recommends suggests)){push@handlers,"with-$type"=>sub {my%uniq;$self->{install_types}=[grep!$uniq{$_}++,@{$self->{install_types}},$type ]};push@handlers,"without-$type"=>sub {$self->{install_types}=[grep $_ ne $type,@{$self->{install_types}}]}}@handlers}sub build_args_handlers {my$self=shift;my@handlers;for my$phase (qw(configure build test install)){push@handlers,"$phase-args=s"=>\($self->{build_args}{$phase})}@handlers}sub parse_options {my$self=shift;local@ARGV=@{$self->{argv}};push@ARGV,grep length,split /\s+/,$self->env('OPT');push@ARGV,@_;Getopt::Long::Configure("bundling");Getopt::Long::GetOptions('f|force'=>sub {$self->{skip_installed}=0;$self->{force}=1},'n|notest!'=>\$self->{notest},'test-only'=>sub {$self->{notest}=0;$self->{skip_installed}=0;$self->{test_only}=1},'S|sudo!'=>\$self->{sudo},'v|verbose'=>\$self->{verbose},'verify!'=>\$self->{verify},'q|quiet!'=>\$self->{quiet},'h|help'=>sub {$self->{action}='show_help'},'V|version'=>sub {$self->{action}='show_version'},'perl=s'=>sub {$self->diag("--perl is deprecated since it's known to be fragile in figuring out dependencies. Run `$_[1] -S cpanm` instead.\n",1);$self->{perl}=$_[1]},'l|local-lib=s'=>sub {$self->{local_lib}=$self->maybe_abs($_[1])},'L|local-lib-contained=s'=>sub {$self->{local_lib}=$self->maybe_abs($_[1]);$self->{self_contained}=1;$self->{pod2man}=undef},'self-contained!'=>\$self->{self_contained},'exclude-vendor!'=>\$self->{exclude_vendor},'mirror=s@'=>$self->{mirrors},'mirror-only!'=>\$self->{mirror_only},'mirror-index=s'=>sub {$self->{mirror_index}=$self->maybe_abs($_[1])},'M|from=s'=>sub {$self->{mirrors}=[$_[1]];$self->{mirror_only}=1},'cpanmetadb=s'=>\$self->{cpanmetadb},'cascade-search!'=>\$self->{cascade_search},'prompt!'=>\$self->{prompt},'installdeps'=>\$self->{installdeps},'skip-installed!'=>\$self->{skip_installed},'skip-satisfied!'=>\$self->{skip_satisfied},'reinstall'=>sub {$self->{skip_installed}=0},'interactive!'=>\$self->{interactive},'i|install'=>sub {$self->{cmd}='install'},'info'=>sub {$self->{cmd}='info'},'look'=>sub {$self->{cmd}='look';$self->{skip_installed}=0},'U|uninstall'=>sub {$self->{cmd}='uninstall'},'self-upgrade'=>sub {$self->{action}='self_upgrade'},'uninst-shadows!'=>\$self->{uninstall_shadows},'lwp!'=>\$self->{try_lwp},'wget!'=>\$self->{try_wget},'curl!'=>\$self->{try_curl},'auto-cleanup=s'=>\$self->{auto_cleanup},'man-pages!'=>\$self->{pod2man},'scandeps'=>\$self->{scandeps},'showdeps'=>sub {$self->{showdeps}=1;$self->{skip_installed}=0},'format=s'=>\$self->{format},'save-dists=s'=>sub {$self->{save_dists}=$self->maybe_abs($_[1])},'skip-configure!'=>\$self->{skip_configure},'dev!'=>\$self->{dev_release},'metacpan!'=>\$self->{metacpan},'report-perl-version!'=>\$self->{report_perl_version},'configure-timeout=i'=>\$self->{configure_timeout},'build-timeout=i'=>\$self->{build_timeout},'test-timeout=i'=>\$self->{test_timeout},'with-develop'=>\$self->{with_develop},'without-develop'=>sub {$self->{with_develop}=0},'with-configure'=>\$self->{with_configure},'without-configure'=>sub {$self->{with_configure}=0},'with-feature=s'=>sub {$self->{features}{$_[1]}=1},'without-feature=s'=>sub {$self->{features}{$_[1]}=0},'with-all-features'=>sub {$self->{features}{__all}=1},'pp|pureperl!'=>\$self->{pure_perl},"cpanfile=s"=>\$self->{cpanfile_path},$self->install_type_handlers,$self->build_args_handlers,);if (!@ARGV && $0 ne '-' &&!-t STDIN){push@ARGV,$self->load_argv_from_fh(\*STDIN);$self->{load_from_stdin}=1}$self->{argv}=\@ARGV}sub check_upgrade {my$self=shift;my$install_base=$ENV{PERL_LOCAL_LIB_ROOT}? $self->local_lib_target($ENV{PERL_LOCAL_LIB_ROOT}): $Config{installsitebin};if ($0 eq '-'){return}elsif ($0 !~ /^$install_base/){if ($0 =~ m!perlbrew/bin!){die <{_checked}++;$self->bootstrap_local_lib}sub setup_verify {my$self=shift;my$has_modules=eval {require Module::Signature;require Digest::SHA;1};$self->{cpansign}=$self->which('cpansign');unless ($has_modules && $self->{cpansign}){warn "WARNING: Module::Signature and Digest::SHA is required for distribution verifications.\n";$self->{verify}=0}}sub parse_module_args {my($self,$module)=@_;$module =~ s/^([A-Za-z0-9_:]+)@([v\d\._]+)$/$1~== $2/;if ($module =~ /\~[v\d\._,\!<>= ]+$/){return split /\~/,$module,2}else {return$module,undef}}sub doit {my$self=shift;my$code;eval {$code=($self->_doit==0)};if (my$e=$@){warn$e;$code=1}return$code}sub _doit {my$self=shift;$self->setup_home;$self->init_tools;$self->setup_verify if$self->{verify};if (my$action=$self->{action}){$self->$action()and return 1}return$self->show_help(1)unless @{$self->{argv}}or $self->{load_from_stdin};$self->configure_mirrors;my$cwd=Cwd::cwd;my@fail;for my$module (@{$self->{argv}}){if ($module =~ s/\.pm$//i){my ($volume,$dirs,$file)=File::Spec->splitpath($module);$module=join '::',grep {$_}File::Spec->splitdir($dirs),$file}($module,my$version)=$self->parse_module_args($module);$self->chdir($cwd);if ($self->{cmd}eq 'uninstall'){$self->uninstall_module($module)or push@fail,$module}else {$self->install_module($module,0,$version)or push@fail,$module}}if ($self->{base}&& $self->{auto_cleanup}){$self->cleanup_workdirs}if ($self->{installed_dists}){my$dists=$self->{installed_dists}> 1 ? "distributions" : "distribution";$self->diag("$self->{installed_dists} $dists installed\n",1)}if ($self->{scandeps}){$self->dump_scandeps()}$self->chdir($cwd);return!@fail}sub setup_home {my$self=shift;$self->{home}=$self->env('HOME')if$self->env('HOME');unless (_writable($self->{home})){die "Can't write to cpanm home '$self->{home}': You should fix it with chown/chmod first.\n"}$self->{base}="$self->{home}/work/" .time .".$$";File::Path::mkpath([$self->{base}],0,0777);$self->{log}=File::Spec->catfile($self->{base},"build.log");my$final_log="$self->{home}/build.log";{open my$out,">$self->{log}" or die "$self->{log}: $!"}if (CAN_SYMLINK){my$build_link="$self->{home}/latest-build";unlink$build_link;symlink$self->{base},$build_link;unlink$final_log;symlink$self->{log},$final_log}else {my$log=$self->{log};my$home=$self->{home};$self->{at_exit}=sub {my$self=shift;my$temp_log="$home/build.log." .time .".$$";File::Copy::copy($log,$temp_log)&& unlink($final_log);rename($temp_log,$final_log)}}$self->chat("cpanm (App::cpanminus) $VERSION on perl $] built for $Config{archname}\n" ."Work directory is $self->{base}\n")}sub package_index_for {my ($self,$mirror)=@_;return$self->source_for($mirror)."/02packages.details.txt"}sub generate_mirror_index {my ($self,$mirror)=@_;my$file=$self->package_index_for($mirror);my$gz_file=$file .'.gz';my$index_mtime=(stat$gz_file)[9];unless (-e $file && (stat$file)[9]>= $index_mtime){$self->chat("Uncompressing index file...\n");if (eval {require Compress::Zlib}){my$gz=Compress::Zlib::gzopen($gz_file,"rb")or do {$self->diag_fail("$Compress::Zlib::gzerrno opening compressed index");return};open my$fh,'>',$file or do {$self->diag_fail("$! opening uncompressed index for write");return};my$buffer;while (my$status=$gz->gzread($buffer)){if ($status < 0){$self->diag_fail($gz->gzerror ." reading compressed index");return}print$fh $buffer}}else {if (system("gunzip -c $gz_file > $file")){$self->diag_fail("Cannot uncompress -- please install gunzip or Compress::Zlib");return}}utime$index_mtime,$index_mtime,$file}return 1}sub search_mirror_index {my ($self,$mirror,$module,$version)=@_;$self->search_mirror_index_file($self->package_index_for($mirror),$module,$version)}sub search_mirror_index_file {my($self,$file,$module,$version)=@_;open my$fh,'<',$file or return;my$found;while (<$fh>){if (m!^\Q$module\E\s+([\w\.]+)\s+(\S*)!m){$found=$self->cpan_module($module,$2,$1);last}}return$found unless$self->{cascade_search};if ($found){if ($self->satisfy_version($module,$found->{module_version},$version)){return$found}else {$self->chat("Found $module $found->{module_version} which doesn't satisfy $version.\n")}}return}sub with_version_range {my($self,$version)=@_;defined($version)&& $version =~ /(?:<|!=|==)/}sub encode_json {my($self,$data)=@_;require JSON::PP;my$json=JSON::PP::encode_json($data);$json =~ s/([^a-zA-Z0-9_\-.])/uc sprintf("%%%02x",ord($1))/eg;$json}sub version_to_query {my($self,$module,$version)=@_;require CPAN::Meta::Requirements;my$requirements=CPAN::Meta::Requirements->new;$requirements->add_string_requirement($module,$version || '0');my$req=$requirements->requirements_for_module($module);if ($req =~ s/^==\s*//){return {term=>{'module.version'=>$req },}}elsif ($req !~ /\s/){return {range=>{'module.version_numified'=>{'gte'=>$self->numify_ver_metacpan($req)}},}}else {my%ops=qw(< lt <= lte > gt >= gte);my(%range,@exclusion);my@requirements=split /,\s*/,$req;for my$r (@requirements){if ($r =~ s/^([<>]=?)\s*//){$range{$ops{$1}}=$self->numify_ver_metacpan($r)}elsif ($r =~ s/\!=\s*//){push@exclusion,$self->numify_ver_metacpan($r)}}my@filters=({range=>{'module.version_numified'=>\%range }},);if (@exclusion){push@filters,{not=>{or=>[map {+{term=>{'module.version_numified'=>$self->numify_ver_metacpan($_)}}}@exclusion ]},}}return@filters}}sub numify_ver_metacpan {my($self,$ver)=@_;$ver =~ s/_//g;version->new($ver)->numify}sub numify_ver {my($self,$ver)=@_;eval version->new($ver)->numify}sub maturity_filter {my($self,$module,$version)=@_;if ($version =~ /==/){return}elsif ($self->{dev_release}){return +{not=>{term=>{status=>'backpan' }}}}else {return ({not=>{term=>{status=>'backpan' }}},{term=>{maturity=>'released' }},)}}sub by_version {my%s=qw(latest 3 cpan 2 backpan 1);$b->{_score}<=> $a->{_score}|| $s{$b->{fields}{status}}<=> $s{$a->{fields}{status}}}sub by_first_come {$a->{fields}{date}cmp $b->{fields}{date}}sub by_date {$b->{fields}{date}cmp $a->{fields}{date}}sub find_best_match {my($self,$match,$version)=@_;return unless$match && @{$match->{hits}{hits}|| []};my@hits=$self->{dev_release}? sort {by_version || by_date}@{$match->{hits}{hits}}: sort {by_version || by_first_come}@{$match->{hits}{hits}};$hits[0]->{fields}}sub search_metacpan {my($self,$module,$version)=@_;require JSON::PP;$self->chat("Searching $module ($version) on metacpan ...\n");my$metacpan_uri='http://api.metacpan.org/v0';my@filter=$self->maturity_filter($module,$version);my$query={filtered=>{(@filter ? (filter=>{and=>\@filter }): ()),query=>{nested=>{score_mode=>'max',path=>'module',query=>{custom_score=>{metacpan_script=>"score_version_numified",query=>{constant_score=>{filter=>{and=>[{term=>{'module.authorized'=>JSON::PP::true()}},{term=>{'module.indexed'=>JSON::PP::true()}},{term=>{'module.name'=>$module }},$self->version_to_query($module,$version),]}}},}},}},}};my$module_uri="$metacpan_uri/file/_search?source=";$module_uri .= $self->encode_json({query=>$query,fields=>['date','release','author','module','status' ],});my($release,$author,$module_version);my$module_json=$self->get($module_uri);my$module_meta=eval {JSON::PP::decode_json($module_json)};my$match=$self->find_best_match($module_meta);if ($match){$release=$match->{release};$author=$match->{author};my$module_matched=(grep {$_->{name}eq $module}@{$match->{module}})[0];$module_version=$module_matched->{version}}unless ($release){$self->chat("! Could not find a release matching $module ($version) on MetaCPAN.\n");return}my$dist_uri="$metacpan_uri/release/_search?source=";$dist_uri .= $self->encode_json({filter=>{and=>[{term=>{'release.name'=>$release }},{term=>{'release.author'=>$author }},]},fields=>['download_url','stat','status' ],});my$dist_json=$self->get($dist_uri);my$dist_meta=eval {JSON::PP::decode_json($dist_json)};if ($dist_meta){$dist_meta=$dist_meta->{hits}{hits}[0]{fields}}if ($dist_meta && $dist_meta->{download_url}){(my$distfile=$dist_meta->{download_url})=~ s!.+/authors/id/!!;local$self->{mirrors}=$self->{mirrors};if ($dist_meta->{status}eq 'backpan'){$self->{mirrors}=['http://backpan.perl.org' ]}elsif ($dist_meta->{stat}{mtime}> time()-24*60*60){$self->{mirrors}=['http://cpan.metacpan.org' ]}return$self->cpan_module($module,$distfile,$module_version)}$self->diag_fail("Finding $module on metacpan failed.");return}sub search_database {my($self,$module,$version)=@_;my$found;if ($self->{dev_release}or $self->{metacpan}){$found=$self->search_metacpan($module,$version)and return$found;$found=$self->search_cpanmetadb($module,$version)and return$found}else {$found=$self->search_cpanmetadb($module,$version)and return$found;$found=$self->search_metacpan($module,$version)and return$found}}sub search_cpanmetadb {my($self,$module,$version)=@_;$self->chat("Searching $module ($version) on cpanmetadb ...\n");if ($self->with_version_range($version)){return$self->search_cpanmetadb_history($module,$version)}else {return$self->search_cpanmetadb_package($module,$version)}}sub search_cpanmetadb_package {my($self,$module,$version)=@_;require CPAN::Meta::YAML;(my$uri=$self->{cpanmetadb})=~ s{/?$}{/package/$module};my$yaml=$self->get($uri);my$meta=eval {CPAN::Meta::YAML::Load($yaml)};if ($meta && $meta->{distfile}){return$self->cpan_module($module,$meta->{distfile},$meta->{version})}$self->diag_fail("Finding $module on cpanmetadb failed.");return}sub search_cpanmetadb_history {my($self,$module,$version)=@_;(my$uri=$self->{cpanmetadb})=~ s{/?$}{/history/$module};my$content=$self->get($uri)or return;my@found;for my$line (split /\r?\n/,$content){if ($line =~ /^$module\s+(\S+)\s+(\S+)$/){push@found,{version=>$1,version_obj=>version::->parse($1),distfile=>$2,}}}return unless@found;$found[-1]->{latest}=1;my$match;for my$try (sort {$b->{version_obj}cmp $a->{version_obj}}@found){if ($self->satisfy_version($module,$try->{version_obj},$version)){local$self->{mirrors}=$self->{mirrors};unshift @{$self->{mirrors}},'http://backpan.perl.org' unless$try->{latest};return$self->cpan_module($module,$try->{distfile},$try->{version})}}$self->diag_fail("Finding $module ($version) on cpanmetadb failed.");return}sub search_module {my($self,$module,$version)=@_;if ($self->{mirror_index}){$self->mask_output(chat=>"Searching $module on mirror index $self->{mirror_index} ...\n");my$pkg=$self->search_mirror_index_file($self->{mirror_index},$module,$version);return$pkg if$pkg;unless ($self->{cascade_search}){$self->mask_output(diag_fail=>"Finding $module ($version) on mirror index $self->{mirror_index} failed.");return}}unless ($self->{mirror_only}){my$found=$self->search_database($module,$version);return$found if$found}MIRROR: for my$mirror (@{$self->{mirrors}}){$self->mask_output(chat=>"Searching $module on mirror $mirror ...\n");my$name='02packages.details.txt.gz';my$uri="$mirror/modules/$name";my$gz_file=$self->package_index_for($mirror).'.gz';unless ($self->{pkgs}{$uri}){$self->mask_output(chat=>"Downloading index file $uri ...\n");$self->mirror($uri,$gz_file);$self->generate_mirror_index($mirror)or next MIRROR;$self->{pkgs}{$uri}="!!retrieved!!"}my$pkg=$self->search_mirror_index($mirror,$module,$version);return$pkg if$pkg;$self->mask_output(diag_fail=>"Finding $module ($version) on mirror $mirror failed.")}return}sub source_for {my($self,$mirror)=@_;$mirror =~ s/[^\w\.\-]+/%/g;my$dir="$self->{home}/sources/$mirror";File::Path::mkpath([$dir ],0,0777);return$dir}sub load_argv_from_fh {my($self,$fh)=@_;my@argv;while(defined(my$line=<$fh>)){chomp$line;$line =~ s/#.+$//;$line =~ s/^\s+//;$line =~ s/\s+$//;push@argv,split ' ',$line if$line}return@argv}sub show_version {my$self=shift;print "cpanm (App::cpanminus) version $VERSION ($0)\n";print "perl version $] ($^X)\n\n";print " \%Config:\n";for my$key (qw(archname installsitelib installsitebin installman1dir installman3dir sitearchexp sitelibexp vendorarch vendorlibexp archlibexp privlibexp)){print " $key=$Config{$key}\n" if$Config{$key}}print " \%ENV:\n";for my$key (grep /^PERL/,sort keys%ENV){print " $key=$ENV{$key}\n"}print " \@INC:\n";for my$inc (@INC){print " $inc\n" unless ref($inc)eq 'CODE'}return 1}sub show_help {my$self=shift;if ($_[0]){print <splitdir($dir);while (@dir){$dir=File::Spec->catdir(@dir);if (-e $dir){return -w _}pop@dir}return}sub maybe_abs {my($self,$lib)=@_;if ($lib eq '_' or $lib =~ /^~/ or File::Spec->file_name_is_absolute($lib)){return$lib}else {return File::Spec->canonpath(File::Spec->catdir(Cwd::cwd(),$lib))}}sub local_lib_target {my($self,$root)=@_;(grep {$_ ne ''}split /\Q$Config{path_sep}/,$root)[0]}sub bootstrap_local_lib {my$self=shift;if ($self->{local_lib}){return$self->setup_local_lib($self->{local_lib})}if ($ENV{PERL_LOCAL_LIB_ROOT}&& $ENV{PERL_MM_OPT}){return$self->setup_local_lib($self->local_lib_target($ENV{PERL_LOCAL_LIB_ROOT}),1)}return if$self->{sudo}or (_writable($Config{installsitelib})and _writable($Config{installsitebin}));if ($ENV{PERL_MM_OPT}and ($ENV{MODULEBUILDRC}or $ENV{PERL_MB_OPT})){return}$self->setup_local_lib;$self->diag(<module=>$_}@$config_deps;my$reqs=CPAN::Meta::Requirements->from_string_hash({'Module::Build'=>'0.38','ExtUtils::MakeMaker'=>'6.58','ExtUtils::Install'=>'1.46',});if ($deps{"ExtUtils::MakeMaker"}){$deps{"ExtUtils::MakeMaker"}->merge_with($reqs)}elsif ($deps{"Module::Build"}){$deps{"Module::Build"}->merge_with($reqs);$deps{"ExtUtils::Install"}||= App::cpanminus::Dependency->new("ExtUtils::Install",0,'configure');$deps{"ExtUtils::Install"}->merge_with($reqs)}@$config_deps=values%deps}sub _core_only_inc {my($self,$base)=@_;require local::lib;(local::lib->resolve_path(local::lib->install_base_arch_path($base)),local::lib->resolve_path(local::lib->install_base_perl_path($base)),(!$self->{exclude_vendor}? grep {$_}@Config{qw(vendorarch vendorlibexp)}: ()),@Config{qw(archlibexp privlibexp)},)}sub _diff {my($self,$old,$new)=@_;my@diff;my%old=map {$_=>1}@$old;for my$n (@$new){push@diff,$n unless exists$old{$n}}@diff}sub _setup_local_lib_env {my($self,$base)=@_;$self->diag(<setup_env_hash_for($base,0)}sub setup_local_lib {my($self,$base,$no_env)=@_;$base=undef if$base eq '_';require local::lib;{local $0='cpanm';$base ||= "~/perl5";$base=local::lib->resolve_path($base);if ($self->{self_contained}){my@inc=$self->_core_only_inc($base);$self->{search_inc}=[@inc ]}else {$self->{search_inc}=[local::lib->install_base_arch_path($base),local::lib->install_base_perl_path($base),@INC,]}$self->_setup_local_lib_env($base)unless$no_env;$self->{local_lib}=$base}}sub prompt_bool {my($self,$mess,$def)=@_;my$val=$self->prompt($mess,$def);return lc$val eq 'y'}sub prompt {my($self,$mess,$def)=@_;my$isa_tty=-t STDIN && (-t STDOUT ||!(-f STDOUT || -c STDOUT));my$dispdef=defined$def ? "[$def] " : " ";$def=defined$def ? $def : "";if (!$self->{prompt}|| (!$isa_tty && eof STDIN)){return$def}local $|=1;local $\;my$ans;eval {local$SIG{ALRM}=sub {undef$ans;die "alarm\n"};print STDOUT "$mess $dispdef";alarm$self->{prompt_timeout}if$self->{prompt_timeout};$ans=;alarm 0};if (defined$ans){chomp$ans}else {print STDOUT "\n"}return (!defined$ans || $ans eq '')? $def : $ans}sub diag_ok {my($self,$msg)=@_;chomp$msg;$msg ||= "OK";if ($self->{in_progress}){$self->_diag("$msg\n");$self->{in_progress}=0}$self->log("-> $msg\n")}sub diag_fail {my($self,$msg,$always)=@_;chomp$msg;if ($self->{in_progress}){$self->_diag("FAIL\n");$self->{in_progress}=0}if ($msg){$self->_diag("! $msg\n",$always,1);$self->log("-> FAIL $msg\n")}}sub diag_progress {my($self,$msg)=@_;chomp$msg;$self->{in_progress}=1;$self->_diag("$msg ... ");$self->log("$msg\n")}sub _diag {my($self,$msg,$always,$error)=@_;my$fh=$error ? *STDERR : *STDOUT;print {$fh}$msg if$always or $self->{verbose}or!$self->{quiet}}sub diag {my($self,$msg,$always)=@_;$self->_diag($msg,$always);$self->log($msg)}sub chat {my$self=shift;print STDERR @_ if$self->{verbose};$self->log(@_)}sub mask_output {my$self=shift;my$method=shift;$self->$method($self->mask_uri_passwords(@_))}sub log {my$self=shift;open my$out,">>$self->{log}";print$out @_}sub run {my($self,$cmd)=@_;if (WIN32){$cmd=$self->shell_quote(@$cmd)if ref$cmd eq 'ARRAY';unless ($self->{verbose}){$cmd .= " >> " .$self->shell_quote($self->{log})." 2>&1"}!system$cmd}else {my$pid=fork;if ($pid){waitpid$pid,0;return!$?}else {$self->run_exec($cmd)}}}sub run_exec {my($self,$cmd)=@_;if (ref$cmd eq 'ARRAY'){unless ($self->{verbose}){open my$logfh,">>",$self->{log};open STDERR,'>&',$logfh;open STDOUT,'>&',$logfh;close$logfh}exec @$cmd}else {unless ($self->{verbose}){$cmd .= " >> " .$self->shell_quote($self->{log})." 2>&1"}exec$cmd}}sub run_timeout {my($self,$cmd,$timeout)=@_;return$self->run($cmd)if WIN32 || $self->{verbose}||!$timeout;my$pid=fork;if ($pid){eval {local$SIG{ALRM}=sub {die "alarm\n"};alarm$timeout;waitpid$pid,0;alarm 0};if ($@ && $@ eq "alarm\n"){$self->diag_fail("Timed out (> ${timeout}s). Use --verbose to retry.");local$SIG{TERM}='IGNORE';kill TERM=>0;waitpid$pid,0;return}return!$?}elsif ($pid==0){$self->run_exec($cmd)}else {$self->chat("! fork failed: falling back to system()\n");$self->run($cmd)}}sub append_args {my($self,$cmd,$phase)=@_;if (my$args=$self->{build_args}{$phase}){$cmd=join ' ',$self->shell_quote(@$cmd),$args}$cmd}sub configure {my($self,$cmd,$depth)=@_;local$ENV{PERL5_CPAN_IS_RUNNING}=local$ENV{PERL5_CPANPLUS_IS_RUNNING}=$$;local$ENV{PERL5_CPANM_IS_RUNNING}=$$;my$use_default=!$self->{interactive};local$ENV{PERL_MM_USE_DEFAULT}=$use_default;local$ENV{PERL_MM_OPT}=$ENV{PERL_MM_OPT};local$ENV{PERL_MB_OPT}=$ENV{PERL_MB_OPT};unless ($self->{pod2man}){$ENV{PERL_MM_OPT}.= " INSTALLMAN1DIR=none INSTALLMAN3DIR=none";$ENV{PERL_MB_OPT}.= " --config installman1dir= --config installsiteman1dir= --config installman3dir= --config installsiteman3dir="}if ($self->{pure_perl}){$ENV{PERL_MM_OPT}.= " PUREPERL_ONLY=1";$ENV{PERL_MB_OPT}.= " --pureperl-only"}$cmd=$self->append_args($cmd,'configure')if$depth==0;local$self->{verbose}=$self->{verbose}|| $self->{interactive};$self->run_timeout($cmd,$self->{configure_timeout})}sub build {my($self,$cmd,$distname,$depth)=@_;local$ENV{PERL_MM_USE_DEFAULT}=!$self->{interactive};$cmd=$self->append_args($cmd,'build')if$depth==0;return 1 if$self->run_timeout($cmd,$self->{build_timeout});while (1){my$ans=lc$self->prompt("Building $distname failed.\nYou can s)kip, r)etry, e)xamine build log, or l)ook ?","s");return if$ans eq 's';return$self->build($cmd,$distname,$depth)if$ans eq 'r';$self->show_build_log if$ans eq 'e';$self->look if$ans eq 'l'}}sub test {my($self,$cmd,$distname,$depth)=@_;return 1 if$self->{notest};local$ENV{PERL_MM_USE_DEFAULT}=!$self->{interactive};local$ENV{NONINTERACTIVE_TESTING}=!$self->{interactive};$cmd=$self->append_args($cmd,'test')if$depth==0;return 1 if$self->run_timeout($cmd,$self->{test_timeout});if ($self->{force}){$self->diag_fail("Testing $distname failed but installing it anyway.");return 1}else {$self->diag_fail;while (1){my$ans=lc$self->prompt("Testing $distname failed.\nYou can s)kip, r)etry, f)orce install, e)xamine build log, or l)ook ?","s");return if$ans eq 's';return$self->test($cmd,$distname,$depth)if$ans eq 'r';return 1 if$ans eq 'f';$self->show_build_log if$ans eq 'e';$self->look if$ans eq 'l'}}}sub install {my($self,$cmd,$uninst_opts,$depth)=@_;if ($depth==0 && $self->{test_only}){return 1}if ($self->{sudo}){unshift @$cmd,"sudo"}if ($self->{uninstall_shadows}&&!$ENV{PERL_MM_OPT}){push @$cmd,@$uninst_opts}$cmd=$self->append_args($cmd,'install')if$depth==0;$self->run($cmd)}sub look {my$self=shift;my$shell=$ENV{SHELL};$shell ||= $ENV{COMSPEC}if WIN32;if ($shell){my$cwd=Cwd::cwd;$self->diag("Entering $cwd with $shell\n");system$shell}else {$self->diag_fail("You don't seem to have a SHELL :/")}}sub show_build_log {my$self=shift;my@pagers=($ENV{PAGER},(WIN32 ? (): ('less')),'more');my$pager;while (@pagers){$pager=shift@pagers;next unless$pager;$pager=$self->which($pager);next unless$pager;last}if ($pager){system("$pager < $self->{log}")}else {$self->diag_fail("You don't seem to have a PAGER :/")}}sub chdir {my$self=shift;Cwd::chdir(File::Spec->canonpath($_[0]))or die "$_[0]: $!"}sub configure_mirrors {my$self=shift;unless (@{$self->{mirrors}}){$self->{mirrors}=['http://www.cpan.org' ]}for (@{$self->{mirrors}}){s!^/!file:///!;s!/$!!}}sub self_upgrade {my$self=shift;$self->check_upgrade;$self->{argv}=['App::cpanminus' ];return}sub install_module {my($self,$module,$depth,$version)=@_;$self->check_libs;if ($self->{seen}{$module}++){$self->chat("Already tried $module. Skipping.\n");return 1}if ($self->{skip_satisfied}){my($ok,$local)=$self->check_module($module,$version || 0);if ($ok){$self->diag("You have $module ($local)\n",1);return 1}}my$dist=$self->resolve_name($module,$version);unless ($dist){my$what=$module .($version ? " ($version)" : "");$self->diag_fail("Couldn't find module or a distribution $what",1);return}if ($dist->{distvname}&& $self->{seen}{$dist->{distvname}}++){$self->chat("Already tried $dist->{distvname}. Skipping.\n");return 1}if ($self->{cmd}eq 'info'){print$self->format_dist($dist),"\n";return 1}$dist->{depth}=$depth;if ($dist->{module}){unless ($self->satisfy_version($dist->{module},$dist->{module_version},$version)){$self->diag("Found $dist->{module} $dist->{module_version} which doesn't satisfy $version.\n",1);return}my$cmp=$version ? "==" : "";my$requirement=$dist->{module_version}? "$cmp$dist->{module_version}" : 0;my($ok,$local)=$self->check_module($dist->{module},$requirement);if ($self->{skip_installed}&& $ok){$self->diag("$dist->{module} is up to date. ($local)\n",1);return 1}}if ($dist->{dist}eq 'perl'){$self->diag("skipping $dist->{pathname}\n");return 1}$self->diag("--> Working on $module\n");$dist->{dir}||= $self->fetch_module($dist);unless ($dist->{dir}){$self->diag_fail("Failed to fetch distribution $dist->{distvname}",1);return}$self->chat("Entering $dist->{dir}\n");$self->chdir($self->{base});$self->chdir($dist->{dir});if ($self->{cmd}eq 'look'){$self->look;return 1}return$self->build_stuff($module,$dist,$depth)}sub uninstall_search_path {my$self=shift;$self->{local_lib}? (local::lib->install_base_arch_path($self->{local_lib}),local::lib->install_base_perl_path($self->{local_lib})): @Config{qw(installsitearch installsitelib)}}sub uninstall_module {my ($self,$module)=@_;$self->check_libs;my@inc=$self->uninstall_search_path;my($metadata,$packlist)=$self->packlists_containing($module,\@inc);unless ($packlist){$self->diag_fail(<uninstall_target($metadata,$packlist);$self->ask_permission($module,\@uninst_files)or return;$self->uninstall_files(@uninst_files,$packlist);$self->diag("Successfully uninstalled $module\n",1);return 1}sub packlists_containing {my($self,$module,$inc)=@_;require Module::Metadata;my$metadata=Module::Metadata->new_from_module($module,inc=>$inc)or return;my$packlist;my$wanted=sub {return unless $_ eq '.packlist' && -f $_;for my$file ($self->unpack_packlist($File::Find::name)){$packlist ||= $File::Find::name if$file eq $metadata->filename}};{require File::pushd;my$pushd=File::pushd::pushd();my@search=grep -d $_,map File::Spec->catdir($_,'auto'),@$inc;File::Find::find($wanted,@search)}return$metadata,$packlist}sub uninstall_target {my($self,$metadata,$packlist)=@_;if ($self->has_shadow_install($metadata)or $self->{local_lib}){grep$self->should_unlink($_),$self->unpack_packlist($packlist)}else {$self->unpack_packlist($packlist)}}sub has_shadow_install {my($self,$metadata)=@_;my@shadow=grep defined,map Module::Metadata->new_from_module($metadata->name,inc=>[$_]),@INC;@shadow >= 2}sub should_unlink {my($self,$file)=@_;if ($self->{local_lib}){$file =~ /^\Q$self->{local_lib}\E/}else {!(grep$file =~ /^\Q$_\E/,@Config{qw(installbin installscript installman1dir installman3dir)})}}sub ask_permission {my ($self,$module,$files)=@_;$self->diag("$module contains the following files:\n\n");for my$file (@$files){$self->diag(" $file\n")}$self->diag("\n");return 'force uninstall' if$self->{force};local$self->{prompt}=1;return$self->prompt_bool("Are you sure you want to uninstall $module?",'y')}sub unpack_packlist {my ($self,$packlist)=@_;open my$fh,'<',$packlist or die "$packlist: $!";map {chomp;$_}<$fh>}sub uninstall_files {my ($self,@files)=@_;$self->diag("\n");for my$file (@files){$self->diag("Unlink: $file\n");unlink$file or $self->diag_fail("$!: $file")}$self->diag("\n");return 1}sub format_dist {my($self,$dist)=@_;return "$dist->{cpanid}/$dist->{filename}"}sub trim {local $_=shift;tr/\n/ /d;s/^\s*|\s*$//g;$_}sub fetch_module {my($self,$dist)=@_;$self->chdir($self->{base});for my$uri (@{$dist->{uris}}){$self->mask_output(diag_progress=>"Fetching $uri");my$filename=$dist->{filename}|| $uri;my$name=File::Basename::basename($filename);my$cancelled;my$fetch=sub {my$file;eval {local$SIG{INT}=sub {$cancelled=1;die "SIGINT\n"};$self->mirror($uri,$name);$file=$name if -e $name};$self->diag("ERROR: " .trim("$@")."\n",1)if $@ && $@ ne "SIGINT\n";return$file};my($try,$file);while ($try++ < 3){$file=$fetch->();last if$cancelled or $file;$self->mask_output(diag_fail=>"Download $uri failed. Retrying ... ")}if ($cancelled){$self->diag_fail("Download cancelled.");return}unless ($file){$self->mask_output(diag_fail=>"Failed to download $uri");next}$self->diag_ok;$dist->{local_path}=File::Spec->rel2abs($name);my$dir=$self->unpack($file,$uri,$dist);next unless$dir;if (my$save=$self->{save_dists}){my$path=$dist->{pathname}? "$save/authors/id/$dist->{pathname}" : "$save/vendor/$file";$self->chat("Copying $name to $path\n");File::Path::mkpath([File::Basename::dirname($path)],0,0777);File::Copy::copy($file,$path)or warn $!}return$dist,$dir}}sub unpack {my($self,$file,$uri,$dist)=@_;if ($self->{verify}){$self->verify_archive($file,$uri,$dist)or return}$self->chat("Unpacking $file\n");my$dir=$file =~ /\.zip/i ? $self->unzip($file): $self->untar($file);unless ($dir){$self->diag_fail("Failed to unpack $file: no directory")}return$dir}sub verify_checksums_signature {my($self,$chk_file)=@_;require Module::Signature;$self->chat("Verifying the signature of CHECKSUMS\n");my$rv=eval {local$SIG{__WARN__}=sub {};my$v=Module::Signature::_verify($chk_file);$v==Module::Signature::SIGNATURE_OK()};if ($rv){$self->chat("Verified OK!\n")}else {$self->diag_fail("Verifying CHECKSUMS signature failed: $rv\n");return}return 1}sub verify_archive {my($self,$file,$uri,$dist)=@_;unless ($dist->{cpanid}){$self->chat("Archive '$file' does not seem to be from PAUSE. Skip verification.\n");return 1}(my$mirror=$uri)=~ s!/authors/id.*$!!;(my$chksum_uri=$uri)=~ s!/[^/]*$!/CHECKSUMS!;my$chk_file=$self->source_for($mirror)."/$dist->{cpanid}.CHECKSUMS";$self->mask_output(diag_progress=>"Fetching $chksum_uri");$self->mirror($chksum_uri,$chk_file);unless (-e $chk_file){$self->diag_fail("Fetching $chksum_uri failed.\n");return}$self->diag_ok;$self->verify_checksums_signature($chk_file)or return;$self->verify_checksum($file,$chk_file)}sub verify_checksum {my($self,$file,$chk_file)=@_;$self->chat("Verifying the SHA1 for $file\n");open my$fh,"<$chk_file" or die "$chk_file: $!";my$data=join '',<$fh>;$data =~ s/\015?\012/\n/g;require Safe;my$chksum=Safe->new->reval($data);if (!ref$chksum or ref$chksum ne 'HASH'){$self->diag_fail("! Checksum file downloaded from $chk_file is broken.\n");return}if (my$sha=$chksum->{$file}{sha256}){my$hex=$self->sha1_for($file);if ($hex eq $sha){$self->chat("Checksum for $file: Verified!\n")}else {$self->diag_fail("Checksum mismatch for $file\n");return}}else {$self->chat("Checksum for $file not found in CHECKSUMS.\n");return}}sub sha1_for {my($self,$file)=@_;require Digest::SHA;open my$fh,"<",$file or die "$file: $!";my$dg=Digest::SHA->new(256);my($data);while (read($fh,$data,4096)){$dg->add($data)}return$dg->hexdigest}sub verify_signature {my($self,$dist)=@_;$self->diag_progress("Verifying the SIGNATURE file");my$out=`$self->{cpansign} -v --skip 2>&1`;$self->log($out);if ($out =~ /Signature verified OK/){$self->diag_ok("Verified OK");return 1}else {$self->diag_fail("SIGNATURE verificaion for $dist->{filename} failed\n");return}}sub resolve_name {my($self,$module,$version)=@_;if ($module =~ /(?:^git:|\.git(?:@.+)?$)/){return$self->git_uri($module)}if ($module =~ /^(ftp|https?|file):/){if ($module =~ m!authors/id/(.*)!){return$self->cpan_dist($1,$module)}else {return {uris=>[$module ]}}}if ($module =~ m!^[\./]! && -d $module){return {source=>'local',dir=>Cwd::abs_path($module),}}if (-f $module){return {source=>'local',uris=>["file://" .Cwd::abs_path($module)],}}if ($module =~ s!^cpan:///distfile/!!){return$self->cpan_dist($module)}if ($module =~ m!^(?:[A-Z]/[A-Z]{2}/)?([A-Z]{2}[\-A-Z0-9]*/.*)$!){return$self->cpan_dist($1)}return$self->search_module($module,$version)}sub cpan_module {my($self,$module,$dist,$version)=@_;my$dist=$self->cpan_dist($dist);$dist->{module}=$module;$dist->{module_version}=$version if$version && $version ne 'undef';return$dist}sub cpan_dist {my($self,$dist,$url)=@_;$dist =~ s!^([A-Z]{2})!substr($1,0,1)."/".substr($1,0,2)."/".$1!e;require CPAN::DistnameInfo;my$d=CPAN::DistnameInfo->new($dist);if ($url){$url=[$url ]unless ref$url eq 'ARRAY'}else {my$id=$d->cpanid;my$fn=substr($id,0,1)."/" .substr($id,0,2)."/" .$id ."/" .$d->filename;my@mirrors=@{$self->{mirrors}};my@urls=map "$_/authors/id/$fn",@mirrors;$url=\@urls,}return {$d->properties,source=>'cpan',uris=>$url,}}sub git_uri {my ($self,$uri)=@_;($uri,my$commitish)=split /(?<=\.git)@/i,$uri,2;my$dir=File::Temp::tempdir(CLEANUP=>1);$self->mask_output(diag_progress=>"Cloning $uri");$self->run(['git','clone',$uri,$dir ]);unless (-e "$dir/.git"){$self->diag_fail("Failed cloning git repository $uri",1);return}if ($commitish){require File::pushd;my$dir=File::pushd::pushd($dir);unless ($self->run(['git','checkout',$commitish ])){$self->diag_fail("Failed to checkout '$commitish' in git repository $uri\n");return}}$self->diag_ok;return {source=>'local',dir=>$dir,}}sub setup_module_build_patch {my$self=shift;open my$out,">$self->{base}/ModuleBuildSkipMan.pm" or die $!;print$out <{search_inc}||= do {if (defined$::Bin){[grep!/^\Q$::Bin\E\/..\/(?:fat)?lib$/,@INC]}else {[@INC]}}}sub check_module {my($self,$mod,$want_ver)=@_;require Module::Metadata;my$meta=Module::Metadata->new_from_module($mod,inc=>$self->search_inc)or return 0,undef;my$version=$meta->version;if ($self->{self_contained}&& $self->loaded_from_perl_lib($meta)){$version=$self->core_version_for($mod);return 0,undef if$version && $version==-1}$self->{local_versions}{$mod}=$version;if ($self->is_deprecated($meta)){return 0,$version}elsif ($self->satisfy_version($mod,$version,$want_ver)){return 1,($version || 'undef')}else {return 0,$version}}sub satisfy_version {my($self,$mod,$version,$want_ver)=@_;$want_ver='0' unless defined($want_ver)&& length($want_ver);require CPAN::Meta::Requirements;my$requirements=CPAN::Meta::Requirements->new;$requirements->add_string_requirement($mod,$want_ver);$requirements->accepts_module($mod,$version)}sub unsatisfy_how {my($self,$ver,$want_ver)=@_;if ($want_ver =~ /^[v0-9\.\_]+$/){return "$ver < $want_ver"}else {return "$ver doesn't satisfy $want_ver"}}sub is_deprecated {my($self,$meta)=@_;my$deprecated=eval {require Module::CoreList;Module::CoreList::is_deprecated($meta->{module})};return$deprecated && $self->loaded_from_perl_lib($meta)}sub loaded_from_perl_lib {my($self,$meta)=@_;require Config;my@dirs=qw(archlibexp privlibexp);if ($self->{self_contained}&&!$self->{exclude_vendor}&& $Config{vendorarch}){unshift@dirs,qw(vendorarch vendorlibexp)}for my$dir (@dirs){my$confdir=$Config{$dir};if ($confdir eq substr($meta->filename,0,length($confdir))){return 1}}return}sub should_install {my($self,$mod,$ver)=@_;$self->chat("Checking if you have $mod $ver ... ");my($ok,$local)=$self->check_module($mod,$ver);if ($ok){$self->chat("Yes ($local)\n")}elsif ($local){$self->chat("No (" .$self->unsatisfy_how($local,$ver).")\n")}else {$self->chat("No\n")}return$mod unless$ok;return}sub check_perl_version {my($self,$version)=@_;require CPAN::Meta::Requirements;my$req=CPAN::Meta::Requirements->from_string_hash({perl=>$version });$req->accepts_module(perl=>$])}sub install_deps {my($self,$dir,$depth,@deps)=@_;my(@install,%seen,@fail);for my$dep (@deps){next if$seen{$dep->module};if ($dep->module eq 'perl'){if ($dep->is_requirement &&!$self->check_perl_version($dep->version)){$self->diag("Needs perl @{[$dep->version]}, you have $]\n");push@fail,'perl'}}elsif ($self->should_install($dep->module,$dep->version)){push@install,$dep;$seen{$dep->module}=1}}if (@install){$self->diag("==> Found dependencies: " .join(", ",map $_->module,@install)."\n")}for my$dep (@install){$self->install_module($dep->module,$depth + 1,$dep->version)}$self->chdir($self->{base});$self->chdir($dir)if$dir;if ($self->{scandeps}){return 1}my@not_ok=$self->unsatisfied_deps(@deps);if (@not_ok){return 0,\@not_ok}else {return 1}}sub unsatisfied_deps {my($self,@deps)=@_;require CPAN::Meta::Check;require CPAN::Meta::Requirements;my$reqs=CPAN::Meta::Requirements->new;for my$dep (grep $_->is_requirement,@deps){$reqs->add_string_requirement($dep->module=>$dep->requires_version || '0')}my$ret=CPAN::Meta::Check::check_requirements($reqs,'requires',$self->{search_inc});grep defined,values %$ret}sub install_deps_bailout {my($self,$target,$dir,$depth,@deps)=@_;my($ok,$fail)=$self->install_deps($dir,$depth,@deps);if (!$ok){$self->diag_fail("Installing the dependencies failed: " .join(", ",@$fail),1);unless ($self->prompt_bool("Do you want to continue building $target anyway?","n")){$self->diag_fail("Bailing out the installation for $target.",1);return}}return 1}sub build_stuff {my($self,$stuff,$dist,$depth)=@_;if ($self->{verify}&& -e 'SIGNATURE'){$self->verify_signature($dist)or return}require CPAN::Meta;my($meta_file)=grep -f,qw(META.json META.yml);if ($meta_file){$self->chat("Checking configure dependencies from $meta_file\n");$dist->{cpanmeta}=eval {CPAN::Meta->load_file($meta_file)}}elsif ($dist->{dist}&& $dist->{version}){$self->chat("META.yml/json not found. Creating skeleton for it.\n");$dist->{cpanmeta}=CPAN::Meta->new({name=>$dist->{dist},version=>$dist->{version}})}$dist->{meta}=$dist->{cpanmeta}? $dist->{cpanmeta}->as_struct : {};my@config_deps;if ($dist->{cpanmeta}){push@config_deps,App::cpanminus::Dependency->from_prereqs($dist->{cpanmeta}->effective_prereqs,['configure'],$self->{install_types},)}if (-e 'Build.PL' &&!$self->should_use_mm($dist->{dist})&&!@config_deps){push@config_deps,App::cpanminus::Dependency->from_versions({'Module::Build'=>'0.38' },'configure',)}$self->merge_with_cpanfile($dist,\@config_deps);$self->upgrade_toolchain(\@config_deps);my$target=$dist->{meta}{name}? "$dist->{meta}{name}-$dist->{meta}{version}" : $dist->{dir};{$self->install_deps_bailout($target,$dist->{dir},$depth,@config_deps)or return}$self->diag_progress("Configuring $target");my$configure_state=$self->configure_this($dist,$depth);$self->diag_ok($configure_state->{configured_ok}? "OK" : "N/A");if ($dist->{cpanmeta}&& $dist->{source}eq 'cpan'){$dist->{provides}=$dist->{cpanmeta}{provides}|| $self->extract_packages($dist->{cpanmeta},".")}my$root_target=(($self->{installdeps}or $self->{showdeps})and $depth==0);$dist->{want_phases}=$self->{notest}&&!$root_target ? [qw(build runtime)]: [qw(build test runtime)];push @{$dist->{want_phases}},'develop' if$self->{with_develop}&& $depth==0;push @{$dist->{want_phases}},'configure' if$self->{with_configure}&& $depth==0;my@deps=$self->find_prereqs($dist);my$module_name=$self->find_module_name($configure_state)|| $dist->{meta}{name};$module_name =~ s/-/::/g;if ($self->{showdeps}){for my$dep (@config_deps,@deps){print$dep->module,($dep->version ? ("~".$dep->version): ""),"\n"}return 1}my$distname=$dist->{meta}{name}? "$dist->{meta}{name}-$dist->{meta}{version}" : $stuff;my$walkup;if ($self->{scandeps}){$walkup=$self->scandeps_append_child($dist)}$self->install_deps_bailout($distname,$dist->{dir},$depth,@deps)or return;if ($self->{scandeps}){unless ($configure_state->{configured_ok}){my$diag=<{scandeps_tree}};$diag .= "!\n" .join("",map "! * $_->[0]{module}\n",@tree[0..$#tree-1])if@tree}$self->diag("!\n$diag!\n",1)}$walkup->();return 1}if ($self->{installdeps}&& $depth==0){if ($configure_state->{configured_ok}){$self->diag("<== Installed dependencies for $stuff. Finishing.\n");return 1}else {$self->diag("! Configuring $distname failed. See $self->{log} for details.\n",1);return}}my$installed;if ($configure_state->{use_module_build}&& -e 'Build' && -f _){$self->diag_progress("Building " .($self->{notest}? "" : "and testing ").$distname);$self->build([$self->{perl},"./Build" ],$distname,$depth)&& $self->test([$self->{perl},"./Build","test" ],$distname,$depth)&& $self->install([$self->{perl},"./Build","install" ],["--uninst",1 ],$depth)&& $installed++}elsif ($self->{make}&& -e 'Makefile'){$self->diag_progress("Building " .($self->{notest}? "" : "and testing ").$distname);$self->build([$self->{make}],$distname,$depth)&& $self->test([$self->{make},"test" ],$distname,$depth)&& $self->install([$self->{make},"install" ],["UNINST=1" ],$depth)&& $installed++}else {my$why;my$configure_failed=$configure_state->{configured}&&!$configure_state->{configured_ok};if ($configure_failed){$why="Configure failed for $distname."}elsif ($self->{make}){$why="The distribution doesn't have a proper Makefile.PL/Build.PL"}else {$why="Can't configure the distribution. You probably need to have 'make'."}$self->diag_fail("$why See $self->{log} for details.",1);return}if ($installed && $self->{test_only}){$self->diag_ok;$self->diag("Successfully tested $distname\n",1)}elsif ($installed){my$local=$self->{local_versions}{$dist->{module}|| ''};my$version=$dist->{module_version}|| $dist->{meta}{version}|| $dist->{version};my$reinstall=$local && ($local eq $version);my$action=$local &&!$reinstall ? $self->numify_ver($version)< $self->numify_ver($local)? "downgraded" : "upgraded" : undef;my$how=$reinstall ? "reinstalled $distname" : $local ? "installed $distname ($action from $local)" : "installed $distname" ;my$msg="Successfully $how";$self->diag_ok;$self->diag("$msg\n",1);$self->{installed_dists}++;$self->save_meta($stuff,$dist,$module_name,\@config_deps,\@deps);return 1}else {my$what=$self->{test_only}? "Testing" : "Installing";$self->diag_fail("$what $stuff failed. See $self->{log} for details. Retry with --force to force install it.",1);return}}sub perl_requirements {my($self,@requires)=@_;my@perl;for my$requires (grep defined,@requires){if (exists$requires->{perl}){push@perl,App::cpanminus::Dependency->new(perl=>$requires->{perl})}}return@perl}sub should_use_mm {my($self,$dist)=@_;my%should_use_mm=map {$_=>1}qw(version ExtUtils-ParseXS ExtUtils-Install ExtUtils-Manifest);$should_use_mm{$dist}}sub configure_this {my($self,$dist,$depth)=@_;if (-e $self->{cpanfile_path}&& $self->{installdeps}&& $depth==0){require Module::CPANfile;$dist->{cpanfile}=eval {Module::CPANfile->load($self->{cpanfile_path})};$self->diag_fail($@,1)if $@;return {configured=>1,configured_ok=>!!$dist->{cpanfile},use_module_build=>0,}}if ($self->{skip_configure}){my$eumm=-e 'Makefile';my$mb=-e 'Build' && -f _;return {configured=>1,configured_ok=>$eumm || $mb,use_module_build=>$mb,}}my$state={};my$try_eumm=sub {if (-e 'Makefile.PL'){$self->chat("Running Makefile.PL\n");if ($self->configure([$self->{perl},"Makefile.PL" ],$depth)){$state->{configured_ok}=-e 'Makefile'}$state->{configured}++}};my$try_mb=sub {if (-e 'Build.PL'){$self->chat("Running Build.PL\n");if ($self->configure([$self->{perl},"Build.PL" ],$depth)){$state->{configured_ok}=-e 'Build' && -f _}$state->{use_module_build}++;$state->{configured}++}};my@try;if ($dist->{dist}&& $self->should_use_mm($dist->{dist})){@try=($try_eumm,$try_mb)}else {@try=($try_mb,$try_eumm)}for my$try (@try){$try->();last if$state->{configured_ok}}unless ($state->{configured_ok}){while (1){my$ans=lc$self->prompt("Configuring $dist->{dist} failed.\nYou can s)kip, r)etry, e)xamine build log, or l)ook ?","s");last if$ans eq 's';return$self->configure_this($dist,$depth)if$ans eq 'r';$self->show_build_log if$ans eq 'e';$self->look if$ans eq 'l'}}return$state}sub find_module_name {my($self,$state)=@_;return unless$state->{configured_ok};if ($state->{use_module_build}&& -e "_build/build_params"){my$params=do {open my$in,"_build/build_params";$self->safe_eval(join "",<$in>)};return eval {$params->[2]{module_name}}|| undef}elsif (-e "Makefile"){open my$mf,"Makefile";while (<$mf>){if (/^\#\s+NAME\s+=>\s+(.*)/){return$self->safe_eval($1)}}}return}sub list_files {my$self=shift;if (-e 'MANIFEST'){require ExtUtils::Manifest;my$manifest=eval {ExtUtils::Manifest::manifind()}|| {};return sort {lc$a cmp lc$b}keys %$manifest}else {require File::Find;my@files;my$finder=sub {my$name=$File::Find::name;$name =~ s!\.[/\\]!!;push@files,$name};File::Find::find($finder,".");return sort {lc$a cmp lc$b}@files}}sub extract_packages {my($self,$meta,$dir)=@_;my$try=sub {my$file=shift;return 0 if$file =~ m!^(?:x?t|inc|local|perl5|fatlib|_build)/!;return 1 unless$meta->{no_index};return 0 if grep {$file =~ m!^$_/!}@{$meta->{no_index}{directory}|| []};return 0 if grep {$file eq $_}@{$meta->{no_index}{file}|| []};return 1};require Parse::PMFile;my@files=grep {/\.pm(?:\.PL)?$/ && $try->($_)}$self->list_files;my$provides={};for my$file (@files){my$parser=Parse::PMFile->new($meta,{UNSAFE=>1,ALLOW_DEV_VERSION=>1 });my$packages=$parser->parse($file);while (my($package,$meta)=each %$packages){$provides->{$package}||= {file=>$meta->{infile},($meta->{version}eq 'undef')? (): (version=>$meta->{version}),}}}return$provides}sub save_meta {my($self,$module,$dist,$module_name,$config_deps,$build_deps)=@_;return unless$dist->{distvname}&& $dist->{source}eq 'cpan';my$base=($ENV{PERL_MM_OPT}|| '')=~ /INSTALL_BASE=/ ? ($self->install_base($ENV{PERL_MM_OPT})."/lib/perl5"): $Config{sitelibexp};my$provides=$dist->{provides};File::Path::mkpath("blib/meta",0,0777);my$local={name=>$module_name,target=>$module,version=>exists$provides->{$module_name}? ($provides->{$module_name}{version}|| $dist->{version}): $dist->{version},dist=>$dist->{distvname},pathname=>$dist->{pathname},provides=>$provides,};require JSON::PP;open my$fh,">","blib/meta/install.json" or die $!;print$fh JSON::PP::encode_json($local);if (-e "MYMETA.json"){File::Copy::copy("MYMETA.json","blib/meta/MYMETA.json")}my@cmd=(($self->{sudo}? 'sudo' : ()),$^X,'-MExtUtils::Install=install','-e',qq[install({ 'blib/meta' => '$base/$Config{archname}/.meta/$dist->{distvname}' })],);$self->run(\@cmd)}sub _merge_hashref {my($self,@hashrefs)=@_;my%hash;for my$h (@hashrefs){%hash=(%hash,%$h)}return \%hash}sub install_base {my($self,$mm_opt)=@_;$mm_opt =~ /INSTALL_BASE=(\S+)/ and return $1;die "Your PERL_MM_OPT doesn't contain INSTALL_BASE"}sub safe_eval {my($self,$code)=@_;eval$code}sub configure_features {my($self,$dist,@features)=@_;map $_->identifier,grep {$self->effective_feature($dist,$_)}@features}sub effective_feature {my($self,$dist,$feature)=@_;if ($dist->{depth}==0){my$value=$self->{features}{$feature->identifier};return$value if defined$value;return 1 if$self->{features}{__all}}if ($self->{interactive}){require CPAN::Meta::Requirements;$self->diag("[@{[ $feature->description ]}]\n",1);my$req=CPAN::Meta::Requirements->new;for my$phase (@{$dist->{want_phases}}){for my$type (@{$self->{install_types}}){$req->add_requirements($feature->prereqs->requirements_for($phase,$type))}}my$reqs=$req->as_string_hash;my@missing;for my$module (keys %$reqs){if ($self->should_install($module,$req->{$module})){push@missing,$module}}if (@missing){my$howmany=@missing;$self->diag("==> Found missing dependencies: " .join(", ",@missing)."\n",1);local$self->{prompt}=1;return$self->prompt_bool("Install the $howmany optional module(s)?","y")}}return}sub find_prereqs {my($self,$dist)=@_;my@deps=$self->extract_meta_prereqs($dist);if ($dist->{module}=~ /^Bundle::/i){push@deps,$self->bundle_deps($dist)}$self->merge_with_cpanfile($dist,\@deps);return@deps}sub merge_with_cpanfile {my($self,$dist,$deps)=@_;if ($self->{cpanfile_requirements}&&!$dist->{cpanfile}){for my$dep (@$deps){$dep->merge_with($self->{cpanfile_requirements})}}}sub extract_meta_prereqs {my($self,$dist)=@_;if ($dist->{cpanfile}){my@features=$self->configure_features($dist,$dist->{cpanfile}->features);my$prereqs=$dist->{cpanfile}->prereqs_with(@features);$self->{cpanfile_requirements}=$prereqs->merged_requirements($dist->{want_phases},['requires']);return App::cpanminus::Dependency->from_prereqs($prereqs,$dist->{want_phases},$self->{install_types})}require CPAN::Meta;my@deps;my($meta_file)=grep -f,qw(MYMETA.json MYMETA.yml);if ($meta_file){$self->chat("Checking dependencies from $meta_file ...\n");my$mymeta=eval {CPAN::Meta->load_file($meta_file,{lazy_validation=>1 })};if ($mymeta){$dist->{meta}{name}=$mymeta->name;$dist->{meta}{version}=$mymeta->version;return$self->extract_prereqs($mymeta,$dist)}}if (-e '_build/prereqs'){$self->chat("Checking dependencies from _build/prereqs ...\n");my$prereqs=do {open my$in,"_build/prereqs";$self->safe_eval(join "",<$in>)};my$meta=CPAN::Meta->new({name=>$dist->{meta}{name},version=>$dist->{meta}{version},%$prereqs },{lazy_validation=>1 },);@deps=$self->extract_prereqs($meta,$dist)}elsif (-e 'Makefile'){$self->chat("Finding PREREQ from Makefile ...\n");open my$mf,"Makefile";while (<$mf>){if (/^\#\s+PREREQ_PM => \{\s*(.*?)\s*\}/){my@all;my@pairs=split ', ',$1;for (@pairs){my ($pkg,$v)=split '=>',$_;push@all,[$pkg,$v ]}my$list=join ", ",map {"'$_->[0]' => $_->[1]"}@all;my$prereq=$self->safe_eval("no strict; +{ $list }");push@deps,App::cpanminus::Dependency->from_versions($prereq)if$prereq;last}}}return@deps}sub bundle_deps {my($self,$dist)=@_;my@files;File::Find::find({wanted=>sub {push@files,File::Spec->rel2abs($_)if /\.pm/i},no_chdir=>1,},'.');my@deps;for my$file (@files){open my$pod,"<",$file or next;my$in_contents;while (<$pod>){if (/^=head\d\s+CONTENTS/){$in_contents=1}elsif (/^=/){$in_contents=0}elsif ($in_contents){/^(\S+)\s*(\S+)?/ and push@deps,App::cpanminus::Dependency->new($1,$self->maybe_version($2))}}}return@deps}sub maybe_version {my($self,$string)=@_;return$string && $string =~ /^\.?\d/ ? $string : undef}sub extract_prereqs {my($self,$meta,$dist)=@_;my@features=$self->configure_features($dist,$meta->features);my$prereqs=$self->soften_makemaker_prereqs($meta->effective_prereqs(\@features)->clone);return App::cpanminus::Dependency->from_prereqs($prereqs,$dist->{want_phases},$self->{install_types})}sub soften_makemaker_prereqs {my($self,$prereqs)=@_;return$prereqs unless -e "inc/Module/Install.pm";for my$phase (qw(build test runtime)){my$reqs=$prereqs->requirements_for($phase,'requires');if ($reqs->requirements_for_module('ExtUtils::MakeMaker')){$reqs->clear_requirement('ExtUtils::MakeMaker');$reqs->add_minimum('ExtUtils::MakeMaker'=>0)}}$prereqs}sub cleanup_workdirs {my$self=shift;my$expire=time - 24 * 60 * 60 * $self->{auto_cleanup};my@targets;opendir my$dh,"$self->{home}/work";while (my$e=readdir$dh){next if$e !~ /^(\d+)\.\d+$/;my$time=$1;if ($time < $expire){push@targets,"$self->{home}/work/$e"}}if (@targets){if (@targets >= 64){$self->diag("Expiring " .scalar(@targets)." work directories. This might take a while...\n")}else {$self->chat("Expiring " .scalar(@targets)." work directories.\n")}File::Path::rmtree(\@targets,0,0)}}sub scandeps_append_child {my($self,$dist)=@_;my$new_node=[$dist,[]];my$curr_node=$self->{scandeps_current}|| [undef,$self->{scandeps_tree}];push @{$curr_node->[1]},$new_node;$self->{scandeps_current}=$new_node;return sub {$self->{scandeps_current}=$curr_node}}sub dump_scandeps {my$self=shift;if ($self->{format}eq 'tree'){$self->walk_down(sub {my($dist,$depth)=@_;if ($depth==0){print "$dist->{distvname}\n"}else {print " " x ($depth - 1);print "\\_ $dist->{distvname}\n"}},1)}elsif ($self->{format}=~ /^dists?$/){$self->walk_down(sub {my($dist,$depth)=@_;print$self->format_dist($dist),"\n"},0)}elsif ($self->{format}eq 'json'){require JSON::PP;print JSON::PP::encode_json($self->{scandeps_tree})}elsif ($self->{format}eq 'yaml'){require YAML;print YAML::Dump($self->{scandeps_tree})}else {$self->diag("Unknown format: $self->{format}\n")}}sub walk_down {my($self,$cb,$pre)=@_;$self->_do_walk_down($self->{scandeps_tree},$cb,0,$pre)}sub _do_walk_down {my($self,$children,$cb,$depth,$pre)=@_;for my$node (@$children){$cb->($node->[0],$depth)if$pre;$self->_do_walk_down($node->[1],$cb,$depth + 1,$pre);$cb->($node->[0],$depth)unless$pre}}sub DESTROY {my$self=shift;$self->{at_exit}->($self)if$self->{at_exit}}sub shell_quote {my($self,@stuff)=@_;if (WIN32){join ' ',map {/^${quote}.+${quote}$/ ? $_ : ($quote .$_ .$quote)}@stuff}else {String::ShellQuote::shell_quote_best_effort(@stuff)}}sub which {my($self,$name)=@_;if (File::Spec->file_name_is_absolute($name)){if (-x $name &&!-d _){return$name}}my$exe_ext=$Config{_exe};for my$dir (File::Spec->path){my$fullpath=File::Spec->catfile($dir,$name);if ((-x $fullpath || -x ($fullpath .= $exe_ext))&&!-d _){if ($fullpath =~ /\s/){$fullpath=$self->shell_quote($fullpath)}return$fullpath}}return}sub get {my($self,$uri)=@_;if ($uri =~ /^file:/){$self->file_get($uri)}else {$self->{_backends}{get}->(@_)}}sub mirror {my($self,$uri,$local)=@_;if ($uri =~ /^file:/){$self->file_mirror($uri,$local)}else {$self->{_backends}{mirror}->(@_)}}sub untar {$_[0]->{_backends}{untar}->(@_)};sub unzip {$_[0]->{_backends}{unzip}->(@_)};sub uri_to_file {my($self,$uri)=@_;if ($uri =~ s!file:/+!!){$uri="/$uri" unless$uri =~ m![a-zA-Z]:!}return$uri}sub file_get {my($self,$uri)=@_;my$file=$self->uri_to_file($uri);open my$fh,"<$file" or return;join '',<$fh>}sub file_mirror {my($self,$uri,$path)=@_;my$file=$self->uri_to_file($uri);my$source_mtime=(stat$file)[9];return if -e $path && (stat$path)[9]>= $source_mtime;File::Copy::copy($file,$path);utime$source_mtime,$source_mtime,$path}sub has_working_lwp {my($self,$mirrors)=@_;my$https=grep /^https:/,@$mirrors;eval {require LWP::UserAgent;LWP::UserAgent->VERSION(5.802);require LWP::Protocol::https if$https;1}}sub init_tools {my$self=shift;return if$self->{initialized}++;if ($self->{make}=$self->which($Config{make})){$self->chat("You have make $self->{make}\n")}if ($self->{try_lwp}&& $self->has_working_lwp($self->{mirrors})){$self->chat("You have LWP $LWP::VERSION\n");my$ua=sub {LWP::UserAgent->new(parse_head=>0,env_proxy=>1,agent=>$self->agent,timeout=>30,@_,)};$self->{_backends}{get}=sub {my$self=shift;my$res=$ua->()->request(HTTP::Request->new(GET=>$_[0]));return unless$res->is_success;return$res->decoded_content};$self->{_backends}{mirror}=sub {my$self=shift;my$res=$ua->()->mirror(@_);die$res->content if$res->code==501;$res->code}}elsif ($self->{try_wget}and my$wget=$self->which('wget')){$self->chat("You have $wget\n");my@common=('--user-agent',$self->agent,'--retry-connrefused',($self->{verbose}? (): ('-q')),);$self->{_backends}{get}=sub {my($self,$uri)=@_;$self->safeexec(my$fh,$wget,$uri,@common,'-O','-')or die "wget $uri: $!";local $/;<$fh>};$self->{_backends}{mirror}=sub {my($self,$uri,$path)=@_;$self->safeexec(my$fh,$wget,$uri,@common,'-O',$path)or die "wget $uri: $!";local $/;<$fh>}}elsif ($self->{try_curl}and my$curl=$self->which('curl')){$self->chat("You have $curl\n");my@common=('--location','--user-agent',$self->agent,($self->{verbose}? (): '-s'),);$self->{_backends}{get}=sub {my($self,$uri)=@_;$self->safeexec(my$fh,$curl,@common,$uri)or die "curl $uri: $!";local $/;<$fh>};$self->{_backends}{mirror}=sub {my($self,$uri,$path)=@_;$self->safeexec(my$fh,$curl,@common,$uri,'-#','-o',$path)or die "curl $uri: $!";local $/;<$fh>}}else {require HTTP::Tiny;$self->chat("Falling back to HTTP::Tiny $HTTP::Tiny::VERSION\n");my%common=(agent=>$self->agent,);$self->{_backends}{get}=sub {my$self=shift;my$res=HTTP::Tiny->new(%common)->get($_[0]);return unless$res->{success};return$res->{content}};$self->{_backends}{mirror}=sub {my$self=shift;my$res=HTTP::Tiny->new(%common)->mirror(@_);return$res->{status}}}my$tar=$self->which('tar');my$tar_ver;my$maybe_bad_tar=sub {WIN32 || BAD_TAR || (($tar_ver=`$tar --version 2>/dev/null`)=~ /GNU.*1\.13/i)};if ($tar &&!$maybe_bad_tar->()){chomp$tar_ver;$self->chat("You have $tar: $tar_ver\n");$self->{_backends}{untar}=sub {my($self,$tarfile)=@_;my$xf=($self->{verbose}? 'v' : '')."xf";my$ar=$tarfile =~ /bz2$/ ? 'j' : 'z';my($root,@others)=`$tar ${ar}tf $tarfile` or return undef;FILE: {chomp$root;$root =~ s!^\./!!;$root =~ s{^(.+?)/.*$}{$1};if (!length($root)){$root=shift(@others);redo FILE if$root}}system "$tar $ar$xf $tarfile";return$root if -d $root;$self->diag_fail("Bad archive: $tarfile");return undef}}elsif ($tar and my$gzip=$self->which('gzip')and my$bzip2=$self->which('bzip2')){$self->chat("You have $tar, $gzip and $bzip2\n");$self->{_backends}{untar}=sub {my($self,$tarfile)=@_;my$x="x" .($self->{verbose}? 'v' : '')."f -";my$ar=$tarfile =~ /bz2$/ ? $bzip2 : $gzip;my($root,@others)=`$ar -dc $tarfile | $tar tf -` or return undef;FILE: {chomp$root;$root =~ s!^\./!!;$root =~ s{^(.+?)/.*$}{$1};if (!length($root)){$root=shift(@others);redo FILE if$root}}system "$ar -dc $tarfile | $tar $x";return$root if -d $root;$self->diag_fail("Bad archive: $tarfile");return undef}}elsif (eval {require Archive::Tar}){$self->chat("Falling back to Archive::Tar $Archive::Tar::VERSION\n");$self->{_backends}{untar}=sub {my$self=shift;my$t=Archive::Tar->new($_[0]);my($root,@others)=$t->list_files;FILE: {$root =~ s!^\./!!;$root =~ s{^(.+?)/.*$}{$1};if (!length($root)){$root=shift(@others);redo FILE if$root}}$t->extract;return -d $root ? $root : undef}}else {$self->{_backends}{untar}=sub {die "Failed to extract $_[1] - You need to have tar or Archive::Tar installed.\n"}}if (my$unzip=$self->which('unzip')){$self->chat("You have $unzip\n");$self->{_backends}{unzip}=sub {my($self,$zipfile)=@_;my$opt=$self->{verbose}? '' : '-q';my(undef,$root,@others)=`$unzip -t $zipfile` or return undef;chomp$root;$root =~ s{^\s+testing:\s+([^/]+)/.*?\s+OK$}{$1};system "$unzip $opt $zipfile";return$root if -d $root;$self->diag_fail("Bad archive: [$root] $zipfile");return undef}}else {$self->{_backends}{unzip}=sub {eval {require Archive::Zip}or die "Failed to extract $_[1] - You need to have unzip or Archive::Zip installed.\n";my($self,$file)=@_;my$zip=Archive::Zip->new();my$status;$status=$zip->read($file);$self->diag_fail("Read of file[$file] failed")if$status!=Archive::Zip::AZ_OK();my@members=$zip->members();for my$member (@members){my$af=$member->fileName();next if ($af =~ m!^(/|\.\./)!);$status=$member->extractToFileNamed($af);$self->diag_fail("Extracting of file[$af] from zipfile[$file failed")if$status!=Archive::Zip::AZ_OK()}my ($root)=$zip->membersMatching(qr<^[^/]+/$>);$root &&= $root->fileName;return -d $root ? $root : undef}}}sub safeexec {my$self=shift;my$rdr=$_[0]||= Symbol::gensym();if (WIN32){my$cmd=$self->shell_quote(@_[1..$#_]);return open($rdr,"$cmd |")}if (my$pid=open($rdr,'-|')){return$pid}elsif (defined$pid){exec(@_[1 .. $#_ ]);exit 1}else {return}}sub mask_uri_passwords {my($self,@strings)=@_;s{ (https?://) ([^:/]+) : [^@/]+ @ }{$1$2:********@}gx for@strings;return@strings}1; - It appears your cpanm executable was installed via `perlbrew install-cpanm`. - cpanm --self-upgrade won't upgrade the version of cpanm you're running. - - Run the following command to get it upgraded. - - perlbrew install-cpanm - - DIE - You are running cpanm from the path where your current perl won't install executables to. - Because of that, cpanm --self-upgrade won't upgrade the version of cpanm you're running. - - cpanm path : $0 - Install path : $Config{installsitebin} - - It means you either installed cpanm globally with system perl, or use distro packages such - as rpm or apt-get, and you have to use them again to upgrade cpanm. - DIE - Usage: cpanm [options] Module [...] - - Try `cpanm --help` or `man cpanm` for more options. - USAGE - Usage: cpanm [options] Module [...] - - Options: - -v,--verbose Turns on chatty output - -q,--quiet Turns off the most output - --interactive Turns on interactive configure (required for Task:: modules) - -f,--force force install - -n,--notest Do not run unit tests - --test-only Run tests only, do not install - -S,--sudo sudo to run install commands - --installdeps Only install dependencies - --showdeps Only display direct dependencies - --reinstall Reinstall the distribution even if you already have the latest version installed - --mirror Specify the base URL for the mirror (e.g. http://cpan.cpantesters.org/) - --mirror-only Use the mirror's index file instead of the CPAN Meta DB - -M,--from Use only this mirror base URL and its index file - --prompt Prompt when configure/build/test fails - -l,--local-lib Specify the install base to install modules - -L,--local-lib-contained Specify the install base to install all non-core modules - --self-contained Install all non-core modules, even if they're already installed. - --auto-cleanup Number of days that cpanm's work directories expire in. Defaults to 7 - - Commands: - --self-upgrade upgrades itself - --info Displays distribution info on CPAN - --look Opens the distribution with your SHELL - -U,--uninstall Uninstalls the modules (EXPERIMENTAL) - -V,--version Displays software version - - Examples: - - cpanm Test::More # install Test::More - cpanm MIYAGAWA/Plack-0.99_05.tar.gz # full distribution path - cpanm http://example.org/LDS/CGI.pm-3.20.tar.gz # install from URL - cpanm ~/dists/MyCompany-Enterprise-1.00.tar.gz # install from a local file - cpanm --interactive Task::Kensho # Configure interactively - cpanm . # install from local directory - cpanm --installdeps . # install all the deps for the current directory - cpanm -L extlib Plack # install Plack and all non-core deps into extlib - cpanm --mirror http://cpan.cpantesters.org/ DBI # use the fast-syncing mirror - cpanm -M https://cpan.metacpan.org App::perlbrew # use only this secure mirror and its index - - You can also specify the default options in PERL_CPANM_OPT environment variable in the shell rc: - - export PERL_CPANM_OPT="--prompt --reinstall -l ~/perl --mirror http://cpan.cpantesters.org" - - Type `man cpanm` or `perldoc cpanm` for the more detailed explanation of the options. - - HELP - ! - ! Can't write to $Config{installsitelib} and $Config{installsitebin}: Installing modules to $ENV{HOME}/perl5 - ! To turn off this warning, you have to do one of the following: - ! - run me as a root or with --sudo option (to install to $Config{installsitelib} and $Config{installsitebin}) - ! - Configure local::lib in your existing shell to set PERL_MM_OPT etc. - ! - Install local::lib by running the following commands - ! - ! cpanm --local-lib=~/perl5 local::lib && eval \$(perl -I ~/perl5/lib/perl5/ -Mlocal::lib) - ! - DIAG - WARNING: Your lib directory name ($base) contains a space in it. It's known to cause issues with perl builder tools such as local::lib and MakeMaker. You're recommended to rename your directory. - WARN - $module is not found in the following directories and can't be uninstalled. - - @{[ join(" \n", map " $_", @inc) ]} - - DIAG - package ModuleBuildSkipMan; - CHECK { - if (%Module::Build::) { - no warnings 'redefine'; - *Module::Build::Base::ACTION_manpages = sub {}; - *Module::Build::Base::ACTION_docs = sub {}; - } - } - 1; - EOF - ! Configuring $distname failed. See $self->{log} for details. - ! You might have to install the following modules first to get --scandeps working correctly. - DIAG -APP_CPANMINUS_SCRIPT - -$fatpacked{"CPAN/DistnameInfo.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_DISTNAMEINFO'; - package CPAN::DistnameInfo;$VERSION="0.12";use strict;sub distname_info {my$file=shift or return;my ($dist,$version)=$file =~ /^ - ((?:[-+.]*(?:[A-Za-z0-9]+|(?<=\D)_|_(?=\D))* - (?: - [A-Za-z](?=[^A-Za-z]|$) - | - \d(?=-) - )(? 6 and $1 & 1)or ($2 and $2 >= 50))or $3}elsif ($version =~ /\d\D\d+_\d/ or $version =~ /-TRIAL/){$dev=1}}else {$version=undef}($dist,$version,$dev)}sub new {my$class=shift;my$distfile=shift;$distfile =~ s,//+,/,g;my%info=(pathname=>$distfile);($info{filename}=$distfile)=~ s,^(((.*?/)?authors/)?id/)?([A-Z])/(\4[A-Z])/(\5[-A-Z0-9]*)/,, and $info{cpanid}=$6;if ($distfile =~ m,([^/]+)\.(tar\.(?:g?z|bz2)|zip|tgz)$,i){$info{distvname}=$1;$info{extension}=$2}@info{qw(dist version beta)}=distname_info($info{distvname});$info{maturity}=delete$info{beta}? 'developer' : 'released';return bless \%info,$class}sub dist {shift->{dist}}sub version {shift->{version}}sub maturity {shift->{maturity}}sub filename {shift->{filename}}sub cpanid {shift->{cpanid}}sub distvname {shift->{distvname}}sub extension {shift->{extension}}sub pathname {shift->{pathname}}sub properties {%{$_[0]}}1; -CPAN_DISTNAMEINFO - -$fatpacked{"CPAN/Meta.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META'; - use 5.006;use strict;use warnings;package CPAN::Meta;our$VERSION='2.150005';use Carp qw(carp croak);use CPAN::Meta::Feature;use CPAN::Meta::Prereqs;use CPAN::Meta::Converter;use CPAN::Meta::Validator;use Parse::CPAN::Meta 1.4414 ();BEGIN {*_dclone=\&CPAN::Meta::Converter::_dclone}BEGIN {my@STRING_READERS=qw(abstract description dynamic_config generated_by name release_status version);no strict 'refs';for my$attr (@STRING_READERS){*$attr=sub {$_[0]{$attr }}}}BEGIN {my@LIST_READERS=qw(author keywords license);no strict 'refs';for my$attr (@LIST_READERS){*$attr=sub {my$value=$_[0]{$attr };croak "$attr must be called in list context" unless wantarray;return @{_dclone($value)}if ref$value;return$value}}}sub authors {$_[0]->author}sub licenses {$_[0]->license}BEGIN {my@MAP_READERS=qw(meta-spec resources provides no_index prereqs optional_features);no strict 'refs';for my$attr (@MAP_READERS){(my$subname=$attr)=~ s/-/_/;*$subname=sub {my$value=$_[0]{$attr };return _dclone($value)if$value;return {}}}}sub custom_keys {return grep {/^x_/i}keys %{$_[0]}}sub custom {my ($self,$attr)=@_;my$value=$self->{$attr};return _dclone($value)if ref$value;return$value}sub _new {my ($class,$struct,$options)=@_;my$self;if ($options->{lazy_validation}){my$cmc=CPAN::Meta::Converter->new($struct);$self=$cmc->convert(version=>2);return bless$self,$class}else {my$cmv=CPAN::Meta::Validator->new($struct);unless ($cmv->is_valid){die "Invalid metadata structure. Errors: " .join(", ",$cmv->errors)."\n"}}my$version=$struct->{'meta-spec'}{version}|| '1.0';if ($version==2){$self=$struct}else {my$cmc=CPAN::Meta::Converter->new($struct);$self=$cmc->convert(version=>2)}return bless$self,$class}sub new {my ($class,$struct,$options)=@_;my$self=eval {$class->_new($struct,$options)};croak($@)if $@;return$self}sub create {my ($class,$struct,$options)=@_;my$version=__PACKAGE__->VERSION || 2;$struct->{generated_by}||= __PACKAGE__ ." version $version" ;$struct->{'meta-spec'}{version}||= int($version);my$self=eval {$class->_new($struct,$options)};croak ($@)if $@;return$self}sub load_file {my ($class,$file,$options)=@_;$options->{lazy_validation}=1 unless exists$options->{lazy_validation};croak "load_file() requires a valid, readable filename" unless -r $file;my$self;eval {my$struct=Parse::CPAN::Meta->load_file($file);$self=$class->_new($struct,$options)};croak($@)if $@;return$self}sub load_yaml_string {my ($class,$yaml,$options)=@_;$options->{lazy_validation}=1 unless exists$options->{lazy_validation};my$self;eval {my ($struct)=Parse::CPAN::Meta->load_yaml_string($yaml);$self=$class->_new($struct,$options)};croak($@)if $@;return$self}sub load_json_string {my ($class,$json,$options)=@_;$options->{lazy_validation}=1 unless exists$options->{lazy_validation};my$self;eval {my$struct=Parse::CPAN::Meta->load_json_string($json);$self=$class->_new($struct,$options)};croak($@)if $@;return$self}sub load_string {my ($class,$string,$options)=@_;$options->{lazy_validation}=1 unless exists$options->{lazy_validation};my$self;eval {my$struct=Parse::CPAN::Meta->load_string($string);$self=$class->_new($struct,$options)};croak($@)if $@;return$self}sub save {my ($self,$file,$options)=@_;my$version=$options->{version}|| '2';my$layer=$] ge '5.008001' ? ':utf8' : '';if ($version ge '2'){carp "'$file' should end in '.json'" unless$file =~ m{\.json$}}else {carp "'$file' should end in '.yml'" unless$file =~ m{\.yml$}}my$data=$self->as_string($options);open my$fh,">$layer",$file or die "Error opening '$file' for writing: $!\n";print {$fh}$data;close$fh or die "Error closing '$file': $!\n";return 1}sub meta_spec_version {my ($self)=@_;return$self->meta_spec->{version}}sub effective_prereqs {my ($self,$features)=@_;$features ||= [];my$prereq=CPAN::Meta::Prereqs->new($self->prereqs);return$prereq unless @$features;my@other=map {;$self->feature($_)->prereqs}@$features;return$prereq->with_merged_prereqs(\@other)}sub should_index_file {my ($self,$filename)=@_;for my$no_index_file (@{$self->no_index->{file}|| []}){return if$filename eq $no_index_file}for my$no_index_dir (@{$self->no_index->{directory}}){$no_index_dir =~ s{$}{/} unless$no_index_dir =~ m{/\z};return if index($filename,$no_index_dir)==0}return 1}sub should_index_package {my ($self,$package)=@_;for my$no_index_pkg (@{$self->no_index->{package}|| []}){return if$package eq $no_index_pkg}for my$no_index_ns (@{$self->no_index->{namespace}}){return if index($package,"${no_index_ns}::")==0}return 1}sub features {my ($self)=@_;my$opt_f=$self->optional_features;my@features=map {;CPAN::Meta::Feature->new($_=>$opt_f->{$_ })}keys %$opt_f;return@features}sub feature {my ($self,$ident)=@_;croak "no feature named $ident" unless my$f=$self->optional_features->{$ident };return CPAN::Meta::Feature->new($ident,$f)}sub as_struct {my ($self,$options)=@_;my$struct=_dclone($self);if ($options->{version}){my$cmc=CPAN::Meta::Converter->new($struct);$struct=$cmc->convert(version=>$options->{version})}return$struct}sub as_string {my ($self,$options)=@_;my$version=$options->{version}|| '2';my$struct;if ($self->meta_spec_version ne $version){my$cmc=CPAN::Meta::Converter->new($self->as_struct);$struct=$cmc->convert(version=>$version)}else {$struct=$self->as_struct}my ($data,$backend);if ($version ge '2'){$backend=Parse::CPAN::Meta->json_backend();local$struct->{x_serialization_backend}=sprintf '%s version %s',$backend,$backend->VERSION;$data=$backend->new->pretty->canonical->encode($struct)}else {$backend=Parse::CPAN::Meta->yaml_backend();local$struct->{x_serialization_backend}=sprintf '%s version %s',$backend,$backend->VERSION;$data=eval {no strict 'refs';&{"$backend\::Dump"}($struct)};if ($@){croak$backend->can('errstr')? $backend->errstr : $@}}return$data}sub TO_JSON {return {%{$_[0]}}}1; -CPAN_META - -$fatpacked{"CPAN/Meta/Check.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_CHECK'; - package CPAN::Meta::Check;$CPAN::Meta::Check::VERSION='0.012';use strict;use warnings;use base 'Exporter';our@EXPORT=qw//;our@EXPORT_OK=qw/check_requirements requirements_for verify_dependencies/;our%EXPORT_TAGS=(all=>[@EXPORT,@EXPORT_OK ]);use CPAN::Meta::Prereqs '2.132830';use CPAN::Meta::Requirements 2.121;use Module::Metadata 1.000023;sub _check_dep {my ($reqs,$module,$dirs)=@_;$module eq 'perl' and return ($reqs->accepts_module($module,$])? (): sprintf "Your Perl (%s) is not in the range '%s'",$],$reqs->requirements_for_module($module));my$metadata=Module::Metadata->new_from_module($module,inc=>$dirs);return "Module '$module' is not installed" if not defined$metadata;my$version=eval {$metadata->version};return "Missing version info for module '$module'" if$reqs->requirements_for_module($module)and not $version;return sprintf 'Installed version (%s) of %s is not in range \'%s\'',$version,$module,$reqs->requirements_for_module($module)if not $reqs->accepts_module($module,$version || 0);return}sub _check_conflict {my ($reqs,$module,$dirs)=@_;my$metadata=Module::Metadata->new_from_module($module,inc=>$dirs);return if not defined$metadata;my$version=eval {$metadata->version};return "Missing version info for module '$module'" if not $version;return sprintf 'Installed version (%s) of %s is in range \'%s\'',$version,$module,$reqs->requirements_for_module($module)if$reqs->accepts_module($module,$version);return}sub requirements_for {my ($meta,$phases,$type)=@_;my$prereqs=ref($meta)eq 'CPAN::Meta' ? $meta->effective_prereqs : $meta;return$prereqs->merged_requirements(ref($phases)? $phases : [$phases ],[$type ])}sub check_requirements {my ($reqs,$type,$dirs)=@_;return +{map {$_=>$type ne 'conflicts' ? scalar _check_dep($reqs,$_,$dirs): scalar _check_conflict($reqs,$_,$dirs)}$reqs->required_modules }}sub verify_dependencies {my ($meta,$phases,$type,$dirs)=@_;my$reqs=requirements_for($meta,$phases,$type);my$issues=check_requirements($reqs,$type,$dirs);return grep {defined}values %{$issues}}1; -CPAN_META_CHECK - -$fatpacked{"CPAN/Meta/Converter.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_CONVERTER'; - use 5.006;use strict;use warnings;package CPAN::Meta::Converter;our$VERSION='2.150005';use CPAN::Meta::Validator;use CPAN::Meta::Requirements;use Parse::CPAN::Meta 1.4400 ();BEGIN {eval "use version ()";if (my$err=$@){eval "use ExtUtils::MakeMaker::version" or die$err}}*_is_qv=version->can('is_qv')? sub {$_[0]->is_qv}: sub {exists $_[0]->{qv}};sub _dclone {my$ref=shift;no warnings 'once';no warnings 'redefine';local*UNIVERSAL::TO_JSON=sub {"$_[0]"};my$json=Parse::CPAN::Meta->json_backend()->new ->utf8 ->allow_blessed ->convert_blessed;$json->decode($json->encode($ref))}my%known_specs=('2'=>'http://search.cpan.org/perldoc?CPAN::Meta::Spec','1.4'=>'http://module-build.sourceforge.net/META-spec-v1.4.html','1.3'=>'http://module-build.sourceforge.net/META-spec-v1.3.html','1.2'=>'http://module-build.sourceforge.net/META-spec-v1.2.html','1.1'=>'http://module-build.sourceforge.net/META-spec-v1.1.html','1.0'=>'http://module-build.sourceforge.net/META-spec-v1.0.html');my@spec_list=sort {$a <=> $b}keys%known_specs;my ($LOWEST,$HIGHEST)=@spec_list[0,-1];sub _keep {$_[0]}sub _keep_or_one {defined($_[0])? $_[0]: 1}sub _keep_or_zero {defined($_[0])? $_[0]: 0}sub _keep_or_unknown {defined($_[0])&& length($_[0])? $_[0]: "unknown"}sub _generated_by {my$gen=shift;my$sig=__PACKAGE__ ." version " .(__PACKAGE__->VERSION || "");return$sig unless defined$gen and length$gen;return$gen if$gen =~ /\Q$sig/;return "$gen, $sig"}sub _listify {!defined $_[0]? undef : ref $_[0]eq 'ARRAY' ? $_[0]: [$_[0]]}sub _prefix_custom {my$key=shift;$key =~ s/^(?!x_) # Unless it already starts with x_ - (?:x-?)? # Remove leading x- or x (if present) - /x_/ix;return$key}sub _ucfirst_custom {my$key=shift;$key=ucfirst$key unless$key =~ /[A-Z]/;return$key}sub _no_prefix_ucfirst_custom {my$key=shift;$key =~ s/^x_//;return _ucfirst_custom($key)}sub _change_meta_spec {my ($element,undef,undef,$version)=@_;return {version=>$version,url=>$known_specs{$version},}}my@open_source=('perl','gpl','apache','artistic','artistic_2','lgpl','bsd','gpl','mit','mozilla','open_source',);my%is_open_source=map {;$_=>1}@open_source;my@valid_licenses_1=(@open_source,'unrestricted','restrictive','unknown',);my%license_map_1=((map {$_=>$_}@valid_licenses_1),artistic2=>'artistic_2',);sub _license_1 {my ($element)=@_;return 'unknown' unless defined$element;if ($license_map_1{lc$element}){return$license_map_1{lc$element}}else {return 'unknown'}}my@valid_licenses_2=qw(agpl_3 apache_1_1 apache_2_0 artistic_1 artistic_2 bsd freebsd gfdl_1_2 gfdl_1_3 gpl_1 gpl_2 gpl_3 lgpl_2_1 lgpl_3_0 mit mozilla_1_0 mozilla_1_1 openssl perl_5 qpl_1_0 ssleay sun zlib open_source restricted unrestricted unknown);my%license_map_2=((map {$_=>$_}@valid_licenses_2),apache=>'apache_2_0',artistic=>'artistic_1',artistic2=>'artistic_2',gpl=>'open_source',lgpl=>'open_source',mozilla=>'open_source',perl=>'perl_5',restrictive=>'restricted',);sub _license_2 {my ($element)=@_;return ['unknown' ]unless defined$element;$element=[$element ]unless ref$element eq 'ARRAY';my@new_list;for my$lic (@$element){next unless defined$lic;if (my$new=$license_map_2{lc$lic}){push@new_list,$new}}return@new_list ? \@new_list : ['unknown' ]}my%license_downgrade_map=qw(agpl_3 open_source apache_1_1 apache apache_2_0 apache artistic_1 artistic artistic_2 artistic_2 bsd bsd freebsd open_source gfdl_1_2 open_source gfdl_1_3 open_source gpl_1 gpl gpl_2 gpl gpl_3 gpl lgpl_2_1 lgpl lgpl_3_0 lgpl mit mit mozilla_1_0 mozilla mozilla_1_1 mozilla openssl open_source perl_5 perl qpl_1_0 open_source ssleay open_source sun open_source zlib open_source open_source open_source restricted restrictive unrestricted unrestricted unknown unknown);sub _downgrade_license {my ($element)=@_;if (!defined$element){return "unknown"}elsif(ref$element eq 'ARRAY'){if (@$element > 1){if (grep {!$is_open_source{$license_downgrade_map{lc $_}|| 'unknown' }}@$element){return 'unknown'}else {return 'open_source'}}elsif (@$element==1){return$license_downgrade_map{lc$element->[0]}|| "unknown"}}elsif (!ref$element){return$license_downgrade_map{lc$element}|| "unknown"}return "unknown"}my$no_index_spec_1_2={'file'=>\&_listify,'dir'=>\&_listify,'package'=>\&_listify,'namespace'=>\&_listify,};my$no_index_spec_1_3={'file'=>\&_listify,'directory'=>\&_listify,'package'=>\&_listify,'namespace'=>\&_listify,};my$no_index_spec_2={'file'=>\&_listify,'directory'=>\&_listify,'package'=>\&_listify,'namespace'=>\&_listify,':custom'=>\&_prefix_custom,};sub _no_index_1_2 {my (undef,undef,$meta)=@_;my$no_index=$meta->{no_index}|| $meta->{private};return unless$no_index;if (!ref$no_index){my$item=$no_index;$no_index={dir=>[$item ],file=>[$item ]}}elsif (ref$no_index eq 'ARRAY'){my$list=$no_index;$no_index={dir=>[@$list ],file=>[@$list ]}}if (exists$no_index->{files}){$no_index->{file}=delete$no_index->{files}}if (exists$no_index->{modules}){$no_index->{module}=delete$no_index->{modules}}return _convert($no_index,$no_index_spec_1_2)}sub _no_index_directory {my ($element,$key,$meta,$version)=@_;return unless$element;if (!ref$element){my$item=$element;$element={directory=>[$item ],file=>[$item ]}}elsif (ref$element eq 'ARRAY'){my$list=$element;$element={directory=>[@$list ],file=>[@$list ]}}if (exists$element->{dir}){$element->{directory}=delete$element->{dir}}if (exists$element->{files}){$element->{file}=delete$element->{files}}if (exists$element->{modules}){$element->{module}=delete$element->{modules}}my$spec=$version==2 ? $no_index_spec_2 : $no_index_spec_1_3;return _convert($element,$spec)}sub _is_module_name {my$mod=shift;return unless defined$mod && length$mod;return$mod =~ m{^[A-Za-z][A-Za-z0-9_]*(?:::[A-Za-z0-9_]+)*$}}sub _clean_version {my ($element)=@_;return 0 if!defined$element;$element =~ s{^\s*}{};$element =~ s{\s*$}{};$element =~ s{^\.}{0.};return 0 if!length$element;return 0 if ($element eq 'undef' || $element eq '');my$v=eval {version->new($element)};if (defined$v){return _is_qv($v)? $v->normal : $element}else {return 0}}sub _bad_version_hook {my ($v)=@_;$v =~ s{^\s*}{};$v =~ s{\s*$}{};$v =~ s{[a-z]+$}{};my$vobj=eval {version->new($v)};return defined($vobj)? $vobj : version->new(0)}sub _version_map {my ($element)=@_;return unless defined$element;if (ref$element eq 'HASH'){my$new_map=CPAN::Meta::Requirements->new({bad_version_hook=>\&_bad_version_hook });while (my ($k,$v)=each %$element){next unless _is_module_name($k);if (!defined($v)||!length($v)|| $v eq 'undef' || $v eq ''){$v=0}if (_is_module_name($v)&&!version::is_lax($v)){$new_map->add_minimum($k=>0);$new_map->add_minimum($v=>0)}$new_map->add_string_requirement($k=>$v)}return$new_map->as_string_hash}elsif (ref$element eq 'ARRAY'){my$hashref={map {$_=>0}@$element };return _version_map($hashref)}elsif (ref$element eq '' && length$element){return {$element=>0 }}return}sub _prereqs_from_1 {my (undef,undef,$meta)=@_;my$prereqs={};for my$phase (qw/build configure/){my$key="${phase}_requires";$prereqs->{$phase}{requires}=_version_map($meta->{$key})if$meta->{$key}}for my$rel (qw/requires recommends conflicts/){$prereqs->{runtime}{$rel}=_version_map($meta->{$rel})if$meta->{$rel}}return$prereqs}my$prereqs_spec={configure=>\&_prereqs_rel,build=>\&_prereqs_rel,test=>\&_prereqs_rel,runtime=>\&_prereqs_rel,develop=>\&_prereqs_rel,':custom'=>\&_prefix_custom,};my$relation_spec={requires=>\&_version_map,recommends=>\&_version_map,suggests=>\&_version_map,conflicts=>\&_version_map,':custom'=>\&_prefix_custom,};sub _cleanup_prereqs {my ($prereqs,$key,$meta,$to_version)=@_;return unless$prereqs && ref$prereqs eq 'HASH';return _convert($prereqs,$prereqs_spec,$to_version)}sub _prereqs_rel {my ($relation,$key,$meta,$to_version)=@_;return unless$relation && ref$relation eq 'HASH';return _convert($relation,$relation_spec,$to_version)}BEGIN {my@old_prereqs=qw(requires configure_requires recommends conflicts);for (@old_prereqs){my$sub="_get_$_";my ($phase,$type)=split qr/_/,$_;if (!defined$type){$type=$phase;$phase='runtime'}no strict 'refs';*{$sub}=sub {_extract_prereqs($_[2]->{prereqs},$phase,$type)}}}sub _get_build_requires {my ($data,$key,$meta)=@_;my$test_h=_extract_prereqs($_[2]->{prereqs},qw(test requires))|| {};my$build_h=_extract_prereqs($_[2]->{prereqs},qw(build requires))|| {};my$test_req=CPAN::Meta::Requirements->from_string_hash($test_h);my$build_req=CPAN::Meta::Requirements->from_string_hash($build_h);$test_req->add_requirements($build_req)->as_string_hash}sub _extract_prereqs {my ($prereqs,$phase,$type)=@_;return unless ref$prereqs eq 'HASH';return scalar _version_map($prereqs->{$phase}{$type})}sub _downgrade_optional_features {my (undef,undef,$meta)=@_;return unless exists$meta->{optional_features};my$origin=$meta->{optional_features};my$features={};for my$name (keys %$origin){$features->{$name}={description=>$origin->{$name}{description},requires=>_extract_prereqs($origin->{$name}{prereqs},'runtime','requires'),configure_requires=>_extract_prereqs($origin->{$name}{prereqs},'runtime','configure_requires'),build_requires=>_extract_prereqs($origin->{$name}{prereqs},'runtime','build_requires'),recommends=>_extract_prereqs($origin->{$name}{prereqs},'runtime','recommends'),conflicts=>_extract_prereqs($origin->{$name}{prereqs},'runtime','conflicts'),};for my$k (keys %{$features->{$name}}){delete$features->{$name}{$k}unless defined$features->{$name}{$k}}}return$features}sub _upgrade_optional_features {my (undef,undef,$meta)=@_;return unless exists$meta->{optional_features};my$origin=$meta->{optional_features};my$features={};for my$name (keys %$origin){$features->{$name}={description=>$origin->{$name}{description},prereqs=>_prereqs_from_1(undef,undef,$origin->{$name}),};delete$features->{$name}{prereqs}{configure}}return$features}my$optional_features_2_spec={description=>\&_keep,prereqs=>\&_cleanup_prereqs,':custom'=>\&_prefix_custom,};sub _feature_2 {my ($element,$key,$meta,$to_version)=@_;return unless$element && ref$element eq 'HASH';_convert($element,$optional_features_2_spec,$to_version)}sub _cleanup_optional_features_2 {my ($element,$key,$meta,$to_version)=@_;return unless$element && ref$element eq 'HASH';my$new_data={};for my$k (keys %$element){$new_data->{$k}=_feature_2($element->{$k},$k,$meta,$to_version)}return unless keys %$new_data;return$new_data}sub _optional_features_1_4 {my ($element)=@_;return unless$element;$element=_optional_features_as_map($element);for my$name (keys %$element){for my$drop (qw/requires_packages requires_os excluded_os/){delete$element->{$name}{$drop}}}return$element}sub _optional_features_as_map {my ($element)=@_;return unless$element;if (ref$element eq 'ARRAY'){my%map;for my$feature (@$element){my (@parts)=%$feature;$map{$parts[0]}=$parts[1]}$element=\%map}return$element}sub _is_urlish {defined $_[0]&& $_[0]=~ m{\A[-+.a-z0-9]+:.+}i}sub _url_or_drop {my ($element)=@_;return$element if _is_urlish($element);return}sub _url_list {my ($element)=@_;return unless$element;$element=_listify($element);$element=[grep {_is_urlish($_)}@$element ];return unless @$element;return$element}sub _author_list {my ($element)=@_;return ['unknown' ]unless$element;$element=_listify($element);$element=[map {defined $_ && length $_ ? $_ : 'unknown'}@$element ];return ['unknown' ]unless @$element;return$element}my$resource2_upgrade={license=>sub {return _is_urlish($_[0])? _listify($_[0]): undef},homepage=>\&_url_or_drop,bugtracker=>sub {my ($item)=@_;return unless$item;if ($item =~ m{^mailto:(.*)$}){return {mailto=>$1 }}elsif(_is_urlish($item)){return {web=>$item }}else {return}},repository=>sub {return _is_urlish($_[0])? {url=>$_[0]}: undef},':custom'=>\&_prefix_custom,};sub _upgrade_resources_2 {my (undef,undef,$meta,$version)=@_;return unless exists$meta->{resources};return _convert($meta->{resources},$resource2_upgrade)}my$bugtracker2_spec={web=>\&_url_or_drop,mailto=>\&_keep,':custom'=>\&_prefix_custom,};sub _repo_type {my ($element,$key,$meta,$to_version)=@_;return$element if defined$element;return unless exists$meta->{url};my$repo_url=$meta->{url};for my$type (qw/git svn/){return$type if$repo_url =~ m{\A$type}}return}my$repository2_spec={web=>\&_url_or_drop,url=>\&_url_or_drop,type=>\&_repo_type,':custom'=>\&_prefix_custom,};my$resources2_cleanup={license=>\&_url_list,homepage=>\&_url_or_drop,bugtracker=>sub {ref $_[0]? _convert($_[0],$bugtracker2_spec): undef},repository=>sub {my$data=shift;ref$data ? _convert($data,$repository2_spec): undef},':custom'=>\&_prefix_custom,};sub _cleanup_resources_2 {my ($resources,$key,$meta,$to_version)=@_;return unless$resources && ref$resources eq 'HASH';return _convert($resources,$resources2_cleanup,$to_version)}my$resource1_spec={license=>\&_url_or_drop,homepage=>\&_url_or_drop,bugtracker=>\&_url_or_drop,repository=>\&_url_or_drop,':custom'=>\&_keep,};sub _resources_1_3 {my (undef,undef,$meta,$version)=@_;return unless exists$meta->{resources};return _convert($meta->{resources},$resource1_spec)}*_resources_1_4=*_resources_1_3;sub _resources_1_2 {my (undef,undef,$meta)=@_;my$resources=$meta->{resources}|| {};if ($meta->{license_url}&&!$resources->{license}){$resources->{license}=$meta->{license_url}if _is_urlish($meta->{license_url})}return unless keys %$resources;return _convert($resources,$resource1_spec)}my$resource_downgrade_spec={license=>sub {return ref $_[0]? $_[0]->[0]: $_[0]},homepage=>\&_url_or_drop,bugtracker=>sub {return $_[0]->{web}},repository=>sub {return $_[0]->{url}|| $_[0]->{web}},':custom'=>\&_no_prefix_ucfirst_custom,};sub _downgrade_resources {my (undef,undef,$meta,$version)=@_;return unless exists$meta->{resources};return _convert($meta->{resources},$resource_downgrade_spec)}sub _release_status {my ($element,undef,$meta)=@_;return$element if$element && $element =~ m{\A(?:stable|testing|unstable)\z};return _release_status_from_version(undef,undef,$meta)}sub _release_status_from_version {my (undef,undef,$meta)=@_;my$version=$meta->{version}|| '';return ($version =~ /_/)? 'testing' : 'stable'}my$provides_spec={file=>\&_keep,version=>\&_keep,};my$provides_spec_2={file=>\&_keep,version=>\&_keep,':custom'=>\&_prefix_custom,};sub _provides {my ($element,$key,$meta,$to_version)=@_;return unless defined$element && ref$element eq 'HASH';my$spec=$to_version==2 ? $provides_spec_2 : $provides_spec;my$new_data={};for my$k (keys %$element){$new_data->{$k}=_convert($element->{$k},$spec,$to_version);$new_data->{$k}{version}=_clean_version($element->{$k}{version})if exists$element->{$k}{version}}return$new_data}sub _convert {my ($data,$spec,$to_version,$is_fragment)=@_;my$new_data={};for my$key (keys %$spec){next if$key eq ':custom' || $key eq ':drop';next unless my$fcn=$spec->{$key};if ($is_fragment && $key eq 'generated_by'){$fcn=\&_keep}die "spec for '$key' is not a coderef" unless ref$fcn && ref$fcn eq 'CODE';my$new_value=$fcn->($data->{$key},$key,$data,$to_version);$new_data->{$key}=$new_value if defined$new_value}my$drop_list=$spec->{':drop'};my$customizer=$spec->{':custom'}|| \&_keep;for my$key (keys %$data){next if$drop_list && grep {$key eq $_}@$drop_list;next if exists$spec->{$key};$new_data->{$customizer->($key)}=$data->{$key}}return$new_data}my%up_convert=('2-from-1.4'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_2,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'release_status'=>\&_release_status,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_upgrade_optional_features,'provides'=>\&_provides,'resources'=>\&_upgrade_resources_2,'description'=>\&_keep,'prereqs'=>\&_prereqs_from_1,':drop'=>[qw(build_requires configure_requires conflicts distribution_type license_url private recommends requires) ],':custom'=>\&_prefix_custom,},'1.4-from-1.3'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_optional_features_1_4,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_4,'configure_requires'=>\&_keep,':drop'=>[qw(license_url private)],':custom'=>\&_keep },'1.3-from-1.2'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_3,':drop'=>[qw(license_url private)],':custom'=>\&_keep },'1.2-from-1.1'=>{'version'=>\&_keep,'license'=>\&_license_1,'name'=>\&_keep,'generated_by'=>\&_generated_by,'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'meta-spec'=>\&_change_meta_spec,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'recommends'=>\&_version_map,'requires'=>\&_version_map,'keywords'=>\&_keep,'no_index'=>\&_no_index_1_2,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'resources'=>\&_resources_1_2,':drop'=>[qw(license_url private)],':custom'=>\&_keep },'1.1-from-1.0'=>{'version'=>\&_keep,'name'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'recommends'=>\&_version_map,'requires'=>\&_version_map,'license_url'=>\&_url_or_drop,'private'=>\&_keep,':custom'=>\&_keep },);my%down_convert=('1.4-from-2'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_downgrade_license,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_get_build_requires,'configure_requires'=>\&_get_configure_requires,'conflicts'=>\&_get_conflicts,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_downgrade_optional_features,'provides'=>\&_provides,'recommends'=>\&_get_recommends,'requires'=>\&_get_requires,'resources'=>\&_downgrade_resources,':drop'=>[qw(description prereqs release_status)],':custom'=>\&_keep },'1.3-from-1.4'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_3,':drop'=>[qw(configure_requires)],':custom'=>\&_keep,},'1.2-from-1.3'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_1_2,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_3,':custom'=>\&_keep,},'1.1-from-1.2'=>{'version'=>\&_keep,'name'=>\&_keep,'meta-spec'=>\&_change_meta_spec,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'private'=>\&_keep,'recommends'=>\&_version_map,'requires'=>\&_version_map,':drop'=>[qw(abstract author provides no_index keywords resources)],':custom'=>\&_keep,},'1.0-from-1.1'=>{'name'=>\&_keep,'meta-spec'=>\&_change_meta_spec,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'recommends'=>\&_version_map,'requires'=>\&_version_map,':custom'=>\&_keep,},);my%cleanup=('2'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_2,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'release_status'=>\&_release_status,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_cleanup_optional_features_2,'provides'=>\&_provides,'resources'=>\&_cleanup_resources_2,'description'=>\&_keep,'prereqs'=>\&_cleanup_prereqs,':drop'=>[qw(build_requires configure_requires conflicts distribution_type license_url private recommends requires) ],':custom'=>\&_prefix_custom,},'1.4'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_optional_features_1_4,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_4,'configure_requires'=>\&_keep,':custom'=>\&_keep },'1.3'=>{'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'meta-spec'=>\&_change_meta_spec,'name'=>\&_keep,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'keywords'=>\&_keep,'no_index'=>\&_no_index_directory,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'recommends'=>\&_version_map,'requires'=>\&_version_map,'resources'=>\&_resources_1_3,':custom'=>\&_keep },'1.2'=>{'version'=>\&_keep,'license'=>\&_license_1,'name'=>\&_keep,'generated_by'=>\&_generated_by,'abstract'=>\&_keep_or_unknown,'author'=>\&_author_list,'meta-spec'=>\&_change_meta_spec,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'recommends'=>\&_version_map,'requires'=>\&_version_map,'keywords'=>\&_keep,'no_index'=>\&_no_index_1_2,'optional_features'=>\&_optional_features_as_map,'provides'=>\&_provides,'resources'=>\&_resources_1_2,':custom'=>\&_keep },'1.1'=>{'version'=>\&_keep,'name'=>\&_keep,'meta-spec'=>\&_change_meta_spec,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'recommends'=>\&_version_map,'requires'=>\&_version_map,'license_url'=>\&_url_or_drop,'private'=>\&_keep,':custom'=>\&_keep },'1.0'=>{'name'=>\&_keep,'meta-spec'=>\&_change_meta_spec,'version'=>\&_keep,'build_requires'=>\&_version_map,'conflicts'=>\&_version_map,'distribution_type'=>\&_keep,'dynamic_config'=>\&_keep_or_one,'generated_by'=>\&_generated_by,'license'=>\&_license_1,'recommends'=>\&_version_map,'requires'=>\&_version_map,':custom'=>\&_keep,},);my%fragments_generate=('2'=>{'abstract'=>'abstract','author'=>'author','generated_by'=>'generated_by','license'=>'license','name'=>'name','version'=>'version','dynamic_config'=>'dynamic_config','release_status'=>'release_status','keywords'=>'keywords','no_index'=>'no_index','optional_features'=>'optional_features','provides'=>'provides','resources'=>'resources','description'=>'description','prereqs'=>'prereqs',},'1.4'=>{'abstract'=>'abstract','author'=>'author','generated_by'=>'generated_by','license'=>'license','name'=>'name','version'=>'version','build_requires'=>'prereqs','conflicts'=>'prereqs','distribution_type'=>'distribution_type','dynamic_config'=>'dynamic_config','keywords'=>'keywords','no_index'=>'no_index','optional_features'=>'optional_features','provides'=>'provides','recommends'=>'prereqs','requires'=>'prereqs','resources'=>'resources','configure_requires'=>'prereqs',},);$fragments_generate{$_}=$fragments_generate{'1.4'}for qw/1.3 1.2 1.1 1.0/;sub new {my ($class,$data,%args)=@_;my$self={'data'=>$data,'spec'=>_extract_spec_version($data,$args{default_version}),};return bless$self,$class}sub _extract_spec_version {my ($data,$default)=@_;my$spec=$data->{'meta-spec'};return($default || "1.0")unless defined$spec && ref$spec eq 'HASH';my$v=$spec->{version};if (defined$v && $v =~ /^\d+(?:\.\d+)?$/){return$v if defined$v && grep {$v eq $_}keys%known_specs;return$v+0 if defined$v && grep {$v==$_}keys%known_specs}return "2" if exists$data->{prereqs};return "1.4" if exists$data->{configure_requires};return($default || "1.2")}sub convert {my ($self,%args)=@_;my$args={%args };my$new_version=$args->{version}|| $HIGHEST;my$is_fragment=$args->{is_fragment};my ($old_version)=$self->{spec};my$converted=_dclone($self->{data});if ($old_version==$new_version){$converted=_convert($converted,$cleanup{$old_version},$old_version,$is_fragment);unless ($args->{is_fragment}){my$cmv=CPAN::Meta::Validator->new($converted);unless ($cmv->is_valid){my$errs=join("\n",$cmv->errors);die "Failed to clean-up $old_version metadata. Errors:\n$errs\n"}}return$converted}elsif ($old_version > $new_version){my@vers=sort {$b <=> $a}keys%known_specs;for my$i (0 .. $#vers-1){next if$vers[$i]> $old_version;last if$vers[$i+1]< $new_version;my$spec_string="$vers[$i+1]-from-$vers[$i]";$converted=_convert($converted,$down_convert{$spec_string},$vers[$i+1],$is_fragment);unless ($args->{is_fragment}){my$cmv=CPAN::Meta::Validator->new($converted);unless ($cmv->is_valid){my$errs=join("\n",$cmv->errors);die "Failed to downconvert metadata to $vers[$i+1]. Errors:\n$errs\n"}}}return$converted}else {my@vers=sort {$a <=> $b}keys%known_specs;for my$i (0 .. $#vers-1){next if$vers[$i]< $old_version;last if$vers[$i+1]> $new_version;my$spec_string="$vers[$i+1]-from-$vers[$i]";$converted=_convert($converted,$up_convert{$spec_string},$vers[$i+1],$is_fragment);unless ($args->{is_fragment}){my$cmv=CPAN::Meta::Validator->new($converted);unless ($cmv->is_valid){my$errs=join("\n",$cmv->errors);die "Failed to upconvert metadata to $vers[$i+1]. Errors:\n$errs\n"}}}return$converted}}sub upgrade_fragment {my ($self)=@_;my ($old_version)=$self->{spec};my%expected=map {;$_=>1}grep {defined}map {$fragments_generate{$old_version}{$_}}keys %{$self->{data}};my$converted=$self->convert(version=>$HIGHEST,is_fragment=>1);for my$key (keys %$converted){next if$key =~ /^x_/i || $key eq 'meta-spec';delete$converted->{$key}unless$expected{$key}}return$converted}1; -CPAN_META_CONVERTER - -$fatpacked{"CPAN/Meta/Feature.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_FEATURE'; - use 5.006;use strict;use warnings;package CPAN::Meta::Feature;our$VERSION='2.150005';use CPAN::Meta::Prereqs;sub new {my ($class,$identifier,$spec)=@_;my%guts=(identifier=>$identifier,description=>$spec->{description},prereqs=>CPAN::Meta::Prereqs->new($spec->{prereqs}),);bless \%guts=>$class}sub identifier {$_[0]{identifier}}sub description {$_[0]{description}}sub prereqs {$_[0]{prereqs}}1; -CPAN_META_FEATURE - -$fatpacked{"CPAN/Meta/History.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_HISTORY'; - use 5.006;use strict;use warnings;package CPAN::Meta::History;our$VERSION='2.150005';1; -CPAN_META_HISTORY - -$fatpacked{"CPAN/Meta/Merge.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_MERGE'; - use strict;use warnings;package CPAN::Meta::Merge;our$VERSION='2.150005';use Carp qw/croak/;use Scalar::Util qw/blessed/;use CPAN::Meta::Converter 2.141170;sub _is_identical {my ($left,$right)=@_;return (not defined$left and not defined$right)|| (defined$left and defined$right and $left eq $right)}sub _identical {my ($left,$right,$path)=@_;croak sprintf "Can't merge attribute %s: '%s' does not equal '%s'",join('.',@{$path}),$left,$right unless _is_identical($left,$right);return$left}sub _merge {my ($current,$next,$mergers,$path)=@_;for my$key (keys %{$next}){if (not exists$current->{$key}){$current->{$key}=$next->{$key}}elsif (my$merger=$mergers->{$key}){$current->{$key}=$merger->($current->{$key},$next->{$key},[@{$path},$key ])}elsif ($merger=$mergers->{':default'}){$current->{$key}=$merger->($current->{$key},$next->{$key},[@{$path},$key ])}else {croak sprintf "Can't merge unknown attribute '%s'",join '.',@{$path},$key}}return$current}sub _uniq {my%seen=();return grep {not $seen{$_}++}@_}sub _set_addition {my ($left,$right)=@_;return [+_uniq(@{$left},@{$right})]}sub _uniq_map {my ($left,$right,$path)=@_;for my$key (keys %{$right}){if (not exists$left->{$key}){$left->{$key}=$right->{$key}}elsif (_is_identical($left->{$key},$right->{$key})){1}elsif (ref$left->{$key}eq 'HASH' and ref$right->{$key}eq 'HASH'){$left->{$key}=_uniq_map($left->{$key},$right->{$key},[@{$path},$key ])}else {croak 'Duplication of element ' .join '.',@{$path},$key}}return$left}sub _improvize {my ($left,$right,$path)=@_;my ($name)=reverse @{$path};if ($name =~ /^x_/){if (ref($left)eq 'ARRAY'){return _set_addition($left,$right,$path)}elsif (ref($left)eq 'HASH'){return _uniq_map($left,$right,$path)}else {return _identical($left,$right,$path)}}croak sprintf "Can't merge '%s'",join '.',@{$path}}sub _optional_features {my ($left,$right,$path)=@_;for my$key (keys %{$right}){if (not exists$left->{$key}){$left->{$key}=$right->{$key}}else {for my$subkey (keys %{$right->{$key}}){next if$subkey eq 'prereqs';if (not exists$left->{$key}{$subkey}){$left->{$key}{$subkey}=$right->{$key}{$subkey}}else {Carp::croak "Cannot merge two optional_features named '$key' with different '$subkey' values" if do {no warnings 'uninitialized';$left->{$key}{$subkey}ne $right->{$key}{$subkey}}}}require CPAN::Meta::Prereqs;$left->{$key}{prereqs}=CPAN::Meta::Prereqs->new($left->{$key}{prereqs})->with_merged_prereqs(CPAN::Meta::Prereqs->new($right->{$key}{prereqs}))->as_string_hash}}return$left}my%default=(abstract=>\&_identical,author=>\&_set_addition,dynamic_config=>sub {my ($left,$right)=@_;return$left || $right},generated_by=>sub {my ($left,$right)=@_;return join ', ',_uniq(split(/, /,$left),split(/, /,$right))},license=>\&_set_addition,'meta-spec'=>{version=>\&_identical,url=>\&_identical },name=>\&_identical,release_status=>\&_identical,version=>\&_identical,description=>\&_identical,keywords=>\&_set_addition,no_index=>{map {($_=>\&_set_addition)}qw/file directory package namespace/ },optional_features=>\&_optional_features,prereqs=>sub {require CPAN::Meta::Prereqs;my ($left,$right)=map {CPAN::Meta::Prereqs->new($_)}@_[0,1];return$left->with_merged_prereqs($right)->as_string_hash},provides=>\&_uniq_map,resources=>{license=>\&_set_addition,homepage=>\&_identical,bugtracker=>\&_uniq_map,repository=>\&_uniq_map,':default'=>\&_improvize,},':default'=>\&_improvize,);sub new {my ($class,%arguments)=@_;croak 'default version required' if not exists$arguments{default_version};my%mapping=%default;my%extra=%{$arguments{extra_mappings}|| {}};for my$key (keys%extra){if (ref($mapping{$key})eq 'HASH'){$mapping{$key}={%{$mapping{$key}},%{$extra{$key}}}}else {$mapping{$key}=$extra{$key}}}return bless {default_version=>$arguments{default_version},mapping=>_coerce_mapping(\%mapping,[]),},$class}my%coderef_for=(set_addition=>\&_set_addition,uniq_map=>\&_uniq_map,identical=>\&_identical,improvize=>\&_improvize,);sub _coerce_mapping {my ($orig,$map_path)=@_;my%ret;for my$key (keys %{$orig}){my$value=$orig->{$key};if (ref($orig->{$key})eq 'CODE'){$ret{$key}=$value}elsif (ref($value)eq 'HASH'){my$mapping=_coerce_mapping($value,[@{$map_path},$key ]);$ret{$key}=sub {my ($left,$right,$path)=@_;return _merge($left,$right,$mapping,[@{$path}])}}elsif ($coderef_for{$value}){$ret{$key}=$coderef_for{$value}}else {croak "Don't know what to do with " .join '.',@{$map_path},$key}}return \%ret}sub merge {my ($self,@items)=@_;my$current={};for my$next (@items){if (blessed($next)&& $next->isa('CPAN::Meta')){$next=$next->as_struct}elsif (ref($next)eq 'HASH'){my$cmc=CPAN::Meta::Converter->new($next,default_version=>$self->{default_version});$next=$cmc->upgrade_fragment}else {croak "Don't know how to merge '$next'"}$current=_merge($current,$next,$self->{mapping},[])}return$current}1; -CPAN_META_MERGE - -$fatpacked{"CPAN/Meta/Prereqs.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_PREREQS'; - use 5.006;use strict;use warnings;package CPAN::Meta::Prereqs;our$VERSION='2.150005';use Carp qw(confess);use Scalar::Util qw(blessed);use CPAN::Meta::Requirements 2.121;sub __legal_phases {qw(configure build test runtime develop)}sub __legal_types {qw(requires recommends suggests conflicts)}sub new {my ($class,$prereq_spec)=@_;$prereq_spec ||= {};my%is_legal_phase=map {;$_=>1}$class->__legal_phases;my%is_legal_type=map {;$_=>1}$class->__legal_types;my%guts;PHASE: for my$phase (keys %$prereq_spec){next PHASE unless$phase =~ /\Ax_/i or $is_legal_phase{$phase};my$phase_spec=$prereq_spec->{$phase };next PHASE unless keys %$phase_spec;TYPE: for my$type (keys %$phase_spec){next TYPE unless$type =~ /\Ax_/i or $is_legal_type{$type};my$spec=$phase_spec->{$type };next TYPE unless keys %$spec;$guts{prereqs}{$phase}{$type}=CPAN::Meta::Requirements->from_string_hash($spec)}}return bless \%guts=>$class}sub requirements_for {my ($self,$phase,$type)=@_;confess "requirements_for called without phase" unless defined$phase;confess "requirements_for called without type" unless defined$type;unless ($phase =~ /\Ax_/i or grep {$phase eq $_}$self->__legal_phases){confess "requested requirements for unknown phase: $phase"}unless ($type =~ /\Ax_/i or grep {$type eq $_}$self->__legal_types){confess "requested requirements for unknown type: $type"}my$req=($self->{prereqs}{$phase}{$type}||= CPAN::Meta::Requirements->new);$req->finalize if$self->is_finalized;return$req}sub with_merged_prereqs {my ($self,$other)=@_;my@other=blessed($other)? $other : @$other;my@prereq_objs=($self,@other);my%new_arg;for my$phase ($self->__legal_phases){for my$type ($self->__legal_types){my$req=CPAN::Meta::Requirements->new;for my$prereq (@prereq_objs){my$this_req=$prereq->requirements_for($phase,$type);next unless$this_req->required_modules;$req->add_requirements($this_req)}next unless$req->required_modules;$new_arg{$phase }{$type }=$req->as_string_hash}}return (ref$self)->new(\%new_arg)}sub merged_requirements {my ($self,$phases,$types)=@_;$phases=[qw/runtime build test/]unless defined$phases;$types=[qw/requires recommends/]unless defined$types;confess "merged_requirements phases argument must be an arrayref" unless ref$phases eq 'ARRAY';confess "merged_requirements types argument must be an arrayref" unless ref$types eq 'ARRAY';my$req=CPAN::Meta::Requirements->new;for my$phase (@$phases){unless ($phase =~ /\Ax_/i or grep {$phase eq $_}$self->__legal_phases){confess "requested requirements for unknown phase: $phase"}for my$type (@$types){unless ($type =~ /\Ax_/i or grep {$type eq $_}$self->__legal_types){confess "requested requirements for unknown type: $type"}$req->add_requirements($self->requirements_for($phase,$type))}}$req->finalize if$self->is_finalized;return$req}sub as_string_hash {my ($self)=@_;my%hash;for my$phase ($self->__legal_phases){for my$type ($self->__legal_types){my$req=$self->requirements_for($phase,$type);next unless$req->required_modules;$hash{$phase }{$type }=$req->as_string_hash}}return \%hash}sub is_finalized {$_[0]{finalized}}sub finalize {my ($self)=@_;$self->{finalized}=1;for my$phase (keys %{$self->{prereqs}}){$_->finalize for values %{$self->{prereqs}{$phase}}}}sub clone {my ($self)=@_;my$clone=(ref$self)->new($self->as_string_hash)}1; -CPAN_META_PREREQS - -$fatpacked{"CPAN/Meta/Requirements.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_REQUIREMENTS'; - use strict;use warnings;package CPAN::Meta::Requirements;our$VERSION='2.133';use Carp ();BEGIN {eval "use version ()";if (my$err=$@){eval "use ExtUtils::MakeMaker::version" or die$err}}*_is_qv=version->can('is_qv')? sub {$_[0]->is_qv}: sub {exists $_[0]->{qv}};my$V0=version->new(0);my@valid_options=qw(bad_version_hook);sub new {my ($class,$options)=@_;$options ||= {};Carp::croak "Argument to $class\->new() must be a hash reference" unless ref$options eq 'HASH';my%self=map {;$_=>$options->{$_}}@valid_options;return bless \%self=>$class}sub _find_magic_vstring {my$value=shift;my$tvalue='';require B;my$sv=B::svref_2object(\$value);my$magic=ref($sv)eq 'B::PVMG' ? $sv->MAGIC : undef;while ($magic){if ($magic->TYPE eq 'V'){$tvalue=$magic->PTR;$tvalue =~ s/^v?(.+)$/v$1/;last}else {$magic=$magic->MOREMAGIC}}return$tvalue}sub _isa_version {UNIVERSAL::isa($_[0],'UNIVERSAL')&& $_[0]->isa('version')}sub _version_object {my ($self,$module,$version)=@_;my ($vobj,$err);if (not defined$version or (!ref($version)&& $version eq '0')){return$V0}elsif (ref($version)eq 'version' || _isa_version($version)){$vobj=$version}else {if ($INC{'version/vpp.pm'}|| $INC{'ExtUtils/MakeMaker/version/vpp.pm'}){my$magic=_find_magic_vstring($version);$version=$magic if length$magic}eval {local$SIG{__WARN__}=sub {die "Invalid version: $_[0]"};$vobj=version->new($version)};if (my$err=$@){my$hook=$self->{bad_version_hook};$vobj=eval {$hook->($version,$module)}if ref$hook eq 'CODE';unless (eval {$vobj->isa("version")}){$err =~ s{ at .* line \d+.*$}{};die "Can't convert '$version': $err"}}}if ($vobj =~ m{\A\.}){$vobj=version->new("0$vobj")}if (_is_qv($vobj)){$vobj=version->new($vobj->normal)}return$vobj}BEGIN {for my$type (qw(maximum exclusion exact_version)){my$method="with_$type";my$to_add=$type eq 'exact_version' ? $type : "add_$type";my$code=sub {my ($self,$name,$version)=@_;$version=$self->_version_object($name,$version);$self->__modify_entry_for($name,$method,$version);return$self};no strict 'refs';*$to_add=$code}}sub add_minimum {my ($self,$name,$version)=@_;if (not defined$version or "$version" eq '0'){return$self if$self->__entry_for($name);Carp::confess("can't add new requirements to finalized requirements")if$self->is_finalized;$self->{requirements}{$name }=CPAN::Meta::Requirements::_Range::Range->with_minimum($V0)}else {$version=$self->_version_object($name,$version);$self->__modify_entry_for($name,'with_minimum',$version)}return$self}sub add_requirements {my ($self,$req)=@_;for my$module ($req->required_modules){my$modifiers=$req->__entry_for($module)->as_modifiers;for my$modifier (@$modifiers){my ($method,@args)=@$modifier;$self->$method($module=>@args)}}return$self}sub accepts_module {my ($self,$module,$version)=@_;$version=$self->_version_object($module,$version);return 1 unless my$range=$self->__entry_for($module);return$range->_accepts($version)}sub clear_requirement {my ($self,$module)=@_;return$self unless$self->__entry_for($module);Carp::confess("can't clear requirements on finalized requirements")if$self->is_finalized;delete$self->{requirements}{$module };return$self}sub requirements_for_module {my ($self,$module)=@_;my$entry=$self->__entry_for($module);return unless$entry;return$entry->as_string}sub required_modules {keys %{$_[0]{requirements}}}sub clone {my ($self)=@_;my$new=(ref$self)->new;return$new->add_requirements($self)}sub __entry_for {$_[0]{requirements}{$_[1]}}sub __modify_entry_for {my ($self,$name,$method,$version)=@_;my$fin=$self->is_finalized;my$old=$self->__entry_for($name);Carp::confess("can't add new requirements to finalized requirements")if$fin and not $old;my$new=($old || 'CPAN::Meta::Requirements::_Range::Range')->$method($version);Carp::confess("can't modify finalized requirements")if$fin and $old->as_string ne $new->as_string;$self->{requirements}{$name }=$new}sub is_simple {my ($self)=@_;for my$module ($self->required_modules){return if$self->__entry_for($module)->as_string =~ /\s/}return 1}sub is_finalized {$_[0]{finalized}}sub finalize {$_[0]{finalized}=1}sub as_string_hash {my ($self)=@_;my%hash=map {;$_=>$self->{requirements}{$_}->as_string}$self->required_modules;return \%hash}my%methods_for_op=('=='=>[qw(exact_version) ],'!='=>[qw(add_exclusion) ],'>='=>[qw(add_minimum) ],'<='=>[qw(add_maximum) ],'>'=>[qw(add_minimum add_exclusion) ],'<'=>[qw(add_maximum add_exclusion) ],);sub add_string_requirement {my ($self,$module,$req)=@_;unless (defined$req && length$req){$req=0;$self->_blank_carp($module)}my$magic=_find_magic_vstring($req);if (length$magic){$self->add_minimum($module=>$magic);return}my@parts=split qr{\s*,\s*},$req;for my$part (@parts){my ($op,$ver)=$part =~ m{\A\s*(==|>=|>|<=|<|!=)\s*(.*)\z};if (!defined$op){$self->add_minimum($module=>$part)}else {Carp::confess("illegal requirement string: $req")unless my$methods=$methods_for_op{$op };$self->$_($module=>$ver)for @$methods}}}sub _blank_carp {my ($self,$module)=@_;Carp::carp("Undefined requirement for $module treated as '0'")}sub from_string_hash {my ($class,$hash,$options)=@_;my$self=$class->new($options);for my$module (keys %$hash){my$req=$hash->{$module};unless (defined$req && length$req){$req=0;$class->_blank_carp($module)}$self->add_string_requirement($module,$req)}return$self}{package CPAN::Meta::Requirements::_Range::Exact;sub _new {bless {version=>$_[1]}=>$_[0]}sub _accepts {return $_[0]{version}==$_[1]}sub as_string {return "== $_[0]{version}"}sub as_modifiers {return [[exact_version=>$_[0]{version}]]}sub _clone {(ref $_[0])->_new(version->new($_[0]{version}))}sub with_exact_version {my ($self,$version)=@_;return$self->_clone if$self->_accepts($version);Carp::confess("illegal requirements: unequal exact version specified")}sub with_minimum {my ($self,$minimum)=@_;return$self->_clone if$self->{version}>= $minimum;Carp::confess("illegal requirements: minimum above exact specification")}sub with_maximum {my ($self,$maximum)=@_;return$self->_clone if$self->{version}<= $maximum;Carp::confess("illegal requirements: maximum below exact specification")}sub with_exclusion {my ($self,$exclusion)=@_;return$self->_clone unless$exclusion==$self->{version};Carp::confess("illegal requirements: excluded exact specification")}}{package CPAN::Meta::Requirements::_Range::Range;sub _self {ref($_[0])? $_[0]: (bless {}=>$_[0])}sub _clone {return (bless {}=>$_[0])unless ref $_[0];my ($s)=@_;my%guts=((exists$s->{minimum}? (minimum=>version->new($s->{minimum})): ()),(exists$s->{maximum}? (maximum=>version->new($s->{maximum})): ()),(exists$s->{exclusions}? (exclusions=>[map {version->new($_)}@{$s->{exclusions}}]): ()),);bless \%guts=>ref($s)}sub as_modifiers {my ($self)=@_;my@mods;push@mods,[add_minimum=>$self->{minimum}]if exists$self->{minimum};push@mods,[add_maximum=>$self->{maximum}]if exists$self->{maximum};push@mods,map {;[add_exclusion=>$_ ]}@{$self->{exclusions}|| []};return \@mods}sub as_string {my ($self)=@_;return 0 if!keys %$self;return "$self->{minimum}" if (keys %$self)==1 and exists$self->{minimum};my@exclusions=@{$self->{exclusions}|| []};my@parts;for my$pair ([qw(>= > minimum) ],[qw(<= < maximum) ],){my ($op,$e_op,$k)=@$pair;if (exists$self->{$k}){my@new_exclusions=grep {$_!=$self->{$k }}@exclusions;if (@new_exclusions==@exclusions){push@parts,"$op $self->{ $k }"}else {push@parts,"$e_op $self->{ $k }";@exclusions=@new_exclusions}}}push@parts,map {;"!= $_"}@exclusions;return join q{, },@parts}sub with_exact_version {my ($self,$version)=@_;$self=$self->_clone;Carp::confess("illegal requirements: exact specification outside of range")unless$self->_accepts($version);return CPAN::Meta::Requirements::_Range::Exact->_new($version)}sub _simplify {my ($self)=@_;if (defined$self->{minimum}and defined$self->{maximum}){if ($self->{minimum}==$self->{maximum}){Carp::confess("illegal requirements: excluded all values")if grep {$_==$self->{minimum}}@{$self->{exclusions}|| []};return CPAN::Meta::Requirements::_Range::Exact->_new($self->{minimum})}Carp::confess("illegal requirements: minimum exceeds maximum")if$self->{minimum}> $self->{maximum}}if ($self->{exclusions}){my%seen;@{$self->{exclusions}}=grep {(!defined$self->{minimum}or $_ >= $self->{minimum})and (!defined$self->{maximum}or $_ <= $self->{maximum})and !$seen{$_}++}@{$self->{exclusions}}}return$self}sub with_minimum {my ($self,$minimum)=@_;$self=$self->_clone;if (defined (my$old_min=$self->{minimum})){$self->{minimum}=(sort {$b cmp $a}($minimum,$old_min))[0]}else {$self->{minimum}=$minimum}return$self->_simplify}sub with_maximum {my ($self,$maximum)=@_;$self=$self->_clone;if (defined (my$old_max=$self->{maximum})){$self->{maximum}=(sort {$a cmp $b}($maximum,$old_max))[0]}else {$self->{maximum}=$maximum}return$self->_simplify}sub with_exclusion {my ($self,$exclusion)=@_;$self=$self->_clone;push @{$self->{exclusions}||= []},$exclusion;return$self->_simplify}sub _accepts {my ($self,$version)=@_;return if defined$self->{minimum}and $version < $self->{minimum};return if defined$self->{maximum}and $version > $self->{maximum};return if defined$self->{exclusions}and grep {$version==$_}@{$self->{exclusions}};return 1}}1; -CPAN_META_REQUIREMENTS - -$fatpacked{"CPAN/Meta/Spec.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_SPEC'; - use 5.006;use strict;use warnings;package CPAN::Meta::Spec;our$VERSION='2.150005';1; -CPAN_META_SPEC - -$fatpacked{"CPAN/Meta/Validator.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_VALIDATOR'; - use 5.006;use strict;use warnings;package CPAN::Meta::Validator;our$VERSION='2.150005';my%known_specs=('1.4'=>'http://module-build.sourceforge.net/META-spec-v1.4.html','1.3'=>'http://module-build.sourceforge.net/META-spec-v1.3.html','1.2'=>'http://module-build.sourceforge.net/META-spec-v1.2.html','1.1'=>'http://module-build.sourceforge.net/META-spec-v1.1.html','1.0'=>'http://module-build.sourceforge.net/META-spec-v1.0.html');my%known_urls=map {$known_specs{$_}=>$_}keys%known_specs;my$module_map1={'map'=>{':key'=>{name=>\&module,value=>\&exversion }}};my$module_map2={'map'=>{':key'=>{name=>\&module,value=>\&version }}};my$no_index_2={'map'=>{file=>{list=>{value=>\&string }},directory=>{list=>{value=>\&string }},'package'=>{list=>{value=>\&string }},namespace=>{list=>{value=>\&string }},':key'=>{name=>\&custom_2,value=>\&anything },}};my$no_index_1_3={'map'=>{file=>{list=>{value=>\&string }},directory=>{list=>{value=>\&string }},'package'=>{list=>{value=>\&string }},namespace=>{list=>{value=>\&string }},':key'=>{name=>\&string,value=>\&anything },}};my$no_index_1_2={'map'=>{file=>{list=>{value=>\&string }},dir=>{list=>{value=>\&string }},'package'=>{list=>{value=>\&string }},namespace=>{list=>{value=>\&string }},':key'=>{name=>\&string,value=>\&anything },}};my$no_index_1_1={'map'=>{':key'=>{name=>\&string,list=>{value=>\&string }},}};my$prereq_map={map=>{':key'=>{name=>\&phase,'map'=>{':key'=>{name=>\&relation,%$module_map1,},},}},};my%definitions=('2'=>{'abstract'=>{mandatory=>1,value=>\&string },'author'=>{mandatory=>1,list=>{value=>\&string }},'dynamic_config'=>{mandatory=>1,value=>\&boolean },'generated_by'=>{mandatory=>1,value=>\&string },'license'=>{mandatory=>1,list=>{value=>\&license }},'meta-spec'=>{mandatory=>1,'map'=>{version=>{mandatory=>1,value=>\&version},url=>{value=>\&url },':key'=>{name=>\&custom_2,value=>\&anything },}},'name'=>{mandatory=>1,value=>\&string },'release_status'=>{mandatory=>1,value=>\&release_status },'version'=>{mandatory=>1,value=>\&version },'description'=>{value=>\&string },'keywords'=>{list=>{value=>\&string }},'no_index'=>$no_index_2,'optional_features'=>{'map'=>{':key'=>{name=>\&string,'map'=>{description=>{value=>\&string },prereqs=>$prereq_map,':key'=>{name=>\&custom_2,value=>\&anything },}}}},'prereqs'=>$prereq_map,'provides'=>{'map'=>{':key'=>{name=>\&module,'map'=>{file=>{mandatory=>1,value=>\&file },version=>{value=>\&version },':key'=>{name=>\&custom_2,value=>\&anything },}}}},'resources'=>{'map'=>{license=>{list=>{value=>\&url }},homepage=>{value=>\&url },bugtracker=>{'map'=>{web=>{value=>\&url },mailto=>{value=>\&string},':key'=>{name=>\&custom_2,value=>\&anything },}},repository=>{'map'=>{web=>{value=>\&url },url=>{value=>\&url },type=>{value=>\&string },':key'=>{name=>\&custom_2,value=>\&anything },}},':key'=>{value=>\&string,name=>\&custom_2 },}},':key'=>{name=>\&custom_2,value=>\&anything },},'1.4'=>{'meta-spec'=>{mandatory=>1,'map'=>{version=>{mandatory=>1,value=>\&version},url=>{mandatory=>1,value=>\&urlspec },':key'=>{name=>\&string,value=>\&anything },},},'name'=>{mandatory=>1,value=>\&string },'version'=>{mandatory=>1,value=>\&version },'abstract'=>{mandatory=>1,value=>\&string },'author'=>{mandatory=>1,list=>{value=>\&string }},'license'=>{mandatory=>1,value=>\&license },'generated_by'=>{mandatory=>1,value=>\&string },'distribution_type'=>{value=>\&string },'dynamic_config'=>{value=>\&boolean },'requires'=>$module_map1,'recommends'=>$module_map1,'build_requires'=>$module_map1,'configure_requires'=>$module_map1,'conflicts'=>$module_map2,'optional_features'=>{'map'=>{':key'=>{name=>\&string,'map'=>{description=>{value=>\&string },requires=>$module_map1,recommends=>$module_map1,build_requires=>$module_map1,conflicts=>$module_map2,':key'=>{name=>\&string,value=>\&anything },}}}},'provides'=>{'map'=>{':key'=>{name=>\&module,'map'=>{file=>{mandatory=>1,value=>\&file },version=>{value=>\&version },':key'=>{name=>\&string,value=>\&anything },}}}},'no_index'=>$no_index_1_3,'private'=>$no_index_1_3,'keywords'=>{list=>{value=>\&string }},'resources'=>{'map'=>{license=>{value=>\&url },homepage=>{value=>\&url },bugtracker=>{value=>\&url },repository=>{value=>\&url },':key'=>{value=>\&string,name=>\&custom_1 },}},':key'=>{name=>\&string,value=>\&anything },},'1.3'=>{'meta-spec'=>{mandatory=>1,'map'=>{version=>{mandatory=>1,value=>\&version},url=>{mandatory=>1,value=>\&urlspec },':key'=>{name=>\&string,value=>\&anything },},},'name'=>{mandatory=>1,value=>\&string },'version'=>{mandatory=>1,value=>\&version },'abstract'=>{mandatory=>1,value=>\&string },'author'=>{mandatory=>1,list=>{value=>\&string }},'license'=>{mandatory=>1,value=>\&license },'generated_by'=>{mandatory=>1,value=>\&string },'distribution_type'=>{value=>\&string },'dynamic_config'=>{value=>\&boolean },'requires'=>$module_map1,'recommends'=>$module_map1,'build_requires'=>$module_map1,'conflicts'=>$module_map2,'optional_features'=>{'map'=>{':key'=>{name=>\&string,'map'=>{description=>{value=>\&string },requires=>$module_map1,recommends=>$module_map1,build_requires=>$module_map1,conflicts=>$module_map2,':key'=>{name=>\&string,value=>\&anything },}}}},'provides'=>{'map'=>{':key'=>{name=>\&module,'map'=>{file=>{mandatory=>1,value=>\&file },version=>{value=>\&version },':key'=>{name=>\&string,value=>\&anything },}}}},'no_index'=>$no_index_1_3,'private'=>$no_index_1_3,'keywords'=>{list=>{value=>\&string }},'resources'=>{'map'=>{license=>{value=>\&url },homepage=>{value=>\&url },bugtracker=>{value=>\&url },repository=>{value=>\&url },':key'=>{value=>\&string,name=>\&custom_1 },}},':key'=>{name=>\&string,value=>\&anything },},'1.2'=>{'meta-spec'=>{mandatory=>1,'map'=>{version=>{mandatory=>1,value=>\&version},url=>{mandatory=>1,value=>\&urlspec },':key'=>{name=>\&string,value=>\&anything },},},'name'=>{mandatory=>1,value=>\&string },'version'=>{mandatory=>1,value=>\&version },'license'=>{mandatory=>1,value=>\&license },'generated_by'=>{mandatory=>1,value=>\&string },'author'=>{mandatory=>1,list=>{value=>\&string }},'abstract'=>{mandatory=>1,value=>\&string },'distribution_type'=>{value=>\&string },'dynamic_config'=>{value=>\&boolean },'keywords'=>{list=>{value=>\&string }},'private'=>$no_index_1_2,'$no_index'=>$no_index_1_2,'requires'=>$module_map1,'recommends'=>$module_map1,'build_requires'=>$module_map1,'conflicts'=>$module_map2,'optional_features'=>{'map'=>{':key'=>{name=>\&string,'map'=>{description=>{value=>\&string },requires=>$module_map1,recommends=>$module_map1,build_requires=>$module_map1,conflicts=>$module_map2,':key'=>{name=>\&string,value=>\&anything },}}}},'provides'=>{'map'=>{':key'=>{name=>\&module,'map'=>{file=>{mandatory=>1,value=>\&file },version=>{value=>\&version },':key'=>{name=>\&string,value=>\&anything },}}}},'resources'=>{'map'=>{license=>{value=>\&url },homepage=>{value=>\&url },bugtracker=>{value=>\&url },repository=>{value=>\&url },':key'=>{value=>\&string,name=>\&custom_1 },}},':key'=>{name=>\&string,value=>\&anything },},'1.1'=>{'name'=>{value=>\&string },'version'=>{mandatory=>1,value=>\&version },'license'=>{value=>\&license },'generated_by'=>{value=>\&string },'license_uri'=>{value=>\&url },'distribution_type'=>{value=>\&string },'dynamic_config'=>{value=>\&boolean },'private'=>$no_index_1_1,'requires'=>$module_map1,'recommends'=>$module_map1,'build_requires'=>$module_map1,'conflicts'=>$module_map2,':key'=>{name=>\&string,value=>\&anything },},'1.0'=>{'name'=>{value=>\&string },'version'=>{mandatory=>1,value=>\&version },'license'=>{value=>\&license },'generated_by'=>{value=>\&string },'license_uri'=>{value=>\&url },'distribution_type'=>{value=>\&string },'dynamic_config'=>{value=>\&boolean },'requires'=>$module_map1,'recommends'=>$module_map1,'build_requires'=>$module_map1,'conflicts'=>$module_map2,':key'=>{name=>\&string,value=>\&anything },},);sub new {my ($class,$data)=@_;my$self={'data'=>$data,'spec'=>eval {$data->{'meta-spec'}{'version'}}|| "1.0",'errors'=>undef,};return bless$self,$class}sub is_valid {my$self=shift;my$data=$self->{data};my$spec_version=$self->{spec};$self->check_map($definitions{$spec_version},$data);return!$self->errors}sub errors {my$self=shift;return ()unless(defined$self->{errors});return @{$self->{errors}}}my$spec_error="Missing validation action in specification. " ."Must be one of 'map', 'list', or 'value'";sub check_map {my ($self,$spec,$data)=@_;if(ref($spec)ne 'HASH'){$self->_error("Unknown META specification, cannot validate.");return}if(ref($data)ne 'HASH'){$self->_error("Expected a map structure from string or file.");return}for my$key (keys %$spec){next unless($spec->{$key}->{mandatory});next if(defined$data->{$key});push @{$self->{stack}},$key;$self->_error("Missing mandatory field, '$key'");pop @{$self->{stack}}}for my$key (keys %$data){push @{$self->{stack}},$key;if($spec->{$key}){if($spec->{$key}{value}){$spec->{$key}{value}->($self,$key,$data->{$key})}elsif($spec->{$key}{'map'}){$self->check_map($spec->{$key}{'map'},$data->{$key})}elsif($spec->{$key}{'list'}){$self->check_list($spec->{$key}{'list'},$data->{$key})}else {$self->_error("$spec_error for '$key'")}}elsif ($spec->{':key'}){$spec->{':key'}{name}->($self,$key,$key);if($spec->{':key'}{value}){$spec->{':key'}{value}->($self,$key,$data->{$key})}elsif($spec->{':key'}{'map'}){$self->check_map($spec->{':key'}{'map'},$data->{$key})}elsif($spec->{':key'}{'list'}){$self->check_list($spec->{':key'}{'list'},$data->{$key})}else {$self->_error("$spec_error for ':key'")}}else {$self->_error("Unknown key, '$key', found in map structure")}pop @{$self->{stack}}}}sub check_list {my ($self,$spec,$data)=@_;if(ref($data)ne 'ARRAY'){$self->_error("Expected a list structure");return}if(defined$spec->{mandatory}){if(!defined$data->[0]){$self->_error("Missing entries from mandatory list")}}for my$value (@$data){push @{$self->{stack}},$value || "";if(defined$spec->{value}){$spec->{value}->($self,'list',$value)}elsif(defined$spec->{'map'}){$self->check_map($spec->{'map'},$value)}elsif(defined$spec->{'list'}){$self->check_list($spec->{'list'},$value)}elsif ($spec->{':key'}){$self->check_map($spec,$value)}else {$self->_error("$spec_error associated with '$self->{stack}[-2]'")}pop @{$self->{stack}}}}sub header {my ($self,$key,$value)=@_;if(defined$value){return 1 if($value && $value =~ /^--- #YAML:1.0/)}$self->_error("file does not have a valid YAML header.");return 0}sub release_status {my ($self,$key,$value)=@_;if(defined$value){my$version=$self->{data}{version}|| '';if ($version =~ /_/){return 1 if ($value =~ /\A(?:testing|unstable)\z/);$self->_error("'$value' for '$key' is invalid for version '$version'")}else {return 1 if ($value =~ /\A(?:stable|testing|unstable)\z/);$self->_error("'$value' for '$key' is invalid")}}else {$self->_error("'$key' is not defined")}return 0}sub _uri_split {return $_[0]=~ m,(?:([^:/?#]+):)?(?://([^/?#]*))?([^?#]*)(?:\?([^#]*))?(?:#(.*))?,}sub url {my ($self,$key,$value)=@_;if(defined$value){my ($scheme,$auth,$path,$query,$frag)=_uri_split($value);unless (defined$scheme && length$scheme){$self->_error("'$value' for '$key' does not have a URL scheme");return 0}unless (defined$auth && length$auth){$self->_error("'$value' for '$key' does not have a URL authority");return 0}return 1}$value ||= '';$self->_error("'$value' for '$key' is not a valid URL.");return 0}sub urlspec {my ($self,$key,$value)=@_;if(defined$value){return 1 if($value && $known_specs{$self->{spec}}eq $value);if($value && $known_urls{$value}){$self->_error('META specification URL does not match version');return 0}}$self->_error('Unknown META specification');return 0}sub anything {return 1}sub string {my ($self,$key,$value)=@_;if(defined$value){return 1 if($value || $value =~ /^0$/)}$self->_error("value is an undefined string");return 0}sub string_or_undef {my ($self,$key,$value)=@_;return 1 unless(defined$value);return 1 if($value || $value =~ /^0$/);$self->_error("No string defined for '$key'");return 0}sub file {my ($self,$key,$value)=@_;return 1 if(defined$value);$self->_error("No file defined for '$key'");return 0}sub exversion {my ($self,$key,$value)=@_;if(defined$value && ($value || $value =~ /0/)){my$pass=1;for(split(",",$value)){$self->version($key,$_)or ($pass=0)}return$pass}$value='' unless(defined$value);$self->_error("'$value' for '$key' is not a valid version.");return 0}sub version {my ($self,$key,$value)=@_;if(defined$value){return 0 unless($value || $value =~ /0/);return 1 if($value =~ /^\s*((<|<=|>=|>|!=|==)\s*)?v?\d+((\.\d+((_|\.)\d+)?)?)/)}else {$value=''}$self->_error("'$value' for '$key' is not a valid version.");return 0}sub boolean {my ($self,$key,$value)=@_;if(defined$value){return 1 if($value =~ /^(0|1|true|false)$/)}else {$value=''}$self->_error("'$value' for '$key' is not a boolean value.");return 0}my%v1_licenses=('perl'=>'http://dev.perl.org/licenses/','gpl'=>'http://www.opensource.org/licenses/gpl-license.php','apache'=>'http://apache.org/licenses/LICENSE-2.0','artistic'=>'http://opensource.org/licenses/artistic-license.php','artistic_2'=>'http://opensource.org/licenses/artistic-license-2.0.php','lgpl'=>'http://www.opensource.org/licenses/lgpl-license.php','bsd'=>'http://www.opensource.org/licenses/bsd-license.php','gpl'=>'http://www.opensource.org/licenses/gpl-license.php','mit'=>'http://opensource.org/licenses/mit-license.php','mozilla'=>'http://opensource.org/licenses/mozilla1.1.php','open_source'=>undef,'unrestricted'=>undef,'restrictive'=>undef,'unknown'=>undef,);my%v2_licenses=map {$_=>1}qw(agpl_3 apache_1_1 apache_2_0 artistic_1 artistic_2 bsd freebsd gfdl_1_2 gfdl_1_3 gpl_1 gpl_2 gpl_3 lgpl_2_1 lgpl_3_0 mit mozilla_1_0 mozilla_1_1 openssl perl_5 qpl_1_0 ssleay sun zlib open_source restricted unrestricted unknown);sub license {my ($self,$key,$value)=@_;my$licenses=$self->{spec}< 2 ? \%v1_licenses : \%v2_licenses;if(defined$value){return 1 if($value && exists$licenses->{$value})}else {$value=''}$self->_error("License '$value' is invalid");return 0}sub custom_1 {my ($self,$key)=@_;if(defined$key){return 1 if($key && $key =~ /^[_a-z]+$/i && $key =~ /[A-Z]/)}else {$key=''}$self->_error("Custom resource '$key' must be in CamelCase.");return 0}sub custom_2 {my ($self,$key)=@_;if(defined$key){return 1 if($key && $key =~ /^x_/i)}else {$key=''}$self->_error("Custom key '$key' must begin with 'x_' or 'X_'.");return 0}sub identifier {my ($self,$key)=@_;if(defined$key){return 1 if($key && $key =~ /^([a-z][_a-z]+)$/i)}else {$key=''}$self->_error("Key '$key' is not a legal identifier.");return 0}sub module {my ($self,$key)=@_;if(defined$key){return 1 if($key && $key =~ /^[A-Za-z0-9_]+(::[A-Za-z0-9_]+)*$/)}else {$key=''}$self->_error("Key '$key' is not a legal module name.");return 0}my@valid_phases=qw/configure build test runtime develop/;sub phase {my ($self,$key)=@_;if(defined$key){return 1 if(length$key && grep {$key eq $_}@valid_phases);return 1 if$key =~ /x_/i}else {$key=''}$self->_error("Key '$key' is not a legal phase.");return 0}my@valid_relations=qw/requires recommends suggests conflicts/;sub relation {my ($self,$key)=@_;if(defined$key){return 1 if(length$key && grep {$key eq $_}@valid_relations);return 1 if$key =~ /x_/i}else {$key=''}$self->_error("Key '$key' is not a legal prereq relationship.");return 0}sub _error {my$self=shift;my$mess=shift;$mess .= ' ('.join(' -> ',@{$self->{stack}}).')' if($self->{stack});$mess .= " [Validation: $self->{spec}]";push @{$self->{errors}},$mess}1; -CPAN_META_VALIDATOR - -$fatpacked{"CPAN/Meta/YAML.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'CPAN_META_YAML'; - use 5.008001;use strict;use warnings;package CPAN::Meta::YAML;$CPAN::Meta::YAML::VERSION='0.016';;use Exporter;our@ISA=qw{Exporter};our@EXPORT=qw{Load Dump};our@EXPORT_OK=qw{LoadFile DumpFile freeze thaw};sub Dump {return CPAN::Meta::YAML->new(@_)->_dump_string}sub Load {my$self=CPAN::Meta::YAML->_load_string(@_);if (wantarray){return @$self}else {return$self->[-1]}}BEGIN {*freeze=\&Dump;*thaw=\&Load}sub DumpFile {my$file=shift;return CPAN::Meta::YAML->new(@_)->_dump_file($file)}sub LoadFile {my$file=shift;my$self=CPAN::Meta::YAML->_load_file($file);if (wantarray){return @$self}else {return$self->[-1]}}sub new {my$class=shift;bless [@_ ],$class}sub read_string {my$self=shift;$self->_load_string(@_)}sub write_string {my$self=shift;$self->_dump_string(@_)}sub read {my$self=shift;$self->_load_file(@_)}sub write {my$self=shift;$self->_dump_file(@_)}my@UNPRINTABLE=qw(0 x01 x02 x03 x04 x05 x06 a b t n v f r x0E x0F x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x1A e x1C x1D x1E x1F);my%UNESCAPES=(0=>"\x00",z=>"\x00",N=>"\x85",a=>"\x07",b=>"\x08",t=>"\x09",n=>"\x0a",v=>"\x0b",f=>"\x0c",r=>"\x0d",e=>"\x1b",'\\'=>'\\',);my%QUOTE=map {$_=>1}qw{null true false};my$re_capture_double_quoted=qr/\"([^\\"]*(?:\\.[^\\"]*)*)\"/;my$re_capture_single_quoted=qr/\'([^\']*(?:\'\'[^\']*)*)\'/;my$re_capture_unquoted_key=qr/([^:]+(?::+\S(?:[^:]*|.*?(?=:)))*)(?=\s*\:(?:\s+|$))/;my$re_trailing_comment=qr/(?:\s+\#.*)?/;my$re_key_value_separator=qr/\s*:(?:\s+(?:\#.*)?|$)/;sub _load_file {my$class=ref $_[0]? ref shift : shift;my$file=shift or $class->_error('You did not specify a file name');$class->_error("File '$file' does not exist")unless -e $file;$class->_error("'$file' is a directory, not a file")unless -f _;$class->_error("Insufficient permissions to read '$file'")unless -r _;open(my$fh,"<:unix:encoding(UTF-8)",$file);unless ($fh){$class->_error("Failed to open file '$file': $!")}if (_can_flock()){flock($fh,Fcntl::LOCK_SH())or warn "Couldn't lock '$file' for reading: $!"}my$contents=eval {use warnings FATAL=>'utf8';local $/;<$fh>};if (my$err=$@){$class->_error("Error reading from file '$file': $err")}unless (close$fh){$class->_error("Failed to close file '$file': $!")}$class->_load_string($contents)}sub _load_string {my$class=ref $_[0]? ref shift : shift;my$self=bless [],$class;my$string=$_[0];eval {unless (defined$string){die \"Did not provide a string to load"}if (utf8::is_utf8($string)&&!utf8::valid($string)){die \<<'...'}utf8::upgrade($string);$string =~ s/^\x{FEFF}//;return$self unless length$string;my@lines=grep {!/^\s*(?:\#.*)?\z/}split /(?:\015{1,2}\012|\015|\012)/,$string;@lines and $lines[0]=~ /^\%YAML[: ][\d\.]+.*\z/ and shift@lines;my$in_document=0;while (@lines){if ($lines[0]=~ /^---\s*(?:(.+)\s*)?\z/){shift@lines;if (defined $1 and $1 !~ /^(?:\#.+|\%YAML[: ][\d\.]+)\z/){push @$self,$self->_load_scalar("$1",[undef ],\@lines);next}$in_document=1}if (!@lines or $lines[0]=~ /^(?:---|\.\.\.)/){push @$self,undef;while (@lines and $lines[0]!~ /^---/){shift@lines}$in_document=0}elsif (!$in_document && @$self){die \"CPAN::Meta::YAML failed to classify the line '$lines[0]'"}elsif ($lines[0]=~ /^\s*\-(?:\s|$|-+$)/){my$document=[];push @$self,$document;$self->_load_array($document,[0 ],\@lines)}elsif ($lines[0]=~ /^(\s*)\S/){my$document={};push @$self,$document;$self->_load_hash($document,[length($1)],\@lines)}else {die \"CPAN::Meta::YAML failed to classify the line '$lines[0]'"}}};my$err=$@;if (ref$err eq 'SCALAR'){$self->_error(${$err})}elsif ($err){$self->_error($err)}return$self}sub _unquote_single {my ($self,$string)=@_;return '' unless length$string;$string =~ s/\'\'/\'/g;return$string}sub _unquote_double {my ($self,$string)=@_;return '' unless length$string;$string =~ s/\\"/"/g;$string =~ s{\\([Nnever\\fartz0b]|x([0-9a-fA-F]{2}))} - Read an invalid UTF-8 string (maybe mixed UTF-8 and 8-bit character set). - Did you decode with lax ":utf8" instead of strict ":encoding(UTF-8)"? - ... - {(length($1)>1)?pack("H2",$2):$UNESCAPES{$1}}gex;return$string}sub _load_scalar {my ($self,$string,$indent,$lines)=@_;$string =~ s/\s*\z//;return undef if$string eq '~';if ($string =~ /^$re_capture_single_quoted$re_trailing_comment\z/){return$self->_unquote_single($1)}if ($string =~ /^$re_capture_double_quoted$re_trailing_comment\z/){return$self->_unquote_double($1)}if ($string =~ /^[\'\"!&]/){die \"CPAN::Meta::YAML does not support a feature in line '$string'"}return {}if$string =~ /^{}(?:\s+\#.*)?\z/;return []if$string =~ /^\[\](?:\s+\#.*)?\z/;if ($string !~ /^[>|]/){die \"CPAN::Meta::YAML found illegal characters in plain scalar: '$string'" if$string =~ /^(?:-(?:\s|$)|[\@\%\`])/ or $string =~ /:(?:\s|$)/;$string =~ s/\s+#.*\z//;return$string}die \"CPAN::Meta::YAML failed to find multi-line scalar content" unless @$lines;$lines->[0]=~ /^(\s*)/;$indent->[-1]=length("$1");if (defined$indent->[-2]and $indent->[-1]<= $indent->[-2]){die \"CPAN::Meta::YAML found bad indenting in line '$lines->[0]'"}my@multiline=();while (@$lines){$lines->[0]=~ /^(\s*)/;last unless length($1)>= $indent->[-1];push@multiline,substr(shift(@$lines),length($1))}my$j=(substr($string,0,1)eq '>')? ' ' : "\n";my$t=(substr($string,1,1)eq '-')? '' : "\n";return join($j,@multiline).$t}sub _load_array {my ($self,$array,$indent,$lines)=@_;while (@$lines){if ($lines->[0]=~ /^(?:---|\.\.\.)/){while (@$lines and $lines->[0]!~ /^---/){shift @$lines}return 1}$lines->[0]=~ /^(\s*)/;if (length($1)< $indent->[-1]){return 1}elsif (length($1)> $indent->[-1]){die \"CPAN::Meta::YAML found bad indenting in line '$lines->[0]'"}if ($lines->[0]=~ /^(\s*\-\s+)[^\'\"]\S*\s*:(?:\s+|$)/){my$indent2=length("$1");$lines->[0]=~ s/-/ /;push @$array,{};$self->_load_hash($array->[-1],[@$indent,$indent2 ],$lines)}elsif ($lines->[0]=~ /^\s*\-\s*\z/){shift @$lines;unless (@$lines){push @$array,undef;return 1}if ($lines->[0]=~ /^(\s*)\-/){my$indent2=length("$1");if ($indent->[-1]==$indent2){push @$array,undef}else {push @$array,[];$self->_load_array($array->[-1],[@$indent,$indent2 ],$lines)}}elsif ($lines->[0]=~ /^(\s*)\S/){push @$array,{};$self->_load_hash($array->[-1],[@$indent,length("$1")],$lines)}else {die \"CPAN::Meta::YAML failed to classify line '$lines->[0]'"}}elsif ($lines->[0]=~ /^\s*\-(\s*)(.+?)\s*\z/){shift @$lines;push @$array,$self->_load_scalar("$2",[@$indent,undef ],$lines)}elsif (defined$indent->[-2]and $indent->[-1]==$indent->[-2]){return 1}else {die \"CPAN::Meta::YAML failed to classify line '$lines->[0]'"}}return 1}sub _load_hash {my ($self,$hash,$indent,$lines)=@_;while (@$lines){if ($lines->[0]=~ /^(?:---|\.\.\.)/){while (@$lines and $lines->[0]!~ /^---/){shift @$lines}return 1}$lines->[0]=~ /^(\s*)/;if (length($1)< $indent->[-1]){return 1}elsif (length($1)> $indent->[-1]){die \"CPAN::Meta::YAML found bad indenting in line '$lines->[0]'"}my$key;if ($lines->[0]=~ s/^\s*$re_capture_single_quoted$re_key_value_separator//){$key=$self->_unquote_single($1)}elsif ($lines->[0]=~ s/^\s*$re_capture_double_quoted$re_key_value_separator//){$key=$self->_unquote_double($1)}elsif ($lines->[0]=~ s/^\s*$re_capture_unquoted_key$re_key_value_separator//){$key=$1;$key =~ s/\s+$//}elsif ($lines->[0]=~ /^\s*\?/){die \"CPAN::Meta::YAML does not support a feature in line '$lines->[0]'"}else {die \"CPAN::Meta::YAML failed to classify line '$lines->[0]'"}if (exists$hash->{$key}){warn "CPAN::Meta::YAML found a duplicate key '$key' in line '$lines->[0]'"}if (length$lines->[0]){$hash->{$key}=$self->_load_scalar(shift(@$lines),[@$indent,undef ],$lines)}else {shift @$lines;unless (@$lines){$hash->{$key}=undef;return 1}if ($lines->[0]=~ /^(\s*)-/){$hash->{$key}=[];$self->_load_array($hash->{$key},[@$indent,length($1)],$lines)}elsif ($lines->[0]=~ /^(\s*)./){my$indent2=length("$1");if ($indent->[-1]>= $indent2){$hash->{$key}=undef}else {$hash->{$key}={};$self->_load_hash($hash->{$key},[@$indent,length($1)],$lines)}}}}return 1}sub _dump_file {my$self=shift;require Fcntl;my$file=shift or $self->_error('You did not specify a file name');my$fh;if (_can_flock()){my$flags=Fcntl::O_WRONLY()|Fcntl::O_CREAT();sysopen($fh,$file,$flags);unless ($fh){$self->_error("Failed to open file '$file' for writing: $!")}binmode($fh,":raw:encoding(UTF-8)");flock($fh,Fcntl::LOCK_EX())or warn "Couldn't lock '$file' for reading: $!";truncate$fh,0;seek$fh,0,0}else {open$fh,">:unix:encoding(UTF-8)",$file}print {$fh}$self->_dump_string;unless (close$fh){$self->_error("Failed to close file '$file': $!")}return 1}sub _dump_string {my$self=shift;return '' unless ref$self && @$self;my$indent=0;my@lines=();eval {for my$cursor (@$self){push@lines,'---';if (!defined$cursor){}elsif (!ref$cursor){$lines[-1].= ' ' .$self->_dump_scalar($cursor)}elsif (ref$cursor eq 'ARRAY'){unless (@$cursor){$lines[-1].= ' []';next}push@lines,$self->_dump_array($cursor,$indent,{})}elsif (ref$cursor eq 'HASH'){unless (%$cursor){$lines[-1].= ' {}';next}push@lines,$self->_dump_hash($cursor,$indent,{})}else {die \("Cannot serialize " .ref($cursor))}}};if (ref $@ eq 'SCALAR'){$self->_error(${$@})}elsif ($@){$self->_error($@)}join '',map {"$_\n"}@lines}sub _has_internal_string_value {my$value=shift;my$b_obj=B::svref_2object(\$value);return$b_obj->FLAGS & B::SVf_POK()}sub _dump_scalar {my$string=$_[1];my$is_key=$_[2];my$has_string_flag=_has_internal_string_value($string);return '~' unless defined$string;return "''" unless length$string;if (Scalar::Util::looks_like_number($string)){if ($is_key || $has_string_flag){return qq['$string']}else {return$string}}if ($string =~ /[\x00-\x09\x0b-\x0d\x0e-\x1f\x7f-\x9f\'\n]/){$string =~ s/\\/\\\\/g;$string =~ s/"/\\"/g;$string =~ s/\n/\\n/g;$string =~ s/[\x85]/\\N/g;$string =~ s/([\x00-\x1f])/\\$UNPRINTABLE[ord($1)]/g;$string =~ s/([\x7f-\x9f])/'\x' . sprintf("%X",ord($1))/ge;return qq|"$string"|}if ($string =~ /(?:^[~!@#%&*|>?:,'"`{}\[\]]|^-+$|\s|:\z)/ or $QUOTE{$string}){return "'$string'"}return$string}sub _dump_array {my ($self,$array,$indent,$seen)=@_;if ($seen->{refaddr($array)}++){die \"CPAN::Meta::YAML does not support circular references"}my@lines=();for my$el (@$array){my$line=(' ' x $indent).'-';my$type=ref$el;if (!$type){$line .= ' ' .$self->_dump_scalar($el);push@lines,$line}elsif ($type eq 'ARRAY'){if (@$el){push@lines,$line;push@lines,$self->_dump_array($el,$indent + 1,$seen)}else {$line .= ' []';push@lines,$line}}elsif ($type eq 'HASH'){if (keys %$el){push@lines,$line;push@lines,$self->_dump_hash($el,$indent + 1,$seen)}else {$line .= ' {}';push@lines,$line}}else {die \"CPAN::Meta::YAML does not support $type references"}}@lines}sub _dump_hash {my ($self,$hash,$indent,$seen)=@_;if ($seen->{refaddr($hash)}++){die \"CPAN::Meta::YAML does not support circular references"}my@lines=();for my$name (sort keys %$hash){my$el=$hash->{$name};my$line=(' ' x $indent).$self->_dump_scalar($name,1).":";my$type=ref$el;if (!$type){$line .= ' ' .$self->_dump_scalar($el);push@lines,$line}elsif ($type eq 'ARRAY'){if (@$el){push@lines,$line;push@lines,$self->_dump_array($el,$indent + 1,$seen)}else {$line .= ' []';push@lines,$line}}elsif ($type eq 'HASH'){if (keys %$el){push@lines,$line;push@lines,$self->_dump_hash($el,$indent + 1,$seen)}else {$line .= ' {}';push@lines,$line}}else {die \"CPAN::Meta::YAML does not support $type references"}}@lines}our$errstr='';sub _error {require Carp;$errstr=$_[1];$errstr =~ s/ at \S+ line \d+.*//;Carp::croak($errstr)}my$errstr_warned;sub errstr {require Carp;Carp::carp("CPAN::Meta::YAML->errstr and \$CPAN::Meta::YAML::errstr is deprecated")unless$errstr_warned++;$errstr}use B;my$HAS_FLOCK;sub _can_flock {if (defined$HAS_FLOCK){return$HAS_FLOCK}else {require Config;my$c=\%Config::Config;$HAS_FLOCK=grep {$c->{$_}}qw/d_flock d_fcntl_can_lock d_lockf/;require Fcntl if$HAS_FLOCK;return$HAS_FLOCK}}use Scalar::Util ();BEGIN {local $@;if (eval {Scalar::Util->VERSION(1.18)}){*refaddr=*Scalar::Util::refaddr}else {eval <<'END_PERL'}}delete$CPAN::Meta::YAML::{refaddr};1; - # Scalar::Util failed to load or too old - sub refaddr { - my $pkg = ref($_[0]) or return undef; - if ( !! UNIVERSAL::can($_[0], 'can') ) { - bless $_[0], 'Scalar::Util::Fake'; - } else { - $pkg = undef; - } - "$_[0]" =~ /0x(\w+)/; - my $i = do { no warnings 'portable'; hex $1 }; - bless $_[0], $pkg if defined $pkg; - $i; - } - END_PERL -CPAN_META_YAML - -$fatpacked{"Exporter.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'EXPORTER'; - package Exporter;require 5.006;our$Debug=0;our$ExportLevel=0;our$Verbose ||=0;our$VERSION='5.70';our (%Cache);sub as_heavy {require Exporter::Heavy;my$c=(caller(1))[3];$c =~ s/.*:://;\&{"Exporter::Heavy::heavy_$c"}}sub export {goto &{as_heavy()}}sub import {my$pkg=shift;my$callpkg=caller($ExportLevel);if ($pkg eq "Exporter" and @_ and $_[0]eq "import"){*{$callpkg."::import"}=\&import;return}my$exports=\@{"$pkg\::EXPORT"};my$fail=${$pkg .'::'}{EXPORT_FAIL}&& \@{"$pkg\::EXPORT_FAIL"};return export$pkg,$callpkg,@_ if$Verbose or $Debug or $fail && @$fail > 1;my$export_cache=($Cache{$pkg}||={});my$args=@_ or @_=@$exports;if ($args and not %$export_cache){s/^&//,$export_cache->{$_}=1 foreach (@$exports,@{"$pkg\::EXPORT_OK"})}my$heavy;if ($args or $fail){($heavy=(/\W/ or $args and not exists$export_cache->{$_}or $fail and @$fail and $_ eq $fail->[0]))and last foreach (@_)}else {($heavy=/\W/)and last foreach (@_)}return export$pkg,$callpkg,($args ? @_ : ())if$heavy;local$SIG{__WARN__}=sub {require Carp;&Carp::carp}if not $SIG{__WARN__};*{"$callpkg\::$_"}=\&{"$pkg\::$_"}foreach @_}sub export_fail {my$self=shift;@_}sub export_to_level {goto &{as_heavy()}}sub export_tags {goto &{as_heavy()}}sub export_ok_tags {goto &{as_heavy()}}sub require_version {goto &{as_heavy()}}1; -EXPORTER - -$fatpacked{"Exporter/Heavy.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'EXPORTER_HEAVY'; - package Exporter::Heavy;use strict;no strict 'refs';require Exporter;our$VERSION=$Exporter::VERSION;sub _rebuild_cache {my ($pkg,$exports,$cache)=@_;s/^&// foreach @$exports;@{$cache}{@$exports}=(1)x @$exports;my$ok=\@{"${pkg}::EXPORT_OK"};if (@$ok){s/^&// foreach @$ok;@{$cache}{@$ok}=(1)x @$ok}}sub heavy_export {my$oldwarn=$SIG{__WARN__};local$SIG{__WARN__}=sub {local$SIG{__WARN__}=$oldwarn;my$text=shift;if ($text =~ s/ at \S*Exporter\S*.pm line \d+.*\n//){require Carp;local$Carp::CarpLevel=1;Carp::carp($text)}else {warn$text}};local$SIG{__DIE__}=sub {require Carp;local$Carp::CarpLevel=1;Carp::croak("$_[0]Illegal null symbol in \@${1}::EXPORT")if $_[0]=~ /^Unable to create sub named "(.*?)::"/};my($pkg,$callpkg,@imports)=@_;my($type,$sym,$cache_is_current,$oops);my($exports,$export_cache)=(\@{"${pkg}::EXPORT"},$Exporter::Cache{$pkg}||={});if (@imports){if (!%$export_cache){_rebuild_cache ($pkg,$exports,$export_cache);$cache_is_current=1}if (grep m{^[/!:]},@imports){my$tagsref=\%{"${pkg}::EXPORT_TAGS"};my$tagdata;my%imports;my($remove,$spec,@names,@allexports);unshift@imports,':DEFAULT' if$imports[0]=~ m/^!/;for$spec (@imports){$remove=$spec =~ s/^!//;if ($spec =~ s/^://){if ($spec eq 'DEFAULT'){@names=@$exports}elsif ($tagdata=$tagsref->{$spec}){@names=@$tagdata}else {warn qq["$spec" is not defined in %${pkg}::EXPORT_TAGS];++$oops;next}}elsif ($spec =~ m:^/(.*)/$:){my$patn=$1;@allexports=keys %$export_cache unless@allexports;@names=grep(/$patn/,@allexports)}else {@names=($spec)}warn "Import ".($remove ? "del":"add").": @names " if$Exporter::Verbose;if ($remove){for$sym (@names){delete$imports{$sym}}}else {@imports{@names}=(1)x @names}}@imports=keys%imports}my@carp;for$sym (@imports){if (!$export_cache->{$sym}){if ($sym =~ m/^\d/){$pkg->VERSION($sym);if (@imports==1){@imports=@$exports;last}if (@imports==2 and!$imports[1]){@imports=();last}}elsif ($sym !~ s/^&// ||!$export_cache->{$sym}){unless ($cache_is_current){%$export_cache=();_rebuild_cache ($pkg,$exports,$export_cache);$cache_is_current=1}if (!$export_cache->{$sym}){push@carp,qq["$sym" is not exported by the $pkg module\n];$oops++}}}}if ($oops){require Carp;Carp::croak("@{carp}Can't continue after import errors")}}else {@imports=@$exports}my($fail,$fail_cache)=(\@{"${pkg}::EXPORT_FAIL"},$Exporter::FailCache{$pkg}||={});if (@$fail){if (!%$fail_cache){my@expanded=map {/^\w/ ? ($_,'&'.$_): $_}@$fail;warn "${pkg}::EXPORT_FAIL cached: @expanded" if$Exporter::Verbose;@{$fail_cache}{@expanded}=(1)x @expanded}my@failed;for$sym (@imports){push(@failed,$sym)if$fail_cache->{$sym}}if (@failed){@failed=$pkg->export_fail(@failed);for$sym (@failed){require Carp;Carp::carp(qq["$sym" is not implemented by the $pkg module ],"on this architecture")}if (@failed){require Carp;Carp::croak("Can't continue after import errors")}}}warn "Importing into $callpkg from $pkg: ",join(", ",sort@imports)if$Exporter::Verbose;for$sym (@imports){(*{"${callpkg}::$sym"}=\&{"${pkg}::$sym"},next)unless$sym =~ s/^(\W)//;$type=$1;no warnings 'once';*{"${callpkg}::$sym"}=$type eq '&' ? \&{"${pkg}::$sym"}: $type eq '$' ? \${"${pkg}::$sym"}: $type eq '@' ? \@{"${pkg}::$sym"}: $type eq '%' ? \%{"${pkg}::$sym"}: $type eq '*' ? *{"${pkg}::$sym"}: do {require Carp;Carp::croak("Can't export symbol: $type$sym")}}}sub heavy_export_to_level {my$pkg=shift;my$level=shift;(undef)=shift;my$callpkg=caller($level);$pkg->export($callpkg,@_)}sub _push_tags {my($pkg,$var,$syms)=@_;my@nontag=();my$export_tags=\%{"${pkg}::EXPORT_TAGS"};push(@{"${pkg}::$var"},map {$export_tags->{$_}? @{$export_tags->{$_}}: scalar(push(@nontag,$_),$_)}(@$syms)? @$syms : keys %$export_tags);if (@nontag and $^W){require Carp;Carp::carp(join(", ",@nontag)." are not tags of $pkg")}}sub heavy_require_version {my($self,$wanted)=@_;my$pkg=ref$self || $self;return ${pkg}->VERSION($wanted)}sub heavy_export_tags {_push_tags((caller)[0],"EXPORT",\@_)}sub heavy_export_ok_tags {_push_tags((caller)[0],"EXPORT_OK",\@_)}1; -EXPORTER_HEAVY - -$fatpacked{"File/pushd.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'FILE_PUSHD'; - use strict;use warnings;package File::pushd;our$VERSION='1.009';our@EXPORT=qw(pushd tempd);our@ISA=qw(Exporter);use Exporter;use Carp;use Cwd qw(getcwd abs_path);use File::Path qw(rmtree);use File::Temp qw();use File::Spec;use overload q{""}=>sub {File::Spec->canonpath($_[0]->{_pushd})},fallback=>1;sub pushd {my ($target_dir,$options)=@_;$options->{untaint_pattern}||= qr{^([-+@\w./]+)$};$target_dir="." unless defined$target_dir;croak "Can't locate directory $target_dir" unless -d $target_dir;my$tainted_orig=getcwd;my$orig;if ($tainted_orig =~ $options->{untaint_pattern}){$orig=$1}else {$orig=$tainted_orig}my$tainted_dest;eval {$tainted_dest=$target_dir ? abs_path($target_dir): $orig};croak "Can't locate absolute path for $target_dir: $@" if $@;my$dest;if ($tainted_dest =~ $options->{untaint_pattern}){$dest=$1}else {$dest=$tainted_dest}if ($dest ne $orig){chdir$dest or croak "Can't chdir to $dest\: $!"}my$self=bless {_pushd=>$dest,_original=>$orig },__PACKAGE__;return$self}sub tempd {my ($options)=@_;my$dir;eval {$dir=pushd(File::Temp::tempdir(CLEANUP=>0),$options)};croak $@ if $@;$dir->{_tempd}=1;return$dir}sub preserve {my$self=shift;return 1 if!$self->{"_tempd"};if (@_==0){return$self->{_preserve}=1}else {return$self->{_preserve}=$_[0]? 1 : 0}}sub DESTROY {my ($self)=@_;my$orig=$self->{_original};chdir$orig if$orig;if ($self->{_tempd}&&!$self->{_preserve}){my$err=do {local $@;eval {rmtree($self->{_pushd})};$@};carp$err if$err}}1; -FILE_PUSHD - -$fatpacked{"HTTP/Tiny.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'HTTP_TINY'; - package HTTP::Tiny;use strict;use warnings;our$VERSION='0.056';use Carp ();my@attributes;BEGIN {@attributes=qw(cookie_jar default_headers http_proxy https_proxy keep_alive local_address max_redirect max_size proxy no_proxy timeout SSL_options verify_SSL);my%persist_ok=map {;$_=>1}qw(cookie_jar default_headers max_redirect max_size);no strict 'refs';no warnings 'uninitialized';for my$accessor (@attributes){*{$accessor}=sub {@_ > 1 ? do {delete $_[0]->{handle}if!$persist_ok{$accessor}&& $_[1]ne $_[0]->{$accessor};$_[0]->{$accessor}=$_[1]}: $_[0]->{$accessor}}}}sub agent {my($self,$agent)=@_;if(@_ > 1){$self->{agent}=(defined$agent && $agent =~ / $/)? $agent .$self->_agent : $agent}return$self->{agent}}sub new {my($class,%args)=@_;my$self={max_redirect=>5,timeout=>60,keep_alive=>1,verify_SSL=>$args{verify_SSL}|| $args{verify_ssl}|| 0,no_proxy=>$ENV{no_proxy},};bless$self,$class;$class->_validate_cookie_jar($args{cookie_jar})if$args{cookie_jar};for my$key (@attributes){$self->{$key}=$args{$key}if exists$args{$key}}$self->agent(exists$args{agent}? $args{agent}: $class->_agent);$self->_set_proxies;return$self}sub _set_proxies {my ($self)=@_;if (!exists$self->{proxy}){$self->{proxy}=$ENV{all_proxy}|| $ENV{ALL_PROXY}}if (defined$self->{proxy}){$self->_split_proxy('generic proxy'=>$self->{proxy})}else {delete$self->{proxy}}if (!exists$self->{http_proxy}){local$ENV{HTTP_PROXY}if$ENV{REQUEST_METHOD};$self->{http_proxy}=$ENV{http_proxy}|| $ENV{HTTP_PROXY}|| $self->{proxy}}if (defined$self->{http_proxy}){$self->_split_proxy(http_proxy=>$self->{http_proxy});$self->{_has_proxy}{http}=1}else {delete$self->{http_proxy}}if (!exists$self->{https_proxy}){$self->{https_proxy}=$ENV{https_proxy}|| $ENV{HTTPS_PROXY}|| $self->{proxy}}if ($self->{https_proxy}){$self->_split_proxy(https_proxy=>$self->{https_proxy});$self->{_has_proxy}{https}=1}else {delete$self->{https_proxy}}unless (ref$self->{no_proxy}eq 'ARRAY'){$self->{no_proxy}=(defined$self->{no_proxy})? [split /\s*,\s*/,$self->{no_proxy}]: []}return}for my$sub_name (qw/get head put post delete/){my$req_method=uc$sub_name;no strict 'refs';eval <<"HERE"}sub post_form {my ($self,$url,$data,$args)=@_;(@_==3 || @_==4 && ref$args eq 'HASH')or Carp::croak(q/Usage: $http->post_form(URL, DATAREF, [HASHREF])/ ."\n");my$headers={};while (my ($key,$value)=each %{$args->{headers}|| {}}){$headers->{lc$key}=$value}delete$args->{headers};return$self->request('POST',$url,{%$args,content=>$self->www_form_urlencode($data),headers=>{%$headers,'content-type'=>'application/x-www-form-urlencoded' },})}sub mirror {my ($self,$url,$file,$args)=@_;@_==3 || (@_==4 && ref$args eq 'HASH')or Carp::croak(q/Usage: $http->mirror(URL, FILE, [HASHREF])/ ."\n");if (-e $file and my$mtime=(stat($file))[9]){$args->{headers}{'if-modified-since'}||= $self->_http_date($mtime)}my$tempfile=$file .int(rand(2**31));require Fcntl;sysopen my$fh,$tempfile,Fcntl::O_CREAT()|Fcntl::O_EXCL()|Fcntl::O_WRONLY()or Carp::croak(qq/Error: Could not create temporary file $tempfile for downloading: $!\n/);binmode$fh;$args->{data_callback}=sub {print {$fh}$_[0]};my$response=$self->request('GET',$url,$args);close$fh or Carp::croak(qq/Error: Caught error closing temporary file $tempfile: $!\n/);if ($response->{success}){rename$tempfile,$file or Carp::croak(qq/Error replacing $file with $tempfile: $!\n/);my$lm=$response->{headers}{'last-modified'};if ($lm and my$mtime=$self->_parse_http_date($lm)){utime$mtime,$mtime,$file}}$response->{success}||= $response->{status}eq '304';unlink$tempfile;return$response}my%idempotent=map {$_=>1}qw/GET HEAD PUT DELETE OPTIONS TRACE/;sub request {my ($self,$method,$url,$args)=@_;@_==3 || (@_==4 && ref$args eq 'HASH')or Carp::croak(q/Usage: $http->request(METHOD, URL, [HASHREF])/ ."\n");$args ||= {};my$response;for (0 .. 1){$response=eval {$self->_request($method,$url,$args)};last unless $@ && $idempotent{$method}&& $@ =~ m{^(?:Socket closed|Unexpected end)}}if (my$e=$@){if (ref$e eq 'HASH' && exists$e->{status}){return$e}$e="$e";$response={url=>$url,success=>q{},status=>599,reason=>'Internal Exception',content=>$e,headers=>{'content-type'=>'text/plain','content-length'=>length$e,}}}return$response}sub www_form_urlencode {my ($self,$data)=@_;(@_==2 && ref$data)or Carp::croak(q/Usage: $http->www_form_urlencode(DATAREF)/ ."\n");(ref$data eq 'HASH' || ref$data eq 'ARRAY')or Carp::croak("form data must be a hash or array reference\n");my@params=ref$data eq 'HASH' ? %$data : @$data;@params % 2==0 or Carp::croak("form data reference must have an even number of terms\n");my@terms;while(@params){my ($key,$value)=splice(@params,0,2);if (ref$value eq 'ARRAY'){unshift@params,map {$key=>$_}@$value}else {push@terms,join("=",map {$self->_uri_escape($_)}$key,$value)}}return join("&",(ref$data eq 'ARRAY')? (@terms): (sort@terms))}sub can_ssl {my ($self)=@_;my($ok,$reason)=(1,'');unless (eval {require IO::Socket::SSL;IO::Socket::SSL->VERSION(1.42)}){$ok=0;$reason .= qq/IO::Socket::SSL 1.42 must be installed for https support\n/}unless (eval {require Net::SSLeay;Net::SSLeay->VERSION(1.49)}){$ok=0;$reason .= qq/Net::SSLeay 1.49 must be installed for https support\n/}if (ref($self)&& ($self->{verify_SSL}|| $self->{SSL_options}{SSL_verify_mode})){my$handle=HTTP::Tiny::Handle->new(SSL_options=>$self->{SSL_options},verify_SSL=>$self->{verify_SSL},);unless (eval {$handle->_find_CA_file;1}){$ok=0;$reason .= "$@"}}wantarray ? ($ok,$reason): $ok}my%DefaultPort=(http=>80,https=>443,);sub _agent {my$class=ref($_[0])|| $_[0];(my$default_agent=$class)=~ s{::}{-}g;return$default_agent ."/" .$class->VERSION}sub _request {my ($self,$method,$url,$args)=@_;my ($scheme,$host,$port,$path_query,$auth)=$self->_split_url($url);my$request={method=>$method,scheme=>$scheme,host=>$host,port=>$port,host_port=>($port==$DefaultPort{$scheme}? $host : "$host:$port"),uri=>$path_query,headers=>{},};my$handle=delete$self->{handle};if ($handle){unless ($handle->can_reuse($scheme,$host,$port)){$handle->close;undef$handle}}$handle ||= $self->_open_handle($request,$scheme,$host,$port);$self->_prepare_headers_and_cb($request,$args,$url,$auth);$handle->write_request($request);my$response;do {$response=$handle->read_response_header}until (substr($response->{status},0,1)ne '1');$self->_update_cookie_jar($url,$response)if$self->{cookie_jar};if (my@redir_args=$self->_maybe_redirect($request,$response,$args)){$handle->close;return$self->_request(@redir_args,$args)}my$known_message_length;if ($method eq 'HEAD' || $response->{status}=~ /^[23]04/){$known_message_length=1}else {my$data_cb=$self->_prepare_data_cb($response,$args);$known_message_length=$handle->read_body($data_cb,$response)}if ($self->{keep_alive}&& $known_message_length && $response->{protocol}eq 'HTTP/1.1' && ($response->{headers}{connection}|| '')ne 'close'){$self->{handle}=$handle}else {$handle->close}$response->{success}=substr($response->{status},0,1)eq '2';$response->{url}=$url;return$response}sub _open_handle {my ($self,$request,$scheme,$host,$port)=@_;my$handle=HTTP::Tiny::Handle->new(timeout=>$self->{timeout},SSL_options=>$self->{SSL_options},verify_SSL=>$self->{verify_SSL},local_address=>$self->{local_address},keep_alive=>$self->{keep_alive});if ($self->{_has_proxy}{$scheme}&&!grep {$host =~ /\Q$_\E$/}@{$self->{no_proxy}}){return$self->_proxy_connect($request,$handle)}else {return$handle->connect($scheme,$host,$port)}}sub _proxy_connect {my ($self,$request,$handle)=@_;my@proxy_vars;if ($request->{scheme}eq 'https'){Carp::croak(qq{No https_proxy defined})unless$self->{https_proxy};@proxy_vars=$self->_split_proxy(https_proxy=>$self->{https_proxy});if ($proxy_vars[0]eq 'https'){Carp::croak(qq{Can't proxy https over https: $request->{uri} via $self->{https_proxy}})}}else {Carp::croak(qq{No http_proxy defined})unless$self->{http_proxy};@proxy_vars=$self->_split_proxy(http_proxy=>$self->{http_proxy})}my ($p_scheme,$p_host,$p_port,$p_auth)=@proxy_vars;if (length$p_auth &&!defined$request->{headers}{'proxy-authorization'}){$self->_add_basic_auth_header($request,'proxy-authorization'=>$p_auth)}$handle->connect($p_scheme,$p_host,$p_port);if ($request->{scheme}eq 'https'){$self->_create_proxy_tunnel($request,$handle)}else {$request->{uri}="$request->{scheme}://$request->{host_port}$request->{uri}"}return$handle}sub _split_proxy {my ($self,$type,$proxy)=@_;my ($scheme,$host,$port,$path_query,$auth)=eval {$self->_split_url($proxy)};unless(defined($scheme)&& length($scheme)&& length($host)&& length($port)&& $path_query eq '/'){Carp::croak(qq{$type URL must be in format http[s]://[auth@]:/\n})}return ($scheme,$host,$port,$auth)}sub _create_proxy_tunnel {my ($self,$request,$handle)=@_;$handle->_assert_ssl;my$agent=exists($request->{headers}{'user-agent'})? $request->{headers}{'user-agent'}: $self->{agent};my$connect_request={method=>'CONNECT',uri=>"$request->{host}:$request->{port}",headers=>{host=>"$request->{host}:$request->{port}",'user-agent'=>$agent,}};if ($request->{headers}{'proxy-authorization'}){$connect_request->{headers}{'proxy-authorization'}=delete$request->{headers}{'proxy-authorization'}}$handle->write_request($connect_request);my$response;do {$response=$handle->read_response_header}until (substr($response->{status},0,1)ne '1');unless (substr($response->{status},0,1)eq '2'){die$response}$handle->start_ssl($request->{host});return}sub _prepare_headers_and_cb {my ($self,$request,$args,$url,$auth)=@_;for ($self->{default_headers},$args->{headers}){next unless defined;while (my ($k,$v)=each %$_){$request->{headers}{lc$k}=$v}}if (exists$request->{headers}{'host'}){die(qq/The 'Host' header must not be provided as header option\n/)}$request->{headers}{'host'}=$request->{host_port};$request->{headers}{'user-agent'}||= $self->{agent};$request->{headers}{'connection'}="close" unless$self->{keep_alive};if (defined$args->{content}){if (ref$args->{content}eq 'CODE'){$request->{headers}{'content-type'}||= "application/octet-stream";$request->{headers}{'transfer-encoding'}='chunked' unless$request->{headers}{'content-length'}|| $request->{headers}{'transfer-encoding'};$request->{cb}=$args->{content}}elsif (length$args->{content}){my$content=$args->{content};if ($] ge '5.008'){utf8::downgrade($content,1)or die(qq/Wide character in request message body\n/)}$request->{headers}{'content-type'}||= "application/octet-stream";$request->{headers}{'content-length'}=length$content unless$request->{headers}{'content-length'}|| $request->{headers}{'transfer-encoding'};$request->{cb}=sub {substr$content,0,length$content,''}}$request->{trailer_cb}=$args->{trailer_callback}if ref$args->{trailer_callback}eq 'CODE'}if ($self->{cookie_jar}){my$cookies=$self->cookie_jar->cookie_header($url);$request->{headers}{cookie}=$cookies if length$cookies}if (length$auth &&!defined$request->{headers}{authorization}){$self->_add_basic_auth_header($request,'authorization'=>$auth)}return}sub _add_basic_auth_header {my ($self,$request,$header,$auth)=@_;require MIME::Base64;$request->{headers}{$header}="Basic " .MIME::Base64::encode_base64($auth,"");return}sub _prepare_data_cb {my ($self,$response,$args)=@_;my$data_cb=$args->{data_callback};$response->{content}='';if (!$data_cb || $response->{status}!~ /^2/){if (defined$self->{max_size}){$data_cb=sub {$_[1]->{content}.= $_[0];die(qq/Size of response body exceeds the maximum allowed of $self->{max_size}\n/)if length $_[1]->{content}> $self->{max_size}}}else {$data_cb=sub {$_[1]->{content}.= $_[0]}}}return$data_cb}sub _update_cookie_jar {my ($self,$url,$response)=@_;my$cookies=$response->{headers}->{'set-cookie'};return unless defined$cookies;my@cookies=ref$cookies ? @$cookies : $cookies;$self->cookie_jar->add($url,$_)for@cookies;return}sub _validate_cookie_jar {my ($class,$jar)=@_;for my$method (qw/add cookie_header/){Carp::croak(qq/Cookie jar must provide the '$method' method\n/)unless ref($jar)&& ref($jar)->can($method)}return}sub _maybe_redirect {my ($self,$request,$response,$args)=@_;my$headers=$response->{headers};my ($status,$method)=($response->{status},$request->{method});if (($status eq '303' or ($status =~ /^30[1278]/ && $method =~ /^GET|HEAD$/))and $headers->{location}and ++$args->{redirects}<= $self->{max_redirect}){my$location=($headers->{location}=~ /^\//)? "$request->{scheme}://$request->{host_port}$headers->{location}" : $headers->{location};return (($status eq '303' ? 'GET' : $method),$location)}return}sub _split_url {my$url=pop;my ($scheme,$host,$path_query)=$url =~ m<\A([^:/?#]+)://([^/?#]*)([^#]*)> or die(qq/Cannot parse URL: '$url'\n/);$scheme=lc$scheme;$path_query="/$path_query" unless$path_query =~ m<\A/>;my$auth='';if ((my$i=index$host,'@')!=-1){$auth=substr$host,0,$i,'';substr$host,0,1,'';$auth =~ s/%([0-9A-Fa-f]{2})/chr(hex($1))/eg}my$port=$host =~ s/:(\d*)\z// && length $1 ? $1 : $scheme eq 'http' ? 80 : $scheme eq 'https' ? 443 : undef;return ($scheme,(length$host ? lc$host : "localhost"),$port,$path_query,$auth)}my$DoW="Sun|Mon|Tue|Wed|Thu|Fri|Sat";my$MoY="Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec";sub _http_date {my ($sec,$min,$hour,$mday,$mon,$year,$wday)=gmtime($_[1]);return sprintf("%s, %02d %s %04d %02d:%02d:%02d GMT",substr($DoW,$wday*4,3),$mday,substr($MoY,$mon*4,3),$year+1900,$hour,$min,$sec)}sub _parse_http_date {my ($self,$str)=@_;require Time::Local;my@tl_parts;if ($str =~ /^[SMTWF][a-z]+, +(\d{1,2}) ($MoY) +(\d\d\d\d) +(\d\d):(\d\d):(\d\d) +GMT$/){@tl_parts=($6,$5,$4,$1,(index($MoY,$2)/4),$3)}elsif ($str =~ /^[SMTWF][a-z]+, +(\d\d)-($MoY)-(\d{2,4}) +(\d\d):(\d\d):(\d\d) +GMT$/){@tl_parts=($6,$5,$4,$1,(index($MoY,$2)/4),$3)}elsif ($str =~ /^[SMTWF][a-z]+ +($MoY) +(\d{1,2}) +(\d\d):(\d\d):(\d\d) +(?:[^0-9]+ +)?(\d\d\d\d)$/){@tl_parts=($5,$4,$3,$2,(index($MoY,$1)/4),$6)}return eval {my$t=@tl_parts ? Time::Local::timegm(@tl_parts): -1;$t < 0 ? undef : $t}}my%escapes=map {chr($_)=>sprintf("%%%02X",$_)}0..255;$escapes{' '}="+";my$unsafe_char=qr/[^A-Za-z0-9\-\._~]/;sub _uri_escape {my ($self,$str)=@_;if ($] ge '5.008'){utf8::encode($str)}else {$str=pack("U*",unpack("C*",$str))if (length$str==do {use bytes;length$str});$str=pack("C*",unpack("C*",$str))}$str =~ s/($unsafe_char)/$escapes{$1}/ge;return$str}package HTTP::Tiny::Handle;use strict;use warnings;use Errno qw[EINTR EPIPE];use IO::Socket qw[SOCK_STREAM];my$SOCKET_CLASS=$ENV{PERL_HTTP_TINY_IPV4_ONLY}? 'IO::Socket::INET' : eval {require IO::Socket::IP;IO::Socket::IP->VERSION(0.25)}? 'IO::Socket::IP' : 'IO::Socket::INET';sub BUFSIZE () {32768}my$Printable=sub {local $_=shift;s/\r/\\r/g;s/\n/\\n/g;s/\t/\\t/g;s/([^\x20-\x7E])/sprintf('\\x%.2X', ord($1))/ge;$_};my$Token=qr/[\x21\x23-\x27\x2A\x2B\x2D\x2E\x30-\x39\x41-\x5A\x5E-\x7A\x7C\x7E]/;sub new {my ($class,%args)=@_;return bless {rbuf=>'',timeout=>60,max_line_size=>16384,max_header_lines=>64,verify_SSL=>0,SSL_options=>{},%args },$class}sub connect {@_==4 || die(q/Usage: $handle->connect(scheme, host, port)/ ."\n");my ($self,$scheme,$host,$port)=@_;if ($scheme eq 'https'){$self->_assert_ssl}elsif ($scheme ne 'http'){die(qq/Unsupported URL scheme '$scheme'\n/)}$self->{fh}=$SOCKET_CLASS->new(PeerHost=>$host,PeerPort=>$port,$self->{local_address}? (LocalAddr=>$self->{local_address}): (),Proto=>'tcp',Type=>SOCK_STREAM,Timeout=>$self->{timeout},KeepAlive=>!!$self->{keep_alive})or die(qq/Could not connect to '$host:$port': $@\n/);binmode($self->{fh})or die(qq/Could not binmode() socket: '$!'\n/);$self->start_ssl($host)if$scheme eq 'https';$self->{scheme}=$scheme;$self->{host}=$host;$self->{port}=$port;$self->{pid}=$$;$self->{tid}=_get_tid();return$self}sub start_ssl {my ($self,$host)=@_;if (ref($self->{fh})eq 'IO::Socket::SSL'){unless ($self->{fh}->stop_SSL){my$ssl_err=IO::Socket::SSL->errstr;die(qq/Error halting prior SSL connection: $ssl_err/)}}my$ssl_args=$self->_ssl_args($host);IO::Socket::SSL->start_SSL($self->{fh},%$ssl_args,SSL_create_ctx_callback=>sub {my$ctx=shift;Net::SSLeay::CTX_set_mode($ctx,Net::SSLeay::MODE_AUTO_RETRY())},);unless (ref($self->{fh})eq 'IO::Socket::SSL'){my$ssl_err=IO::Socket::SSL->errstr;die(qq/SSL connection failed for $host: $ssl_err\n/)}}sub close {@_==1 || die(q/Usage: $handle->close()/ ."\n");my ($self)=@_;CORE::close($self->{fh})or die(qq/Could not close socket: '$!'\n/)}sub write {@_==2 || die(q/Usage: $handle->write(buf)/ ."\n");my ($self,$buf)=@_;if ($] ge '5.008'){utf8::downgrade($buf,1)or die(qq/Wide character in write()\n/)}my$len=length$buf;my$off=0;local$SIG{PIPE}='IGNORE';while (){$self->can_write or die(qq/Timed out while waiting for socket to become ready for writing\n/);my$r=syswrite($self->{fh},$buf,$len,$off);if (defined$r){$len -= $r;$off += $r;last unless$len > 0}elsif ($!==EPIPE){die(qq/Socket closed by remote server: $!\n/)}elsif ($!!=EINTR){if ($self->{fh}->can('errstr')){my$err=$self->{fh}->errstr();die (qq/Could not write to SSL socket: '$err'\n /)}else {die(qq/Could not write to socket: '$!'\n/)}}}return$off}sub read {@_==2 || @_==3 || die(q/Usage: $handle->read(len [, allow_partial])/ ."\n");my ($self,$len,$allow_partial)=@_;my$buf='';my$got=length$self->{rbuf};if ($got){my$take=($got < $len)? $got : $len;$buf=substr($self->{rbuf},0,$take,'');$len -= $take}while ($len > 0){$self->can_read or die(q/Timed out while waiting for socket to become ready for reading/ ."\n");my$r=sysread($self->{fh},$buf,$len,length$buf);if (defined$r){last unless$r;$len -= $r}elsif ($!!=EINTR){if ($self->{fh}->can('errstr')){my$err=$self->{fh}->errstr();die (qq/Could not read from SSL socket: '$err'\n /)}else {die(qq/Could not read from socket: '$!'\n/)}}}if ($len &&!$allow_partial){die(qq/Unexpected end of stream\n/)}return$buf}sub readline {@_==1 || die(q/Usage: $handle->readline()/ ."\n");my ($self)=@_;while (){if ($self->{rbuf}=~ s/\A ([^\x0D\x0A]* \x0D?\x0A)//x){return $1}if (length$self->{rbuf}>= $self->{max_line_size}){die(qq/Line size exceeds the maximum allowed size of $self->{max_line_size}\n/)}$self->can_read or die(qq/Timed out while waiting for socket to become ready for reading\n/);my$r=sysread($self->{fh},$self->{rbuf},BUFSIZE,length$self->{rbuf});if (defined$r){last unless$r}elsif ($!!=EINTR){if ($self->{fh}->can('errstr')){my$err=$self->{fh}->errstr();die (qq/Could not read from SSL socket: '$err'\n /)}else {die(qq/Could not read from socket: '$!'\n/)}}}die(qq/Unexpected end of stream while looking for line\n/)}sub read_header_lines {@_==1 || @_==2 || die(q/Usage: $handle->read_header_lines([headers])/ ."\n");my ($self,$headers)=@_;$headers ||= {};my$lines=0;my$val;while (){my$line=$self->readline;if (++$lines >= $self->{max_header_lines}){die(qq/Header lines exceeds maximum number allowed of $self->{max_header_lines}\n/)}elsif ($line =~ /\A ([^\x00-\x1F\x7F:]+) : [\x09\x20]* ([^\x0D\x0A]*)/x){my ($field_name)=lc $1;if (exists$headers->{$field_name}){for ($headers->{$field_name}){$_=[$_]unless ref $_ eq "ARRAY";push @$_,$2;$val=\$_->[-1]}}else {$val=\($headers->{$field_name}=$2)}}elsif ($line =~ /\A [\x09\x20]+ ([^\x0D\x0A]*)/x){$val or die(qq/Unexpected header continuation line\n/);next unless length $1;$$val .= ' ' if length $$val;$$val .= $1}elsif ($line =~ /\A \x0D?\x0A \z/x){last}else {die(q/Malformed header line: / .$Printable->($line)."\n")}}return$headers}sub write_request {@_==2 || die(q/Usage: $handle->write_request(request)/ ."\n");my($self,$request)=@_;$self->write_request_header(@{$request}{qw/method uri headers/});$self->write_body($request)if$request->{cb};return}my%HeaderCase=('content-md5'=>'Content-MD5','etag'=>'ETag','te'=>'TE','www-authenticate'=>'WWW-Authenticate','x-xss-protection'=>'X-XSS-Protection',);sub write_header_lines {(@_==2 || @_==3 && ref $_[1]eq 'HASH')|| die(q/Usage: $handle->write_header_lines(headers[,prefix])/ ."\n");my($self,$headers,$prefix_data)=@_;my$buf=(defined$prefix_data ? $prefix_data : '');while (my ($k,$v)=each %$headers){my$field_name=lc$k;if (exists$HeaderCase{$field_name}){$field_name=$HeaderCase{$field_name}}else {$field_name =~ /\A $Token+ \z/xo or die(q/Invalid HTTP header field name: / .$Printable->($field_name)."\n");$field_name =~ s/\b(\w)/\u$1/g;$HeaderCase{lc$field_name}=$field_name}for (ref$v eq 'ARRAY' ? @$v : $v){$_='' unless defined $_;$buf .= "$field_name: $_\x0D\x0A"}}$buf .= "\x0D\x0A";return$self->write($buf)}sub read_body {@_==3 || die(q/Usage: $handle->read_body(callback, response)/ ."\n");my ($self,$cb,$response)=@_;my$te=$response->{headers}{'transfer-encoding'}|| '';my$chunked=grep {/chunked/i}(ref$te eq 'ARRAY' ? @$te : $te);return$chunked ? $self->read_chunked_body($cb,$response): $self->read_content_body($cb,$response)}sub write_body {@_==2 || die(q/Usage: $handle->write_body(request)/ ."\n");my ($self,$request)=@_;if ($request->{headers}{'content-length'}){return$self->write_content_body($request)}else {return$self->write_chunked_body($request)}}sub read_content_body {@_==3 || @_==4 || die(q/Usage: $handle->read_content_body(callback, response, [read_length])/ ."\n");my ($self,$cb,$response,$content_length)=@_;$content_length ||= $response->{headers}{'content-length'};if (defined$content_length){my$len=$content_length;while ($len > 0){my$read=($len > BUFSIZE)? BUFSIZE : $len;$cb->($self->read($read,0),$response);$len -= $read}return length($self->{rbuf})==0}my$chunk;$cb->($chunk,$response)while length($chunk=$self->read(BUFSIZE,1));return}sub write_content_body {@_==2 || die(q/Usage: $handle->write_content_body(request)/ ."\n");my ($self,$request)=@_;my ($len,$content_length)=(0,$request->{headers}{'content-length'});while (){my$data=$request->{cb}->();defined$data && length$data or last;if ($] ge '5.008'){utf8::downgrade($data,1)or die(qq/Wide character in write_content()\n/)}$len += $self->write($data)}$len==$content_length or die(qq/Content-Length mismatch (got: $len expected: $content_length)\n/);return$len}sub read_chunked_body {@_==3 || die(q/Usage: $handle->read_chunked_body(callback, $response)/ ."\n");my ($self,$cb,$response)=@_;while (){my$head=$self->readline;$head =~ /\A ([A-Fa-f0-9]+)/x or die(q/Malformed chunk head: / .$Printable->($head)."\n");my$len=hex($1)or last;$self->read_content_body($cb,$response,$len);$self->read(2)eq "\x0D\x0A" or die(qq/Malformed chunk: missing CRLF after chunk data\n/)}$self->read_header_lines($response->{headers});return 1}sub write_chunked_body {@_==2 || die(q/Usage: $handle->write_chunked_body(request)/ ."\n");my ($self,$request)=@_;my$len=0;while (){my$data=$request->{cb}->();defined$data && length$data or last;if ($] ge '5.008'){utf8::downgrade($data,1)or die(qq/Wide character in write_chunked_body()\n/)}$len += length$data;my$chunk=sprintf '%X',length$data;$chunk .= "\x0D\x0A";$chunk .= $data;$chunk .= "\x0D\x0A";$self->write($chunk)}$self->write("0\x0D\x0A");$self->write_header_lines($request->{trailer_cb}->())if ref$request->{trailer_cb}eq 'CODE';return$len}sub read_response_header {@_==1 || die(q/Usage: $handle->read_response_header()/ ."\n");my ($self)=@_;my$line=$self->readline;$line =~ /\A (HTTP\/(0*\d+\.0*\d+)) [\x09\x20]+ ([0-9]{3}) [\x09\x20]+ ([^\x0D\x0A]*) \x0D?\x0A/x or die(q/Malformed Status-Line: / .$Printable->($line)."\n");my ($protocol,$version,$status,$reason)=($1,$2,$3,$4);die (qq/Unsupported HTTP protocol: $protocol\n/)unless$version =~ /0*1\.0*[01]/;return {status=>$status,reason=>$reason,headers=>$self->read_header_lines,protocol=>$protocol,}}sub write_request_header {@_==4 || die(q/Usage: $handle->write_request_header(method, request_uri, headers)/ ."\n");my ($self,$method,$request_uri,$headers)=@_;return$self->write_header_lines($headers,"$method $request_uri HTTP/1.1\x0D\x0A")}sub _do_timeout {my ($self,$type,$timeout)=@_;$timeout=$self->{timeout}unless defined$timeout && $timeout >= 0;my$fd=fileno$self->{fh};defined$fd && $fd >= 0 or die(qq/select(2): 'Bad file descriptor'\n/);my$initial=time;my$pending=$timeout;my$nfound;vec(my$fdset='',$fd,1)=1;while (){$nfound=($type eq 'read')? select($fdset,undef,undef,$pending): select(undef,$fdset,undef,$pending);if ($nfound==-1){$!==EINTR or die(qq/select(2): '$!'\n/);redo if!$timeout || ($pending=$timeout - (time - $initial))> 0;$nfound=0}last}$!=0;return$nfound}sub can_read {@_==1 || @_==2 || die(q/Usage: $handle->can_read([timeout])/ ."\n");my$self=shift;if (ref($self->{fh})eq 'IO::Socket::SSL'){return 1 if$self->{fh}->pending}return$self->_do_timeout('read',@_)}sub can_write {@_==1 || @_==2 || die(q/Usage: $handle->can_write([timeout])/ ."\n");my$self=shift;return$self->_do_timeout('write',@_)}sub _assert_ssl {my($ok,$reason)=HTTP::Tiny->can_ssl();die$reason unless$ok}sub can_reuse {my ($self,$scheme,$host,$port)=@_;return 0 if $self->{pid}!=$$ || $self->{tid}!=_get_tid()|| length($self->{rbuf})|| $scheme ne $self->{scheme}|| $host ne $self->{host}|| $port ne $self->{port}|| eval {$self->can_read(0)}|| $@ ;return 1}sub _find_CA_file {my$self=shift();if ($self->{SSL_options}->{SSL_ca_file}){unless (-r $self->{SSL_options}->{SSL_ca_file}){die qq/SSL_ca_file '$self->{SSL_options}->{SSL_ca_file}' not found or not readable\n/}return$self->{SSL_options}->{SSL_ca_file}}return Mozilla::CA::SSL_ca_file()if eval {require Mozilla::CA;1};for my$ca_bundle ("/etc/ssl/certs/ca-certificates.crt","/etc/pki/tls/certs/ca-bundle.crt","/etc/ssl/ca-bundle.pem","/etc/openssl/certs/ca-certificates.crt","/etc/ssl/cert.pem","/usr/local/share/certs/ca-root-nss.crt","/etc/pki/tls/cacert.pem","/etc/certs/ca-certificates.crt",){return$ca_bundle if -e $ca_bundle}die qq/Couldn't find a CA bundle with which to verify the SSL certificate.\n/ .qq/Try installing Mozilla::CA from CPAN\n/}sub _get_tid {no warnings 'reserved';return threads->can("tid")? threads->tid : 0}sub _ssl_args {my ($self,$host)=@_;my%ssl_args;if (Net::SSLeay::OPENSSL_VERSION_NUMBER()>= 0x01000000){$ssl_args{SSL_hostname}=$host,}if ($self->{verify_SSL}){$ssl_args{SSL_verifycn_scheme}='http';$ssl_args{SSL_verifycn_name}=$host;$ssl_args{SSL_verify_mode}=0x01;$ssl_args{SSL_ca_file}=$self->_find_CA_file}else {$ssl_args{SSL_verifycn_scheme}='none';$ssl_args{SSL_verify_mode}=0x00}for my$k (keys %{$self->{SSL_options}}){$ssl_args{$k}=$self->{SSL_options}{$k}if$k =~ m/^SSL_/}return \%ssl_args}1; - sub $sub_name { - my (\$self, \$url, \$args) = \@_; - \@_ == 2 || (\@_ == 3 && ref \$args eq 'HASH') - or Carp::croak(q/Usage: \$http->$sub_name(URL, [HASHREF])/ . "\n"); - return \$self->request('$req_method', \$url, \$args || {}); - } - HERE -HTTP_TINY - -$fatpacked{"JSON/PP.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'JSON_PP'; - package JSON::PP;use 5.005;use strict;use base qw(Exporter);use overload ();use Carp ();use B ();$JSON::PP::VERSION='2.27300';@JSON::PP::EXPORT=qw(encode_json decode_json from_json to_json);use constant P_ASCII=>0;use constant P_LATIN1=>1;use constant P_UTF8=>2;use constant P_INDENT=>3;use constant P_CANONICAL=>4;use constant P_SPACE_BEFORE=>5;use constant P_SPACE_AFTER=>6;use constant P_ALLOW_NONREF=>7;use constant P_SHRINK=>8;use constant P_ALLOW_BLESSED=>9;use constant P_CONVERT_BLESSED=>10;use constant P_RELAXED=>11;use constant P_LOOSE=>12;use constant P_ALLOW_BIGNUM=>13;use constant P_ALLOW_BAREKEY=>14;use constant P_ALLOW_SINGLEQUOTE=>15;use constant P_ESCAPE_SLASH=>16;use constant P_AS_NONBLESSED=>17;use constant P_ALLOW_UNKNOWN=>18;use constant OLD_PERL=>$] < 5.008 ? 1 : 0;BEGIN {my@xs_compati_bit_properties=qw(latin1 ascii utf8 indent canonical space_before space_after allow_nonref shrink allow_blessed convert_blessed relaxed allow_unknown);my@pp_bit_properties=qw(allow_singlequote allow_bignum loose allow_barekey escape_slash as_nonblessed);if ($] < 5.008){my$helper=$] >= 5.006 ? 'JSON::PP::Compat5006' : 'JSON::PP::Compat5005';eval qq| require $helper |;if ($@){Carp::croak $@}}for my$name (@xs_compati_bit_properties,@pp_bit_properties){my$flag_name='P_' .uc($name);eval qq/ - sub $name { - my \$enable = defined \$_[1] ? \$_[1] : 1; - - if (\$enable) { - \$_[0]->{PROPS}->[$flag_name] = 1; - } - else { - \$_[0]->{PROPS}->[$flag_name] = 0; - } - - \$_[0]; - } - - sub get_$name { - \$_[0]->{PROPS}->[$flag_name] ? 1 : ''; - } - /}}my%encode_allow_method =map {($_=>1)}qw/utf8 pretty allow_nonref latin1 self_encode escape_slash allow_blessed convert_blessed indent indent_length allow_bignum as_nonblessed/;my%decode_allow_method =map {($_=>1)}qw/utf8 allow_nonref loose allow_singlequote allow_bignum allow_barekey max_size relaxed/;my$JSON;sub encode_json ($) {($JSON ||= __PACKAGE__->new->utf8)->encode(@_)}sub decode_json {($JSON ||= __PACKAGE__->new->utf8)->decode(@_)}sub to_json($) {Carp::croak ("JSON::PP::to_json has been renamed to encode_json.")}sub from_json($) {Carp::croak ("JSON::PP::from_json has been renamed to decode_json.")}sub new {my$class=shift;my$self={max_depth=>512,max_size=>0,indent=>0,FLAGS=>0,fallback=>sub {encode_error('Invalid value. JSON can only reference.')},indent_length=>3,};bless$self,$class}sub encode {return $_[0]->PP_encode_json($_[1])}sub decode {return $_[0]->PP_decode_json($_[1],0x00000000)}sub decode_prefix {return $_[0]->PP_decode_json($_[1],0x00000001)}sub pretty {my ($self,$v)=@_;my$enable=defined$v ? $v : 1;if ($enable){$self->indent(1)->indent_length(3)->space_before(1)->space_after(1)}else {$self->indent(0)->space_before(0)->space_after(0)}$self}sub max_depth {my$max=defined $_[1]? $_[1]: 0x80000000;$_[0]->{max_depth}=$max;$_[0]}sub get_max_depth {$_[0]->{max_depth}}sub max_size {my$max=defined $_[1]? $_[1]: 0;$_[0]->{max_size}=$max;$_[0]}sub get_max_size {$_[0]->{max_size}}sub filter_json_object {$_[0]->{cb_object}=defined $_[1]? $_[1]: 0;$_[0]->{F_HOOK}=($_[0]->{cb_object}or $_[0]->{cb_sk_object})? 1 : 0;$_[0]}sub filter_json_single_key_object {if (@_ > 1){$_[0]->{cb_sk_object}->{$_[1]}=$_[2]}$_[0]->{F_HOOK}=($_[0]->{cb_object}or $_[0]->{cb_sk_object})? 1 : 0;$_[0]}sub indent_length {if (!defined $_[1]or $_[1]> 15 or $_[1]< 0){Carp::carp "The acceptable range of indent_length() is 0 to 15."}else {$_[0]->{indent_length}=$_[1]}$_[0]}sub get_indent_length {$_[0]->{indent_length}}sub sort_by {$_[0]->{sort_by}=defined $_[1]? $_[1]: 1;$_[0]}sub allow_bigint {Carp::carp("allow_bigint() is obsoleted. use allow_bignum() insted.")}{my$max_depth;my$indent;my$ascii;my$latin1;my$utf8;my$space_before;my$space_after;my$canonical;my$allow_blessed;my$convert_blessed;my$indent_length;my$escape_slash;my$bignum;my$as_nonblessed;my$depth;my$indent_count;my$keysort;sub PP_encode_json {my$self=shift;my$obj=shift;$indent_count=0;$depth=0;my$idx=$self->{PROPS};($ascii,$latin1,$utf8,$indent,$canonical,$space_before,$space_after,$allow_blessed,$convert_blessed,$escape_slash,$bignum,$as_nonblessed)=@{$idx}[P_ASCII .. P_SPACE_AFTER,P_ALLOW_BLESSED,P_CONVERT_BLESSED,P_ESCAPE_SLASH,P_ALLOW_BIGNUM,P_AS_NONBLESSED];($max_depth,$indent_length)=@{$self}{qw/max_depth indent_length/};$keysort=$canonical ? sub {$a cmp $b}: undef;if ($self->{sort_by}){$keysort=ref($self->{sort_by})eq 'CODE' ? $self->{sort_by}: $self->{sort_by}=~ /\D+/ ? $self->{sort_by}: sub {$a cmp $b}}encode_error("hash- or arrayref expected (not a simple scalar, use allow_nonref to allow this)")if(!ref$obj and!$idx->[P_ALLOW_NONREF ]);my$str=$self->object_to_json($obj);$str .= "\n" if ($indent);unless ($ascii or $latin1 or $utf8){utf8::upgrade($str)}if ($idx->[P_SHRINK ]){utf8::downgrade($str,1)}return$str}sub object_to_json {my ($self,$obj)=@_;my$type=ref($obj);if($type eq 'HASH'){return$self->hash_to_json($obj)}elsif($type eq 'ARRAY'){return$self->array_to_json($obj)}elsif ($type){if (blessed($obj)){return$self->value_to_json($obj)if ($obj->isa('JSON::PP::Boolean'));if ($convert_blessed and $obj->can('TO_JSON')){my$result=$obj->TO_JSON();if (defined$result and ref($result)){if (refaddr($obj)eq refaddr($result)){encode_error(sprintf("%s::TO_JSON method returned same object as was passed instead of a new one",ref$obj))}}return$self->object_to_json($result)}return "$obj" if ($bignum and _is_bignum($obj));return$self->blessed_to_json($obj)if ($allow_blessed and $as_nonblessed);encode_error(sprintf("encountered object '%s', but neither allow_blessed " ."nor convert_blessed settings are enabled",$obj))unless ($allow_blessed);return 'null'}else {return$self->value_to_json($obj)}}else{return$self->value_to_json($obj)}}sub hash_to_json {my ($self,$obj)=@_;my@res;encode_error("json text or perl structure exceeds maximum nesting level (max_depth set too low?)")if (++$depth > $max_depth);my ($pre,$post)=$indent ? $self->_up_indent(): ('','');my$del=($space_before ? ' ' : '').':' .($space_after ? ' ' : '');for my$k (_sort($obj)){if (OLD_PERL){utf8::decode($k)}push@res,string_to_json($self,$k).$del .($self->object_to_json($obj->{$k})|| $self->value_to_json($obj->{$k}))}--$depth;$self->_down_indent()if ($indent);return '{' .(@res ? $pre : '').(@res ? join(",$pre",@res).$post : '').'}'}sub array_to_json {my ($self,$obj)=@_;my@res;encode_error("json text or perl structure exceeds maximum nesting level (max_depth set too low?)")if (++$depth > $max_depth);my ($pre,$post)=$indent ? $self->_up_indent(): ('','');for my$v (@$obj){push@res,$self->object_to_json($v)|| $self->value_to_json($v)}--$depth;$self->_down_indent()if ($indent);return '[' .(@res ? $pre : '').(@res ? join(",$pre",@res).$post : '').']'}sub value_to_json {my ($self,$value)=@_;return 'null' if(!defined$value);my$b_obj=B::svref_2object(\$value);my$flags=$b_obj->FLAGS;return$value if$flags & (B::SVp_IOK | B::SVp_NOK)and!($flags & B::SVp_POK);my$type=ref($value);if(!$type){return string_to_json($self,$value)}elsif(blessed($value)and $value->isa('JSON::PP::Boolean')){return $$value==1 ? 'true' : 'false'}elsif ($type){if ((overload::StrVal($value)=~ /=(\w+)/)[0]){return$self->value_to_json("$value")}if ($type eq 'SCALAR' and defined $$value){return $$value eq '1' ? 'true' : $$value eq '0' ? 'false' : $self->{PROPS}->[P_ALLOW_UNKNOWN ]? 'null' : encode_error("cannot encode reference to scalar")}if ($self->{PROPS}->[P_ALLOW_UNKNOWN ]){return 'null'}else {if ($type eq 'SCALAR' or $type eq 'REF'){encode_error("cannot encode reference to scalar")}else {encode_error("encountered $value, but JSON can only represent references to arrays or hashes")}}}else {return$self->{fallback}->($value)if ($self->{fallback}and ref($self->{fallback})eq 'CODE');return 'null'}}my%esc=("\n"=>'\n',"\r"=>'\r',"\t"=>'\t',"\f"=>'\f',"\b"=>'\b',"\""=>'\"',"\\"=>'\\\\',"\'"=>'\\\'',);sub string_to_json {my ($self,$arg)=@_;$arg =~ s/([\x22\x5c\n\r\t\f\b])/$esc{$1}/g;$arg =~ s/\//\\\//g if ($escape_slash);$arg =~ s/([\x00-\x08\x0b\x0e-\x1f])/'\\u00' . unpack('H2', $1)/eg;if ($ascii){$arg=JSON_PP_encode_ascii($arg)}if ($latin1){$arg=JSON_PP_encode_latin1($arg)}if ($utf8){utf8::encode($arg)}return '"' .$arg .'"'}sub blessed_to_json {my$reftype=reftype($_[1])|| '';if ($reftype eq 'HASH'){return $_[0]->hash_to_json($_[1])}elsif ($reftype eq 'ARRAY'){return $_[0]->array_to_json($_[1])}else {return 'null'}}sub encode_error {my$error=shift;Carp::croak "$error"}sub _sort {defined$keysort ? (sort$keysort (keys %{$_[0]})): keys %{$_[0]}}sub _up_indent {my$self=shift;my$space=' ' x $indent_length;my ($pre,$post)=('','');$post="\n" .$space x $indent_count;$indent_count++;$pre="\n" .$space x $indent_count;return ($pre,$post)}sub _down_indent {$indent_count--}sub PP_encode_box {{depth=>$depth,indent_count=>$indent_count,}}}sub _encode_ascii {join('',map {$_ <= 127 ? chr($_): $_ <= 65535 ? sprintf('\u%04x',$_): sprintf('\u%x\u%x',_encode_surrogates($_))}unpack('U*',$_[0]))}sub _encode_latin1 {join('',map {$_ <= 255 ? chr($_): $_ <= 65535 ? sprintf('\u%04x',$_): sprintf('\u%x\u%x',_encode_surrogates($_))}unpack('U*',$_[0]))}sub _encode_surrogates {my$uni=$_[0]- 0x10000;return ($uni / 0x400 + 0xD800,$uni % 0x400 + 0xDC00)}sub _is_bignum {$_[0]->isa('Math::BigInt')or $_[0]->isa('Math::BigFloat')}my$max_intsize;BEGIN {my$checkint=1111;for my$d (5..64){$checkint .= 1;my$int=eval qq| $checkint |;if ($int =~ /[eE]/){$max_intsize=$d - 1;last}}}{my%escapes=(b=>"\x8",t=>"\x9",n=>"\xA",f=>"\xC",r=>"\xD",'\\'=>'\\','"'=>'"','/'=>'/',);my$text;my$at;my$ch;my$len;my$depth;my$encoding;my$is_valid_utf8;my$utf8_len;my$utf8;my$max_depth;my$max_size;my$relaxed;my$cb_object;my$cb_sk_object;my$F_HOOK;my$allow_bigint;my$singlequote;my$loose;my$allow_barekey;sub PP_decode_json {my ($self,$opt);($self,$text,$opt)=@_;($at,$ch,$depth)=(0,'',0);if (!defined$text or ref$text){decode_error("malformed JSON string, neither array, object, number, string or atom")}my$idx=$self->{PROPS};($utf8,$relaxed,$loose,$allow_bigint,$allow_barekey,$singlequote)=@{$idx}[P_UTF8,P_RELAXED,P_LOOSE .. P_ALLOW_SINGLEQUOTE];if ($utf8){utf8::downgrade($text,1)or Carp::croak("Wide character in subroutine entry")}else {utf8::upgrade($text);utf8::encode($text)}$len=length$text;($max_depth,$max_size,$cb_object,$cb_sk_object,$F_HOOK)=@{$self}{qw/max_depth max_size cb_object cb_sk_object F_HOOK/};if ($max_size > 1){use bytes;my$bytes=length$text;decode_error(sprintf("attempted decode of JSON text of %s bytes size, but max_size is set to %s" ,$bytes,$max_size),1)if ($bytes > $max_size)}my@octets=unpack('C4',$text);$encoding=($octets[0]and $octets[1])? 'UTF-8' : (!$octets[0]and $octets[1])? 'UTF-16BE' : (!$octets[0]and!$octets[1])? 'UTF-32BE' : ($octets[2])? 'UTF-16LE' : (!$octets[2])? 'UTF-32LE' : 'unknown';white();my$valid_start=defined$ch;my$result=value();return undef if (!$result && ($opt & 0x10000000));decode_error("malformed JSON string, neither array, object, number, string or atom")unless$valid_start;if (!$idx->[P_ALLOW_NONREF ]and!ref$result){decode_error('JSON text must be an object or array (but found number, string, true, false or null,' .' use allow_nonref to allow this)',1)}Carp::croak('something wrong.')if$len < $at;my$consumed=defined$ch ? $at - 1 : $at;white();if ($ch){return ($result,$consumed)if ($opt & 0x00000001);decode_error("garbage after JSON object")}($opt & 0x00000001)? ($result,$consumed): $result}sub next_chr {return$ch=undef if($at >= $len);$ch=substr($text,$at++,1)}sub value {white();return if(!defined$ch);return object()if($ch eq '{');return array()if($ch eq '[');return string()if($ch eq '"' or ($singlequote and $ch eq "'"));return number()if($ch =~ /[0-9]/ or $ch eq '-');return word()}sub string {my ($i,$s,$t,$u);my$utf16;my$is_utf8;($is_valid_utf8,$utf8_len)=('',0);$s='';if($ch eq '"' or ($singlequote and $ch eq "'")){my$boundChar=$ch;OUTER: while(defined(next_chr())){if($ch eq $boundChar){next_chr();if ($utf16){decode_error("missing low surrogate character in surrogate pair")}utf8::decode($s)if($is_utf8);return$s}elsif($ch eq '\\'){next_chr();if(exists$escapes{$ch}){$s .= $escapes{$ch}}elsif($ch eq 'u'){my$u='';for(1..4){$ch=next_chr();last OUTER if($ch !~ /[0-9a-fA-F]/);$u .= $ch}if ($u =~ /^[dD][89abAB][0-9a-fA-F]{2}/){$utf16=$u}elsif ($u =~ /^[dD][c-fC-F][0-9a-fA-F]{2}/){unless (defined$utf16){decode_error("missing high surrogate character in surrogate pair")}$is_utf8=1;$s .= JSON_PP_decode_surrogates($utf16,$u)|| next;$utf16=undef}else {if (defined$utf16){decode_error("surrogate pair expected")}if ((my$hex=hex($u))> 127){$is_utf8=1;$s .= JSON_PP_decode_unicode($u)|| next}else {$s .= chr$hex}}}else{unless ($loose){$at -= 2;decode_error('illegal backslash escape sequence in string')}$s .= $ch}}else{if (ord$ch > 127){unless($ch=is_valid_utf8($ch)){$at -= 1;decode_error("malformed UTF-8 character in JSON string")}else {$at += $utf8_len - 1}$is_utf8=1}if (!$loose){if ($ch =~ /[\x00-\x1f\x22\x5c]/){$at--;decode_error('invalid character encountered while parsing JSON string')}}$s .= $ch}}}decode_error("unexpected end of string while parsing JSON string")}sub white {while(defined$ch){if($ch le ' '){next_chr()}elsif($ch eq '/'){next_chr();if(defined$ch and $ch eq '/'){1 while(defined(next_chr())and $ch ne "\n" and $ch ne "\r")}elsif(defined$ch and $ch eq '*'){next_chr();while(1){if(defined$ch){if($ch eq '*'){if(defined(next_chr())and $ch eq '/'){next_chr();last}}else{next_chr()}}else{decode_error("Unterminated comment")}}next}else{$at--;decode_error("malformed JSON string, neither array, object, number, string or atom")}}else{if ($relaxed and $ch eq '#'){pos($text)=$at;$text =~ /\G([^\n]*(?:\r\n|\r|\n|$))/g;$at=pos($text);next_chr;next}last}}}sub array {my$a=$_[0]|| [];decode_error('json text or perl structure exceeds maximum nesting level (max_depth set too low?)')if (++$depth > $max_depth);next_chr();white();if(defined$ch and $ch eq ']'){--$depth;next_chr();return$a}else {while(defined($ch)){push @$a,value();white();if (!defined$ch){last}if($ch eq ']'){--$depth;next_chr();return$a}if($ch ne ','){last}next_chr();white();if ($relaxed and $ch eq ']'){--$depth;next_chr();return$a}}}decode_error(", or ] expected while parsing array")}sub object {my$o=$_[0]|| {};my$k;decode_error('json text or perl structure exceeds maximum nesting level (max_depth set too low?)')if (++$depth > $max_depth);next_chr();white();if(defined$ch and $ch eq '}'){--$depth;next_chr();if ($F_HOOK){return _json_object_hook($o)}return$o}else {while (defined$ch){$k=($allow_barekey and $ch ne '"' and $ch ne "'")? bareKey(): string();white();if(!defined$ch or $ch ne ':'){$at--;decode_error("':' expected")}next_chr();$o->{$k}=value();white();last if (!defined$ch);if($ch eq '}'){--$depth;next_chr();if ($F_HOOK){return _json_object_hook($o)}return$o}if($ch ne ','){last}next_chr();white();if ($relaxed and $ch eq '}'){--$depth;next_chr();if ($F_HOOK){return _json_object_hook($o)}return$o}}}$at--;decode_error(", or } expected while parsing object/hash")}sub bareKey {my$key;while($ch =~ /[^\x00-\x23\x25-\x2F\x3A-\x40\x5B-\x5E\x60\x7B-\x7F]/){$key .= $ch;next_chr()}return$key}sub word {my$word=substr($text,$at-1,4);if($word eq 'true'){$at += 3;next_chr;return$JSON::PP::true}elsif($word eq 'null'){$at += 3;next_chr;return undef}elsif($word eq 'fals'){$at += 3;if(substr($text,$at,1)eq 'e'){$at++;next_chr;return$JSON::PP::false}}$at--;decode_error("'null' expected")if ($word =~ /^n/);decode_error("'true' expected")if ($word =~ /^t/);decode_error("'false' expected")if ($word =~ /^f/);decode_error("malformed JSON string, neither array, object, number, string or atom")}sub number {my$n='';my$v;if($ch eq '0'){my$peek=substr($text,$at,1);my$hex=$peek =~ /[xX]/;if($hex){decode_error("malformed number (leading zero must not be followed by another digit)");($n)=(substr($text,$at+1)=~ /^([0-9a-fA-F]+)/)}else{($n)=(substr($text,$at)=~ /^([0-7]+)/);if (defined$n and length$n > 1){decode_error("malformed number (leading zero must not be followed by another digit)")}}if(defined$n and length($n)){if (!$hex and length($n)==1){decode_error("malformed number (leading zero must not be followed by another digit)")}$at += length($n)+ $hex;next_chr;return$hex ? hex($n): oct($n)}}if($ch eq '-'){$n='-';next_chr;if (!defined$ch or $ch !~ /\d/){decode_error("malformed number (no digits after initial minus)")}}while(defined$ch and $ch =~ /\d/){$n .= $ch;next_chr}if(defined$ch and $ch eq '.'){$n .= '.';next_chr;if (!defined$ch or $ch !~ /\d/){decode_error("malformed number (no digits after decimal point)")}else {$n .= $ch}while(defined(next_chr)and $ch =~ /\d/){$n .= $ch}}if(defined$ch and ($ch eq 'e' or $ch eq 'E')){$n .= $ch;next_chr;if(defined($ch)and ($ch eq '+' or $ch eq '-')){$n .= $ch;next_chr;if (!defined$ch or $ch =~ /\D/){decode_error("malformed number (no digits after exp sign)")}$n .= $ch}elsif(defined($ch)and $ch =~ /\d/){$n .= $ch}else {decode_error("malformed number (no digits after exp sign)")}while(defined(next_chr)and $ch =~ /\d/){$n .= $ch}}$v .= $n;if ($v !~ /[.eE]/ and length$v > $max_intsize){if ($allow_bigint){require Math::BigInt;return Math::BigInt->new($v)}else {return "$v"}}elsif ($allow_bigint){require Math::BigFloat;return Math::BigFloat->new($v)}return 0+$v}sub is_valid_utf8 {$utf8_len=$_[0]=~ /[\x00-\x7F]/ ? 1 : $_[0]=~ /[\xC2-\xDF]/ ? 2 : $_[0]=~ /[\xE0-\xEF]/ ? 3 : $_[0]=~ /[\xF0-\xF4]/ ? 4 : 0 ;return unless$utf8_len;my$is_valid_utf8=substr($text,$at - 1,$utf8_len);return ($is_valid_utf8 =~ /^(?: - [\x00-\x7F] - |[\xC2-\xDF][\x80-\xBF] - |[\xE0][\xA0-\xBF][\x80-\xBF] - |[\xE1-\xEC][\x80-\xBF][\x80-\xBF] - |[\xED][\x80-\x9F][\x80-\xBF] - |[\xEE-\xEF][\x80-\xBF][\x80-\xBF] - |[\xF0][\x90-\xBF][\x80-\xBF][\x80-\xBF] - |[\xF1-\xF3][\x80-\xBF][\x80-\xBF][\x80-\xBF] - |[\xF4][\x80-\x8F][\x80-\xBF][\x80-\xBF] - )$/x)? $is_valid_utf8 : ''}sub decode_error {my$error=shift;my$no_rep=shift;my$str=defined$text ? substr($text,$at): '';my$mess='';my$type=$] >= 5.008 ? 'U*' : $] < 5.006 ? 'C*' : utf8::is_utf8($str)? 'U*' : 'C*' ;for my$c (unpack($type,$str)){$mess .= $c==0x07 ? '\a' : $c==0x09 ? '\t' : $c==0x0a ? '\n' : $c==0x0d ? '\r' : $c==0x0c ? '\f' : $c < 0x20 ? sprintf('\x{%x}',$c): $c==0x5c ? '\\\\' : $c < 0x80 ? chr($c): sprintf('\x{%x}',$c);if (length$mess >= 20){$mess .= '...';last}}unless (length$mess){$mess='(end of string)'}Carp::croak ($no_rep ? "$error" : "$error, at character offset $at (before \"$mess\")")}sub _json_object_hook {my$o=$_[0];my@ks=keys %{$o};if ($cb_sk_object and @ks==1 and exists$cb_sk_object->{$ks[0]}and ref$cb_sk_object->{$ks[0]}){my@val=$cb_sk_object->{$ks[0]}->($o->{$ks[0]});if (@val==1){return$val[0]}}my@val=$cb_object->($o)if ($cb_object);if (@val==0 or @val > 1){return$o}else {return$val[0]}}sub PP_decode_box {{text=>$text,at=>$at,ch=>$ch,len=>$len,depth=>$depth,encoding=>$encoding,is_valid_utf8=>$is_valid_utf8,}}}sub _decode_surrogates {my$uni=0x10000 + (hex($_[0])- 0xD800)* 0x400 + (hex($_[1])- 0xDC00);my$un=pack('U*',$uni);utf8::encode($un);return$un}sub _decode_unicode {my$un=pack('U',hex shift);utf8::encode($un);return$un}BEGIN {unless (defined&utf8::is_utf8){require Encode;*utf8::is_utf8=*Encode::is_utf8}if ($] >= 5.008){*JSON::PP::JSON_PP_encode_ascii=\&_encode_ascii;*JSON::PP::JSON_PP_encode_latin1=\&_encode_latin1;*JSON::PP::JSON_PP_decode_surrogates=\&_decode_surrogates;*JSON::PP::JSON_PP_decode_unicode=\&_decode_unicode}if ($] >= 5.008 and $] < 5.008003){package JSON::PP;require subs;subs->import('join');eval q| - sub join { - return '' if (@_ < 2); - my $j = shift; - my $str = shift; - for (@_) { $str .= $j . $_; } - return $str; - } - |}sub JSON::PP::incr_parse {local$Carp::CarpLevel=1;($_[0]->{_incr_parser}||= JSON::PP::IncrParser->new)->incr_parse(@_)}sub JSON::PP::incr_skip {($_[0]->{_incr_parser}||= JSON::PP::IncrParser->new)->incr_skip}sub JSON::PP::incr_reset {($_[0]->{_incr_parser}||= JSON::PP::IncrParser->new)->incr_reset}eval q{ - sub JSON::PP::incr_text : lvalue { - $_[0]->{_incr_parser} ||= JSON::PP::IncrParser->new; - - if ( $_[0]->{_incr_parser}->{incr_parsing} ) { - Carp::croak("incr_text can not be called when the incremental parser already started parsing"); - } - $_[0]->{_incr_parser}->{incr_text}; - } - } if ($] >= 5.006)}BEGIN {eval 'require Scalar::Util';unless($@){*JSON::PP::blessed=\&Scalar::Util::blessed;*JSON::PP::reftype=\&Scalar::Util::reftype;*JSON::PP::refaddr=\&Scalar::Util::refaddr}else{eval 'sub UNIVERSAL::a_sub_not_likely_to_be_here { ref($_[0]) }';*JSON::PP::blessed=sub {local($@,$SIG{__DIE__},$SIG{__WARN__});ref($_[0])? eval {$_[0]->a_sub_not_likely_to_be_here}: undef};my%tmap=qw(B::NULL SCALAR B::HV HASH B::AV ARRAY B::CV CODE B::IO IO B::GV GLOB B::REGEXP REGEXP);*JSON::PP::reftype=sub {my$r=shift;return undef unless length(ref($r));my$t=ref(B::svref_2object($r));return exists$tmap{$t}? $tmap{$t}: length(ref($$r))? 'REF' : 'SCALAR'};*JSON::PP::refaddr=sub {return undef unless length(ref($_[0]));my$addr;if(defined(my$pkg=blessed($_[0]))){$addr .= bless $_[0],'Scalar::Util::Fake';bless $_[0],$pkg}else {$addr .= $_[0]}$addr =~ /0x(\w+)/;local $^W;hex($1)}}}$JSON::PP::true=do {bless \(my$dummy=1),"JSON::PP::Boolean"};$JSON::PP::false=do {bless \(my$dummy=0),"JSON::PP::Boolean"};sub is_bool {defined $_[0]and UNIVERSAL::isa($_[0],"JSON::PP::Boolean")}sub true {$JSON::PP::true}sub false {$JSON::PP::false}sub null {undef}package JSON::PP::Boolean;use overload ("0+"=>sub {${$_[0]}},"++"=>sub {$_[0]=${$_[0]}+ 1},"--"=>sub {$_[0]=${$_[0]}- 1},fallback=>1,);package JSON::PP::IncrParser;use strict;use constant INCR_M_WS=>0;use constant INCR_M_STR=>1;use constant INCR_M_BS=>2;use constant INCR_M_JSON=>3;use constant INCR_M_C0=>4;use constant INCR_M_C1=>5;$JSON::PP::IncrParser::VERSION='1.01';my$unpack_format=$] < 5.006 ? 'C*' : 'U*';sub new {my ($class)=@_;bless {incr_nest=>0,incr_text=>undef,incr_parsing=>0,incr_p=>0,},$class}sub incr_parse {my ($self,$coder,$text)=@_;$self->{incr_text}='' unless (defined$self->{incr_text});if (defined$text){if (utf8::is_utf8($text)and!utf8::is_utf8($self->{incr_text})){utf8::upgrade($self->{incr_text});utf8::decode($self->{incr_text})}$self->{incr_text}.= $text}my$max_size=$coder->get_max_size;if (defined wantarray){$self->{incr_mode}=INCR_M_WS unless defined$self->{incr_mode};if (wantarray){my@ret;$self->{incr_parsing}=1;do {push@ret,$self->_incr_parse($coder,$self->{incr_text});unless (!$self->{incr_nest}and $self->{incr_mode}==INCR_M_JSON){$self->{incr_mode}=INCR_M_WS if$self->{incr_mode}!=INCR_M_STR}}until (length$self->{incr_text}>= $self->{incr_p});$self->{incr_parsing}=0;return@ret}else {$self->{incr_parsing}=1;my$obj=$self->_incr_parse($coder,$self->{incr_text});$self->{incr_parsing}=0 if defined$obj;return$obj ? $obj : undef}}}sub _incr_parse {my ($self,$coder,$text,$skip)=@_;my$p=$self->{incr_p};my$restore=$p;my@obj;my$len=length$text;if ($self->{incr_mode}==INCR_M_WS){while ($len > $p){my$s=substr($text,$p,1);$p++ and next if (0x20 >= unpack($unpack_format,$s));$self->{incr_mode}=INCR_M_JSON;last}}while ($len > $p){my$s=substr($text,$p++,1);if ($s eq '"'){if (substr($text,$p - 2,1)eq '\\'){next}if ($self->{incr_mode}!=INCR_M_STR){$self->{incr_mode}=INCR_M_STR}else {$self->{incr_mode}=INCR_M_JSON;unless ($self->{incr_nest}){last}}}if ($self->{incr_mode}==INCR_M_JSON){if ($s eq '[' or $s eq '{'){if (++$self->{incr_nest}> $coder->get_max_depth){Carp::croak('json text or perl structure exceeds maximum nesting level (max_depth set too low?)')}}elsif ($s eq ']' or $s eq '}'){last if (--$self->{incr_nest}<= 0)}elsif ($s eq '#'){while ($len > $p){last if substr($text,$p++,1)eq "\n"}}}}$self->{incr_p}=$p;return if ($self->{incr_mode}==INCR_M_STR and not $self->{incr_nest});return if ($self->{incr_mode}==INCR_M_JSON and $self->{incr_nest}> 0);return '' unless (length substr($self->{incr_text},0,$p));local$Carp::CarpLevel=2;$self->{incr_p}=$restore;$self->{incr_c}=$p;my ($obj,$tail)=$coder->PP_decode_json(substr($self->{incr_text},0,$p),0x10000001);$self->{incr_text}=substr($self->{incr_text},$p);$self->{incr_p}=0;return$obj || ''}sub incr_text {if ($_[0]->{incr_parsing}){Carp::croak("incr_text can not be called when the incremental parser already started parsing")}$_[0]->{incr_text}}sub incr_skip {my$self=shift;$self->{incr_text}=substr($self->{incr_text},$self->{incr_c});$self->{incr_p}=0}sub incr_reset {my$self=shift;$self->{incr_text}=undef;$self->{incr_p}=0;$self->{incr_mode}=0;$self->{incr_nest}=0;$self->{incr_parsing}=0}1; -JSON_PP - -$fatpacked{"JSON/PP/Boolean.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'JSON_PP_BOOLEAN'; - use JSON::PP ();use strict;1; -JSON_PP_BOOLEAN - -$fatpacked{"Module/CPANfile.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_CPANFILE'; - package Module::CPANfile;use strict;use warnings;use Cwd;use Carp ();use Module::CPANfile::Environment;use Module::CPANfile::Requirement;our$VERSION='1.1000';sub new {my($class,$file)=@_;bless {},$class}sub load {my($proto,$file)=@_;my$self=ref$proto ? $proto : $proto->new;$self->parse($file || Cwd::abs_path('cpanfile'));$self}sub save {my($self,$path)=@_;open my$out,">",$path or die "$path: $!";print {$out}$self->to_string}sub parse {my($self,$file)=@_;my$code=do {open my$fh,"<",$file or die "$file: $!";join '',<$fh>};my$env=Module::CPANfile::Environment->new($file);$env->parse($code)or die $@;$self->{_mirrors}=$env->mirrors;$self->{_prereqs}=$env->prereqs}sub from_prereqs {my($proto,$prereqs)=@_;my$self=$proto->new;$self->{_prereqs}=Module::CPANfile::Prereqs->from_cpan_meta($prereqs);$self}sub mirrors {my$self=shift;$self->{_mirrors}|| []}sub features {my$self=shift;map$self->feature($_),$self->{_prereqs}->identifiers}sub feature {my($self,$identifier)=@_;$self->{_prereqs}->feature($identifier)}sub prereq {shift->prereqs}sub prereqs {my$self=shift;$self->{_prereqs}->as_cpan_meta}sub merged_requirements {my$self=shift;$self->{_prereqs}->merged_requirements}sub effective_prereqs {my($self,$features)=@_;$self->prereqs_with(@{$features || []})}sub prereqs_with {my($self,@feature_identifiers)=@_;my$prereqs=$self->prereqs;my@others=map {$self->feature($_)->prereqs}@feature_identifiers;$prereqs->with_merged_prereqs(\@others)}sub prereq_specs {my$self=shift;$self->prereqs->as_string_hash}sub prereq_for_module {my($self,$module)=@_;$self->{_prereqs}->find($module)}sub options_for_module {my($self,$module)=@_;my$prereq=$self->prereq_for_module($module)or return;$prereq->requirement->options}sub merge_meta {my($self,$file,$version)=@_;require CPAN::Meta;$version ||= $file =~ /\.yml$/ ? '1.4' : '2';my$prereq=$self->prereqs;my$meta=CPAN::Meta->load_file($file);my$prereqs_hash=$prereq->with_merged_prereqs($meta->effective_prereqs)->as_string_hash;my$struct={%{$meta->as_struct},prereqs=>$prereqs_hash };CPAN::Meta->new($struct)->save($file,{version=>$version })}sub _dump {my$str=shift;require Data::Dumper;chomp(my$value=Data::Dumper->new([$str])->Terse(1)->Dump);$value}sub to_string {my($self,$include_empty)=@_;my$mirrors=$self->mirrors;my$prereqs=$self->prereq_specs;my$code='';$code .= $self->_dump_mirrors($mirrors);$code .= $self->_dump_prereqs($prereqs,$include_empty);for my$feature ($self->features){$code .= sprintf "feature %s, %s => sub {\n",_dump($feature->{identifier}),_dump($feature->{description});$code .= $self->_dump_prereqs($feature->{spec},$include_empty,4);$code .= "}\n\n"}$code =~ s/\n+$/\n/s;$code}sub _dump_mirrors {my($self,$mirrors)=@_;my$code="";for my$url (@$mirrors){$code .= "mirror '$url';\n"}$code =~ s/\n+$/\n/s;$code}sub _dump_prereqs {my($self,$prereqs,$include_empty,$base_indent)=@_;my$code='';for my$phase (qw(runtime configure build test develop)){my$indent=$phase eq 'runtime' ? '' : ' ';$indent=(' ' x ($base_indent || 0)).$indent;my($phase_code,$requirements);$phase_code .= "on $phase => sub {\n" unless$phase eq 'runtime';for my$type (qw(requires recommends suggests conflicts)){for my$mod (sort keys %{$prereqs->{$phase}{$type}}){my$ver=$prereqs->{$phase}{$type}{$mod};$phase_code .= $ver eq '0' ? "${indent}$type '$mod';\n" : "${indent}$type '$mod', '$ver';\n";$requirements++}}$phase_code .= "\n" unless$requirements;$phase_code .= "};\n" unless$phase eq 'runtime';$code .= $phase_code ."\n" if$requirements or $include_empty}$code =~ s/\n+$/\n/s;$code}1; -MODULE_CPANFILE - -$fatpacked{"Module/CPANfile/Environment.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_CPANFILE_ENVIRONMENT'; - package Module::CPANfile::Environment;use strict;use warnings;use Module::CPANfile::Prereqs;use Carp ();my@bindings=qw(on requires recommends suggests conflicts feature osname mirror configure_requires build_requires test_requires author_requires);my$file_id=1;sub new {my($class,$file)=@_;bless {file=>$file,phase=>'runtime',feature=>undef,features=>{},prereqs=>Module::CPANfile::Prereqs->new,mirrors=>[],},$class}sub bind {my$self=shift;my$pkg=caller;for my$binding (@bindings){no strict 'refs';*{"$pkg\::$binding"}=sub {$self->$binding(@_)}}}sub parse {my($self,$code)=@_;my$err;{local $@;$file_id++;$self->_evaluate(<{file} failed: $err"};return 1}sub _evaluate {my$_environment=$_[0];eval $_[1]}sub prereqs {$_[0]->{prereqs}}sub mirrors {$_[0]->{mirrors}}sub on {my($self,$phase,$code)=@_;local$self->{phase}=$phase;$code->()}sub feature {my($self,$identifier,$description,$code)=@_;if (@_==3 && ref($description)eq 'CODE'){$code=$description;$description=$identifier}unless (ref$description eq '' && ref$code eq 'CODE'){Carp::croak("Usage: feature 'identifier', 'Description' => sub { ... }")}local$self->{feature}=$identifier;$self->prereqs->add_feature($identifier,$description);$code->()}sub osname {die "TODO"}sub mirror {my($self,$url)=@_;push @{$self->{mirrors}},$url}sub requirement_for {my($self,$module,@args)=@_;my$requirement=0;$requirement=shift@args if@args % 2;return Module::CPANfile::Requirement->new(name=>$module,version=>$requirement,@args,)}sub requires {my$self=shift;$self->add_prereq(requires=>@_)}sub recommends {my$self=shift;$self->add_prereq(recommends=>@_)}sub suggests {my$self=shift;$self->add_prereq(suggests=>@_)}sub conflicts {my$self=shift;$self->add_prereq(conflicts=>@_)}sub add_prereq {my($self,$type,$module,@args)=@_;$self->prereqs->add_prereq(feature=>$self->{feature},phase=>$self->{phase},type=>$type,module=>$module,requirement=>$self->requirement_for($module,@args),)}sub configure_requires {my($self,@args)=@_;$self->on(configure=>sub {$self->requires(@args)})}sub build_requires {my($self,@args)=@_;$self->on(build=>sub {$self->requires(@args)})}sub test_requires {my($self,@args)=@_;$self->on(test=>sub {$self->requires(@args)})}sub author_requires {my($self,@args)=@_;$self->on(develop=>sub {$self->requires(@args)})}1; - package Module::CPANfile::Sandbox$file_id; - no warnings; - BEGIN { \$_environment->bind } - - # line 1 "$self->{file}" - $code; - EVAL -MODULE_CPANFILE_ENVIRONMENT - -$fatpacked{"Module/CPANfile/Prereq.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_CPANFILE_PREREQ'; - package Module::CPANfile::Prereq;use strict;sub new {my($class,%options)=@_;bless \%options,$class}sub feature {$_[0]->{feature}}sub phase {$_[0]->{phase}}sub type {$_[0]->{type}}sub module {$_[0]->{module}}sub requirement {$_[0]->{requirement}}sub match_feature {my($self,$identifier)=@_;no warnings 'uninitialized';$self->feature eq $identifier}1; -MODULE_CPANFILE_PREREQ - -$fatpacked{"Module/CPANfile/Prereqs.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_CPANFILE_PREREQS'; - package Module::CPANfile::Prereqs;use strict;use Carp ();use CPAN::Meta::Feature;use Module::CPANfile::Prereq;sub from_cpan_meta {my($class,$prereqs)=@_;my$self=$class->new;for my$phase (keys %$prereqs){for my$type (keys %{$prereqs->{$phase}}){while (my($module,$requirement)=each %{$prereqs->{$phase}{$type}}){$self->add_prereq(phase=>$phase,type=>$type,module=>$module,requirement=>Module::CPANfile::Requirement->new(name=>$module,version=>$requirement),)}}}$self}sub new {my$class=shift;bless {prereqs=>[],features=>{},},$class}sub add_feature {my($self,$identifier,$description)=@_;$self->{features}{$identifier}={description=>$description }}sub add_prereq {my($self,%args)=@_;$self->add(Module::CPANfile::Prereq->new(%args))}sub add {my($self,$prereq)=@_;push @{$self->{prereqs}},$prereq}sub as_cpan_meta {my$self=shift;$self->{cpanmeta}||= $self->build_cpan_meta}sub build_cpan_meta {my($self,$identifier)=@_;my$prereq_spec={};$self->prereq_each($identifier,sub {my$prereq=shift;$prereq_spec->{$prereq->phase}{$prereq->type}{$prereq->module}=$prereq->requirement->version});CPAN::Meta::Prereqs->new($prereq_spec)}sub prereq_each {my($self,$identifier,$code)=@_;for my$prereq (@{$self->{prereqs}}){next unless$prereq->match_feature($identifier);$code->($prereq)}}sub merged_requirements {my$self=shift;my$reqs=CPAN::Meta::Requirements->new;for my$prereq (@{$self->{prereqs}}){$reqs->add_string_requirement($prereq->module,$prereq->requirement->version)}$reqs}sub find {my($self,$module)=@_;for my$prereq (@{$self->{prereqs}}){return$prereq if$prereq->module eq $module}return}sub identifiers {my$self=shift;keys %{$self->{features}}}sub feature {my($self,$identifier)=@_;my$data=$self->{features}{$identifier}or Carp::croak("Unknown feature '$identifier'");my$prereqs=$self->build_cpan_meta($identifier);CPAN::Meta::Feature->new($identifier,{description=>$data->{description},prereqs=>$prereqs->as_string_hash,})}1; -MODULE_CPANFILE_PREREQS - -$fatpacked{"Module/CPANfile/Requirement.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_CPANFILE_REQUIREMENT'; - package Module::CPANfile::Requirement;use strict;sub new {my ($class,%args)=@_;$args{version}||= 0;bless +{name=>delete$args{name},version=>delete$args{version},options=>\%args,},$class}sub name {$_[0]->{name}}sub version {$_[0]->{version}}sub options {$_[0]->{options}}sub has_options {keys %{$_[0]->{options}}> 0}1; -MODULE_CPANFILE_REQUIREMENT - -$fatpacked{"Module/Metadata.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'MODULE_METADATA'; - package Module::Metadata;sub __clean_eval {eval $_[0]}use strict;use warnings;our$VERSION='1.000027';use Carp qw/croak/;use File::Spec;BEGIN {eval {require Fcntl;Fcntl->import('SEEK_SET');1}or *SEEK_SET=sub {0}}use version 0.87;BEGIN {if ($INC{'Log/Contextual.pm'}){require "Log/Contextual/WarnLogger.pm";Log::Contextual->import('log_info','-default_logger'=>Log::Contextual::WarnLogger->new({env_prefix=>'MODULE_METADATA',}),)}else {*log_info=sub (&) {warn $_[0]->()}}}use File::Find qw(find);my$V_NUM_REGEXP=qr{v?[0-9._]+};my$PKG_FIRST_WORD_REGEXP=qr{ # the FIRST word in a package name - [a-zA-Z_] # the first word CANNOT start with a digit - (?: - [\w']? # can contain letters, digits, _, or ticks - \w # But, NO multi-ticks or trailing ticks - )* - }x;my$PKG_ADDL_WORD_REGEXP=qr{ # the 2nd+ word in a package name - \w # the 2nd+ word CAN start with digits - (?: - [\w']? # and can contain letters or ticks - \w # But, NO multi-ticks or trailing ticks - )* - }x;my$PKG_NAME_REGEXP=qr{ # match a package name - (?: :: )? # a pkg name can start with arisdottle - $PKG_FIRST_WORD_REGEXP # a package word - (?: - (?: :: )+ ### arisdottle (allow one or many times) - $PKG_ADDL_WORD_REGEXP ### a package word - )* # ^ zero, one or many times - (?: - :: # allow trailing arisdottle - )? - }x;my$PKG_REGEXP=qr{ # match a package declaration - ^[\s\{;]* # intro chars on a line - package # the word 'package' - \s+ # whitespace - ($PKG_NAME_REGEXP) # a package name - \s* # optional whitespace - ($V_NUM_REGEXP)? # optional version number - \s* # optional whitesapce - [;\{] # semicolon line terminator or block start (since 5.16) - }x;my$VARNAME_REGEXP=qr{ # match fully-qualified VERSION name - ([\$*]) # sigil - $ or * - ( - ( # optional leading package name - (?:::|\')? # possibly starting like just :: (a la $::VERSION) - (?:\w+(?:::|\'))* # Foo::Bar:: ... - )? - VERSION - )\b - }x;my$VERS_REGEXP=qr{ # match a VERSION definition - (?: - \(\s*$VARNAME_REGEXP\s*\) # with parens - | - $VARNAME_REGEXP # without parens - ) - \s* - =[^=~>] # = but not ==, nor =~, nor => - }x;sub new_from_file {my$class=shift;my$filename=File::Spec->rel2abs(shift);return undef unless defined($filename)&& -f $filename;return$class->_init(undef,$filename,@_)}sub new_from_handle {my$class=shift;my$handle=shift;my$filename=shift;return undef unless defined($handle)&& defined($filename);$filename=File::Spec->rel2abs($filename);return$class->_init(undef,$filename,@_,handle=>$handle)}sub new_from_module {my$class=shift;my$module=shift;my%props=@_;$props{inc}||= \@INC;my$filename=$class->find_module_by_name($module,$props{inc});return undef unless defined($filename)&& -f $filename;return$class->_init($module,$filename,%props)}{my$compare_versions=sub {my ($v1,$op,$v2)=@_;$v1=version->new($v1)unless UNIVERSAL::isa($v1,'version');my$eval_str="\$v1 $op \$v2";my$result=eval$eval_str;log_info {"error comparing versions: '$eval_str' $@"}if $@;return$result};my$normalize_version=sub {my ($version)=@_;if ($version =~ /[=<>!,]/){}elsif (ref$version eq 'version'){$version=$version->is_qv ? $version->normal : $version->stringify}elsif ($version =~ /^[^v][^.]*\.[^.]+\./){$version="v$version"}else {}return$version};my$resolve_module_versions=sub {my$packages=shift;my($file,$version);my$err='';for my$p (@$packages){if (defined($p->{version})){if (defined($version)){if ($compare_versions->($version,'!=',$p->{version})){$err .= " $p->{file} ($p->{version})\n"}else {}}else {$file=$p->{file};$version=$p->{version}}}$file ||= $p->{file}if defined($p->{file})}if ($err){$err=" $file ($version)\n" .$err}my%result=(file=>$file,version=>$version,err=>$err);return \%result};sub provides {my$class=shift;croak "provides() requires key/value pairs \n" if @_ % 2;my%args=@_;croak "provides() takes only one of 'dir' or 'files'\n" if$args{dir}&& $args{files};croak "provides() requires a 'version' argument" unless defined$args{version};croak "provides() does not support version '$args{version}' metadata" unless grep {$args{version}eq $_}qw/1.4 2/;$args{prefix}='lib' unless defined$args{prefix};my$p;if ($args{dir}){$p=$class->package_versions_from_directory($args{dir})}else {croak "provides() requires 'files' to be an array reference\n" unless ref$args{files}eq 'ARRAY';$p=$class->package_versions_from_directory($args{files})}if (length$args{prefix}){$args{prefix}=~ s{/$}{};for my$v (values %$p){$v->{file}="$args{prefix}/$v->{file}"}}return$p}sub package_versions_from_directory {my ($class,$dir,$files)=@_;my@files;if ($files){@files=@$files}else {find({wanted=>sub {push@files,$_ if -f $_ && /\.pm$/},no_chdir=>1,},$dir)}my(%prime,%alt);for my$file (@files){my$mapped_filename=File::Spec::Unix->abs2rel($file,$dir);my@path=split(/\//,$mapped_filename);(my$prime_package=join('::',@path))=~ s/\.pm$//;my$pm_info=$class->new_from_file($file);for my$package ($pm_info->packages_inside){next if$package eq 'main';next if$package eq 'DB';next if grep /^_/,split(/::/,$package);my$version=$pm_info->version($package);$prime_package=$package if lc($prime_package)eq lc($package);if ($package eq $prime_package){if (exists($prime{$package})){croak "Unexpected conflict in '$package'; multiple versions found.\n"}else {$mapped_filename="$package.pm" if lc("$package.pm")eq lc($mapped_filename);$prime{$package}{file}=$mapped_filename;$prime{$package}{version}=$version if defined($version)}}else {push(@{$alt{$package}},{file=>$mapped_filename,version=>$version,})}}}for my$package (keys(%alt)){my$result=$resolve_module_versions->($alt{$package});if (exists($prime{$package})){if ($result->{err}){log_info {"Found conflicting versions for package '$package'\n" ." $prime{$package}{file} ($prime{$package}{version})\n" .$result->{err}}}elsif (defined($result->{version})){if (exists($prime{$package}{version})&& defined($prime{$package}{version})){if ($compare_versions->($prime{$package}{version},'!=',$result->{version})){log_info {"Found conflicting versions for package '$package'\n" ." $prime{$package}{file} ($prime{$package}{version})\n" ." $result->{file} ($result->{version})\n"}}}else {$prime{$package}{file}=$result->{file};$prime{$package}{version}=$result->{version}}}else {}}else {if ($result->{err}){log_info {"Found conflicting versions for package '$package'\n" .$result->{err}}}$prime{$package}{file}=$result->{file};$prime{$package}{version}=$result->{version}if defined($result->{version})}}for (grep defined $_->{version},values%prime){$_->{version}=$normalize_version->($_->{version})}return \%prime}}sub _init {my$class=shift;my$module=shift;my$filename=shift;my%props=@_;my$handle=delete$props{handle};my(%valid_props,@valid_props);@valid_props=qw(collect_pod inc);@valid_props{@valid_props}=delete(@props{@valid_props});warn "Unknown properties: @{[keys %props]}\n" if scalar(%props);my%data=(module=>$module,filename=>$filename,version=>undef,packages=>[],versions=>{},pod=>{},pod_headings=>[],collect_pod=>0,%valid_props,);my$self=bless(\%data,$class);if (not $handle){my$filename=$self->{filename};open$handle,'<',$filename or croak("Can't open '$filename': $!");$self->_handle_bom($handle,$filename)}$self->_parse_fh($handle);unless($self->{module}and length($self->{module})){my ($v,$d,$f)=File::Spec->splitpath($self->{filename});if($f =~ /\.pm$/){$f =~ s/\..+$//;my@candidates=grep /$f$/,@{$self->{packages}};$self->{module}=shift(@candidates)}else {if(grep /main/,@{$self->{packages}}){$self->{module}='main'}else {$self->{module}=$self->{packages}[0]|| ''}}}$self->{version}=$self->{versions}{$self->{module}}if defined($self->{module});return$self}sub _do_find_module {my$class=shift;my$module=shift || croak 'find_module_by_name() requires a package name';my$dirs=shift || \@INC;my$file=File::Spec->catfile(split(/::/,$module));for my$dir (@$dirs){my$testfile=File::Spec->catfile($dir,$file);return [File::Spec->rel2abs($testfile),$dir ]if -e $testfile and!-d _;$testfile .= '.pm';return [File::Spec->rel2abs($testfile),$dir ]if -e $testfile}return}sub find_module_by_name {my$found=shift()->_do_find_module(@_)or return;return$found->[0]}sub find_module_dir_by_name {my$found=shift()->_do_find_module(@_)or return;return$found->[1]}sub _parse_version_expression {my$self=shift;my$line=shift;my($sigil,$variable_name,$package);if ($line =~ /$VERS_REGEXP/o){($sigil,$variable_name,$package)=$2 ? ($1,$2,$3): ($4,$5,$6);if ($package){$package=($package eq '::')? 'main' : $package;$package =~ s/::$//}}return ($sigil,$variable_name,$package)}sub _handle_bom {my ($self,$fh,$filename)=@_;my$pos=tell$fh;return unless defined$pos;my$buf=' ' x 2;my$count=read$fh,$buf,length$buf;return unless defined$count and $count >= 2;my$encoding;if ($buf eq "\x{FE}\x{FF}"){$encoding='UTF-16BE'}elsif ($buf eq "\x{FF}\x{FE}"){$encoding='UTF-16LE'}elsif ($buf eq "\x{EF}\x{BB}"){$buf=' ';$count=read$fh,$buf,length$buf;if (defined$count and $count >= 1 and $buf eq "\x{BF}"){$encoding='UTF-8'}}if (defined$encoding){if ("$]" >= 5.008){binmode($fh,":encoding($encoding)")}}else {seek$fh,$pos,SEEK_SET or croak(sprintf "Can't reset position to the top of '$filename'")}return$encoding}sub _parse_fh {my ($self,$fh)=@_;my($in_pod,$seen_end,$need_vers)=(0,0,0);my(@packages,%vers,%pod,@pod);my$package='main';my$pod_sect='';my$pod_data='';my$in_end=0;while (defined(my$line=<$fh>)){my$line_num=$.;chomp($line);my$is_cut;if ($line =~ /^=([a-zA-Z].*)/){my$cmd=$1;$is_cut=$cmd =~ /^cut(?:[^a-zA-Z]|$)/;$in_pod=!$is_cut}if ($in_pod){if ($line =~ /^=head[1-4]\s+(.+)\s*$/){push(@pod,$1);if ($self->{collect_pod}&& length($pod_data)){$pod{$pod_sect}=$pod_data;$pod_data=''}$pod_sect=$1}elsif ($self->{collect_pod}){$pod_data .= "$line\n"}}elsif ($is_cut){if ($self->{collect_pod}&& length($pod_data)){$pod{$pod_sect}=$pod_data;$pod_data=''}$pod_sect=''}else {next if$in_end;next if$line =~ /^\s*#/;if ($line eq '__END__'){$in_end++;next}last if$line eq '__DATA__';my($version_sigil,$version_fullname,$version_package)=index($line,'VERSION')>= 1 ? $self->_parse_version_expression($line): ();if ($line =~ /$PKG_REGEXP/o){$package=$1;my$version=$2;push(@packages,$package)unless grep($package eq $_,@packages);$need_vers=defined$version ? 0 : 1;if (not exists$vers{$package}and defined$version){my$dwim_version=eval {_dwim_version($version)};croak "Version '$version' from $self->{filename} does not appear to be valid:\n$line\n\nThe fatal error was: $@\n" unless defined$dwim_version;$vers{$package}=$dwim_version}}elsif ($version_fullname && $version_package){push(@packages,$version_package)unless grep($version_package eq $_,@packages);$need_vers=0 if$version_package eq $package;unless (defined$vers{$version_package}&& length$vers{$version_package}){$vers{$version_package}=$self->_evaluate_version_line($version_sigil,$version_fullname,$line)}}elsif ($package eq 'main' && $version_fullname &&!exists($vers{main})){$need_vers=0;my$v=$self->_evaluate_version_line($version_sigil,$version_fullname,$line);$vers{$package}=$v;push(@packages,'main')}elsif ($package eq 'main' &&!exists($vers{main})&& $line =~ /\w/){$need_vers=1;$vers{main}='';push(@packages,'main')}elsif ($version_fullname && $need_vers){$need_vers=0;my$v=$self->_evaluate_version_line($version_sigil,$version_fullname,$line);unless (defined$vers{$package}&& length$vers{$package}){$vers{$package}=$v}}}}if ($self->{collect_pod}&& length($pod_data)){$pod{$pod_sect}=$pod_data}$self->{versions}=\%vers;$self->{packages}=\@packages;$self->{pod}=\%pod;$self->{pod_headings}=\@pod}{my$pn=0;sub _evaluate_version_line {my$self=shift;my($sigil,$variable_name,$line)=@_;$pn++;my$eval=qq{ my \$dummy = q# Hide from _packages_inside() - #; package Module::Metadata::_version::p${pn}; - use version; - sub { - local $sigil$variable_name; - $line; - \$$variable_name - }; - };$eval=$1 if$eval =~ m{^(.+)}s;local $^W;my$vsub=__clean_eval($eval);if ($@ =~ /Can't locate/ && -d 'lib'){local@INC=('lib',@INC);$vsub=__clean_eval($eval)}warn "Error evaling version line '$eval' in $self->{filename}: $@\n" if $@;(ref($vsub)eq 'CODE')or croak "failed to build version sub for $self->{filename}";my$result=eval {$vsub->()};croak "Could not get version from $self->{filename} by executing:\n$eval\n\nThe fatal error was: $@\n" if $@;my$version=eval {_dwim_version($result)};croak "Version '$result' from $self->{filename} does not appear to be valid:\n$eval\n\nThe fatal error was: $@\n" unless defined$version;return$version}}{my@version_prep=(sub {return shift},sub {my$v=shift;$v =~ s{([0-9])[a-z-].*$}{$1}i;return$v},sub {my$v=shift;my$num_dots=()=$v =~ m{(\.)}g;my$num_unders=()=$v =~ m{(_)}g;my$leading_v=substr($v,0,1)eq 'v';if (!$leading_v && $num_dots < 2 && $num_unders > 1){$v =~ s{_}{}g;$num_unders=()=$v =~ m{(_)}g}return$v},sub {my$v=shift;no warnings 'numeric';return 0 + $v},);sub _dwim_version {my ($result)=shift;return$result if ref($result)eq 'version';my ($version,$error);for my$f (@version_prep){$result=$f->($result);$version=eval {version->new($result)};$error ||= $@ if $@;last if defined$version}croak$error unless defined$version;return$version}}sub name {$_[0]->{module}}sub filename {$_[0]->{filename}}sub packages_inside {@{$_[0]->{packages}}}sub pod_inside {@{$_[0]->{pod_headings}}}sub contains_pod {0+@{$_[0]->{pod_headings}}}sub version {my$self=shift;my$mod=shift || $self->{module};my$vers;if (defined($mod)&& length($mod)&& exists($self->{versions}{$mod})){return$self->{versions}{$mod}}else {return undef}}sub pod {my$self=shift;my$sect=shift;if (defined($sect)&& length($sect)&& exists($self->{pod}{$sect})){return$self->{pod}{$sect}}else {return undef}}sub is_indexable {my ($self,$package)=@_;my@indexable_packages=grep {$_ ne 'main'}$self->packages_inside;return!!grep {$_ eq $package}@indexable_packages if$package;return!!@indexable_packages}1; -MODULE_METADATA - -$fatpacked{"Parse/CPAN/Meta.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'PARSE_CPAN_META'; - use 5.008001;use strict;package Parse::CPAN::Meta;our$VERSION='1.4414';use Exporter;use Carp 'croak';our@ISA=qw/Exporter/;our@EXPORT_OK=qw/Load LoadFile/;sub load_file {my ($class,$filename)=@_;my$meta=_slurp($filename);if ($filename =~ /\.ya?ml$/){return$class->load_yaml_string($meta)}elsif ($filename =~ /\.json$/){return$class->load_json_string($meta)}else {$class->load_string($meta)}}sub load_string {my ($class,$string)=@_;if ($string =~ /^---/){return$class->load_yaml_string($string)}elsif ($string =~ /^\s*\{/){return$class->load_json_string($string)}else {return$class->load_yaml_string($string)}}sub load_yaml_string {my ($class,$string)=@_;my$backend=$class->yaml_backend();my$data=eval {no strict 'refs';&{"$backend\::Load"}($string)};croak $@ if $@;return$data || {}}sub load_json_string {my ($class,$string)=@_;my$data=eval {$class->json_backend()->new->decode($string)};croak $@ if $@;return$data || {}}sub yaml_backend {if (!defined$ENV{PERL_YAML_BACKEND}){_can_load('CPAN::Meta::YAML',0.011)or croak "CPAN::Meta::YAML 0.011 is not available\n";return "CPAN::Meta::YAML"}else {my$backend=$ENV{PERL_YAML_BACKEND};_can_load($backend)or croak "Could not load PERL_YAML_BACKEND '$backend'\n";$backend->can("Load")or croak "PERL_YAML_BACKEND '$backend' does not implement Load()\n";return$backend}}sub json_backend {if (!$ENV{PERL_JSON_BACKEND}or $ENV{PERL_JSON_BACKEND}eq 'JSON::PP'){_can_load('JSON::PP'=>2.27103)or croak "JSON::PP 2.27103 is not available\n";return 'JSON::PP'}else {_can_load('JSON'=>2.5)or croak "JSON 2.5 is required for " ."\$ENV{PERL_JSON_BACKEND} = '$ENV{PERL_JSON_BACKEND}'\n";return "JSON"}}sub _slurp {require Encode;open my$fh,"<:raw","$_[0]" or die "can't open $_[0] for reading: $!";my$content=do {local $/;<$fh>};$content=Encode::decode('UTF-8',$content,Encode::PERLQQ());return$content}sub _can_load {my ($module,$version)=@_;(my$file=$module)=~ s{::}{/}g;$file .= ".pm";return 1 if$INC{$file};return 0 if exists$INC{$file};eval {require$file;1}or return 0;if (defined$version){eval {$module->VERSION($version);1}or return 0}return 1}sub LoadFile ($) {return Load(_slurp(shift))}sub Load ($) {require CPAN::Meta::YAML;my$object=eval {CPAN::Meta::YAML::Load(shift)};croak $@ if $@;return$object}1; -PARSE_CPAN_META - -$fatpacked{"Parse/PMFile.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'PARSE_PMFILE'; - package Parse::PMFile;sub __clean_eval {eval $_[0]}use strict;use warnings;use Safe;use JSON::PP ();use Dumpvalue;use version ();use File::Spec ();our$VERSION='0.36';our$VERBOSE=0;our$ALLOW_DEV_VERSION=0;our$FORK=0;our$UNSAFE=$] < 5.010000 ? 1 : 0;sub new {my ($class,$meta,$opts)=@_;bless {%{$opts || {}},META_CONTENT=>$meta},$class}sub parse {my ($self,$pmfile)=@_;$pmfile =~ s|\\|/|g;my($filemtime)=(stat$pmfile)[9];$self->{MTIME}=$filemtime;$self->{PMFILE}=$pmfile;unless ($self->_version_from_meta_ok){my$version;unless (eval {$version=$self->_parse_version;1}){$self->_verbose(1,"error with version in $pmfile: $@");return}$self->{VERSION}=$version;if ($self->{VERSION}=~ /^\{.*\}$/){}elsif ($self->{VERSION}=~ /[_\s]/ &&!$self->{ALLOW_DEV_VERSION}&&!$ALLOW_DEV_VERSION){return}}my($ppp)=$self->_packages_per_pmfile;my@keys_ppp=$self->_filter_ppps(sort keys %$ppp);$self->_verbose(1,"Will check keys_ppp[@keys_ppp]\n");my ($package,%errors);my%checked_in;DBPACK: foreach$package (@keys_ppp){if ($package !~ /^\w[\w\:\']*\w?\z/ || $package !~ /\w\z/ || $package =~ /:/ && $package !~ /::/ || $package =~ /\w:\w/ || $package =~ /:::/){$self->_verbose(1,"Package[$package] did not pass the ultimate sanity check");delete$ppp->{$package};next}if ($self->{USERID}&& $self->{PERMISSIONS}&&!$self->_perm_check($package)){delete$ppp->{$package};next}{my (undef,$module)=split m{/lib/},$self->{PMFILE},2;if ($module){$module =~ s{\.pm\z}{};$module =~ s{/}{::}g;if (lc$module eq lc$package && $module ne $package){$errors{$package}={indexing_warning=>"Capitalization of package ($package) does not match filename!",infile=>$self->{PMFILE},}}}}my$pp=$ppp->{$package};if ($pp->{version}&& $pp->{version}=~ /^\{.*\}$/){my$err=JSON::PP::decode_json($pp->{version});if ($err->{x_normalize}){$errors{$package}={normalize=>$err->{version},infile=>$pp->{infile},};$pp->{version}="undef"}elsif ($err->{openerr}){$pp->{version}="undef";$self->_verbose(1,qq{Parse::PMFile was not able to - read the file. It issued the following error: C< $err->{r} >},);$errors{$package}={open=>$err->{r},infile=>$pp->{infile},}}else {$pp->{version}="undef";$self->_verbose(1,qq{Parse::PMFile was not able to - parse the following line in that file: C< $err->{line} > - - Note: the indexer is running in a Safe compartement and cannot - provide the full functionality of perl in the VERSION line. It - is trying hard, but sometime it fails. As a workaround, please - consider writing a META.yml that contains a 'provides' - attribute or contact the CPAN admins to investigate (yet - another) workaround against "Safe" limitations.)},);$errors{$package}={parse_version=>$err->{line},infile=>$err->{file},}}}for ($package,$pp->{version},){if (!defined || /^\s*$/ || /\s/){delete$ppp->{$package};next}}$checked_in{$package}=$ppp->{$package}}return (wantarray && %errors)? (\%checked_in,\%errors): \%checked_in}sub _perm_check {my ($self,$package)=@_;my$userid=$self->{USERID};my$module=$self->{PERMISSIONS}->module_permissions($package);return 1 if!$module;return 1 if defined$module->m && $module->m eq $userid;return 1 if defined$module->f && $module->f eq $userid;return 1 if defined$module->c && grep {$_ eq $userid}@{$module->c};return}sub _parse_version {my$self=shift;use strict;my$pmfile=$self->{PMFILE};my$tmpfile=File::Spec->catfile(File::Spec->tmpdir,"ParsePMFile$$" .rand(1000));my$pmcp=$pmfile;for ($pmcp){s/([^\\](\\\\)*)@/$1\\@/g}my($v);{package main;my$pid;if ($self->{FORK}|| $FORK){$pid=fork();die "Can't fork: $!" unless defined$pid}if ($pid){waitpid($pid,0);if (open my$fh,'<',$tmpfile){$v=<$fh>}}else {my($comp)=Safe->new;my$eval=qq{ - local(\$^W) = 0; - Parse::PMFile::_parse_version_safely("$pmcp"); - };$comp->permit("entereval");$comp->share("*Parse::PMFile::_parse_version_safely");$comp->share("*version::new");$comp->share("*version::numify");$comp->share_from('main',['*version::','*charstar::','*Exporter::','*DynaLoader::']);$comp->share_from('version',['&qv']);$comp->permit(":base_math");$comp->deny(qw/enteriter iter unstack goto/);version->import('qv')if$self->{UNSAFE}|| $UNSAFE;{no strict;$v=($self->{UNSAFE}|| $UNSAFE)? eval$eval : $comp->reval($eval)}if ($@){my$err=$@;if (ref$err){if ($err->{line}=~ /([\$*])([\w\:\']*)\bVERSION\b.*?\=(.*)/){local($^W)=0;my ($sigil,$vstr)=($1,$3);$self->_restore_overloaded_stuff(1)if$err->{line}=~ /use\s+version\b|version\->|qv\(/;$v=($self->{UNSAFE}|| $UNSAFE)? eval$vstr : $comp->reval($vstr);$v=$$v if$sigil eq '*' && ref$v}if ($@ or!$v){$self->_verbose(1,sprintf("reval failed: err[%s] for eval[%s]",JSON::PP::encode_json($err),$eval,));$v=JSON::PP::encode_json($err)}}else {$v=JSON::PP::encode_json({openerr=>$err })}}if (defined$v){$v=$v->numify if ref($v)=~ /^version(::vpp)?$/}else {$v=""}if ($self->{FORK}|| $FORK){open my$fh,'>:utf8',$tmpfile;print$fh $v;exit 0}else {utf8::encode($v);$v=undef if defined$v &&!length$v;$comp->erase;$self->_restore_overloaded_stuff}}}unlink$tmpfile if ($self->{FORK}|| $FORK)&& -e $tmpfile;return$self->_normalize_version($v)}sub _restore_overloaded_stuff {my ($self,$used_version_in_safe)=@_;return if$self->{UNSAFE}|| $UNSAFE;no strict 'refs';no warnings 'redefine';my$restored;if ($INC{'version/vxs.pm'}){*{'version::(""'}=\&version::vxs::stringify;*{'version::(0+'}=\&version::vxs::numify;*{'version::(cmp'}=\&version::vxs::VCMP;*{'version::(<=>'}=\&version::vxs::VCMP;*{'version::(bool'}=\&version::vxs::boolean;$restored=1}if ($INC{'version/vpp.pm'}){{package charstar;overload->import}if (!$used_version_in_safe){package version::vpp;overload->import}unless ($restored){*{'version::(""'}=\&version::vpp::stringify;*{'version::(0+'}=\&version::vpp::numify;*{'version::(cmp'}=\&version::vpp::vcmp;*{'version::(<=>'}=\&version::vpp::vcmp;*{'version::(bool'}=\&version::vpp::vbool}*{'version::vpp::(""'}=\&version::vpp::stringify;*{'version::vpp::(0+'}=\&version::vpp::numify;*{'version::vpp::(cmp'}=\&version::vpp::vcmp;*{'version::vpp::(<=>'}=\&version::vpp::vcmp;*{'version::vpp::(bool'}=\&version::vpp::vbool;*{'charstar::(""'}=\&charstar::thischar;*{'charstar::(0+'}=\&charstar::thischar;*{'charstar::(++'}=\&charstar::increment;*{'charstar::(--'}=\&charstar::decrement;*{'charstar::(+'}=\&charstar::plus;*{'charstar::(-'}=\&charstar::minus;*{'charstar::(*'}=\&charstar::multiply;*{'charstar::(cmp'}=\&charstar::cmp;*{'charstar::(<=>'}=\&charstar::spaceship;*{'charstar::(bool'}=\&charstar::thischar;*{'charstar::(='}=\&charstar::clone;$restored=1}if (!$restored){*{'version::(""'}=\&version::stringify;*{'version::(0+'}=\&version::numify;*{'version::(cmp'}=\&version::vcmp;*{'version::(<=>'}=\&version::vcmp;*{'version::(bool'}=\&version::boolean}}sub _packages_per_pmfile {my$self=shift;my$ppp={};my$pmfile=$self->{PMFILE};my$filemtime=$self->{MTIME};my$version=$self->{VERSION};open my$fh,"<","$pmfile" or return$ppp;local $/="\n";my$inpod=0;PLINE: while (<$fh>){chomp;my($pline)=$_;$inpod=$pline =~ /^=(?!cut)/ ? 1 : $pline =~ /^=cut/ ? 0 : $inpod;next if$inpod;next if substr($pline,0,4)eq "=cut";$pline =~ s/\#.*//;next if$pline =~ /^\s*$/;if ($pline =~ /^__(?:END|DATA)__\b/ and $pmfile !~ /\.PL$/){last PLINE}my$pkg;my$strict_version;if ($pline =~ m{ - # (.*) # takes too much time if $pline is long - (? 128;$ppp->{$pkg}{parsed}++;$ppp->{$pkg}{infile}=$pmfile;if ($self->_simile($pmfile,$pkg)){$ppp->{$pkg}{simile}=$pmfile;if ($self->_version_from_meta_ok){my$provides=$self->{META_CONTENT}{provides};if (exists$provides->{$pkg}){if (defined$provides->{$pkg}{version}){my$v=$provides->{$pkg}{version};if ($v =~ /[_\s]/ &&!$self->{ALLOW_DEV_VERSION}&&!$ALLOW_DEV_VERSION){next PLINE}unless (eval {$version=$self->_normalize_version($v);1}){$self->_verbose(1,"error with version in $pmfile: $@");next}$ppp->{$pkg}{version}=$version}else {$ppp->{$pkg}{version}="undef"}}}else {if (defined$strict_version){$ppp->{$pkg}{version}=$strict_version }else {$ppp->{$pkg}{version}=defined$version ? $version : ""}no warnings;if ($version eq 'undef'){$ppp->{$pkg}{version}=$version unless defined$ppp->{$pkg}{version}}else {$ppp->{$pkg}{version}=$version if$version > $ppp->{$pkg}{version}|| $version gt $ppp->{$pkg}{version}}}}else {$ppp->{$pkg}{version}=$version unless defined$ppp->{$pkg}{version}&& length($ppp->{$pkg}{version})}$ppp->{$pkg}{filemtime}=$filemtime}else {}}close$fh;$ppp}{no strict;sub _parse_version_safely {my($parsefile)=@_;my$result;local*FH;local $/="\n";open(FH,$parsefile)or die "Could not open '$parsefile': $!";my$inpod=0;while (){$inpod=/^=(?!cut)/ ? 1 : /^=cut/ ? 0 : $inpod;next if$inpod || /^\s*#/;last if /^__(?:END|DATA)__\b/;chop;if (my ($ver)=/package \s+ \S+ \s+ (\S+) \s* [;{]/x){return$ver if version::is_lax($ver)}next unless /(?<=])\=(?![=>])/;my$current_parsed_line=$_;my$eval=qq{ - package # - ExtUtils::MakeMaker::_version; - - local $1$2; - \$$2=undef; do { - $_ - }; \$$2 - };local $^W=0;local$SIG{__WARN__}=sub {};$result=__clean_eval($eval);if ($@ or!defined$result){die +{eval=>$eval,line=>$current_parsed_line,file=>$parsefile,err=>$@,}}last}close FH;$result="undef" unless defined$result;if ((ref$result)=~ /^version(?:::vpp)?\b/){$result=$result->numify}return$result}}sub _filter_ppps {my($self,@ppps)=@_;my@res;MANI: for my$ppp (@ppps){if ($self->{META_CONTENT}){my$no_index=$self->{META_CONTENT}{no_index}|| $self->{META_CONTENT}{private};if (ref($no_index)eq 'HASH'){my%map=(package=>qr{\z},namespace=>qr{::},);for my$k (qw(package namespace)){next unless my$v=$no_index->{$k};my$rest=$map{$k};if (ref$v eq "ARRAY"){for my$ve (@$v){$ve =~ s|::$||;if ($ppp =~ /^$ve$rest/){$self->_verbose(1,"Skipping ppp[$ppp] due to ve[$ve]");next MANI}else {$self->_verbose(1,"NOT skipping ppp[$ppp] due to ve[$ve]")}}}else {$v =~ s|::$||;if ($ppp =~ /^$v$rest/){$self->_verbose(1,"Skipping ppp[$ppp] due to v[$v]");next MANI}else {$self->_verbose(1,"NOT skipping ppp[$ppp] due to v[$v]")}}}}else {$self->_verbose(1,"No keyword 'no_index' or 'private' in META_CONTENT")}}else {}push@res,$ppp}$self->_verbose(1,"Result of filter_ppps: res[@res]");@res}sub _simile {my($self,$file,$package)=@_;$file =~ s|.*/||;$file =~ s|\.pm(?:\.PL)?||;my$ret=$package =~ m/\b\Q$file\E$/;$ret ||= 0;unless ($ret){$ret=1 if lc$file eq 'version'}$self->_verbose(1,"Result of simile(): file[$file] package[$package] ret[$ret]\n");$ret}sub _normalize_version {my($self,$v)=@_;$v="undef" unless defined$v;my$dv=Dumpvalue->new;my$sdv=$dv->stringify($v,1);$self->_verbose(1,"Result of normalize_version: sdv[$sdv]\n");return$v if$v eq "undef";return$v if$v =~ /^\{.*\}$/;$v =~ s/^\s+//;$v =~ s/\s+\z//;if ($v =~ /_/){return$v }if (!version::is_lax($v)){return JSON::PP::encode_json({x_normalize=>'version::is_lax failed',version=>$v })}my$vv=eval {no warnings;version->new($v)->numify};if ($@){return JSON::PP::encode_json({x_normalize=>$@,version=>$v })}if ($vv eq $v){}else {my$forced=$self->_force_numeric($v);if ($forced eq $vv){}elsif ($forced =~ /^v(.+)/){$vv=version->new($1)->numify}else {if ($forced==$vv){$vv=$forced}}}return$vv}sub _force_numeric {my($self,$v)=@_;$v=$self->_readable($v);if ($v =~ /^(\+?)(\d*)(\.(\d*))?/ && (defined $2 && length $2 || defined $4 && length $4)){my$two=defined $2 ? $2 : "";my$three=defined $3 ? $3 : "";$v="$two$three"}$v}sub _version_from_meta_ok {my($self)=@_;return$self->{VERSION_FROM_META_OK}if exists$self->{VERSION_FROM_META_OK};my$c=$self->{META_CONTENT};return($self->{VERSION_FROM_META_OK}=0)unless$c->{provides};my ($mb_v)=(defined$c->{generated_by}? $c->{generated_by}: '')=~ /Module::Build version ([\d\.]+)/;return($self->{VERSION_FROM_META_OK}=1)unless$mb_v;return($self->{VERSION_FROM_META_OK}=1)if$mb_v eq '0.250.0';if ($mb_v >= 0.19 && $mb_v < 0.26 &&!keys %{$c->{provides}}){return($self->{VERSION_FROM_META_OK}=0)}return($self->{VERSION_FROM_META_OK}=1)}sub _verbose {my($self,$level,@what)=@_;warn@what if$level <= ((ref$self && $self->{VERBOSE})|| $VERBOSE)}sub _vcmp {my($self,$l,$r)=@_;local($^W)=0;$self->_verbose(9,"l[$l] r[$r]");return 0 if$l eq $r;for ($l,$r){s/_//g}$self->_verbose(9,"l[$l] r[$r]");for ($l,$r){next unless tr/.// > 1 || /^v/;s/^v?/v/;1 while s/\.0+(\d)/.$1/}$self->_verbose(9,"l[$l] r[$r]");if ($l=~/^v/ <=> $r=~/^v/){for ($l,$r){next if /^v/;$_=$self->_float2vv($_)}}$self->_verbose(9,"l[$l] r[$r]");my$lvstring="v0";my$rvstring="v0";if ($] >= 5.006 && $l =~ /^v/ && $r =~ /^v/){$lvstring=$self->_vstring($l);$rvstring=$self->_vstring($r);$self->_verbose(9,sprintf "lv[%vd] rv[%vd]",$lvstring,$rvstring)}return (($l ne "undef")<=> ($r ne "undef")|| $lvstring cmp $rvstring || $l <=> $r || $l cmp $r)}sub _vgt {my($self,$l,$r)=@_;$self->_vcmp($l,$r)> 0}sub _vlt {my($self,$l,$r)=@_;$self->_vcmp($l,$r)< 0}sub _vge {my($self,$l,$r)=@_;$self->_vcmp($l,$r)>= 0}sub _vle {my($self,$l,$r)=@_;$self->_vcmp($l,$r)<= 0}sub _vstring {my($self,$n)=@_;$n =~ s/^v// or die "Parse::PMFile::_vstring() called with invalid arg [$n]";pack "U*",split /\./,$n}sub _float2vv {my($self,$n)=@_;my($rev)=int($n);$rev ||= 0;my($mantissa)=$n =~ /\.(\d{1,12})/;$mantissa ||= 0;$mantissa .= "0" while length($mantissa)%3;my$ret="v" .$rev;while ($mantissa){$mantissa =~ s/(\d{1,3})// or die "Panic: length>0 but not a digit? mantissa[$mantissa]";$ret .= ".".int($1)}$ret =~ s/(\.0)+/.0/;$ret}sub _readable {my($self,$n)=@_;$n =~ /^([\w\-\+\.]+)/;return $1 if defined $1 && length($1)>0;if ($] < 5.006){$self->_verbose(9,"Suspicious version string seen [$n]\n");return$n}my$better=sprintf "v%vd",$n;$self->_verbose(9,"n[$n] better[$better]");return$better}1; -PARSE_PMFILE - -$fatpacked{"String/ShellQuote.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'STRING_SHELLQUOTE'; - package String::ShellQuote;use strict;use vars qw($VERSION @ISA @EXPORT);require Exporter;$VERSION='1.04';@ISA=qw(Exporter);@EXPORT=qw(shell_quote shell_quote_best_effort shell_comment_quote);sub croak {require Carp;goto&Carp::croak}sub _shell_quote_backend {my@in=@_;my@err=();if (0){require RS::Handy;print RS::Handy::data_dump(\@in)}return \@err,'' unless@in;my$ret='';my$saw_non_equal=0;for (@in){if (!defined $_ or $_ eq ''){$_="''";next}if (s/\x00//g){push@err,"No way to quote string containing null (\\000) bytes"}my$escape=0;if (/=/){if (!$saw_non_equal){$escape=1}}else {$saw_non_equal=1}if (m|[^\w!%+,\-./:=@^]|){$escape=1}if ($escape || (!$saw_non_equal && /=/)){s/'/'\\''/g;s|((?:'\\''){2,})|q{'"} . (q{'} x (length($1) / 4)) . q{"'}|ge;$_="'$_'";s/^''//;s/''$//}}continue {$ret .= "$_ "}chop$ret;return \@err,$ret}sub shell_quote {my ($rerr,$s)=_shell_quote_backend @_;if (@$rerr){my%seen;@$rerr=grep {!$seen{$_}++}@$rerr;my$s=join '',map {"shell_quote(): $_\n"}@$rerr;chomp$s;croak$s}return$s}sub shell_quote_best_effort {my ($rerr,$s)=_shell_quote_backend @_;return$s}sub shell_comment_quote {return '' unless @_;unless (@_==1){croak "Too many arguments to shell_comment_quote " ."(got " .@_ ." expected 1)"}local $_=shift;s/\n/\n#/g;return $_}1; -STRING_SHELLQUOTE - -$fatpacked{"lib/core/only.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'LIB_CORE_ONLY'; - package lib::core::only;use strict;use warnings FATAL=>'all';use Config;sub import {@INC=@Config{qw(privlibexp archlibexp)};return}1; -LIB_CORE_ONLY - -$fatpacked{"local/lib.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'LOCAL_LIB'; - package local::lib;use 5.006;use strict;use warnings;use Config;our$VERSION='2.000015';$VERSION=eval$VERSION;BEGIN {*_WIN32=($^O eq 'MSWin32' || $^O eq 'NetWare' || $^O eq 'symbian')? sub(){1}: sub(){0};*_USE_FSPEC=($^O eq 'MacOS' || $^O eq 'VMS' || $INC{'File/Spec.pm'})? sub(){1}: sub(){0}}our$_DIR_JOIN=_WIN32 ? '\\' : '/';our$_DIR_SPLIT=(_WIN32 || $^O eq 'cygwin')? qr{[\\/]} : qr{/};our$_ROOT=_WIN32 ? do {my$UNC=qr{[\\/]{2}[^\\/]+[\\/][^\\/]+};qr{^(?:$UNC|[A-Za-z]:|)$_DIR_SPLIT}}: qr{^/};our$_PERL;sub _cwd {my$drive=shift;if (!$_PERL){($_PERL)=$^X =~ /(.+)/;if (_is_abs($_PERL)){}elsif (-x $Config{perlpath}){$_PERL=$Config{perlpath}}else {($_PERL)=map {/(.*)/}grep {-x $_}map {join($_DIR_JOIN,$_,$_PERL)}split /\Q$Config{path_sep}\E/,$ENV{PATH}}}local@ENV{qw(PATH IFS CDPATH ENV BASH_ENV)};my$cmd=$drive ? "eval { Cwd::getdcwd(q($drive)) }" : 'getcwd';my$cwd=`"$_PERL" -MCwd -le "print $cmd"`;chomp$cwd;if (!length$cwd && $drive){$cwd=$drive}$cwd =~ s/$_DIR_SPLIT?$/$_DIR_JOIN/;$cwd}sub _catdir {if (_USE_FSPEC){require File::Spec;File::Spec->catdir(@_)}else {my$dir=join($_DIR_JOIN,@_);$dir =~ s{($_DIR_SPLIT)(?:\.?$_DIR_SPLIT)+}{$1}g;$dir}}sub _is_abs {if (_USE_FSPEC){require File::Spec;File::Spec->file_name_is_absolute($_[0])}else {$_[0]=~ $_ROOT}}sub _rel2abs {my ($dir,$base)=@_;return$dir if _is_abs($dir);$base=_WIN32 && $dir =~ s/^([A-Za-z]:)// ? _cwd("$1"): $base ? $base : _cwd;return _catdir($base,$dir)}sub import {my ($class,@args)=@_;push@args,@ARGV if $0 eq '-';my@steps;my%opts;my$shelltype;while (@args){my$arg=shift@args;if ($arg =~ /\xE2\x88\x92/ or $arg =~ /−/){die <<'DEATH'}elsif ($arg eq '--self-contained'){die <<'DEATH'}elsif($arg =~ /^--deactivate(?:=(.*))?$/){my$path=defined $1 ? $1 : shift@args;push@steps,['deactivate',$path]}elsif ($arg eq '--deactivate-all'){push@steps,['deactivate_all']}elsif ($arg =~ /^--shelltype(?:=(.*))?$/){$shelltype=defined $1 ? $1 : shift@args}elsif ($arg eq '--no-create'){$opts{no_create}=1}elsif ($arg =~ /^--/){die "Unknown import argument: $arg"}else {push@steps,['activate',$arg]}}if (!@steps){push@steps,['activate',undef]}my$self=$class->new(%opts);for (@steps){my ($method,@args)=@$_;$self=$self->$method(@args)}if ($0 eq '-'){print$self->environment_vars_string($shelltype);exit 0}else {$self->setup_local_lib}}sub new {my$class=shift;bless {@_},$class}sub clone {my$self=shift;bless {%$self,@_},ref$self}sub inc {$_[0]->{inc}||= \@INC}sub libs {$_[0]->{libs}||= [\'PERL5LIB' ]}sub bins {$_[0]->{bins}||= [\'PATH' ]}sub roots {$_[0]->{roots}||= [\'PERL_LOCAL_LIB_ROOT' ]}sub extra {$_[0]->{extra}||= {}}sub no_create {$_[0]->{no_create}}my$_archname=$Config{archname};my$_version=$Config{version};my@_inc_version_list=reverse split / /,$Config{inc_version_list};my$_path_sep=$Config{path_sep};sub _as_list {my$list=shift;grep length,map {!(ref $_ && ref $_ eq 'SCALAR')? $_ : (defined$ENV{$$_}? split(/\Q$_path_sep/,$ENV{$$_}): ())}ref$list ? @$list : $list}sub _remove_from {my ($list,@remove)=@_;return @$list if!@remove;my%remove=map {$_=>1}@remove;grep!$remove{$_},_as_list($list)}my@_lib_subdirs=([$_version,$_archname],[$_version],[$_archname],(@_inc_version_list ? \@_inc_version_list : ()),[],);sub install_base_bin_path {my ($class,$path)=@_;return _catdir($path,'bin')}sub install_base_perl_path {my ($class,$path)=@_;return _catdir($path,'lib','perl5')}sub install_base_arch_path {my ($class,$path)=@_;_catdir($class->install_base_perl_path($path),$_archname)}sub lib_paths_for {my ($class,$path)=@_;my$base=$class->install_base_perl_path($path);return map {_catdir($base,@$_)}@_lib_subdirs}sub _mm_escape_path {my$path=shift;$path =~ s/\\/\\\\/g;if ($path =~ s/ /\\ /g){$path=qq{"$path"}}return$path}sub _mb_escape_path {my$path=shift;$path =~ s/\\/\\\\/g;return qq{"$path"}}sub installer_options_for {my ($class,$path)=@_;return (PERL_MM_OPT=>defined$path ? "INSTALL_BASE="._mm_escape_path($path): undef,PERL_MB_OPT=>defined$path ? "--install_base "._mb_escape_path($path): undef,)}sub active_paths {my ($self)=@_;$self=ref$self ? $self : $self->new;return grep {my$active_ll=$self->install_base_perl_path($_);grep {$_ eq $active_ll}@{$self->inc}}_as_list($self->roots)}sub deactivate {my ($self,$path)=@_;$self=$self->new unless ref$self;$path=$self->resolve_path($path);$path=$self->normalize_path($path);my@active_lls=$self->active_paths;if (!grep {$_ eq $path}@active_lls){warn "Tried to deactivate inactive local::lib '$path'\n";return$self}my%args=(bins=>[_remove_from($self->bins,$self->install_base_bin_path($path))],libs=>[_remove_from($self->libs,$self->install_base_perl_path($path))],inc=>[_remove_from($self->inc,$self->lib_paths_for($path))],roots=>[_remove_from($self->roots,$path)],);$args{extra}={$self->installer_options_for($args{roots}[0])};$self->clone(%args)}sub deactivate_all {my ($self)=@_;$self=$self->new unless ref$self;my@active_lls=$self->active_paths;my%args;if (@active_lls){%args=(bins=>[_remove_from($self->bins,map$self->install_base_bin_path($_),@active_lls)],libs=>[_remove_from($self->libs,map$self->install_base_perl_path($_),@active_lls)],inc=>[_remove_from($self->inc,map$self->lib_paths_for($_),@active_lls)],roots=>[_remove_from($self->roots,@active_lls)],)}$args{extra}={$self->installer_options_for(undef)};$self->clone(%args)}sub activate {my ($self,$path)=@_;$self=$self->new unless ref$self;$path=$self->resolve_path($path);$self->ensure_dir_structure_for($path)unless$self->no_create;$path=$self->normalize_path($path);my@active_lls=$self->active_paths;if (grep {$_ eq $path}@active_lls[1 .. $#active_lls]){$self=$self->deactivate($path)}my%args;if (!@active_lls || $active_lls[0]ne $path){%args=(bins=>[$self->install_base_bin_path($path),@{$self->bins}],libs=>[$self->install_base_perl_path($path),@{$self->libs}],inc=>[$self->lib_paths_for($path),@{$self->inc}],roots=>[$path,@{$self->roots}],)}$args{extra}={$self->installer_options_for($path)};$self->clone(%args)}sub normalize_path {my ($self,$path)=@_;$path=(Win32::GetShortPathName($path)|| $path)if $^O eq 'MSWin32';return$path}sub build_environment_vars_for {my$self=$_[0]->new->activate($_[1]);$self->build_environment_vars}sub build_activate_environment_vars_for {my$self=$_[0]->new->activate($_[1]);$self->build_environment_vars}sub build_deactivate_environment_vars_for {my$self=$_[0]->new->deactivate($_[1]);$self->build_environment_vars}sub build_deact_all_environment_vars_for {my$self=$_[0]->new->deactivate_all;$self->build_environment_vars}sub build_environment_vars {my$self=shift;(PATH=>join($_path_sep,_as_list($self->bins)),PERL5LIB=>join($_path_sep,_as_list($self->libs)),PERL_LOCAL_LIB_ROOT=>join($_path_sep,_as_list($self->roots)),%{$self->extra},)}sub setup_local_lib_for {my$self=$_[0]->new->activate($_[1]);$self->setup_local_lib}sub setup_local_lib {my$self=shift;require Carp::Heavy if$INC{'Carp.pm'};$self->setup_env_hash;@INC=@{$self->inc}}sub setup_env_hash_for {my$self=$_[0]->new->activate($_[1]);$self->setup_env_hash}sub setup_env_hash {my$self=shift;my%env=$self->build_environment_vars;for my$key (keys%env){if (defined$env{$key}){$ENV{$key}=$env{$key}}else {delete$ENV{$key}}}}sub print_environment_vars_for {print $_[0]->environment_vars_string_for(@_[1..$#_])}sub environment_vars_string_for {my$self=$_[0]->new->activate($_[1]);$self->environment_vars_string}sub environment_vars_string {my ($self,$shelltype)=@_;$shelltype ||= $self->guess_shelltype;my$extra=$self->extra;my@envs=(PATH=>$self->bins,PERL5LIB=>$self->libs,PERL_LOCAL_LIB_ROOT=>$self->roots,map {$_=>$extra->{$_}}sort keys %$extra,);$self->_build_env_string($shelltype,\@envs)}sub _build_env_string {my ($self,$shelltype,$envs)=@_;my@envs=@$envs;my$build_method="build_${shelltype}_env_declaration";my$out='';while (@envs){my ($name,$value)=(shift(@envs),shift(@envs));if (ref$value && @$value==1 && ref$value->[0]&& ref$value->[0]eq 'SCALAR' && ${$value->[0]}eq $name){next}$out .= $self->$build_method($name,$value)}my$wrap_method="wrap_${shelltype}_output";if ($self->can($wrap_method)){return$self->$wrap_method($out)}return$out}sub build_bourne_env_declaration {my ($class,$name,$args)=@_;my$value=$class->_interpolate($args,'${%s}',qr/["\\\$!`]/,'\\%s');if (!defined$value){return qq{unset $name;\n}}$value =~ s/(^|\G|$_path_sep)\$\{$name\}$_path_sep/$1\${$name}\${$name+$_path_sep}/g;$value =~ s/$_path_sep\$\{$name\}$/\${$name+$_path_sep}\${$name}/;qq{${name}="$value"; export ${name};\n}}sub build_csh_env_declaration {my ($class,$name,$args)=@_;my ($value,@vars)=$class->_interpolate($args,'${%s}','"','"\\%s"');if (!defined$value){return qq{unsetenv $name;\n}}my$out='';for my$var (@vars){$out .= qq{if ! \$?$name setenv $name '';\n}}my$value_without=$value;if ($value_without =~ s/(?:^|$_path_sep)\$\{$name\}(?:$_path_sep|$)//g){$out .= qq{if "\${$name}" != '' setenv $name "$value";\n};$out .= qq{if "\${$name}" == '' }}$out .= qq{setenv $name "$value_without";\n};return$out}sub build_cmd_env_declaration {my ($class,$name,$args)=@_;my$value=$class->_interpolate($args,'%%%s%%',qr(%),'%s');if (!$value){return qq{\@set $name=\n}}my$out='';my$value_without=$value;if ($value_without =~ s/(?:^|$_path_sep)%$name%(?:$_path_sep|$)//g){$out .= qq{\@if not "%$name%"=="" set "$name=$value"\n};$out .= qq{\@if "%$name%"=="" }}$out .= qq{\@set "$name=$value_without"\n};return$out}sub build_powershell_env_declaration {my ($class,$name,$args)=@_;my$value=$class->_interpolate($args,'$env:%s','"','`%s');if (!$value){return qq{Remove-Item -ErrorAction 0 Env:\\$name;\n}}my$maybe_path_sep=qq{\$(if("\$env:$name"-eq""){""}else{"$_path_sep"})};$value =~ s/(^|\G|$_path_sep)\$env:$name$_path_sep/$1\$env:$name"+$maybe_path_sep+"/g;$value =~ s/$_path_sep\$env:$name$/"+$maybe_path_sep+\$env:$name+"/;qq{\$env:$name = \$("$value");\n}}sub wrap_powershell_output {my ($class,$out)=@_;return$out || " \n"}sub build_fish_env_declaration {my ($class,$name,$args)=@_;my$value=$class->_interpolate($args,'$%s',qr/[\\"' ]/,'\\%s');if (!defined$value){return qq{set -e $name;\n}}$value =~ s/$_path_sep/ /g;qq{set -x $name $value;\n}}sub _interpolate {my ($class,$args,$var_pat,$escape,$escape_pat)=@_;return unless defined$args;my@args=ref$args ? @$args : $args;return unless@args;my@vars=map {$$_}grep {ref $_ eq 'SCALAR'}@args;my$string=join$_path_sep,map {ref $_ eq 'SCALAR' ? sprintf($var_pat,$$_): do {s/($escape)/sprintf($escape_pat, $1)/ge;$_}}@args;return wantarray ? ($string,\@vars): $string}sub pipeline;sub pipeline {my@methods=@_;my$last=pop(@methods);if (@methods){\sub {my ($obj,@args)=@_;$obj->${pipeline@methods}($obj->$last(@args))}}else {\sub {shift->$last(@_)}}}sub resolve_path {my ($class,$path)=@_;$path=$class->${pipeline qw(resolve_relative_path resolve_home_path resolve_empty_path)}($path);$path}sub resolve_empty_path {my ($class,$path)=@_;if (defined$path){$path}else {'~/perl5'}}sub resolve_home_path {my ($class,$path)=@_;$path =~ /^~([^\/]*)/ or return$path;my$user=$1;my$homedir=do {if (!length($user)&& defined$ENV{HOME}){$ENV{HOME}}else {require File::Glob;File::Glob::bsd_glob("~$user",File::Glob::GLOB_TILDE())}};unless (defined$homedir){require Carp;require Carp::Heavy;Carp::croak("Couldn't resolve homedir for " .(defined$user ? $user : 'current user'))}$path =~ s/^~[^\/]*/$homedir/;$path}sub resolve_relative_path {my ($class,$path)=@_;_rel2abs($path)}sub ensure_dir_structure_for {my ($class,$path)=@_;unless (-d $path){warn "Attempting to create directory ${path}\n"}require File::Basename;my@dirs;while(!-d $path){push@dirs,$path;$path=File::Basename::dirname($path)}mkdir $_ for reverse@dirs;return}sub guess_shelltype {my$shellbin =defined$ENV{SHELL}? ($ENV{SHELL}=~ /([\w.]+)$/)[-1]: ($^O eq 'MSWin32' && exists$ENV{'!EXITCODE'})? 'bash' : ($^O eq 'MSWin32' && $ENV{PROMPT}&& $ENV{COMSPEC})? ($ENV{COMSPEC}=~ /([\w.]+)$/)[-1]: ($^O eq 'MSWin32' &&!$ENV{PROMPT})? 'powershell.exe' : 'sh';for ($shellbin){return /csh$/ ? 'csh' : /fish/ ? 'fish' : /command(?:\.com)?$/i ? 'cmd' : /cmd(?:\.exe)?$/i ? 'cmd' : /4nt(?:\.exe)?$/i ? 'cmd' : /powershell(?:\.exe)?$/i ? 'powershell' : 'bourne'}}1; - WHOA THERE! It looks like you've got some fancy dashes in your commandline! - These are *not* the traditional -- dashes that software recognizes. You - probably got these by copy-pasting from the perldoc for this module as - rendered by a UTF8-capable formatter. This most typically happens on an OS X - terminal, but can happen elsewhere too. Please try again after replacing the - dashes with normal minus signs. - DEATH - FATAL: The local::lib --self-contained flag has never worked reliably and the - original author, Mark Stosberg, was unable or unwilling to maintain it. As - such, this flag has been removed from the local::lib codebase in order to - prevent misunderstandings and potentially broken builds. The local::lib authors - recommend that you look at the lib::core::only module shipped with this - distribution in order to create a more robust environment that is equivalent to - what --self-contained provided (although quite possibly not what you originally - thought it provided due to the poor quality of the documentation, for which we - apologise). - DEATH -LOCAL_LIB - -$fatpacked{"parent.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'PARENT'; - package parent;use strict;use vars qw($VERSION);$VERSION='0.228';sub import {my$class=shift;my$inheritor=caller(0);if (@_ and $_[0]eq '-norequire'){shift @_}else {for (my@filename=@_){if ($_ eq $inheritor){warn "Class '$inheritor' tried to inherit from itself\n"};s{::|'}{/}g;require "$_.pm"}}{no strict 'refs';push @{"$inheritor\::ISA"},@_}};"All your base are belong to us" -PARENT - -$fatpacked{"version.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'VERSION'; - package version;use 5.006002;use strict;use warnings::register;if ($] >= 5.015){warnings::register_categories(qw/version/)}use vars qw(@ISA $VERSION $CLASS $STRICT $LAX *declare *qv);$VERSION=0.9912;$CLASS='version';{local$SIG{'__DIE__'};if (1){eval "use version::vpp $VERSION";die "$@" if ($@);push@ISA,"version::vpp";local $^W;*version::qv=\&version::vpp::qv;*version::declare=\&version::vpp::declare;*version::_VERSION=\&version::vpp::_VERSION;*version::vcmp=\&version::vpp::vcmp;*version::new=\&version::vpp::new;*version::numify=\&version::vpp::numify;*version::normal=\&version::vpp::normal;if ($] >= 5.009000){no strict 'refs';*version::stringify=\&version::vpp::stringify;*{'version::(""'}=\&version::vpp::stringify;*{'version::(<=>'}=\&version::vpp::vcmp;*version::parse=\&version::vpp::parse}}else {push@ISA,"version::vxs";local $^W;*version::declare=\&version::vxs::declare;*version::qv=\&version::vxs::qv;*version::_VERSION=\&version::vxs::_VERSION;*version::vcmp=\&version::vxs::VCMP;*version::new=\&version::vxs::new;*version::numify=\&version::vxs::numify;*version::normal=\&version::vxs::normal;if ($] >= 5.009000){no strict 'refs';*version::stringify=\&version::vxs::stringify;*{'version::(""'}=\&version::vxs::stringify;*{'version::(<=>'}=\&version::vxs::VCMP;*version::parse=\&version::vxs::parse}}}require version::regex;*version::is_lax=\&version::regex::is_lax;*version::is_strict=\&version::regex::is_strict;*LAX=\$version::regex::LAX;*STRICT=\$version::regex::STRICT;sub import {no strict 'refs';my ($class)=shift;unless ($class eq $CLASS){local $^W;*{$class.'::declare'}=\&{$CLASS.'::declare'};*{$class.'::qv'}=\&{$CLASS.'::qv'}}my%args;if (@_){map {$args{$_}=1}@_}else {%args=(qv=>1,'UNIVERSAL::VERSION'=>1,)}my$callpkg=caller();if (exists($args{declare})){*{$callpkg.'::declare'}=sub {return$class->declare(shift)}unless defined(&{$callpkg.'::declare'})}if (exists($args{qv})){*{$callpkg.'::qv'}=sub {return$class->qv(shift)}unless defined(&{$callpkg.'::qv'})}if (exists($args{'UNIVERSAL::VERSION'})){local $^W;*UNIVERSAL::VERSION =\&{$CLASS.'::_VERSION'}}if (exists($args{'VERSION'})){*{$callpkg.'::VERSION'}=\&{$CLASS.'::_VERSION'}}if (exists($args{'is_strict'})){*{$callpkg.'::is_strict'}=\&{$CLASS.'::is_strict'}unless defined(&{$callpkg.'::is_strict'})}if (exists($args{'is_lax'})){*{$callpkg.'::is_lax'}=\&{$CLASS.'::is_lax'}unless defined(&{$callpkg.'::is_lax'})}}1; -VERSION - -$fatpacked{"version/regex.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'VERSION_REGEX'; - package version::regex;use strict;use vars qw($VERSION $CLASS $STRICT $LAX);$VERSION=0.9912;my$FRACTION_PART=qr/\.[0-9]+/;my$STRICT_INTEGER_PART=qr/0|[1-9][0-9]*/;my$LAX_INTEGER_PART=qr/[0-9]+/;my$STRICT_DOTTED_DECIMAL_PART=qr/\.[0-9]{1,3}/;my$LAX_DOTTED_DECIMAL_PART=qr/\.[0-9]+/;my$LAX_ALPHA_PART=qr/_[0-9]+/;my$STRICT_DECIMAL_VERSION=qr/ $STRICT_INTEGER_PART $FRACTION_PART? /x;my$STRICT_DOTTED_DECIMAL_VERSION=qr/ v $STRICT_INTEGER_PART $STRICT_DOTTED_DECIMAL_PART{2,} /x;$STRICT=qr/ $STRICT_DECIMAL_VERSION | $STRICT_DOTTED_DECIMAL_VERSION /x;my$LAX_DECIMAL_VERSION=qr/ $LAX_INTEGER_PART (?: \. | $FRACTION_PART $LAX_ALPHA_PART? )? - | - $FRACTION_PART $LAX_ALPHA_PART? - /x;my$LAX_DOTTED_DECIMAL_VERSION=qr/ - v $LAX_INTEGER_PART (?: $LAX_DOTTED_DECIMAL_PART+ $LAX_ALPHA_PART? )? - | - $LAX_INTEGER_PART? $LAX_DOTTED_DECIMAL_PART{2,} $LAX_ALPHA_PART? - /x;$LAX=qr/ undef | $LAX_DECIMAL_VERSION | $LAX_DOTTED_DECIMAL_VERSION /x;sub is_strict {defined $_[0]&& $_[0]=~ qr/ \A $STRICT \z /x}sub is_lax {defined $_[0]&& $_[0]=~ qr/ \A $LAX \z /x}1; -VERSION_REGEX - -$fatpacked{"version/vpp.pm"} = '#line '.(1+__LINE__).' "'.__FILE__."\"\n".<<'VERSION_VPP'; - package charstar;use overload ('""'=>\&thischar,'0+'=>\&thischar,'++'=>\&increment,'--'=>\&decrement,'+'=>\&plus,'-'=>\&minus,'*'=>\&multiply,'cmp'=>\&cmp,'<=>'=>\&spaceship,'bool'=>\&thischar,'='=>\&clone,);sub new {my ($self,$string)=@_;my$class=ref($self)|| $self;my$obj={string=>[split(//,$string)],current=>0,};return bless$obj,$class}sub thischar {my ($self)=@_;my$last=$#{$self->{string}};my$curr=$self->{current};if ($curr >= 0 && $curr <= $last){return$self->{string}->[$curr]}else {return ''}}sub increment {my ($self)=@_;$self->{current}++}sub decrement {my ($self)=@_;$self->{current}--}sub plus {my ($self,$offset)=@_;my$rself=$self->clone;$rself->{current}+= $offset;return$rself}sub minus {my ($self,$offset)=@_;my$rself=$self->clone;$rself->{current}-= $offset;return$rself}sub multiply {my ($left,$right,$swapped)=@_;my$char=$left->thischar();return$char * $right}sub spaceship {my ($left,$right,$swapped)=@_;unless (ref($right)){$right=$left->new($right)}return$left->{current}<=> $right->{current}}sub cmp {my ($left,$right,$swapped)=@_;unless (ref($right)){if (length($right)==1){return$left->thischar cmp $right}$right=$left->new($right)}return$left->currstr cmp $right->currstr}sub bool {my ($self)=@_;my$char=$self->thischar;return ($char ne '')}sub clone {my ($left,$right,$swapped)=@_;$right={string=>[@{$left->{string}}],current=>$left->{current},};return bless$right,ref($left)}sub currstr {my ($self,$s)=@_;my$curr=$self->{current};my$last=$#{$self->{string}};if (defined($s)&& $s->{current}< $last){$last=$s->{current}}my$string=join('',@{$self->{string}}[$curr..$last]);return$string}package version::vpp;use 5.006002;use strict;use warnings::register;use Config;use vars qw($VERSION $CLASS @ISA $LAX $STRICT $WARN_CATEGORY);$VERSION=0.9912;$CLASS='version::vpp';if ($] > 5.015){warnings::register_categories(qw/version/);$WARN_CATEGORY='version'}else {$WARN_CATEGORY='numeric'}require version::regex;*version::vpp::is_strict=\&version::regex::is_strict;*version::vpp::is_lax=\&version::regex::is_lax;*LAX=\$version::regex::LAX;*STRICT=\$version::regex::STRICT;use overload ('""'=>\&stringify,'0+'=>\&numify,'cmp'=>\&vcmp,'<=>'=>\&vcmp,'bool'=>\&vbool,'+'=>\&vnoop,'-'=>\&vnoop,'*'=>\&vnoop,'/'=>\&vnoop,'+='=>\&vnoop,'-='=>\&vnoop,'*='=>\&vnoop,'/='=>\&vnoop,'abs'=>\&vnoop,);sub import {no strict 'refs';my ($class)=shift;unless ($class eq $CLASS){local $^W;*{$class.'::declare'}=\&{$CLASS.'::declare'};*{$class.'::qv'}=\&{$CLASS.'::qv'}}my%args;if (@_){map {$args{$_}=1}@_}else {%args=(qv=>1,'UNIVERSAL::VERSION'=>1,)}my$callpkg=caller();if (exists($args{declare})){*{$callpkg.'::declare'}=sub {return$class->declare(shift)}unless defined(&{$callpkg.'::declare'})}if (exists($args{qv})){*{$callpkg.'::qv'}=sub {return$class->qv(shift)}unless defined(&{$callpkg.'::qv'})}if (exists($args{'UNIVERSAL::VERSION'})){no warnings qw/redefine/;*UNIVERSAL::VERSION =\&{$CLASS.'::_VERSION'}}if (exists($args{'VERSION'})){*{$callpkg.'::VERSION'}=\&{$CLASS.'::_VERSION'}}if (exists($args{'is_strict'})){*{$callpkg.'::is_strict'}=\&{$CLASS.'::is_strict'}unless defined(&{$callpkg.'::is_strict'})}if (exists($args{'is_lax'})){*{$callpkg.'::is_lax'}=\&{$CLASS.'::is_lax'}unless defined(&{$callpkg.'::is_lax'})}}my$VERSION_MAX=0x7FFFFFFF;use constant TRUE=>1;use constant FALSE=>0;sub isDIGIT {my ($char)=shift->thischar();return ($char =~ /\d/)}sub isALPHA {my ($char)=shift->thischar();return ($char =~ /[a-zA-Z]/)}sub isSPACE {my ($char)=shift->thischar();return ($char =~ /\s/)}sub BADVERSION {my ($s,$errstr,$error)=@_;if ($errstr){$$errstr=$error}return$s}sub prescan_version {my ($s,$strict,$errstr,$sqv,$ssaw_decimal,$swidth,$salpha)=@_;my$qv=defined$sqv ? $$sqv : FALSE;my$saw_decimal=defined$ssaw_decimal ? $$ssaw_decimal : 0;my$width=defined$swidth ? $$swidth : 3;my$alpha=defined$salpha ? $$salpha : FALSE;my$d=$s;if ($qv && isDIGIT($d)){goto dotted_decimal_version}if ($d eq 'v'){$d++;if (isDIGIT($d)){$qv=TRUE}else {return BADVERSION($s,$errstr,"Invalid version format (dotted-decimal versions require at least three parts)")}dotted_decimal_version: if ($strict && $d eq '0' && isDIGIT($d+1)){return BADVERSION($s,$errstr,"Invalid version format (no leading zeros)")}while (isDIGIT($d)){$d++}if ($d eq '.'){$saw_decimal++;$d++}else {if ($strict){return BADVERSION($s,$errstr,"Invalid version format (dotted-decimal versions require at least three parts)")}else {goto version_prescan_finish}}{my$i=0;my$j=0;while (isDIGIT($d)){$i++;while (isDIGIT($d)){$d++;$j++;if ($strict && $j > 3){return BADVERSION($s,$errstr,"Invalid version format (maximum 3 digits between decimals)")}}if ($d eq '_'){if ($strict){return BADVERSION($s,$errstr,"Invalid version format (no underscores)")}if ($alpha){return BADVERSION($s,$errstr,"Invalid version format (multiple underscores)")}$d++;$alpha=TRUE}elsif ($d eq '.'){if ($alpha){return BADVERSION($s,$errstr,"Invalid version format (underscores before decimal)")}$saw_decimal++;$d++}elsif (!isDIGIT($d)){last}$j=0}if ($strict && $i < 2){return BADVERSION($s,$errstr,"Invalid version format (dotted-decimal versions require at least three parts)")}}}else {my$j=0;if ($strict){if ($d eq '.'){return BADVERSION($s,$errstr,"Invalid version format (0 before decimal required)")}if ($d eq '0' && isDIGIT($d+1)){return BADVERSION($s,$errstr,"Invalid version format (no leading zeros)")}}if ($d eq '-'){return BADVERSION($s,$errstr,"Invalid version format (negative version number)")}while (isDIGIT($d)){$d++}if ($d eq '.'){$saw_decimal++;$d++}elsif (!$d || $d eq ';' || isSPACE($d)|| $d eq '}'){if ($d==$s){return BADVERSION($s,$errstr,"Invalid version format (version required)")}goto version_prescan_finish}elsif ($d==$s){return BADVERSION($s,$errstr,"Invalid version format (non-numeric data)")}elsif ($d eq '_'){if ($strict){return BADVERSION($s,$errstr,"Invalid version format (no underscores)")}elsif (isDIGIT($d+1)){return BADVERSION($s,$errstr,"Invalid version format (alpha without decimal)")}else {return BADVERSION($s,$errstr,"Invalid version format (misplaced underscore)")}}elsif ($d){return BADVERSION($s,$errstr,"Invalid version format (non-numeric data)")}if ($d &&!isDIGIT($d)&& ($strict ||!($d eq ';' || isSPACE($d)|| $d eq '}'))){return BADVERSION($s,$errstr,"Invalid version format (fractional part required)")}while (isDIGIT($d)){$d++;$j++;if ($d eq '.' && isDIGIT($d-1)){if ($alpha){return BADVERSION($s,$errstr,"Invalid version format (underscores before decimal)")}if ($strict){return BADVERSION($s,$errstr,"Invalid version format (dotted-decimal versions must begin with 'v')")}$d=$s;$qv=TRUE;goto dotted_decimal_version}if ($d eq '_'){if ($strict){return BADVERSION($s,$errstr,"Invalid version format (no underscores)")}if ($alpha){return BADVERSION($s,$errstr,"Invalid version format (multiple underscores)")}if (!isDIGIT($d+1)){return BADVERSION($s,$errstr,"Invalid version format (misplaced underscore)")}$width=$j;$d++;$alpha=TRUE}}}version_prescan_finish: while (isSPACE($d)){$d++}if ($d &&!isDIGIT($d)&& (!($d eq ';' || $d eq '}'))){return BADVERSION($s,$errstr,"Invalid version format (non-numeric data)")}if ($saw_decimal > 1 && ($d-1)eq '.'){return BADVERSION($s,$errstr,"Invalid version format (trailing decimal)")}if (defined$sqv){$$sqv=$qv}if (defined$swidth){$$swidth=$width}if (defined$ssaw_decimal){$$ssaw_decimal=$saw_decimal}if (defined$salpha){$$salpha=$alpha}return$d}sub scan_version {my ($s,$rv,$qv)=@_;my$start;my$pos;my$last;my$errstr;my$saw_decimal=0;my$width=3;my$alpha=FALSE;my$vinf=FALSE;my@av;$s=new charstar$s;while (isSPACE($s)){$s++}$last=prescan_version($s,FALSE,\$errstr,\$qv,\$saw_decimal,\$width,\$alpha);if ($errstr){if ($s ne 'undef'){require Carp;Carp::croak($errstr)}}$start=$s;if ($s eq 'v'){$s++}$pos=$s;if ($qv){$$rv->{qv}=$qv}if ($alpha){$$rv->{alpha}=$alpha}if (!$qv && $width < 3){$$rv->{width}=$width}while (isDIGIT($pos)){$pos++}if (!isALPHA($pos)){my$rev;for (;;){$rev=0;{my$end=$pos;my$mult=1;my$orev;if (!$qv && $s > $start && $saw_decimal==1){$mult *= 100;while ($s < $end){$orev=$rev;$rev += $s * $mult;$mult /= 10;if ((abs($orev)> abs($rev))|| (abs($rev)> $VERSION_MAX)){warn("Integer overflow in version %d",$VERSION_MAX);$s=$end - 1;$rev=$VERSION_MAX;$vinf=1}$s++;if ($s eq '_'){$s++}}}else {while (--$end >= $s){$orev=$rev;$rev += $end * $mult;$mult *= 10;if ((abs($orev)> abs($rev))|| (abs($rev)> $VERSION_MAX)){warn("Integer overflow in version");$end=$s - 1;$rev=$VERSION_MAX;$vinf=1}}}}push@av,$rev;if ($vinf){$s=$last;last}elsif ($pos eq '.'){$pos++;if ($qv){while ($pos eq '0'){$pos++}}$s=$pos}elsif ($pos eq '_' && isDIGIT($pos+1)){$s=++$pos}elsif ($pos eq ',' && isDIGIT($pos+1)){$s=++$pos}elsif (isDIGIT($pos)){$s=$pos}else {$s=$pos;last}if ($qv){while (isDIGIT($pos)){$pos++}}else {my$digits=0;while ((isDIGIT($pos)|| $pos eq '_')&& $digits < 3){if ($pos ne '_'){$digits++}$pos++}}}}if ($qv){my$len=$#av;$len=2 - $len;while ($len-- > 0){push@av,0}}if ($vinf){$$rv->{original}="v.Inf";$$rv->{vinf}=1}elsif ($s > $start){$$rv->{original}=$start->currstr($s);if ($qv && $saw_decimal==1 && $start ne 'v'){$$rv->{original}='v' .$$rv->{original}}}else {$$rv->{original}='0';push(@av,0)}$$rv->{version}=\@av;if ($s eq 'undef'){$s += 5}return$s}sub new {my$class=shift;unless (defined$class or $#_ > 1){require Carp;Carp::croak('Usage: version::new(class, version)')}my$self=bless ({},ref ($class)|| $class);my$qv=FALSE;if ($#_==1){$qv=TRUE}my$value=pop;if (ref($value)&& eval('$value->isa("version")')){$self->{version}=[@{$value->{version}}];$self->{qv}=1 if$value->{qv};$self->{alpha}=1 if$value->{alpha};$self->{original}=''.$value->{original};return$self}if (not defined$value or $value =~ /^undef$/){push @{$self->{version}},0;$self->{original}="0";return ($self)}if (ref($value)=~ m/ARRAY|HASH/){require Carp;Carp::croak("Invalid version format (non-numeric data)")}$value=_un_vstring($value);if ($Config{d_setlocale}){use POSIX qw/locale_h/;use if$Config{d_setlocale},'locale';my$currlocale=setlocale(LC_ALL);if (localeconv()->{decimal_point}eq ','){$value =~ tr/,/./}}if ($value =~ /\d+.?\d*e[-+]?\d+/){$value=sprintf("%.9f",$value);$value =~ s/(0+)$//}my$s=scan_version($value,\$self,$qv);if ($s){warn("Version string '%s' contains invalid data; " ."ignoring: '%s'",$value,$s)}return ($self)}*parse=\&new;sub numify {my ($self)=@_;unless (_verify($self)){require Carp;Carp::croak("Invalid version object")}my$width=$self->{width}|| 3;my$alpha=$self->{alpha}|| "";my$len=$#{$self->{version}};my$digit=$self->{version}[0];my$string=sprintf("%d.",$digit);if ($alpha and warnings::enabled()){warnings::warn($WARN_CATEGORY,'alpha->numify() is lossy')}for (my$i=1 ;$i < $len ;$i++ ){$digit=$self->{version}[$i];if ($width < 3){my$denom=10**(3-$width);my$quot=int($digit/$denom);my$rem=$digit - ($quot * $denom);$string .= sprintf("%0".$width."d_%d",$quot,$rem)}else {$string .= sprintf("%03d",$digit)}}if ($len > 0){$digit=$self->{version}[$len];if ($alpha && $width==3){$string .= "_"}$string .= sprintf("%0".$width."d",$digit)}else {$string .= sprintf("000")}return$string}sub normal {my ($self)=@_;unless (_verify($self)){require Carp;Carp::croak("Invalid version object")}my$alpha=$self->{alpha}|| "";my$qv=$self->{qv}|| "";my$len=$#{$self->{version}};my$digit=$self->{version}[0];my$string=sprintf("v%d",$digit);for (my$i=1 ;$i < $len ;$i++ ){$digit=$self->{version}[$i];$string .= sprintf(".%d",$digit)}if ($len > 0){$digit=$self->{version}[$len];if ($alpha){$string .= sprintf("_%0d",$digit)}else {$string .= sprintf(".%0d",$digit)}}if ($len <= 2){for ($len=2 - $len;$len!=0;$len-- ){$string .= sprintf(".%0d",0)}}return$string}sub stringify {my ($self)=@_;unless (_verify($self)){require Carp;Carp::croak("Invalid version object")}return exists$self->{original}? $self->{original}: exists$self->{qv}? $self->normal : $self->numify}sub vcmp {require UNIVERSAL;my ($left,$right,$swap)=@_;my$class=ref($left);unless (UNIVERSAL::isa($right,$class)){$right=$class->new($right)}if ($swap){($left,$right)=($right,$left)}unless (_verify($left)){require Carp;Carp::croak("Invalid version object")}unless (_verify($right)){require Carp;Carp::croak("Invalid version format")}my$l=$#{$left->{version}};my$r=$#{$right->{version}};my$m=$l < $r ? $l : $r;my$lalpha=$left->is_alpha;my$ralpha=$right->is_alpha;my$retval=0;my$i=0;while ($i <= $m && $retval==0){$retval=$left->{version}[$i]<=> $right->{version}[$i];$i++}if ($retval==0 && $l==$r && $left->{version}[$m]==$right->{version}[$m]&& ($lalpha || $ralpha)){if ($lalpha &&!$ralpha){$retval=-1}elsif ($ralpha &&!$lalpha){$retval=+1}}if ($retval==0 && $l!=$r){if ($l < $r){while ($i <= $r && $retval==0){if ($right->{version}[$i]!=0){$retval=-1}$i++}}else {while ($i <= $l && $retval==0){if ($left->{version}[$i]!=0){$retval=+1}$i++}}}return$retval}sub vbool {my ($self)=@_;return vcmp($self,$self->new("0"),1)}sub vnoop {require Carp;Carp::croak("operation not supported with version object")}sub is_alpha {my ($self)=@_;return (exists$self->{alpha})}sub qv {my$value=shift;my$class=$CLASS;if (@_){$class=ref($value)|| $value;$value=shift}$value=_un_vstring($value);$value='v'.$value unless$value =~ /(^v|\d+\.\d+\.\d)/;my$obj=$CLASS->new($value);return bless$obj,$class}*declare=\&qv;sub is_qv {my ($self)=@_;return (exists$self->{qv})}sub _verify {my ($self)=@_;if (ref($self)&& eval {exists$self->{version}}&& ref($self->{version})eq 'ARRAY'){return 1}else {return 0}}sub _is_non_alphanumeric {my$s=shift;$s=new charstar$s;while ($s){return 0 if isSPACE($s);return 1 unless (isALPHA($s)|| isDIGIT($s)|| $s =~ /[.-]/);$s++}return 0}sub _un_vstring {my$value=shift;if (length($value)>= 1 && $value !~ /[,._]/ && _is_non_alphanumeric($value)){my$tvalue;if ($] >= 5.008_001){$tvalue=_find_magic_vstring($value);$value=$tvalue if length$tvalue}elsif ($] >= 5.006_000){$tvalue=sprintf("v%vd",$value);if ($tvalue =~ /^v\d+(\.\d+)*$/){$value=$tvalue}}}return$value}sub _find_magic_vstring {my$value=shift;my$tvalue='';require B;my$sv=B::svref_2object(\$value);my$magic=ref($sv)eq 'B::PVMG' ? $sv->MAGIC : undef;while ($magic){if ($magic->TYPE eq 'V'){$tvalue=$magic->PTR;$tvalue =~ s/^v?(.+)$/v$1/;last}else {$magic=$magic->MOREMAGIC}}return$tvalue}sub _VERSION {my ($obj,$req)=@_;my$class=ref($obj)|| $obj;no strict 'refs';if (exists$INC{"$class.pm"}and not %{"$class\::"}and $] >= 5.008){require Carp;Carp::croak("$class defines neither package nor VERSION" ."--version check failed")}my$version=eval "\$$class\::VERSION";if (defined$version){local $^W if $] <= 5.008;$version=version::vpp->new($version)}if (defined$req){unless (defined$version){require Carp;my$msg=$] < 5.006 ? "$class version $req required--this is only version " : "$class does not define \$$class\::VERSION" ."--version check failed";if ($ENV{VERSION_DEBUG}){Carp::confess($msg)}else {Carp::croak($msg)}}$req=version::vpp->new($req);if ($req > $version){require Carp;if ($req->is_qv){Carp::croak(sprintf ("%s version %s required--"."this is only version %s",$class,$req->normal,$version->normal))}else {Carp::croak(sprintf ("%s version %s required--"."this is only version %s",$class,$req->stringify,$version->stringify))}}}return defined$version ? $version->stringify : undef}1; -VERSION_VPP - -s/^ //mg for values %fatpacked; - -my $class = 'FatPacked::'.(0+\%fatpacked); -no strict 'refs'; -*{"${class}::files"} = sub { keys %{$_[0]} }; - -if ($] < 5.008) { - *{"${class}::INC"} = sub { - if (my $fat = $_[0]{$_[1]}) { - my $pos = 0; - my $last = length $fat; - return (sub { - return 0 if $pos == $last; - my $next = (1 + index $fat, "\n", $pos) || $last; - $_ .= substr $fat, $pos, $next - $pos; - $pos = $next; - return 1; - }); - } - }; -} - -else { - *{"${class}::INC"} = sub { - if (my $fat = $_[0]{$_[1]}) { - open my $fh, '<', \$fat - or die "FatPacker error loading $_[1] (could be a perl installation issue?)"; - return $fh; - } - return; - }; -} - -unshift @INC, bless \%fatpacked, $class; - } # END OF FATPACK CODE - - - -use strict; -use App::cpanminus::script; - - -unless (caller) { - my $app = App::cpanminus::script->new; - $app->parse_options(@ARGV); - exit $app->doit; -} - -__END__ - -=head1 NAME - -cpanm - get, unpack build and install modules from CPAN - -=head1 SYNOPSIS - - cpanm Test::More # install Test::More - cpanm MIYAGAWA/Plack-0.99_05.tar.gz # full distribution path - cpanm http://example.org/LDS/CGI.pm-3.20.tar.gz # install from URL - cpanm ~/dists/MyCompany-Enterprise-1.00.tar.gz # install from a local file - cpanm --interactive Task::Kensho # Configure interactively - cpanm . # install from local directory - cpanm --installdeps . # install all the deps for the current directory - cpanm -L extlib Plack # install Plack and all non-core deps into extlib - cpanm --mirror http://cpan.cpantesters.org/ DBI # use the fast-syncing mirror - cpanm --from https://cpan.metacpan.org/ Plack # use only the HTTPS mirror - -=head1 COMMANDS - -=over 4 - -=item (arguments) - -Command line arguments can be either a module name, distribution file, -local file path, HTTP URL or git repository URL. Following commands -will all work as you expect. - - cpanm Plack - cpanm Plack/Request.pm - cpanm MIYAGAWA/Plack-1.0000.tar.gz - cpanm /path/to/Plack-1.0000.tar.gz - cpanm http://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA/Plack-0.9990.tar.gz - cpanm git://github.com/plack/Plack.git - -Additionally, you can use the notation using C<~> and C<@> to specify -version for a given module. C<~> specifies the version requirement in -the L format, while C<@> pins the exact version, and -is a shortcut for C<~"== VERSION">. - - cpanm Plack~1.0000 # 1.0000 or later - cpanm Plack~">= 1.0000, < 2.0000" # latest of 1.xxxx - cpanm Plack@0.9990 # specific version. same as Plack~"== 0.9990" - -The version query including specific version or range will be sent to -L to search for previous releases. The query will search for -BackPAN archives by default, unless you specify C<--dev> option, in -which case, archived versions will be filtered out. - -For a git repository, you can specify a branch, tag, or commit SHA to -build. The default is C - - cpanm git://github.com/plack/Plack.git@1.0000 # tag - cpanm git://github.com/plack/Plack.git@devel # branch - -=item -i, --install - -Installs the modules. This is a default behavior and this is just a -compatibility option to make it work like L or L. - -=item --self-upgrade - -Upgrades itself. It's just an alias for: - - cpanm App::cpanminus - -=item --info - -Displays the distribution information in -C format in the standard out. - -=item --installdeps - -Installs the dependencies of the target distribution but won't build -itself. Handy if you want to try the application from a version -controlled repository such as git. - - cpanm --installdeps . - -=item --look - -Download and unpack the distribution and then open the directory with -your shell. Handy to poke around the source code or do manual -testing. - -=item -h, --help - -Displays the help message. - -=item -V, --version - -Displays the version number. - -=back - -=head1 OPTIONS - -You can specify the default options in C environment variable. - -=over 4 - -=item -f, --force - -Force install modules even when testing failed. - -=item -n, --notest - -Skip the testing of modules. Use this only when you just want to save -time for installing hundreds of distributions to the same perl and -architecture you've already tested to make sure it builds fine. - -Defaults to false, and you can say C<--no-notest> to override when it -is set in the default options in C. - -=item --test-only - -Run the tests only, and do not install the specified module or -distributions. Handy if you want to verify the new (or even old) -releases pass its unit tests without installing the module. - -Note that if you specify this option with a module or distribution -that has dependencies, these dependencies will be installed if you -don't currently have them. - -=item -S, --sudo - -Switch to the root user with C when installing modules. Use this -if you want to install modules to the system perl include path. - -Defaults to false, and you can say C<--no-sudo> to override when it is -set in the default options in C. - -=item -v, --verbose - -Makes the output verbose. It also enables the interactive -configuration. (See --interactive) - -=item -q, --quiet - -Makes the output even more quiet than the default. It only shows the -successful/failed dependencies to the output. - -=item -l, --local-lib - -Sets the L compatible path to install modules to. You -don't need to set this if you already configure the shell environment -variables using L, but this can be used to override that -as well. - -=item -L, --local-lib-contained - -Same with C<--local-lib> but with L<--self-contained> set. All -non-core dependencies will be installed even if they're already -installed. - -For instance, - - cpanm -L extlib Plack - -would install Plack and all of its non-core dependencies into the -directory C, which can be loaded from your application with: - - use local::lib '/path/to/extlib'; - -Note that this option does B reliably work with perl installations -supplied by operating system vendors that strips standard modules from perl, -such as RHEL, Fedora and CentOS, B you also install packages supplying -all the modules that have been stripped. For these systems you will probably -want to install the C meta-package which does just that. - -=item --self-contained - -When examining the dependencies, assume no non-core modules are -installed on the system. Handy if you want to bundle application -dependencies in one directory so you can distribute to other machines. - -=item --exclude-vendor - -Don't include modules installed under the 'vendor' paths when searching for -core modules when the C<--self-contained> flag is in effect. This restores -the behaviour from before version 1.7023 - -=item --mirror - -Specifies the base URL for the CPAN mirror to use, such as -C (you can omit the trailing slash). You -can specify multiple mirror URLs by repeating the command line option. - -You can use a local directory that has a CPAN mirror structure -(created by tools such as L or L) by using a special -URL scheme C. If the given URL begins with `/` (without any -scheme), it is considered as a file scheme as well. - - cpanm --mirror file:///path/to/mirror - cpanm --mirror ~/minicpan # Because shell expands ~ to /home/user - -Defaults to C. - -=item --mirror-only - -Download the mirror's 02packages.details.txt.gz index file instead of -querying the CPAN Meta DB. This will also effectively opt out sending -your local perl versions to backend database servers such as CPAN Meta -DB and MetaCPAN. - -Select this option if you are using a local mirror of CPAN, such as -minicpan when you're offline, or your own CPAN index (a.k.a darkpan). - -=item --from, -M - - cpanm -M https://cpan.metacpan.org/ - cpanm --from https://cpan.metacpan.org/ - -Use the given mirror URL and its index as the I source to search -and download modules from. - -It works similar to C<--mirror> and C<--mirror-only> combined, with a -small difference: unlike C<--mirror> which I the URL to the -list of mirrors, C<--from> (or C<-M> for short) uses the specified URL -as its I source to download index and modules from. This makes -the option always override the default mirror, which might have been -set via global options such as the one set by C -environment variable. - -B It might be useful if you name these options with your shell -aliases, like: - - alias minicpanm='cpanm --from ~/minicpan' - alias darkpan='cpanm --from http://mycompany.example.com/DPAN' - -=item --mirror-index - -B: Specifies the file path to C<02packages.details.txt> -for module search index. - -=item --cpanmetadb - -B: Specifies an alternate URI for CPAN MetaDB index lookups. - -=item --metacpan - -Prefers MetaCPAN API over CPAN MetaDB. - -=item --cpanfile - -B: Specified an alternate path for cpanfile to search for, -when C<--installdeps> command is in use. Defaults to C. - -=item --prompt - -Prompts when a test fails so that you can skip, force install, retry -or look in the shell to see what's going wrong. It also prompts when -one of the dependency failed if you want to proceed the installation. - -Defaults to false, and you can say C<--no-prompt> to override if it's -set in the default options in C. - -=item --dev - -B: search for a newer developer release as well. Defaults to false. - -=item --reinstall - -cpanm, when given a module name in the command line (i.e. C), checks the locally installed version first and skips if it is -already installed. This option makes it skip the check, so: - - cpanm --reinstall Plack - -would reinstall L even if your locally installed version is -latest, or even newer (which would happen if you install a developer -release from version control repositories). - -Defaults to false. - -=item --interactive - -Makes the configuration (such as C and C) -interactive, so you can answer questions in the distribution that -requires custom configuration or Task:: distributions. - -Defaults to false, and you can say C<--no-interactive> to override -when it's set in the default options in C. - -=item --pp, --pureperl - -Prefer Pure perl build of modules by setting C for -MakeMaker and C<--pureperl-only> for Build.PL based -distributions. Note that not all of the CPAN modules support this -convention yet. - -=item --with-recommends, --with-suggests - -B: Installs dependencies declared as C and -C respectively, per META spec. When these dependencies fail -to install, cpanm continues the installation, since they're just -recommendation/suggestion. - -Enabling this could potentially make a circular dependency for a few -modules on CPAN, when C adds a module that C -back the module in return. - -There's also C<--without-recommend> and C<--without-suggests> to -override the default decision made earlier in C. - -Defaults to false for both. - -=item --with-develop - -B: Installs develop phase dependencies in META files or -C when used with C<--installdeps>. Defaults to false. - -=item --with-configure - -B: Installs configure phase dependencies in C -when used with C<--installdeps>. Defaults to false. - -=item --with-feature, --without-feature, --with-all-features - -B: Specifies the feature to enable, if a module supports -optional features per META spec 2.0. - - cpanm --with-feature=opt_csv Spreadsheet::Read - -the features can also be interactively chosen when C<--interactive> -option is enabled. - -C<--with-all-features> enables all the optional features, and -C<--without-feature> can select a feature to disable. - -=item --configure-timeout, --build-timeout, --test-timeout - -Specify the timeout length (in seconds) to wait for the configure, -build and test process. Current default values are: 60 for configure, -3600 for build and 1800 for test. - -=item --configure-args, --build-args, --test-args, --install-args - -B: Pass arguments for configure/build/test/install -commands respectively, for a given module to install. - - cpanm DBD::mysql --configure-args="--cflags=... --libs=..." - -The argument is only enabled for the module passed as a command line -argument, not dependencies. - -=item --scandeps - -B: Scans the depencencies of given modules and output the -tree in a text format. (See C<--format> below for more options) - -Because this command doesn't actually install any distributions, it -will be useful that by typing: - - cpanm --scandeps Catalyst::Runtime - -you can make sure what modules will be installed. - -This command takes into account which modules you already have -installed in your system. If you want to see what modules will be -installed against a vanilla perl installation, you might want to -combine it with C<-L> option. - -=item --format - -B: Determines what format to display the scanned -dependency tree. Available options are C, C, C and -C. - -=over 8 - -=item tree - -Displays the tree in a plain text format. This is the default value. - -=item json, yaml - -Outputs the tree in a JSON or YAML format. L and L modules -need to be installed respectively. The output tree is represented as a -recursive tuple of: - - [ distribution, dependencies ] - -and the container is an array containing the root elements. Note that -there may be multiple root nodes, since you can give multiple modules -to the C<--scandeps> command. - -=item dists - -C is a special output format, where it prints the distribution -filename in the I after the dependency resolution, -like: - - GAAS/MIME-Base64-3.13.tar.gz - GAAS/URI-1.58.tar.gz - PETDANCE/HTML-Tagset-3.20.tar.gz - GAAS/HTML-Parser-3.68.tar.gz - GAAS/libwww-perl-5.837.tar.gz - -which means you can install these distributions in this order without -extra dependencies. When combined with C<-L> option, it will be useful -to replay installations on other machines. - -=back - -=item --save-dists - -Specifies the optional directory path to copy downloaded tarballs in -the CPAN mirror compatible directory structure -i.e. I - -If the distro tarball did not come from CPAN, for example from a local -file or from GitHub, then it will be saved under -I. - -=item --uninst-shadows - -Uninstalls the shadow files of the distribution that you're -installing. This eliminates the confusion if you're trying to install -core (dual-life) modules from CPAN against perl 5.10 or older, or -modules that used to be XS-based but switched to pure perl at some -version. - -If you run cpanm as root and use C or equivalent to -specify custom installation path, you SHOULD disable this option so -you won't accidentally uninstall dual-life modules from the core -include path. - -Defaults to true if your perl version is smaller than 5.12, and you -can disable that with C<--no-uninst-shadows>. - -B: Since version 1.3000 this flag is turned off by default for -perl newer than 5.12, since with 5.12 @INC contains site_perl directory -I the perl core library path, and uninstalling shadows is not -necessary anymore and does more harm by deleting files from the core -library path. - -=item --uninstall, -U - -Uninstalls a module from the library path. It finds a packlist for -given modules, and removes all the files included in the same -distribution. - -If you enable local::lib, it only removes files from the local::lib -directory. - -If you try to uninstall a module in C directory (i.e. core -module), an error will be thrown. - -A dialog will be prompted to confirm the files to be deleted. If you pass -C<-f> option as well, the dialog will be skipped and uninstallation -will be forced. - -=item --cascade-search - -B: Specifies whether to cascade search when you specify -multiple mirrors and a mirror doesn't have a module or has a lower -version of the module than requested. Defaults to false. - -=item --skip-installed - -Specifies whether a module given in the command line is skipped if its latest -version is already installed. Defaults to true. - -B: The C environment variable have to be correctly set -for this to work with modules installed using L, unless -you always use the C<-l> option. - -=item --skip-satisfied - -B: Specifies whether a module (and version) given in the -command line is skipped if it's already installed. - -If you run: - - cpanm --skip-satisfied CGI DBI~1.2 - -cpanm won't install them if you already have CGI (for whatever -versions) or have DBI with version higher than 1.2. It is similar to -C<--skip-installed> but while C<--skip-installed> checks if the -I version of CPAN is installed, C<--skip-satisfied> checks if -a requested version (or not, which means any version) is installed. - -Defaults to false. - -=item --verify - -Verify the integrity of distribution files retrieved from PAUSE using -CHECKSUMS and SIGNATURES (if found). Defaults to false. - -=item --report-perl-version - -Whether it reports the locally installed perl version to the various -web server as part of User-Agent. Defaults to true unless CI related -environment variables such as C, C or C -is enabled. You can disable it by using C<--no-report-perl-version>. - -=item --auto-cleanup - -Specifies the number of days in which cpanm's work directories -expire. Defaults to 7, which means old work directories will be -cleaned up in one week. - -You can set the value to C<0> to make cpan never cleanup those -directories. - -=item --man-pages - -Generates man pages for executables (man1) and libraries (man3). - -Defaults to true (man pages generated) unless C<-L|--local-lib-contained> -option is supplied in which case it's set to false. You can disable -it with C<--no-man-pages>. - -=item --lwp - -Uses L module to download stuff over HTTP. Defaults to true, and -you can say C<--no-lwp> to disable using LWP, when you want to upgrade -LWP from CPAN on some broken perl systems. - -=item --wget - -Uses GNU Wget (if available) to download stuff. Defaults to true, and -you can say C<--no-wget> to disable using Wget (versions of Wget older -than 1.9 don't support the C<--retry-connrefused> option used by cpanm). - -=item --curl - -Uses cURL (if available) to download stuff. Defaults to true, and -you can say C<--no-curl> to disable using cURL. - -Normally with C<--lwp>, C<--wget> and C<--curl> options set to true -(which is the default) cpanm tries L, Wget, cURL and L -(in that order) and uses the first one available. - -=back - -=head1 ENVIRONMENT VARIABLES - -=over 4 - -=item PERL_CPANM_HOME - -The directory cpanm should use to store downloads and build and test -modules. Defaults to the C<.cpanm> directory in your user's home -directory. - -=item PERL_CPANM_OPT - -If set, adds a set of default options to every cpanm command. These -options come first, and so are overridden by command-line options. - -=back - -=head1 SEE ALSO - -L - -=head1 COPYRIGHT - -Copyright 2010- Tatsuhiko Miyagawa. - -=head1 AUTHOR - -Tatsuhiko Miyagawa - -=cut From 2929fb0a4d25e17ff84590c65275793682f63d24 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Tue, 2 Aug 2016 11:50:12 -0500 Subject: [PATCH 197/284] Move archive_version setting to if block The archive_version variable should only get set if versions will be checked and that is only if there is a list_url in the package file. For VCS repos setting the variable triggers an error from web.py as it parses the default_fetcher object. This should fix #1422. --- lib/spack/spack/stage.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 1a8b1a169a6..553c4ad05f4 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -318,10 +318,11 @@ def fetch(self, mirror_only=False): fetchers.insert(0, spack.cache.fetcher(self.mirror_path, digest)) # Look for the archive in list_url - archive_version = spack.url.parse_version(self.default_fetcher.url) package_name = os.path.dirname(self.mirror_path) pkg = spack.repo.get(package_name) if pkg.list_url is not None and pkg.url is not None: + archive_version = spack.url.parse_version( + self.default_fetcher.url) versions = pkg.fetch_remote_versions() try: url_from_list = versions[Version(archive_version)] From fa70a837d4d2f5ba7e96298d57d92833c3c9b09c Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 2 Aug 2016 13:00:47 -0400 Subject: [PATCH 198/284] Remove accidentally committed cabal-install bits --- .../packages/cabal-install/bootstrap.patch | 11 ---- .../builtin/packages/cabal-install/package.py | 51 ------------------- 2 files changed, 62 deletions(-) delete mode 100644 var/spack/repos/builtin/packages/cabal-install/bootstrap.patch delete mode 100644 var/spack/repos/builtin/packages/cabal-install/package.py diff --git a/var/spack/repos/builtin/packages/cabal-install/bootstrap.patch b/var/spack/repos/builtin/packages/cabal-install/bootstrap.patch deleted file mode 100644 index 374706ccf46..00000000000 --- a/var/spack/repos/builtin/packages/cabal-install/bootstrap.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- a/bootstrap.sh 2016-05-02 14:15:09.000000000 +0300 -+++ b/bootstrap.sh 2016-07-03 22:35:15.000000000 +0300 -@@ -69,7 +69,7 @@ - # Find the correct linker/linker-wrapper. - LINK="$(for link in collect2 ld; do - [ $($CC -print-prog-name=$link) = $link ] && continue || -- $CC -print-prog-name=$link -+ $CC -print-prog-name=$link && break - done)" - - # Fall back to "ld"... might work. diff --git a/var/spack/repos/builtin/packages/cabal-install/package.py b/var/spack/repos/builtin/packages/cabal-install/package.py deleted file mode 100644 index 53009704524..00000000000 --- a/var/spack/repos/builtin/packages/cabal-install/package.py +++ /dev/null @@ -1,51 +0,0 @@ -############################################################################## -# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://github.com/llnl/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License (as -# published by the Free Software Foundation) version 2.1, February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - - -class CabalInstall(Package): - """The 'cabal' command-line program simplifies the process of managing - Haskell software by automating the fetching, configuration, - compilation and installation of Haskell libraries and programs.""" - - homepage = "http://www.haskell.org/cabal/" - url = "http://hackage.haskell.org/package/cabal-install-1.24.0.0/cabal-install-1.24.0.0.tar.gz" - - version('1.24.0.0', 'beb998cdc385523935620381abe393f4') - - depends_on('zlib') - depends_on('ghc') - - # @mvkorpel's fix from: - # https://github.com/haskell/cabal/issues/3440 - # It works around problem deciding whether to use collect2 or ld. - # The symptom is complaint about "Setup: Unrecognized flags:..." - patch('bootstrap.patch') - - def install(self, spec, prefix): - bash=which("bash") - bash("bootstrap.sh", "--sandbox", prefix) - #bin.install ".cabal-sandbox/bin/cabal" - #bash_completion.install "bash-completion/cabal" From 941acef009f28ed47bff54e78b72d075fcfc5986 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 2 Aug 2016 12:20:28 -0500 Subject: [PATCH 199/284] spack create should use setup_py() instead of python() --- lib/spack/spack/cmd/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index da74ceb2f6e..51bf17a44b8 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -165,7 +165,7 @@ def install(self, spec, prefix): 'python': """\ # FIXME: Add logic to build and install here. - python('setup.py', 'install', '--prefix={0}'.format(prefix))""", + setup_py('install', '--prefix={0}'.format(prefix))""", 'R': """\ # FIXME: Add logic to build and install here. From f90692cf812ba9313c66dee405dee7d0e47535d3 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 13 Jul 2016 10:30:54 +0200 Subject: [PATCH 200/284] arpack-ng: fix blas/lapack libraries --- .../builtin/packages/arpack-ng/package.py | 39 ++++++++++++++----- 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py index d5dc703c063..2874930cdda 100644 --- a/var/spack/repos/builtin/packages/arpack-ng/package.py +++ b/var/spack/repos/builtin/packages/arpack-ng/package.py @@ -86,10 +86,19 @@ def install(self, spec, prefix): options.extend(std_cmake_args) options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix) - # TODO: - # Arpack calls directly find_package(BLAS REQUIRED) and - # find_package(LAPACK REQUIRED). Make sure correct Blas/Lapack are - # picked up. + # Make sure we use Spack's blas/lapack: + options.extend([ + '-DLAPACK_FOUND=true', + '-DLAPACK_INCLUDE_DIRS=%s' % spec['lapack'].prefix.include, + '-DLAPACK_LIBRARIES=%s' % ( + spec['lapack'].lapack_shared_lib if '+shared' in spec else + spec['lapack'].lapack_static_lib), + '-DBLAS_FOUND=true', + '-DBLAS_INCLUDE_DIRS=%s' % spec['blas'].prefix.include, + '-DBLAS_LIBRARIES=%s' % ( + spec['blas'].blas_shared_lib if '+shared' in spec else + spec['blas'].blas_static_lib) + ]) if '+mpi' in spec: options.append('-DMPI=ON') @@ -101,9 +110,8 @@ def install(self, spec, prefix): cmake('.', *options) make() - # TODO: make test does not work - # make('test') - + if self.run_tests: + make('test') make('install') @when('@3.3.0') @@ -120,10 +128,23 @@ def install(self, spec, prefix): 'F77=%s' % spec['mpi'].mpif77 ]) - if '~shared' in spec: - options.append('--enable-shared=no') + if '+shared' in spec: + options.extend([ + '--with-blas=%s' % to_link_flags( + spec['blas'].blas_shared_lib), + '--with-lapack=%s' % to_link_flags( + spec['lapack'].lapack_shared_lib) + ]) + else: + options.extend([ + '--with-blas=%s' % spec['blas'].blas_static_lib, + '--with-lapack=%s' % spec['lapack'].lapack_static_lib, + '--enable-shared=no' + ]) bootstrap() configure(*options) make() + if self.run_tests: + make('check') make('install') From d7d12aa2dc5deea4330d13ed73a804d1ab83adcc Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Tue, 2 Aug 2016 19:05:48 -0600 Subject: [PATCH 201/284] One more formatting change to make flake8 happy. --- var/spack/repos/builtin/packages/glib/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 98704eaa4ce..1e5a53ab145 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -50,7 +50,7 @@ class Glib(Package): def url_for_version(self, version): """Handle glib's version-based custom URLs.""" url = 'http://ftp.gnome.org/pub/gnome/sources/glib' - return url+'/%s/glib-%s.tar.xz' % (version.up_to(2), version) + return url + '/%s/glib-%s.tar.xz' % (version.up_to(2), version) def install(self, spec, prefix): configure("--prefix=%s" % prefix) From 2f1c000f6231b7dbba899bf6e99b4535eba6a6a8 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 3 Aug 2016 08:26:58 +0200 Subject: [PATCH 202/284] qa : flake8 issues --- var/spack/repos/builtin/packages/plumed/package.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/plumed/package.py b/var/spack/repos/builtin/packages/plumed/package.py index e8cd6d18947..32571455ebb 100644 --- a/var/spack/repos/builtin/packages/plumed/package.py +++ b/var/spack/repos/builtin/packages/plumed/package.py @@ -44,7 +44,6 @@ class Plumed(Package): version('2.2.3', 'a6e3863e40aac07eb8cf739cbd14ecf8') - variant('shared', default=True, description='Builds shared libraries') variant('mpi', default=True, description='Activates MPI support') variant('gsl', default=True, description='Activates GSL support') @@ -65,14 +64,17 @@ def install(self, spec, prefix): # Also consider that this is different with respect to what some other # configure script does in that variables such as MPICXX are # completely ignored here. In case you work on a machine where CXX is - # set to a serial compiler and MPICXX to a MPI compiler, to compile with - # MPI you should use: + # set to a serial compiler and MPICXX to a MPI compiler, to compile + # with MPI you should use: # # > ./configure CXX="$MPICXX" - configure_opts = ['CXX={0}'.format(spec['mpi'].mpicxx)] if '+mpi' in self.spec else [] + configure_opts = [ + 'CXX={0}'.format(spec['mpi'].mpicxx) + ] if '+mpi' in self.spec else [] + configure_opts.extend([ '--prefix={0}'.format(prefix), - '--enable-shared={0}'.format('yes' if '+shared' in spec else 'no'), + '--enable-shared={0}'.format('yes' if '+shared' in spec else 'no'), # NOQA: ignore=E501 '--enable-mpi={0}'.format('yes' if '+mpi' in spec else 'no'), '--enable-gsl={0}'.format('yes' if '+gsl' in spec else 'no') ]) From 7c46a4c0e4c255f3c7969d6681cd8fd7c239ed98 Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Wed, 3 Aug 2016 13:26:02 -0600 Subject: [PATCH 203/284] Provide minor updates after code review + Always depend on the gettext package. This simplifies the logic and I no longer need to 'import sys' + Only apply the patch for the older version of glib. --- var/spack/repos/builtin/packages/glib/package.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 1e5a53ab145..2720831e4f9 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -23,7 +23,6 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import sys class Glib(Package): @@ -40,12 +39,11 @@ class Glib(Package): depends_on('libffi') depends_on('zlib') depends_on('pkg-config', type='build') - depends_on('gettext', when=sys.platform == 'darwin') + depends_on('gettext') depends_on('pcre+utf', when='@2.49:') - depends_on('gettext', when='@2.49:') # The following patch is needed for gcc-6.1 - patch('g_date_strftime.patch') + patch('g_date_strftime.patch', when='@2.42.1') def url_for_version(self, version): """Handle glib's version-based custom URLs.""" From 46e9d85283075680f1419092e0a50751b603ce9f Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Wed, 3 Aug 2016 14:15:44 -0600 Subject: [PATCH 204/284] Cray: Fix a typo that could cause an infinite recursion when calling /env/cc. fixes #1428 --- lib/spack/spack/platforms/cray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index 2a3b81cf9cd..0059b49ff18 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -98,7 +98,7 @@ def setup_platform_environment(self, pkg, env): cray_wrapper_names = join_path(spack.build_env_path, 'cray') if os.path.isdir(cray_wrapper_names): env.prepend_path('PATH', cray_wrapper_names) - env.prepend_path('SPACK_ENV_PATHS', cray_wrapper_names) + env.prepend_path('SPACK_ENV_PATH', cray_wrapper_names) @classmethod def detect(self): From a7bfaa37970b6cbbd5ef0821bf2483a3161f991a Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Wed, 27 Jul 2016 14:26:23 -0400 Subject: [PATCH 205/284] Add package for ocaml compiler Installs the ocaml compiler. --- .../repos/builtin/packages/ocaml/package.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 var/spack/repos/builtin/packages/ocaml/package.py diff --git a/var/spack/repos/builtin/packages/ocaml/package.py b/var/spack/repos/builtin/packages/ocaml/package.py new file mode 100644 index 00000000000..9488d3b7a62 --- /dev/null +++ b/var/spack/repos/builtin/packages/ocaml/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Ocaml(Package): + """OCaml is an industrial strength programming language supporting + functional, imperative and object-oriented styles""" + + homepage = "http://ocaml.org/" + url = "http://caml.inria.fr/pub/distrib/ocaml-4.03/ocaml-4.03.0.tar.gz" + + version('4.03.0', '43812739ea1b4641cf480f57f977c149') + + depends_on('ncurses') + + def install(self, spec, prefix): + configure('-prefix', '{0}'.format(prefix)) + + make('world.opt') + make('install') From 4acf6d76bf7ec982573331835f7bcddd8487b18b Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Wed, 27 Jul 2016 14:27:05 -0400 Subject: [PATCH 206/284] Add package for unison Add package for unison (the file synchronizer). Needs the ocaml compiler to build. --- .../repos/builtin/packages/unison/package.py | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 var/spack/repos/builtin/packages/unison/package.py diff --git a/var/spack/repos/builtin/packages/unison/package.py b/var/spack/repos/builtin/packages/unison/package.py new file mode 100644 index 00000000000..15c57db6653 --- /dev/null +++ b/var/spack/repos/builtin/packages/unison/package.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Unison(Package): + """Unison is a file-synchronization tool for OSX, Unix, and + Windows. It allows two replicas of a collection of files and + directories to be stored on different hosts (or different disks + on the same host), modified separately, and then brought up to + date by propagating the changes in each replica to the + other.""" + + homepage = "https://www.cis.upenn.edu/~bcpierce/unison/" + url = "https://www.seas.upenn.edu/~bcpierce/unison//download/releases/stable/unison-2.48.3.tar.gz" + + version('2.48.4', '5334b78c7e68169df7de95f4c6c4b60f') + + depends_on('ocaml', type='build') + + parallel = False + def install(self, spec, prefix): + make('./mkProjectInfo') + make('UISTYLE=text') + + mkdirp(prefix.bin) + install('unison', prefix.bin) + set_executable(join_path(prefix.bin, 'unison')) From 43371c31a2fab552f2421ca9d355b10f5ff8098e Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Wed, 3 Aug 2016 16:51:53 -0400 Subject: [PATCH 207/284] Flake8 cleanup --- var/spack/repos/builtin/packages/unison/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/unison/package.py b/var/spack/repos/builtin/packages/unison/package.py index 15c57db6653..181e1e6410e 100644 --- a/var/spack/repos/builtin/packages/unison/package.py +++ b/var/spack/repos/builtin/packages/unison/package.py @@ -41,6 +41,7 @@ class Unison(Package): depends_on('ocaml', type='build') parallel = False + def install(self, spec, prefix): make('./mkProjectInfo') make('UISTYLE=text') From f0609699bab84ae71fce57acf81355c0e567e440 Mon Sep 17 00:00:00 2001 From: James Riley Wynne III Date: Wed, 3 Aug 2016 17:16:04 -0400 Subject: [PATCH 208/284] Added parens to %'s arguments. Fixes #1432 --- lib/spack/spack/concretize.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index eced9917c96..6f11c86ce87 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -495,7 +495,7 @@ class UnavailableCompilerVersionError(spack.error.SpackError): def __init__(self, compiler_spec, operating_system): super(UnavailableCompilerVersionError, self).__init__( - "No available compiler version matches '%s' on operating_system %s" % compiler_spec, operating_system, # NOQA: ignore=E501 + "No available compiler version matches '%s' on operating_system %s" % (compiler_spec, operating_system), # NOQA: ignore=E501 "Run 'spack compilers' to see available compiler Options.") From 638f779841cc8be8d51c5cb407142045e4735031 Mon Sep 17 00:00:00 2001 From: Jim Galarowicz Date: Wed, 3 Aug 2016 21:04:17 -0500 Subject: [PATCH 209/284] Update the version of xerces-c, where the developers remove the old version, so there is no fallback. --- var/spack/repos/builtin/packages/xerces-c/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/xerces-c/package.py b/var/spack/repos/builtin/packages/xerces-c/package.py index 2efccc3c083..3927a949f79 100644 --- a/var/spack/repos/builtin/packages/xerces-c/package.py +++ b/var/spack/repos/builtin/packages/xerces-c/package.py @@ -32,8 +32,8 @@ class XercesC(Package): """ homepage = "https://xerces.apache.org/xerces-c" - url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.3.tar.bz2" - version('3.1.3', '5e333b55cb43e6b025ddf0e5d0f0fb0d') + url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.4.tar.bz2" + version('3.1.4', 'd04ae9d8b2dee2157c6db95fa908abfd') def install(self, spec, prefix): configure("--prefix=%s" % prefix, From e68b7d8c6315c5fc153466105a218b6e5445465d Mon Sep 17 00:00:00 2001 From: Jim Galarowicz Date: Wed, 3 Aug 2016 21:08:48 -0500 Subject: [PATCH 210/284] Update libmonitor to use its new github location, it does not exist where the package file now points to. --- var/spack/repos/builtin/packages/libmonitor/package.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/libmonitor/package.py b/var/spack/repos/builtin/packages/libmonitor/package.py index 883d8af4059..0d462ae89de 100644 --- a/var/spack/repos/builtin/packages/libmonitor/package.py +++ b/var/spack/repos/builtin/packages/libmonitor/package.py @@ -26,9 +26,8 @@ class Libmonitor(Package): """Libmonitor is a library for process and thread control.""" - homepage = "http://hpctoolkit.org" - - version('20130218', svn='http://libmonitor.googlecode.com/svn/trunk/', revision=146) + homepage = "https://github.com/HPCToolkit/libmonitor" + version('20130218', git='https://github.com/HPCToolkit/libmonitor.git', commit='4f2311e') variant('krellpatch', default=False, description="build with openspeedshop based patch.") From e69423a154b54c5e2605442d97ab9454028e60d2 Mon Sep 17 00:00:00 2001 From: Jim Galarowicz Date: Wed, 3 Aug 2016 21:28:56 -0500 Subject: [PATCH 211/284] Fix flake8 errors for Update the version of xerces-c, where the developers remove the old version, so there is no fallback. --- var/spack/repos/builtin/packages/xerces-c/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/xerces-c/package.py b/var/spack/repos/builtin/packages/xerces-c/package.py index 3927a949f79..d0c2d3d497b 100644 --- a/var/spack/repos/builtin/packages/xerces-c/package.py +++ b/var/spack/repos/builtin/packages/xerces-c/package.py @@ -24,11 +24,13 @@ ############################################################################## from spack import * + class XercesC(Package): """ Xerces-C++ is a validating XML parser written in a portable subset of C++. Xerces-C++ makes it easy to give your application the ability to read and write XML data. A shared library is provided for parsing, generating, - manipulating, and validating XML documents using the DOM, SAX, and SAX2 APIs. + manipulating, and validating XML documents using the DOM, SAX, and SAX2 + APIs. """ homepage = "https://xerces.apache.org/xerces-c" @@ -41,4 +43,3 @@ def install(self, spec, prefix): make("clean") make() make("install") - From 2e1dbd0697ef2a07154ed83a0d31d144c1bb8b5e Mon Sep 17 00:00:00 2001 From: Jim Galarowicz Date: Wed, 3 Aug 2016 21:32:15 -0500 Subject: [PATCH 212/284] Fix flake8 errors for: Update libmonitor to use its new github location, it does not exist where the package file now points to. --- var/spack/repos/builtin/packages/libmonitor/package.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libmonitor/package.py b/var/spack/repos/builtin/packages/libmonitor/package.py index 0d462ae89de..611e602e2f9 100644 --- a/var/spack/repos/builtin/packages/libmonitor/package.py +++ b/var/spack/repos/builtin/packages/libmonitor/package.py @@ -24,18 +24,17 @@ ############################################################################## from spack import * + class Libmonitor(Package): """Libmonitor is a library for process and thread control.""" homepage = "https://github.com/HPCToolkit/libmonitor" version('20130218', git='https://github.com/HPCToolkit/libmonitor.git', commit='4f2311e') variant('krellpatch', default=False, description="build with openspeedshop based patch.") - patch('libmonitorkrell-0000.patch', when='@20130218+krellpatch') patch('libmonitorkrell-0001.patch', when='@20130218+krellpatch') patch('libmonitorkrell-0002.patch', when='@20130218+krellpatch') - def install(self, spec, prefix): configure("--prefix=" + prefix) make() From b7fa2c4e2161f34fcdaa355b36b3c63832844b3d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 3 Aug 2016 21:36:08 -0700 Subject: [PATCH 213/284] Fix #1429: Remove git dep from gettext; break circular dependency. - It's not really a circular dependency -- git is a run dependency of gettext - We can revert this change when Spack is smart enough to make git a run dependency and build it. --- var/spack/repos/builtin/packages/gettext/package.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py index 0a0b163a746..cf260c3f8ad 100644 --- a/var/spack/repos/builtin/packages/gettext/package.py +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -49,7 +49,6 @@ class Gettext(Package): depends_on('libxml2', when='+libxml2') # Java runtime and compiler (e.g. GNU gcj or kaffe) # C# runtime and compiler (e.g. pnet or mono) - depends_on('git@1.6:', when='+git') depends_on('tar', when='+tar') # depends_on('gzip', when='+gzip') depends_on('bzip2', when='+bzip2') @@ -86,9 +85,6 @@ def install(self, spec, prefix): else: config_args.append('--with-included-libxml') - if '+git' not in spec: - config_args.append('--without-git') - if '+bzip2' not in spec: config_args.append('--without-bzip2') From 7affaca32135c920da5e8ec7473017cbd36c8e30 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 21 Jul 2016 16:28:19 +0200 Subject: [PATCH 214/284] Fix graph command with ASCII output. --- lib/spack/spack/graph.py | 13 +++++++------ lib/spack/spack/spec.py | 4 ++-- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 063e4647b60..6a80c7d4725 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -94,6 +94,7 @@ def topological_sort(spec, **kwargs): nodes = spec.index() topo_order = [] + par = {name: parents(nodes[name]) for name in nodes.keys()} remaining = [name for name in nodes.keys() if not parents(nodes[name])] heapify(remaining) @@ -102,12 +103,12 @@ def topological_sort(spec, **kwargs): topo_order.append(name) node = nodes[name] - for dep in children(node).values(): - del parents(dep)[node.name] - if not parents(dep): + for dep in children(node): + par[dep.name].remove(node) + if not par[dep.name]: heappush(remaining, dep.name) - if any(parents(s) for s in spec.traverse()): + if any(par.get(s.name, []) for s in spec.traverse()): raise ValueError("Spec has cycles!") else: return topo_order @@ -477,8 +478,8 @@ def write(self, spec, **kwargs): # Replace node with its dependencies self._frontier.pop(i) - if node.dependencies: - deps = sorted((d for d in node.dependencies), reverse=True) + if node.dependencies(): + deps = sorted((d.name for d in node.dependencies()), reverse=True) self._connect_deps(i, deps, "new-deps") # anywhere. elif self._frontier: diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index c6277fc8d2b..8e44075f42f 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -460,7 +460,7 @@ def concrete(self): def __str__(self): return ''.join( - ["^" + str(self[name].spec) for name in sorted(self.keys())]) + ["^" + self[name].format() for name in sorted(self.keys())]) @key_ordering @@ -861,7 +861,7 @@ def return_val(res): for name in sorted(successors): child = successors[name] children = child.spec.traverse_with_deptype( - visited, d=d + 1, deptype=deptype_query, + visited, d=d + 1, deptype=deptype, deptype_query=deptype_query, _self_deptype=child.deptypes, **kwargs) for elt in children: From 1578a959823bbb8a5ab229a1ba0d11d3d96cc180 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 21 Jul 2016 17:07:15 +0200 Subject: [PATCH 215/284] Fix tests. --- lib/spack/spack/graph.py | 2 +- lib/spack/spack/test/spec_dag.py | 21 ++++++++++----------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 6a80c7d4725..5ecd40bd3d1 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -94,7 +94,7 @@ def topological_sort(spec, **kwargs): nodes = spec.index() topo_order = [] - par = {name: parents(nodes[name]) for name in nodes.keys()} + par = dict((name, parents(nodes[name])) for name in nodes.keys()) remaining = [name for name in nodes.keys() if not parents(nodes[name])] heapify(remaining) diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 972e79aa20f..fd57a4ea4b3 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -476,20 +476,20 @@ def test_deptype_traversal(self): dag = Spec('dtuse') dag.normalize() - names = ['dtuse', 'dttop', 'dtlink1', 'dtlink3', 'dtlink4', - 'dtrun1', 'dtlink5', 'dtrun3'] + names = ['dtuse', 'dttop', 'dtbuild1', 'dtbuild2', 'dtlink2', + 'dtlink1', 'dtlink3', 'dtlink4'] - traversal = dag.traverse() + traversal = dag.traverse(deptype=('build', 'link')) self.assertEqual([x.name for x in traversal], names) def test_deptype_traversal_with_builddeps(self): dag = Spec('dttop') dag.normalize() - names = ['dttop', 'dtbuild1', 'dtlink2', 'dtrun2', 'dtlink1', - 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5', 'dtrun3'] + names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2', + 'dtlink1', 'dtlink3', 'dtlink4'] - traversal = dag.traverse() + traversal = dag.traverse(deptype=('build', 'link')) self.assertEqual([x.name for x in traversal], names) def test_deptype_traversal_full(self): @@ -500,15 +500,14 @@ def test_deptype_traversal_full(self): 'dtlink1', 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5', 'dtrun3', 'dtbuild3'] - traversal = dag.traverse(deptype_query=spack.alldeps) + traversal = dag.traverse(deptype=spack.alldeps) self.assertEqual([x.name for x in traversal], names) - def test_deptype_traversal_pythonpath(self): + def test_deptype_traversal_run(self): dag = Spec('dttop') dag.normalize() - names = ['dttop', 'dtbuild1', 'dtrun2', 'dtlink1', 'dtrun1', - 'dtrun3'] + names = ['dttop', 'dtrun1', 'dtrun3'] - traversal = dag.traverse(deptype=spack.nolink, deptype_query='run') + traversal = dag.traverse(deptype='run') self.assertEqual([x.name for x in traversal], names) From 2bd1a5119382c90865ab135a88d406740c2c5871 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 21 Jul 2016 17:40:14 +0200 Subject: [PATCH 216/284] flake8 fixes --- lib/spack/spack/graph.py | 79 +++++++++++------------ lib/spack/spack/test/spec_dag.py | 107 +++++++++++++------------------ 2 files changed, 83 insertions(+), 103 deletions(-) diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 5ecd40bd3d1..80d1199ef5f 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -61,7 +61,6 @@ can take a number of specs as input. """ -__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot'] from heapq import * @@ -71,6 +70,8 @@ import spack from spack.spec import Spec +__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot'] + def topological_sort(spec, **kwargs): """Topological sort for specs. @@ -133,6 +134,7 @@ def find(seq, predicate): states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge') NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states + class AsciiGraph(object): def __init__(self): # These can be set after initialization or after a call to @@ -154,18 +156,15 @@ def __init__(self): self._prev_state = None # State of previous line self._prev_index = None # Index of expansion point of prev line - def _indent(self): self._out.write(self.indent * ' ') - def _write_edge(self, string, index, sub=0): """Write a colored edge to the output stream.""" name = self._frontier[index][sub] edge = "@%s{%s}" % (self._name_to_color[name], string) self._out.write(edge) - def _connect_deps(self, i, deps, label=None): """Connect dependencies to existing edges in the frontier. @@ -200,7 +199,8 @@ def _connect_deps(self, i, deps, label=None): collapse = True if self._prev_state == EXPAND_RIGHT: # Special case where previous line expanded and i is off by 1. - self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i+1,j))) + self._back_edge_line([], j, i + 1, True, + label + "-1.5 " + str((i + 1, j))) collapse = False else: @@ -208,19 +208,20 @@ def _connect_deps(self, i, deps, label=None): if self._prev_state == NODE and self._prev_index < i: i += 1 - if i-j > 1: + if i - j > 1: # We need two lines to connect if distance > 1 - self._back_edge_line([], j, i, True, label + "-1 " + str((i,j))) + self._back_edge_line([], j, i, True, + label + "-1 " + str((i, j))) collapse = False - self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j))) + self._back_edge_line([j], -1, -1, collapse, + label + "-2 " + str((i, j))) return True elif deps: self._frontier.insert(i, deps) return False - def _set_state(self, state, index, label=None): if state not in states: raise ValueError("Invalid graph state!") @@ -234,7 +235,6 @@ def _set_state(self, state, index, label=None): self._out.write("%-20s" % (str(label) if label else '')) self._out.write("%s" % self._frontier) - def _back_edge_line(self, prev_ends, end, start, collapse, label=None): """Write part of a backwards edge in the graph. @@ -288,27 +288,26 @@ def advance(to_pos, edges): self._indent() for p in prev_ends: - advance(p, lambda: [("| ", self._pos)] ) - advance(p+1, lambda: [("|/", self._pos)] ) + advance(p, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 + advance(p + 1, lambda: [("|/", self._pos)]) # NOQA: ignore=E272 if end >= 0: - advance(end + 1, lambda: [("| ", self._pos)] ) - advance(start - 1, lambda: [("|", self._pos), ("_", end)] ) + advance(end + 1, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 + advance(start - 1, lambda: [("|", self._pos), ("_", end)]) # NOQA: ignore=E272 else: - advance(start - 1, lambda: [("| ", self._pos)] ) + advance(start - 1, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 if start >= 0: - advance(start, lambda: [("|", self._pos), ("/", end)] ) + advance(start, lambda: [("|", self._pos), ("/", end)]) # NOQA: ignore=E272 if collapse: - advance(flen, lambda: [(" /", self._pos)] ) + advance(flen, lambda: [(" /", self._pos)]) # NOQA: ignore=E272 else: - advance(flen, lambda: [("| ", self._pos)] ) + advance(flen, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 self._set_state(BACK_EDGE, end, label) self._out.write("\n") - def _node_line(self, index, name): """Writes a line with a node at index.""" self._indent() @@ -317,14 +316,13 @@ def _node_line(self, index, name): self._out.write("%s " % self.node_character) - for c in range(index+1, len(self._frontier)): + for c in range(index + 1, len(self._frontier)): self._write_edge("| ", c) self._out.write(" %s" % name) self._set_state(NODE, index) self._out.write("\n") - def _collapse_line(self, index): """Write a collapsing line after a node was added at index.""" self._indent() @@ -336,36 +334,33 @@ def _collapse_line(self, index): self._set_state(COLLAPSE, index) self._out.write("\n") - def _merge_right_line(self, index): """Edge at index is same as edge to right. Merge directly with '\'""" self._indent() for c in range(index): self._write_edge("| ", c) self._write_edge("|", index) - self._write_edge("\\", index+1) - for c in range(index+1, len(self._frontier)): - self._write_edge("| ", c ) + self._write_edge("\\", index + 1) + for c in range(index + 1, len(self._frontier)): + self._write_edge("| ", c) self._set_state(MERGE_RIGHT, index) self._out.write("\n") - def _expand_right_line(self, index): self._indent() for c in range(index): self._write_edge("| ", c) self._write_edge("|", index) - self._write_edge("\\", index+1) + self._write_edge("\\", index + 1) - for c in range(index+2, len(self._frontier)): + for c in range(index + 2, len(self._frontier)): self._write_edge(" \\", c) self._set_state(EXPAND_RIGHT, index) self._out.write("\n") - def write(self, spec, **kwargs): """Write out an ascii graph of the provided spec. @@ -399,7 +394,7 @@ def write(self, spec, **kwargs): # Colors associated with each node in the DAG. # Edges are colored by the node they point to. self._name_to_color = dict((name, self.colors[i % len(self.colors)]) - for i, name in enumerate(topo_order)) + for i, name in enumerate(topo_order)) # Frontier tracks open edges of the graph as it's written out. self._frontier = [[spec.name]] @@ -408,7 +403,8 @@ def write(self, spec, **kwargs): i = find(self._frontier, lambda f: len(f) > 1) if i >= 0: - # Expand frontier until there are enough columns for all children. + # Expand frontier until there are enough columns for all + # children. # Figure out how many back connections there are and # sort them so we do them in order @@ -425,8 +421,9 @@ def write(self, spec, **kwargs): prev_ends = [] for j, (b, d) in enumerate(back): self._frontier[i].remove(d) - if i-b > 1: - self._back_edge_line(prev_ends, b, i, False, 'left-1') + if i - b > 1: + self._back_edge_line(prev_ends, b, i, False, + 'left-1') del prev_ends[:] prev_ends.append(b) @@ -440,12 +437,13 @@ def write(self, spec, **kwargs): elif len(self._frontier[i]) > 1: # Expand forward after doing all back connections - if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1 - and self._frontier[i+1][0] in self._frontier[i]): + if (i + 1 < len(self._frontier) and + len(self._frontier[i + 1]) == 1 and + self._frontier[i + 1][0] in self._frontier[i]): # We need to connect to the element to the right. # Keep lines straight by connecting directly and # avoiding unnecessary expand/contract. - name = self._frontier[i+1][0] + name = self._frontier[i + 1][0] self._frontier[i].remove(name) self._merge_right_line(i) @@ -459,9 +457,8 @@ def write(self, spec, **kwargs): self._frontier.pop(i) self._connect_deps(i, deps, "post-expand") - # Handle any remaining back edges to the right - j = i+1 + j = i + 1 while j < len(self._frontier): deps = self._frontier.pop(j) if not self._connect_deps(j, deps, "back-from-right"): @@ -479,8 +476,9 @@ def write(self, spec, **kwargs): # Replace node with its dependencies self._frontier.pop(i) if node.dependencies(): - deps = sorted((d.name for d in node.dependencies()), reverse=True) - self._connect_deps(i, deps, "new-deps") # anywhere. + deps = sorted((d.name for d in node.dependencies()), + reverse=True) + self._connect_deps(i, deps, "new-deps") # anywhere. elif self._frontier: self._collapse_line(i) @@ -502,7 +500,6 @@ def graph_ascii(spec, **kwargs): graph.write(spec, color=color, out=out) - def graph_dot(*specs, **kwargs): """Generate a graph in dot format of all provided specs. diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index fd57a4ea4b3..8522431fbbe 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -32,8 +32,6 @@ import spack.architecture import spack.package -from llnl.util.lang import list_modules - from spack.spec import Spec from spack.test.mock_packages_test import * @@ -51,21 +49,19 @@ def test_conflicting_package_constraints(self): self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize) - def test_preorder_node_traversal(self): dag = Spec('mpileaks ^zmpi') dag.normalize() names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf', 'zmpi', 'fake'] - pairs = zip([0,1,2,3,4,2,3], names) + pairs = zip([0, 1, 2, 3, 4, 2, 3], names) traversal = dag.traverse() self.assertEqual([x.name for x in traversal], names) traversal = dag.traverse(depth=True) - self.assertEqual([(x, y.name) for x,y in traversal], pairs) - + self.assertEqual([(x, y.name) for x, y in traversal], pairs) def test_preorder_edge_traversal(self): dag = Spec('mpileaks ^zmpi') @@ -73,14 +69,13 @@ def test_preorder_edge_traversal(self): names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf', 'libelf', 'zmpi', 'fake', 'zmpi'] - pairs = zip([0,1,2,3,4,3,2,3,1], names) + pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names) traversal = dag.traverse(cover='edges') self.assertEqual([x.name for x in traversal], names) traversal = dag.traverse(cover='edges', depth=True) - self.assertEqual([(x, y.name) for x,y in traversal], pairs) - + self.assertEqual([(x, y.name) for x, y in traversal], pairs) def test_preorder_path_traversal(self): dag = Spec('mpileaks ^zmpi') @@ -88,14 +83,13 @@ def test_preorder_path_traversal(self): names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf', 'libelf', 'zmpi', 'fake', 'zmpi', 'fake'] - pairs = zip([0,1,2,3,4,3,2,3,1,2], names) + pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names) traversal = dag.traverse(cover='paths') self.assertEqual([x.name for x in traversal], names) traversal = dag.traverse(cover='paths', depth=True) - self.assertEqual([(x, y.name) for x,y in traversal], pairs) - + self.assertEqual([(x, y.name) for x, y in traversal], pairs) def test_postorder_node_traversal(self): dag = Spec('mpileaks ^zmpi') @@ -103,14 +97,13 @@ def test_postorder_node_traversal(self): names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi', 'callpath', 'mpileaks'] - pairs = zip([4,3,2,3,2,1,0], names) + pairs = zip([4, 3, 2, 3, 2, 1, 0], names) traversal = dag.traverse(order='post') self.assertEqual([x.name for x in traversal], names) traversal = dag.traverse(depth=True, order='post') - self.assertEqual([(x, y.name) for x,y in traversal], pairs) - + self.assertEqual([(x, y.name) for x, y in traversal], pairs) def test_postorder_edge_traversal(self): dag = Spec('mpileaks ^zmpi') @@ -118,14 +111,13 @@ def test_postorder_edge_traversal(self): names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi', 'callpath', 'zmpi', 'mpileaks'] - pairs = zip([4,3,3,2,3,2,1,1,0], names) + pairs = zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names) traversal = dag.traverse(cover='edges', order='post') self.assertEqual([x.name for x in traversal], names) traversal = dag.traverse(cover='edges', depth=True, order='post') - self.assertEqual([(x, y.name) for x,y in traversal], pairs) - + self.assertEqual([(x, y.name) for x, y in traversal], pairs) def test_postorder_path_traversal(self): dag = Spec('mpileaks ^zmpi') @@ -133,14 +125,13 @@ def test_postorder_path_traversal(self): names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi', 'callpath', 'fake', 'zmpi', 'mpileaks'] - pairs = zip([4,3,3,2,3,2,1,2,1,0], names) + pairs = zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names) traversal = dag.traverse(cover='paths', order='post') self.assertEqual([x.name for x in traversal], names) traversal = dag.traverse(cover='paths', depth=True, order='post') - self.assertEqual([(x, y.name) for x,y in traversal], pairs) - + self.assertEqual([(x, y.name) for x, y in traversal], pairs) def test_conflicting_spec_constraints(self): mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf') @@ -153,8 +144,7 @@ def test_conflicting_spec_constraints(self): spec._dependencies['mpich'].spec = Spec('mpich@2.0') self.assertRaises(spack.spec.InconsistentSpecError, - lambda: mpileaks.flat_dependencies(copy=False)) - + lambda: mpileaks.flat_dependencies(copy=False)) def test_normalize_twice(self): """Make sure normalize can be run twice on the same spec, @@ -166,7 +156,6 @@ def test_normalize_twice(self): spec.normalize() self.assertEqual(n1, spec) - def test_normalize_a_lot(self): spec = Spec('mpileaks') spec.normalize() @@ -174,7 +163,6 @@ def test_normalize_a_lot(self): spec.normalize() spec.normalize() - def test_normalize_with_virtual_spec(self): dag = Spec('mpileaks', Spec('callpath', @@ -189,80 +177,80 @@ def test_normalize_with_virtual_spec(self): # make sure nothing with the same name occurs twice counts = {} for spec in dag.traverse(key=id): - if not spec.name in counts: + if spec.name not in counts: counts[spec.name] = 0 counts[spec.name] += 1 for name in counts: self.assertEqual(counts[name], 1, "Count for %s was not 1!" % name) - def check_links(self, spec_to_check): for spec in spec_to_check.traverse(): for dependent in spec.dependents(): self.assertTrue( spec.name in dependent.dependencies_dict(), "%s not in dependencies of %s" % - (spec.name, dependent.name)) + (spec.name, dependent.name)) for dependency in spec.dependencies(): self.assertTrue( spec.name in dependency.dependents_dict(), "%s not in dependents of %s" % - (spec.name, dependency.name)) - + (spec.name, dependency.name)) def test_dependents_and_dependencies_are_correct(self): spec = Spec('mpileaks', - Spec('callpath', - Spec('dyninst', - Spec('libdwarf', - Spec('libelf')), - Spec('libelf')), - Spec('mpi')), - Spec('mpi')) + Spec('callpath', + Spec('dyninst', + Spec('libdwarf', + Spec('libelf')), + Spec('libelf')), + Spec('mpi')), + Spec('mpi')) self.check_links(spec) spec.normalize() self.check_links(spec) - def test_unsatisfiable_version(self): self.set_pkg_dep('mpileaks', 'mpich@1.0') spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf') - self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize) - + self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, + spec.normalize) def test_unsatisfiable_compiler(self): self.set_pkg_dep('mpileaks', 'mpich%gcc') - spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf') - self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize) - + spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf' + ' ^libdwarf') + self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, + spec.normalize) def test_unsatisfiable_compiler_version(self): self.set_pkg_dep('mpileaks', 'mpich%gcc@4.6') - spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf') - self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize) - + spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf' + ' ^libdwarf') + self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, + spec.normalize) def test_unsatisfiable_architecture(self): - platform = spack.architecture.platform() - self.set_pkg_dep('mpileaks', 'mpich platform=test target=be') - spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath ^dyninst ^libelf ^libdwarf') - self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize) - + spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath' + ' ^dyninst ^libelf ^libdwarf') + self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, + spec.normalize) def test_invalid_dep(self): spec = Spec('libelf ^mpich') - self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize) + self.assertRaises(spack.spec.InvalidDependencyException, + spec.normalize) spec = Spec('libelf ^libdwarf') - self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize) + self.assertRaises(spack.spec.InvalidDependencyException, + spec.normalize) spec = Spec('mpich ^dyninst ^libelf') - self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize) - + self.assertRaises(spack.spec.InvalidDependencyException, + spec.normalize) def test_equal(self): # Different spec structures to test for equality @@ -301,10 +289,10 @@ def test_equal(self): self.assertFalse(flip_flat.eq_dag(flip_dag)) self.assertFalse(dag.eq_dag(flip_dag)) - def test_normalize_mpileaks(self): # Spec parsed in from a string - spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11 ^libdwarf') + spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11' + ' ^libdwarf') # What that spec should look like after parsing expected_flat = Spec( @@ -367,7 +355,6 @@ def test_normalize_mpileaks(self): self.assertEqual(spec, non_unique_nodes) self.assertFalse(spec.eq_dag(non_unique_nodes)) - def test_normalize_with_virtual_package(self): spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf') spec.normalize() @@ -383,7 +370,6 @@ def test_normalize_with_virtual_package(self): self.assertEqual(str(spec), str(expected_normalized)) - def test_contains(self): spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf') self.assertTrue(Spec('mpi') in spec) @@ -394,7 +380,6 @@ def test_contains(self): self.assertFalse(Spec('libgoblin') in spec) self.assertTrue(Spec('mpileaks') in spec) - def test_copy_simple(self): orig = Spec('mpileaks') copy = orig.copy() @@ -411,7 +396,6 @@ def test_copy_simple(self): copy_ids = set(id(s) for s in copy.traverse()) self.assertFalse(orig_ids.intersection(copy_ids)) - def test_copy_normalized(self): orig = Spec('mpileaks') orig.normalize() @@ -429,7 +413,6 @@ def test_copy_normalized(self): copy_ids = set(id(s) for s in copy.traverse()) self.assertFalse(orig_ids.intersection(copy_ids)) - def test_copy_concretized(self): orig = Spec('mpileaks') orig.concretize() From 584e5506f225142c5d026d54a47ab56d150e8186 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 4 Aug 2016 14:24:25 +0200 Subject: [PATCH 217/284] Update Score-P to 2.0 --- var/spack/repos/builtin/packages/cube/package.py | 5 ++++- var/spack/repos/builtin/packages/opari2/package.py | 2 ++ var/spack/repos/builtin/packages/scorep/package.py | 6 ++++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/cube/package.py b/var/spack/repos/builtin/packages/cube/package.py index 40c2cc68936..01a933f6576 100644 --- a/var/spack/repos/builtin/packages/cube/package.py +++ b/var/spack/repos/builtin/packages/cube/package.py @@ -38,14 +38,17 @@ class Cube(Package): homepage = "http://www.scalasca.org/software/cube-4.x/download.html" url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz" + version('4.3.4', '50f73060f55311cb12c5b3cb354d59fa', + url='http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3.4.tar.gz') version('4.3.3', '07e109248ed8ffc7bdcce614264a2909', url='http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3.3.tar.gz') - version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20', url="http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz") # TODO : add variant that builds GUI on top of Qt + depends_on('zlib') + def install(self, spec, prefix): configure_args = ["--prefix=%s" % prefix, "--without-paraver", diff --git a/var/spack/repos/builtin/packages/opari2/package.py b/var/spack/repos/builtin/packages/opari2/package.py index 510fff9fc2b..749350fb36c 100644 --- a/var/spack/repos/builtin/packages/opari2/package.py +++ b/var/spack/repos/builtin/packages/opari2/package.py @@ -37,6 +37,8 @@ class Opari2(Package): homepage = "http://www.vi-hps.org/projects/score-p" url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz" + version('2.0', '72350dbdb6139f2e68a5055a4f0ba16c', + url='http://www.vi-hps.org/upload/packages/opari2/opari2-2.0.tar.gz') version('1.1.4', '245d3d11147a06de77909b0805f530c0', url='http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.4.tar.gz') version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e') diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py index 633511a15ad..0ffdcb9a1d3 100644 --- a/var/spack/repos/builtin/packages/scorep/package.py +++ b/var/spack/repos/builtin/packages/scorep/package.py @@ -35,6 +35,8 @@ class Scorep(Package): homepage = "http://www.vi-hps.org/projects/score-p" url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz" + version('2.0.2', '8f00e79e1b5b96e511c5ebecd10b2888', + url='http://www.vi-hps.org/upload/packages/scorep/scorep-2.0.2.tar.gz') version('1.4.2', '3b9a042b13bdd5836452354e6567f71e', url='http://www.vi-hps.org/upload/packages/scorep/scorep-1.4.2.tar.gz') version('1.3', '9db6f957b7f51fa01377a9537867a55c', @@ -42,6 +44,10 @@ class Scorep(Package): ########## # Dependencies for SCORE-P are quite tight. See the homepage for more information. + # SCOREP 2.0.2 + depends_on('otf2@2.0', when='@2.0.2') + depends_on('opari2@2.0', when='@2.0.2') + depends_on('cube@4.3:4.4', when='@2.0.2') # SCOREP 1.4.2 depends_on('otf2@1.5:1.6', when='@1.4.2') depends_on('opari2@1.1.4', when='@1.4.2') From a591e183bcb0c23fe48016b3e018ca48c4ff45c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torbj=C3=B6rn=20L=C3=B6nnemark?= Date: Thu, 4 Aug 2016 15:39:45 +0200 Subject: [PATCH 218/284] Fix incorrect indentation --- lib/spack/spack/util/web.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 47abc507e0e..6af2b76affc 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -125,11 +125,11 @@ def _spider(args): if abs_link in visited: continue - # If we're not at max depth, follow links. - if depth < max_depth: - subcalls.append((abs_link, visited, root, None, - depth+1, max_depth, raise_on_error)) - visited.add(abs_link) + # If we're not at max depth, follow links. + if depth < max_depth: + subcalls.append((abs_link, visited, root, None, + depth+1, max_depth, raise_on_error)) + visited.add(abs_link) if subcalls: try: From af8741c23cf0671ba14ab6b90a583f39afc6902f Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Thu, 4 Aug 2016 09:57:50 +0200 Subject: [PATCH 219/284] dealii: add optional python bindings --- .../repos/builtin/packages/dealii/package.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 54604d351ff..18c0849f68d 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -51,14 +51,21 @@ class Dealii(Package): variant('petsc', default=True, description='Compile with Petsc (only with MPI)') variant('slepc', default=True, description='Compile with Slepc (only with Petsc and MPI)') variant('trilinos', default=True, description='Compile with Trilinos (only with MPI)') + variant('python', default=True, description='Compile with Python bindings') # required dependencies, light version depends_on("blas") # Boost 1.58 is blacklisted, see # https://github.com/dealii/dealii/issues/1591 # Require at least 1.59 - depends_on("boost@1.59.0:+thread+system+serialization+iostreams", when='~mpi') # NOQA: ignore=E501 - depends_on("boost@1.59.0:+mpi+thread+system+serialization+iostreams", when='+mpi') # NOQA: ignore=E501 + # +python won't affect @:8.4.1 + depends_on("boost@1.59.0:+thread+system+serialization+iostreams", when='@:8.4.1~mpi') + depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi", when='@:8.4.1+mpi') + # since @8.5.0: (and @develop) python bindings are introduced: + depends_on("boost@1.59.0:+thread+system+serialization+iostreams", when='@8.5.0:~mpi~python') + depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi", when='@8.5.0:+mpi~python') + depends_on("boost@1.59.0:+thread+system+serialization+iostreams+python", when='@8.5.0:~mpi+python') + depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi+python", when='@8.5.0:+mpi+python') depends_on("bzip2") depends_on("cmake", type='build') depends_on("lapack") @@ -120,6 +127,12 @@ def install(self, spec, prefix): '-DZLIB_DIR=%s' % spec['zlib'].prefix ]) + if spec.satisfies('@8.5.0:'): + options.extend([ + '-DDEAL_II_COMPONENT_PYTHON_BINDINGS=%s' % + ('ON' if '+python' in spec else 'OFF') + ]) + # Set directory structure: if spec.satisfies('@:8.2.1'): options.extend(['-DDEAL_II_COMPONENT_COMPAT_FILES=OFF']) From 61b3ecb6d2399dfc5df03cac10818e066bbb5aae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torbj=C3=B6rn=20L=C3=B6nnemark?= Date: Thu, 4 Aug 2016 16:28:14 +0200 Subject: [PATCH 220/284] Fix flake8 issues --- lib/spack/spack/util/web.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 6af2b76affc..cac783a3688 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -25,8 +25,7 @@ import re import os import sys -import subprocess -import urllib2, cookielib +import urllib2 import urlparse from multiprocessing import Pool from HTMLParser import HTMLParser, HTMLParseError @@ -84,7 +83,7 @@ def _spider(args): req.get_method = lambda: "HEAD" resp = urllib2.urlopen(req, timeout=TIMEOUT) - if not "Content-type" in resp.headers: + if "Content-type" not in resp.headers: tty.debug("ignoring page " + url) return pages, links @@ -128,7 +127,7 @@ def _spider(args): # If we're not at max depth, follow links. if depth < max_depth: subcalls.append((abs_link, visited, root, None, - depth+1, max_depth, raise_on_error)) + depth + 1, max_depth, raise_on_error)) visited.add(abs_link) if subcalls: @@ -142,22 +141,22 @@ def _spider(args): pool.terminate() pool.join() - except urllib2.URLError, e: + except urllib2.URLError as e: tty.debug(e) if raise_on_error: raise spack.error.NoNetworkConnectionError(str(e), url) - except HTMLParseError, e: + except HTMLParseError as e: # This error indicates that Python's HTML parser sucks. msg = "Got an error parsing HTML." # Pre-2.7.3 Pythons in particular have rather prickly HTML parsing. - if sys.version_info[:3] < (2,7,3): + if sys.version_info[:3] < (2, 7, 3): msg += " Use Python 2.7.3 or newer for better HTML parsing." tty.warn(msg, url, "HTMLParseError: " + str(e)) - except Exception, e: + except Exception as e: # Other types of errors are completely ignored, except in debug mode. tty.debug("Error in _spider: %s" % e) @@ -173,7 +172,8 @@ def spider(root_url, **kwargs): performance over a sequential fetch. """ max_depth = kwargs.setdefault('depth', 1) - pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False)) + pages, links = _spider((root_url, set(), root_url, None, + 1, max_depth, False)) return pages, links @@ -235,7 +235,7 @@ def find_versions_of_archive(*archive_urls, **kwargs): try: ver = spack.url.parse_version(url) versions[ver] = url - except spack.url.UndetectableVersionError as e: + except spack.url.UndetectableVersionError: continue return versions From d7665a63e39e403ff97be127e8801dafea51c848 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 4 Aug 2016 17:58:59 +0200 Subject: [PATCH 221/284] flake8 fixes --- .../repos/builtin/packages/cube/package.py | 4 +-- .../repos/builtin/packages/opari2/package.py | 14 +++++---- .../repos/builtin/packages/scorep/package.py | 29 ++++++++++--------- 3 files changed, 27 insertions(+), 20 deletions(-) diff --git a/var/spack/repos/builtin/packages/cube/package.py b/var/spack/repos/builtin/packages/cube/package.py index 01a933f6576..09237f87c30 100644 --- a/var/spack/repos/builtin/packages/cube/package.py +++ b/var/spack/repos/builtin/packages/cube/package.py @@ -28,8 +28,8 @@ class Cube(Package): """ - Cube the profile viewer for Score-P and Scalasca profiles. It displays a multi-dimensional performance space - consisting of the dimensions: + Cube the profile viewer for Score-P and Scalasca profiles. It displays a + multi-dimensional performance space consisting of the dimensions: - performance metric - call path - system resource diff --git a/var/spack/repos/builtin/packages/opari2/package.py b/var/spack/repos/builtin/packages/opari2/package.py index 749350fb36c..e901f8ed393 100644 --- a/var/spack/repos/builtin/packages/opari2/package.py +++ b/var/spack/repos/builtin/packages/opari2/package.py @@ -25,13 +25,17 @@ from spack import * + class Opari2(Package): """ - OPARI2 is a source-to-source instrumentation tool for OpenMP and hybrid codes. It surrounds OpenMP directives and - runtime library calls with calls to the POMP2 measurement interface. OPARI2 will provide you with a new - initialization method that allows for multi-directory and parallel builds as well as the usage of pre-instrumented - libraries. Furthermore, an efficient way of tracking parent-child relationships was added. Additionally, we extended - OPARI2 to support instrumentation of OpenMP 3.0 tied tasks. + OPARI2 is a source-to-source instrumentation tool for OpenMP and hybrid + codes. It surrounds OpenMP directives and runtime library calls with calls + to the POMP2 measurement interface. OPARI2 will provide you with a new + initialization method that allows for multi-directory and parallel builds + as well as the usage of pre-instrumented libraries. Furthermore, an + efficient way of tracking parent-child relationships was added. + Additionally, we extended OPARI2 to support instrumentation of OpenMP 3.0 + tied tasks. """ homepage = "http://www.vi-hps.org/projects/score-p" diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py index 0ffdcb9a1d3..d40e08740f7 100644 --- a/var/spack/repos/builtin/packages/scorep/package.py +++ b/var/spack/repos/builtin/packages/scorep/package.py @@ -28,8 +28,9 @@ class Scorep(Package): """ - The Score-P measurement infrastructure is a highly scalable and easy-to-use tool suite for profiling, event - tracing, and online analysis of HPC applications. + The Score-P measurement infrastructure is a highly scalable and easy-to-use + tool suite for profiling, event tracing, and online analysis of HPC + applications. """ homepage = "http://www.vi-hps.org/projects/score-p" @@ -43,7 +44,8 @@ class Scorep(Package): url='http://www.vi-hps.org/upload/packages/scorep/scorep-1.3.tar.gz') ########## - # Dependencies for SCORE-P are quite tight. See the homepage for more information. + # Dependencies for SCORE-P are quite tight. See the homepage for more + # information. # SCOREP 2.0.2 depends_on('otf2@2.0', when='@2.0.2') depends_on('opari2@2.0', when='@2.0.2') @@ -62,17 +64,18 @@ class Scorep(Package): depends_on("papi") def install(self, spec, prefix): - configure = Executable( join_path(self.stage.source_path, 'configure') ) + configure = Executable(join_path(self.stage.source_path, 'configure')) with working_dir('spack-build', create=True): - configure_args = ["--prefix=%s" % prefix, - "--with-otf2=%s" % spec['otf2'].prefix.bin, - "--with-opari2=%s" % spec['opari2'].prefix.bin, - "--with-cube=%s" % spec['cube'].prefix.bin, - "--with-papi-header=%s" % spec['papi'].prefix.include, - "--with-papi-lib=%s" % spec['papi'].prefix.lib, - "--enable-shared", - "CFLAGS=-fPIC", - "CXXFLAGS=-fPIC"] + configure_args = [ + "--prefix=%s" % prefix, + "--with-otf2=%s" % spec['otf2'].prefix.bin, + "--with-opari2=%s" % spec['opari2'].prefix.bin, + "--with-cube=%s" % spec['cube'].prefix.bin, + "--with-papi-header=%s" % spec['papi'].prefix.include, + "--with-papi-lib=%s" % spec['papi'].prefix.lib, + "--enable-shared", + "CFLAGS=-fPIC", + "CXXFLAGS=-fPIC"] configure(*configure_args) make() make("install") From 6ab78eb88e940cfe38b5fa8353639b574c6ee65f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 4 Aug 2016 10:46:45 -0700 Subject: [PATCH 222/284] sbang filtering now works on non-writable files. (#1445) - sbang now changes mode to writable and restores mode if a file is not writable. --- lib/spack/spack/hooks/sbang.py | 12 ++++++++++++ lib/spack/spack/test/sbang.py | 17 ++++++++++++++--- 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py index 3a957c6e0e2..02c1ce38167 100644 --- a/lib/spack/spack/hooks/sbang.py +++ b/lib/spack/spack/hooks/sbang.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +import stat import re import llnl.util.tty as tty @@ -62,10 +63,21 @@ def filter_shebang(path): if re.search(r'^#!(/[^/]*)*lua\b', original): original = re.sub(r'^#', '--', original) + # Change non-writable files to be writable if needed. + saved_mode = None + if not os.access(path, os.W_OK): + st = os.stat(path) + saved_mode = st.st_mode + os.chmod(path, saved_mode | stat.S_IWRITE) + with open(path, 'w') as new_file: new_file.write(new_sbang_line) new_file.write(original) + # Restore original permissions. + if saved_mode is not None: + os.chmod(path, saved_mode) + tty.warn("Patched overlong shebang in %s" % path) diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py index ed54ff90b00..4ce854a1d80 100644 --- a/lib/spack/spack/test/sbang.py +++ b/lib/spack/spack/test/sbang.py @@ -26,6 +26,7 @@ Test that Spack's shebang filtering works correctly. """ import os +import stat import unittest import tempfile import shutil @@ -41,6 +42,7 @@ sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.spack_root last_line = "last!\n" + class SbangTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() @@ -74,10 +76,8 @@ def setUp(self): f.write(long_line) f.write(last_line) - def tearDown(self): - shutil.rmtree(self.tempdir, ignore_errors=True) - + shutil.rmtree(self.tempdir, ignore_errors=True) def test_shebang_handling(self): filter_shebangs_in_directory(self.tempdir) @@ -104,3 +104,14 @@ def test_shebang_handling(self): self.assertEqual(f.readline(), sbang_line) self.assertEqual(f.readline(), long_line) self.assertEqual(f.readline(), last_line) + + def test_shebang_handles_non_writable_files(self): + # make a file non-writable + st = os.stat(self.long_shebang) + not_writable_mode = st.st_mode & ~stat.S_IWRITE + os.chmod(self.long_shebang, not_writable_mode) + + self.test_shebang_handling() + + st = os.stat(self.long_shebang) + self.assertEqual(oct(not_writable_mode), oct(st.st_mode)) From d4274b32f283a056bec7048f1ab8b78bf1518bb6 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 26 Jul 2016 17:16:12 -0400 Subject: [PATCH 223/284] Make flake8 happy (long lines) --- .../repos/builtin/packages/bwa/package.py | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 var/spack/repos/builtin/packages/bwa/package.py diff --git a/var/spack/repos/builtin/packages/bwa/package.py b/var/spack/repos/builtin/packages/bwa/package.py new file mode 100644 index 00000000000..bb6763629b7 --- /dev/null +++ b/var/spack/repos/builtin/packages/bwa/package.py @@ -0,0 +1,52 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Bwa(Package): + """Burrow-Wheeler Aligner for pairwise alignment between DNA sequences.""" + + homepage = "http://github.com/lh3/bwa" + url = "https://github.com/lh3/bwa/releases/download/v0.7.15/bwa-0.7.15.tar.bz2" + + version('0.7.15', 'fcf470a46a1dbe2f96a1c5b87c530554') + + depends_on('zlib') + + def install(self, spec, prefix): + filter_file(r'^INCLUDES=', + "INCLUDES=-I%s" % spec['zlib'].prefix.include, 'Makefile') + filter_file(r'^LIBS=', "LIBS=-L%s " % spec['zlib'].prefix.lib, + 'Makefile') + make() + + mkdirp(prefix.bin) + install('bwa', join_path(prefix.bin, 'bwa')) + set_executable(join_path(prefix.bin, 'bwa')) + mkdirp(prefix.doc) + install('README.md', prefix.doc) + install('NEWS.md', prefix.doc) + mkdirp(prefix.man1) + install('bwa.1', prefix.man1) From d5bb2955b19ed8fba29b6c0f11ea9cafcec6db9b Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 1 Aug 2016 17:37:56 -0400 Subject: [PATCH 224/284] Add package for libgtextutils Gordon's Text utils Library, in support of fastx toolkit. --- .../builtin/packages/libgtextutils/package.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 var/spack/repos/builtin/packages/libgtextutils/package.py diff --git a/var/spack/repos/builtin/packages/libgtextutils/package.py b/var/spack/repos/builtin/packages/libgtextutils/package.py new file mode 100644 index 00000000000..90a981b3579 --- /dev/null +++ b/var/spack/repos/builtin/packages/libgtextutils/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Libgtextutils(Package): + """Gordon's Text utils Library.""" + + homepage = "https://github.com/agordon/libgtextutils" + url = "https://github.com/agordon/libgtextutils/releases/download/0.7/libgtextutils-0.7.tar.gz" + + version('0.7', '593c7c62e3c76ec49f5736eed4f96806') + + # FIXME: Add dependencies if required. + # depends_on('foo') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix)) + + make() + make('install') From e17862c421c3b8fdc25a06256afb6fd38b15aef2 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Mon, 1 Aug 2016 17:38:38 -0400 Subject: [PATCH 225/284] Add package for fastx toolkit Tools for working with FASTA/FASTQ files. --- .../builtin/packages/fastx_toolkit/package.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 var/spack/repos/builtin/packages/fastx_toolkit/package.py diff --git a/var/spack/repos/builtin/packages/fastx_toolkit/package.py b/var/spack/repos/builtin/packages/fastx_toolkit/package.py new file mode 100644 index 00000000000..04b4d24b39c --- /dev/null +++ b/var/spack/repos/builtin/packages/fastx_toolkit/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class FastxToolkit(Package): + """The FASTX-Toolkit is a collection of command line tools for + Short-Reads FASTA/FASTQ files preprocessing.""" + + homepage = "http://hannonlab.cshl.edu/fastx_toolkit/" + url = "https://github.com/agordon/fastx_toolkit/releases/download/0.0.14/fastx_toolkit-0.0.14.tar.bz2" + + version('0.0.14', 'bf1993c898626bb147de3d6695c20b40') + + depends_on('libgtextutils') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix)) + + make() + make('install') From ef1369c365cb6d8ebaba2dd6b4ebc2f06acdf0ba Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Wed, 27 Jul 2016 13:19:17 -0400 Subject: [PATCH 226/284] Add package for htslib Htslib was pulled out of samtools and made into a standalone thing. This commit adds a packag for it, in support of the samtools package. --- .../repos/builtin/packages/htslib/package.py | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 var/spack/repos/builtin/packages/htslib/package.py diff --git a/var/spack/repos/builtin/packages/htslib/package.py b/var/spack/repos/builtin/packages/htslib/package.py new file mode 100644 index 00000000000..1a8b8fd2f5e --- /dev/null +++ b/var/spack/repos/builtin/packages/htslib/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Htslib(Package): + """C library for high-throughput sequencing data formats.""" + + homepage = "https://github.com/samtools/htslib" + url = "https://github.com/samtools/htslib/releases/download/1.3.1/htslib-1.3.1.tar.bz2" + + version('1.3.1', '16d78f90b72f29971b042e8da8be6843') + + depends_on('zlib') + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix)) + make() + make('install') From ba0577dc96597d35aaa75472e8ce014c307a03da Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Wed, 27 Jul 2016 13:20:49 -0400 Subject: [PATCH 227/284] Update samtools to support v1.3.1 Update the samtools package to support v1.3.1, which - now uses configure script; and - now depends on external htslib package. The dependency on mpc seems to have been bogus, it's never linked in, nor is it mentioned in the source tree. I *do* have a version in /usr/lib64, but ldd does not sure it being linked in either.... By depending on 'ncurses' I can do away with the need for the patch. --- .../builtin/packages/samtools/package.py | 25 ++++++++++++------- .../packages/samtools/samtools1.2.patch | 20 --------------- 2 files changed, 16 insertions(+), 29 deletions(-) delete mode 100644 var/spack/repos/builtin/packages/samtools/samtools1.2.patch diff --git a/var/spack/repos/builtin/packages/samtools/package.py b/var/spack/repos/builtin/packages/samtools/package.py index f5c7f4431f1..7df3f5bcf89 100644 --- a/var/spack/repos/builtin/packages/samtools/package.py +++ b/var/spack/repos/builtin/packages/samtools/package.py @@ -25,18 +25,25 @@ from spack import * class Samtools(Package): - """SAM Tools provide various utilities for manipulating alignments in the SAM format, - including sorting, merging, indexing and generating + """SAM Tools provide various utilities for manipulating alignments in + the SAM format, including sorting, merging, indexing and generating alignments in a per-position format""" homepage = "www.htslib.org" - version('1.2','988ec4c3058a6ceda36503eebecd4122',url = "https://github.com/samtools/samtools/releases/download/1.2/samtools-1.2.tar.bz2") + url = "https://github.com/samtools/samtools/releases/download/1.3.1/samtools-1.3.1.tar.bz2" - depends_on("zlib") - depends_on("mpc") - parallel=False - patch("samtools1.2.patch",level=0) + version('1.3.1','a7471aa5a1eb7fc9cc4c6491d73c2d88') + version('1.2','988ec4c3058a6ceda36503eebecd4122') + + depends_on("ncurses") + depends_on("htslib", when='@1.3.1') # htslib became standalone + depends_on('zlib', when='@1.2') # needed for builtin htslib def install(self, spec, prefix): - make("prefix=%s" % prefix, "install") - + if self.spec.version >= Version('1.3.1'): + configure('--prefix={0}'.format(prefix), '--with-ncurses') + make() + make('install') + else: + make("prefix=%s" % prefix) + make("prefix=%s" % prefix, "install") diff --git a/var/spack/repos/builtin/packages/samtools/samtools1.2.patch b/var/spack/repos/builtin/packages/samtools/samtools1.2.patch deleted file mode 100644 index ead3ab4e2cb..00000000000 --- a/var/spack/repos/builtin/packages/samtools/samtools1.2.patch +++ /dev/null @@ -1,20 +0,0 @@ ---- Makefile 2015-02-03 08:27:34.000000000 -0800 -+++ Makefile.new 2015-07-21 10:38:27.881406892 -0700 -@@ -26,7 +26,7 @@ - CFLAGS = -g -Wall -O2 - LDFLAGS = - LDLIBS = --DFLAGS= -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_CURSES_LIB=1 -+DFLAGS= -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_CURSES_LIB=0 - LOBJS= bam_aux.o bam.o bam_import.o sam.o \ - sam_header.o bam_plbuf.o - AOBJS= bam_index.o bam_plcmd.o sam_view.o \ -@@ -37,7 +37,7 @@ - faidx.o stats.o stats_isize.o bam_flags.o bam_split.o \ - bam_tview.o bam_tview_curses.o bam_tview_html.o bam_lpileup.o - INCLUDES= -I. -I$(HTSDIR) --LIBCURSES= -lcurses # -lXCurses -+#LIBCURSES= -lcurses # -lXCurses - - prefix = /usr/local - exec_prefix = $(prefix) From fd11db92e16475d370dd7ac8a39d9c35fc261e63 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Wed, 27 Jul 2016 13:25:11 -0400 Subject: [PATCH 228/284] Add package for bcftools --- .../builtin/packages/bcftools/package.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 var/spack/repos/builtin/packages/bcftools/package.py diff --git a/var/spack/repos/builtin/packages/bcftools/package.py b/var/spack/repos/builtin/packages/bcftools/package.py new file mode 100644 index 00000000000..a1b4a06dbb3 --- /dev/null +++ b/var/spack/repos/builtin/packages/bcftools/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Bcftools(Package): + """BCFtools is a set of utilities that manipulate variant calls in the + Variant Call Format (VCF) and its binary counterpart BCF. All + commands work transparently with both VCFs and BCFs, both + uncompressed and BGZF-compressed.""" + + homepage = "http://samtools.github.io/bcftools/" + url = "https://github.com/samtools/bcftools/releases/download/1.3.1/bcftools-1.3.1.tar.bz2" + + version('1.3.1', '575001e9fca37cab0c7a7287ad4b1cdb') + + depends_on('zlib') + + def install(self, spec, prefix): + make("prefix=%s" % prefix, "all") + make("prefix=%s" % prefix, "install") From e12d7aaddab5453bcefabb90cb01aef130d48d15 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Wed, 27 Jul 2016 15:01:34 -0400 Subject: [PATCH 229/284] Add package for seqtk Seqtk is a tool for working with fast[aq] files. --- .../repos/builtin/packages/seqtk/package.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 var/spack/repos/builtin/packages/seqtk/package.py diff --git a/var/spack/repos/builtin/packages/seqtk/package.py b/var/spack/repos/builtin/packages/seqtk/package.py new file mode 100644 index 00000000000..ca168c176cb --- /dev/null +++ b/var/spack/repos/builtin/packages/seqtk/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Seqtk(Package): + """Toolkit for processing sequences in FASTA/Q formats.""" + + homepage = "https://github.com/lh3/seqtk" + url = "https://github.com/lh3/seqtk/archive/v1.1.tar.gz" + + version('1.2', '255ffe05bf2f073dc57abcff97f11a37') + version('1.1', 'ebf5cc57698a217150c2250494e039a2') + + depends_on('zlib') + + def install(self, spec, prefix): + make() + mkdirp(prefix.bin) + install('seqtk', prefix.bin) + set_executable(join_path(prefix.bin, 'seqtk')) From 0c02ee86a752f4e60680ea42a6525622b57bff48 Mon Sep 17 00:00:00 2001 From: Bruno Turcksin Date: Thu, 4 Aug 2016 14:29:10 -0400 Subject: [PATCH 230/284] Add Trilinos 12.6.4 (#1447) --- var/spack/repos/builtin/packages/trilinos/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 4d1d27e74ac..1d83e055c9c 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -45,6 +45,7 @@ class Trilinos(Package): homepage = "https://trilinos.org/" url = "http://trilinos.csbsju.edu/download/files/trilinos-12.2.1-Source.tar.gz" + version('12.6.4', 'db25056617c688f6f25092376a03200f') version('12.6.3', '960f5f4d3f7c3da818e5a5fb4684559eff7e0c25f959ef576561b8a52f0e4d1e') version('12.6.2', '0c076090508170ddee5efeed317745027f9418319720dc40a072e478775279f9') version('12.6.1', 'adcf2d3aab74cdda98f88fee19cd1442604199b0515ee3da4d80cbe8f37d00e4') From d6dedee6ff9c31c31705e3fdfba1c4e07954b477 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Thu, 4 Aug 2016 15:23:04 -0400 Subject: [PATCH 231/284] Flake8 cleanup --- var/spack/repos/builtin/packages/samtools/package.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/samtools/package.py b/var/spack/repos/builtin/packages/samtools/package.py index 7df3f5bcf89..aafda8ce3ca 100644 --- a/var/spack/repos/builtin/packages/samtools/package.py +++ b/var/spack/repos/builtin/packages/samtools/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Samtools(Package): """SAM Tools provide various utilities for manipulating alignments in the SAM format, including sorting, merging, indexing and generating @@ -32,12 +33,12 @@ class Samtools(Package): homepage = "www.htslib.org" url = "https://github.com/samtools/samtools/releases/download/1.3.1/samtools-1.3.1.tar.bz2" - version('1.3.1','a7471aa5a1eb7fc9cc4c6491d73c2d88') - version('1.2','988ec4c3058a6ceda36503eebecd4122') + version('1.3.1', 'a7471aa5a1eb7fc9cc4c6491d73c2d88') + version('1.2', '988ec4c3058a6ceda36503eebecd4122') depends_on("ncurses") - depends_on("htslib", when='@1.3.1') # htslib became standalone - depends_on('zlib', when='@1.2') # needed for builtin htslib + depends_on("htslib", when='@1.3.1') # htslib became standalone + depends_on('zlib', when='@1.2') # needed for builtin htslib def install(self, spec, prefix): if self.spec.version >= Version('1.3.1'): From 20221ee3aa3003d4180c86bf43bb7156a04d08e7 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Fri, 5 Aug 2016 11:06:07 -0500 Subject: [PATCH 232/284] Catch error for version in VCS This PR will catch the error where the url can not be determined from a VCS URL, such as git. It will print a message to the console and move on because it should not be a fatal error at this point in the process. This should fix #1459. --- lib/spack/spack/stage.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 553c4ad05f4..8fcc331482a 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -321,15 +321,19 @@ def fetch(self, mirror_only=False): package_name = os.path.dirname(self.mirror_path) pkg = spack.repo.get(package_name) if pkg.list_url is not None and pkg.url is not None: - archive_version = spack.url.parse_version( - self.default_fetcher.url) - versions = pkg.fetch_remote_versions() try: - url_from_list = versions[Version(archive_version)] - fetchers.append(fs.URLFetchStrategy(url_from_list, digest)) - except KeyError: - tty.msg("Can not find version %s in url_list" % - archive_version) + archive_version = spack.url.parse_version( + self.default_fetcher.url) + versions = pkg.fetch_remote_versions() + try: + url_from_list = versions[Version(archive_version)] + fetchers.append(fs.URLFetchStrategy( + url_from_list, digest)) + except KeyError: + tty.msg("Can not find version %s in url_list" % + archive_version) + except: + tty.msg("Could not determine url from list_url.") for fetcher in fetchers: try: From 769408130a6dcf0c6ae44570a7577c2000bffe4f Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Fri, 5 Aug 2016 17:47:11 -0400 Subject: [PATCH 233/284] Samtools depends on htslib from 1.3.1 onward Samtools used to (before 1.3.1) include it's own copy of htslib. Going forward, it needs to use the standalone htslib. --- var/spack/repos/builtin/packages/samtools/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/samtools/package.py b/var/spack/repos/builtin/packages/samtools/package.py index aafda8ce3ca..6b0b2247855 100644 --- a/var/spack/repos/builtin/packages/samtools/package.py +++ b/var/spack/repos/builtin/packages/samtools/package.py @@ -37,8 +37,8 @@ class Samtools(Package): version('1.2', '988ec4c3058a6ceda36503eebecd4122') depends_on("ncurses") - depends_on("htslib", when='@1.3.1') # htslib became standalone - depends_on('zlib', when='@1.2') # needed for builtin htslib + depends_on("htslib", when='@1.3.1:') # htslib became standalone + depends_on('zlib', when='@1.2') # needed for builtin htslib def install(self, spec, prefix): if self.spec.version >= Version('1.3.1'): From a6afaeb9748396688f2c6d411790d5defc4b541c Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Fri, 5 Aug 2016 17:51:24 -0400 Subject: [PATCH 234/284] Remove lingering FIXME commentary --- var/spack/repos/builtin/packages/libgtextutils/package.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/libgtextutils/package.py b/var/spack/repos/builtin/packages/libgtextutils/package.py index 90a981b3579..201e0318694 100644 --- a/var/spack/repos/builtin/packages/libgtextutils/package.py +++ b/var/spack/repos/builtin/packages/libgtextutils/package.py @@ -33,9 +33,6 @@ class Libgtextutils(Package): version('0.7', '593c7c62e3c76ec49f5736eed4f96806') - # FIXME: Add dependencies if required. - # depends_on('foo') - def install(self, spec, prefix): configure('--prefix={0}'.format(prefix)) From 4983ebcf7caf0236c37a2f6ca24bf65615cbb1d0 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Sat, 6 Aug 2016 21:35:52 +0200 Subject: [PATCH 235/284] opium: add new package --- .../repos/builtin/packages/opium/package.py | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 var/spack/repos/builtin/packages/opium/package.py diff --git a/var/spack/repos/builtin/packages/opium/package.py b/var/spack/repos/builtin/packages/opium/package.py new file mode 100644 index 00000000000..2c81d92cc0a --- /dev/null +++ b/var/spack/repos/builtin/packages/opium/package.py @@ -0,0 +1,55 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Opium(Package): + """DFT pseudopotential generation project""" + + homepage = "https://opium.sourceforge.net/index.html" + url = "https://downloads.sourceforge.net/project/opium/opium/opium-v3.8/opium-v3.8-src.tgz" + + version('3.8', 'f710c0f869e70352b4a510c31e13bf9f') + + depends_on('blas') + depends_on('lapack') + + def install(self, spec, prefix): + options = [ + 'LDFLAGS=%s %s' % ( + to_link_flags(spec['lapack'].lapack_shared_lib), + to_link_flags(spec['blas'].blas_shared_lib) + ) + ] + + configure(*options) + with working_dir("src", create=False): + make("all-subdirs") + make("opium") + + # opium not have a make install :-(( + mkdirp(self.prefix.bin) + install(join_path(self.stage.source_path, 'opium'), + self.prefix.bin) From 63592096c5ec7ddf4072512303b96ad1505d5659 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 26 Jul 2016 17:25:40 -0400 Subject: [PATCH 236/284] Add package for prank --- .../repos/builtin/packages/prank/package.py | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 var/spack/repos/builtin/packages/prank/package.py diff --git a/var/spack/repos/builtin/packages/prank/package.py b/var/spack/repos/builtin/packages/prank/package.py new file mode 100644 index 00000000000..f551da39665 --- /dev/null +++ b/var/spack/repos/builtin/packages/prank/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Prank(Package): + """A powerful multiple sequence alignment browser.""" + + homepage = "http://wasabiapp.org/software/prank/" + url = "http://wasabiapp.org/download/prank/prank.source.140603.tgz" + + version('150803', '71ac2659e91c385c96473712c0a23e8a') + + depends_on('mafft') + depends_on('exonerate') + depends_on('bppsuite') # for bppancestor + + def install(self, spec, prefix): + with working_dir('src'): + make() + mkdirp(prefix.bin) + install('prank', prefix.bin) From 9e16902397245cf2dd448710362ac003f2a2711d Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Thu, 28 Jul 2016 19:48:17 -0400 Subject: [PATCH 237/284] Fix name of bpp-suite (missing -...) --- var/spack/repos/builtin/packages/prank/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/prank/package.py b/var/spack/repos/builtin/packages/prank/package.py index f551da39665..d627e8a0b69 100644 --- a/var/spack/repos/builtin/packages/prank/package.py +++ b/var/spack/repos/builtin/packages/prank/package.py @@ -35,7 +35,7 @@ class Prank(Package): depends_on('mafft') depends_on('exonerate') - depends_on('bppsuite') # for bppancestor + depends_on('bpp-suite') # for bppancestor def install(self, spec, prefix): with working_dir('src'): From e0db1f0268c192350fe38baac85620a504b2ce70 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 26 Jul 2016 17:22:44 -0400 Subject: [PATCH 238/284] Add package for exonerate --- .../builtin/packages/exonerate/package.py | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 var/spack/repos/builtin/packages/exonerate/package.py diff --git a/var/spack/repos/builtin/packages/exonerate/package.py b/var/spack/repos/builtin/packages/exonerate/package.py new file mode 100644 index 00000000000..dcc20af2f9e --- /dev/null +++ b/var/spack/repos/builtin/packages/exonerate/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Exonerate(Package): + """Pairwise sequence alignment of DNA and proteins""" + + homepage = "http://www.ebi.ac.uk/about/vertebrate-genomics/software/exonerate" + url = "http://ftp.ebi.ac.uk/pub/software/vertebrategenomics/exonerate/exonerate-2.2.0.tar.gz" + + version('2.4.0', '126fbade003b80b663a1d530c56f1904') + + depends_on('pkg-config', type="build") + depends_on('glib') + + parallel = False + + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix), '--disable-debug', '--disable-dependency-tracking') + make() + make('install') From 3c81bb44baab2e3c7d038a61545fcd880a3f261a Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 26 Jul 2016 17:23:56 -0400 Subject: [PATCH 239/284] Add package for mafft --- .../repos/builtin/packages/mafft/package.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 var/spack/repos/builtin/packages/mafft/package.py diff --git a/var/spack/repos/builtin/packages/mafft/package.py b/var/spack/repos/builtin/packages/mafft/package.py new file mode 100644 index 00000000000..131b8c58f9d --- /dev/null +++ b/var/spack/repos/builtin/packages/mafft/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Mafft(Package): + """MAFFT is a multiple sequence alignment program for unix-like + operating systems. It offers a range of multiple alignment + methods, L-INS-i (accurate; for alignment of <~200 sequences), + FFT-NS-2 (fast; for alignment of <~30,000 sequences), etc.""" + + homepage = "http://mafft.cbrc.jp/alignment/software/index.html" + url = "http://mafft.cbrc.jp/alignment/software/mafft-7.221-with-extensions-src.tgz" + + version('7.221', 'b1aad911e51024d631722a2e061ba215') + + def install(self, spec, prefix): + with working_dir('core'): + make('PREFIX=%s' % prefix) + make('PREFIX=%s' % prefix, 'install') From a07a0ef54b43089eb608c0b464db2e2f5f79c115 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 26 Jul 2016 17:18:37 -0400 Subject: [PATCH 240/284] Add Bio++ tool suite and supporting libs Add the Bio++ suite and its supporting libraries. --- .../builtin/packages/bpp-core/package.py | 41 ++++++++++++++++ .../builtin/packages/bpp-phyl/package.py | 43 +++++++++++++++++ .../repos/builtin/packages/bpp-seq/package.py | 42 +++++++++++++++++ .../builtin/packages/bpp-suite/package.py | 47 +++++++++++++++++++ 4 files changed, 173 insertions(+) create mode 100644 var/spack/repos/builtin/packages/bpp-core/package.py create mode 100644 var/spack/repos/builtin/packages/bpp-phyl/package.py create mode 100644 var/spack/repos/builtin/packages/bpp-seq/package.py create mode 100644 var/spack/repos/builtin/packages/bpp-suite/package.py diff --git a/var/spack/repos/builtin/packages/bpp-core/package.py b/var/spack/repos/builtin/packages/bpp-core/package.py new file mode 100644 index 00000000000..0a109147261 --- /dev/null +++ b/var/spack/repos/builtin/packages/bpp-core/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class BppCore(Package): + """Bio++ core library.""" + + homepage = "http://biopp.univ-montp2.fr/wiki/index.php/Installation" + url = "http://biopp.univ-montp2.fr/repos/sources/bpp-core-2.2.0.tar.gz" + + version('2.2.0', '5789ed2ae8687d13664140cd77203477') + + depends_on('cmake') + + def install(self, spec, prefix): + cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, '-DBUILD_TESTING=FALSE', '.') + make() + make('install') diff --git a/var/spack/repos/builtin/packages/bpp-phyl/package.py b/var/spack/repos/builtin/packages/bpp-phyl/package.py new file mode 100644 index 00000000000..2240f7bd2f0 --- /dev/null +++ b/var/spack/repos/builtin/packages/bpp-phyl/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class BppPhyl(Package): + """Bio++ phylogeny library.""" + + homepage = "http://biopp.univ-montp2.fr/wiki/index.php/Installation" + url = "http://biopp.univ-montp2.fr/repos/sources/bpp-phyl-2.2.0.tar.gz" + + version('2.2.0', '5c40667ec0bf37e0ecaba321be932770') + + depends_on('cmake') + depends_on('bpp-core') + depends_on('bpp-seq') + + def install(self, spec, prefix): + cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, '-DBUILD_TESTING=FALSE', '.') + make() + make('install') diff --git a/var/spack/repos/builtin/packages/bpp-seq/package.py b/var/spack/repos/builtin/packages/bpp-seq/package.py new file mode 100644 index 00000000000..398fda528e9 --- /dev/null +++ b/var/spack/repos/builtin/packages/bpp-seq/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class BppSeq(Package): + """Bio++ seq library.""" + + homepage = "http://biopp.univ-montp2.fr/wiki/index.php/Installation" + url = "http://biopp.univ-montp2.fr/repos/sources/bpp-seq-2.2.0.tar.gz" + + version('2.2.0', '44adef0ff4d5ca4e69ccf258c9270633') + + depends_on('cmake') + depends_on('bpp-core') + + def install(self, spec, prefix): + cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, '-DBUILD_TESTING=FALSE', '.') + make() + make('install') diff --git a/var/spack/repos/builtin/packages/bpp-suite/package.py b/var/spack/repos/builtin/packages/bpp-suite/package.py new file mode 100644 index 00000000000..872c72d498c --- /dev/null +++ b/var/spack/repos/builtin/packages/bpp-suite/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class BppSuite(Package): + """BppSuite is a suite of ready-to-use programs for phylogenetic and + sequence analysis.""" + + homepage = "http://biopp.univ-montp2.fr/wiki/index.php/BppSuite" + url = "http://biopp.univ-montp2.fr/repos/sources/bppsuite/bppsuite-2.2.0.tar.gz" + + version('2.2.0', 'd8b29ad7ccf5bd3a7beb701350c9e2a4') + + # FIXME: Add dependencies if required. + depends_on('cmake') + depends_on('texinfo') + depends_on('bpp-core') + depends_on('bpp-seq') + depends_on('bpp-phyl') + + def install(self, spec, prefix): + cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, '.') + make() + make('install') From 6a62a6b693f020bb221902c9049afdb947492708 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Thu, 4 Aug 2016 15:10:45 -0400 Subject: [PATCH 241/284] Flake8 whitespace cleanup --- var/spack/repos/builtin/packages/bpp-core/package.py | 3 ++- var/spack/repos/builtin/packages/bpp-phyl/package.py | 3 ++- var/spack/repos/builtin/packages/bpp-seq/package.py | 3 ++- var/spack/repos/builtin/packages/exonerate/package.py | 3 ++- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/bpp-core/package.py b/var/spack/repos/builtin/packages/bpp-core/package.py index 0a109147261..987fd964b9e 100644 --- a/var/spack/repos/builtin/packages/bpp-core/package.py +++ b/var/spack/repos/builtin/packages/bpp-core/package.py @@ -36,6 +36,7 @@ class BppCore(Package): depends_on('cmake') def install(self, spec, prefix): - cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, '-DBUILD_TESTING=FALSE', '.') + cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DBUILD_TESTING=FALSE', '.') make() make('install') diff --git a/var/spack/repos/builtin/packages/bpp-phyl/package.py b/var/spack/repos/builtin/packages/bpp-phyl/package.py index 2240f7bd2f0..c50f71385a0 100644 --- a/var/spack/repos/builtin/packages/bpp-phyl/package.py +++ b/var/spack/repos/builtin/packages/bpp-phyl/package.py @@ -38,6 +38,7 @@ class BppPhyl(Package): depends_on('bpp-seq') def install(self, spec, prefix): - cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, '-DBUILD_TESTING=FALSE', '.') + cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DBUILD_TESTING=FALSE', '.') make() make('install') diff --git a/var/spack/repos/builtin/packages/bpp-seq/package.py b/var/spack/repos/builtin/packages/bpp-seq/package.py index 398fda528e9..dff26b33085 100644 --- a/var/spack/repos/builtin/packages/bpp-seq/package.py +++ b/var/spack/repos/builtin/packages/bpp-seq/package.py @@ -37,6 +37,7 @@ class BppSeq(Package): depends_on('bpp-core') def install(self, spec, prefix): - cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, '-DBUILD_TESTING=FALSE', '.') + cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DBUILD_TESTING=FALSE', '.') make() make('install') diff --git a/var/spack/repos/builtin/packages/exonerate/package.py b/var/spack/repos/builtin/packages/exonerate/package.py index dcc20af2f9e..7921e640584 100644 --- a/var/spack/repos/builtin/packages/exonerate/package.py +++ b/var/spack/repos/builtin/packages/exonerate/package.py @@ -39,6 +39,7 @@ class Exonerate(Package): parallel = False def install(self, spec, prefix): - configure('--prefix={0}'.format(prefix), '--disable-debug', '--disable-dependency-tracking') + configure('--prefix={0}'.format(prefix), '--disable-debug', + '--disable-dependency-tracking') make() make('install') From f800708ef30f70a8b3f251ca267dee8afc2395d5 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Fri, 5 Aug 2016 18:11:05 -0400 Subject: [PATCH 242/284] Switch to using *std_cmake_args --- var/spack/repos/builtin/packages/bpp-core/package.py | 3 +-- var/spack/repos/builtin/packages/bpp-phyl/package.py | 3 +-- var/spack/repos/builtin/packages/bpp-seq/package.py | 3 +-- var/spack/repos/builtin/packages/bpp-suite/package.py | 2 +- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/bpp-core/package.py b/var/spack/repos/builtin/packages/bpp-core/package.py index 987fd964b9e..40360a03b3b 100644 --- a/var/spack/repos/builtin/packages/bpp-core/package.py +++ b/var/spack/repos/builtin/packages/bpp-core/package.py @@ -36,7 +36,6 @@ class BppCore(Package): depends_on('cmake') def install(self, spec, prefix): - cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DBUILD_TESTING=FALSE', '.') + cmake('-DBUILD_TESTING=FALSE', '.', *std_cmake_args) make() make('install') diff --git a/var/spack/repos/builtin/packages/bpp-phyl/package.py b/var/spack/repos/builtin/packages/bpp-phyl/package.py index c50f71385a0..62db8d55459 100644 --- a/var/spack/repos/builtin/packages/bpp-phyl/package.py +++ b/var/spack/repos/builtin/packages/bpp-phyl/package.py @@ -38,7 +38,6 @@ class BppPhyl(Package): depends_on('bpp-seq') def install(self, spec, prefix): - cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DBUILD_TESTING=FALSE', '.') + cmake('-DBUILD_TESTING=FALSE', '.', *std_cmake_args) make() make('install') diff --git a/var/spack/repos/builtin/packages/bpp-seq/package.py b/var/spack/repos/builtin/packages/bpp-seq/package.py index dff26b33085..7132c668b3d 100644 --- a/var/spack/repos/builtin/packages/bpp-seq/package.py +++ b/var/spack/repos/builtin/packages/bpp-seq/package.py @@ -37,7 +37,6 @@ class BppSeq(Package): depends_on('bpp-core') def install(self, spec, prefix): - cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DBUILD_TESTING=FALSE', '.') + cmake('-DBUILD_TESTING=FALSE', '.', *std_cmake_args) make() make('install') diff --git a/var/spack/repos/builtin/packages/bpp-suite/package.py b/var/spack/repos/builtin/packages/bpp-suite/package.py index 872c72d498c..41e90e375dc 100644 --- a/var/spack/repos/builtin/packages/bpp-suite/package.py +++ b/var/spack/repos/builtin/packages/bpp-suite/package.py @@ -42,6 +42,6 @@ class BppSuite(Package): depends_on('bpp-phyl') def install(self, spec, prefix): - cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix, '.') + cmake('.', *std_cmake_args) make() make('install') From 653905e3a26ac5a0160982f03aac7fd6c3e8804f Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Fri, 22 Jul 2016 19:46:36 -0400 Subject: [PATCH 243/284] Add depends_on('perl') to git package This commit changes the git package to depend_on('perl'). The system perl is not always sufficient to install git (e.g. a CentOS7 system with the development tools group installed has perl but not the ExtUtils::MakeMaker package that git needs) and one can't always update the system's perl. This PR depends_on PR #1339, which adds a perl package to spack. --- var/spack/repos/builtin/packages/git/package.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 01364580a0e..0f2fbc73dbd 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -55,7 +55,7 @@ class Git(Package): depends_on("zlib") # Use system perl for now. - # depends_on("perl") + depends_on("perl") # depends_on("pcre") def install(self, spec, prefix): @@ -64,8 +64,8 @@ def install(self, spec, prefix): "--without-pcre", "--with-openssl=%s" % spec['openssl'].prefix, "--with-zlib=%s" % spec['zlib'].prefix, - "--with-curl=%s" % spec['curl'].prefix, - "--with-expat=%s" % spec['expat'].prefix + "--with-expat=%s" % spec['expat'].prefix, + "--with-perl=%s" % join_path(spec['perl'].prefix.bin, 'perl'), ] which('autoreconf')('-i') From 3cf2fd40a8dd1abede6962317418b33385082f93 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Sat, 23 Jul 2016 16:18:58 -0400 Subject: [PATCH 244/284] Enable depends_on('pcre') also While I have the patient on the operating table, @adamjstewart asked me to uncomment and test the depends_on('pcre'). Did it and it Works For Me(tm). --- var/spack/repos/builtin/packages/git/package.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 0f2fbc73dbd..12b94704340 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -53,10 +53,8 @@ class Git(Package): depends_on("expat") depends_on("gettext") depends_on("zlib") - - # Use system perl for now. + depends_on("pcre") depends_on("perl") - # depends_on("pcre") def install(self, spec, prefix): configure_args = [ From 152fa33a55550db66f35885dc3da1a38250a1606 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 26 Jul 2016 11:13:39 -0400 Subject: [PATCH 245/284] Call configure `--with-libpcre=...` to use our pcre An earlier commit claimed that this package depends_on('pcre') but I didn't fix the call to configure that enabled it. This fixes that. --- var/spack/repos/builtin/packages/git/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 12b94704340..9570f3f8249 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -59,7 +59,7 @@ class Git(Package): def install(self, spec, prefix): configure_args = [ "--prefix=%s" % prefix, - "--without-pcre", + "--with-libpcre=%s" % spec['pcre'].prefix, "--with-openssl=%s" % spec['openssl'].prefix, "--with-zlib=%s" % spec['zlib'].prefix, "--with-expat=%s" % spec['expat'].prefix, From f699d7c08e1454aa1e1e1bc29c3de583be4897dc Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 26 Jul 2016 11:15:27 -0400 Subject: [PATCH 246/284] [Whitespace] Gather the depends_on's together. --- var/spack/repos/builtin/packages/git/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 9570f3f8249..5c559eb19cc 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -55,6 +55,7 @@ class Git(Package): depends_on("zlib") depends_on("pcre") depends_on("perl") + depends_on("zlib") def install(self, spec, prefix): configure_args = [ From 1de5817b58553461bfb1ffe39146a31f588a6df8 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Wed, 3 Aug 2016 20:28:54 -0400 Subject: [PATCH 247/284] Clean up rebase problems My rebase duplicated the depends_on('zlib') and dropped the --with-curl from the configure. This fixes those. --- var/spack/repos/builtin/packages/git/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 5c559eb19cc..e975da5e79d 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -55,7 +55,6 @@ class Git(Package): depends_on("zlib") depends_on("pcre") depends_on("perl") - depends_on("zlib") def install(self, spec, prefix): configure_args = [ @@ -63,6 +62,7 @@ def install(self, spec, prefix): "--with-libpcre=%s" % spec['pcre'].prefix, "--with-openssl=%s" % spec['openssl'].prefix, "--with-zlib=%s" % spec['zlib'].prefix, + "--with-curl=%s" % spec['curl'].prefix, "--with-expat=%s" % spec['expat'].prefix, "--with-perl=%s" % join_path(spec['perl'].prefix.bin, 'perl'), ] From 9ebbde0e0100b8a9c077d6cb1e67fe7a6b2ec2ec Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Thu, 4 Aug 2016 14:14:08 -0400 Subject: [PATCH 248/284] Make configure use our gettext library The configure script needs extra encouragement to link against our gettext/libintl library. --- var/spack/repos/builtin/packages/git/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index e975da5e79d..3cc879088db 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -57,6 +57,7 @@ class Git(Package): depends_on("perl") def install(self, spec, prefix): + env['LDFLAGS'] = "-L%s" % spec['gettext'].prefix.lib + " -lintl" configure_args = [ "--prefix=%s" % prefix, "--with-libpcre=%s" % spec['pcre'].prefix, From 09d96bef9ff2f00ad18d70f3ff88bd3734ef149b Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Sat, 6 Aug 2016 17:39:05 -0400 Subject: [PATCH 249/284] No need to patch Lmod after 6.4.4 The Lmod author changed the src so that it uses the tclsh (and shared libraries) discovered at configure time. He did it differently that I did in this patch, but he changes solve our problem too, so... --- var/spack/repos/builtin/packages/lmod/package.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py index 01911c1a304..a3ae4a7f512 100644 --- a/var/spack/repos/builtin/packages/lmod/package.py +++ b/var/spack/repos/builtin/packages/lmod/package.py @@ -38,6 +38,7 @@ class Lmod(Package): homepage = 'https://www.tacc.utexas.edu/research-development/tacc-projects/lmod' # NOQA: ignore=E501 url = 'https://github.com/TACC/Lmod/archive/6.4.1.tar.gz' + version('6.4.5', '14f6c58dbc0a5a75574d795eac2c1e3c') version('6.4.1', '7978ba777c8aa41a4d8c05fec5f780f4') version('6.3.7', '0fa4d5a24c41cae03776f781aa2dedc1') version('6.0.1', '91abf52fe5033bd419ffe2842ebe7af9') @@ -55,15 +56,16 @@ def setup_environment(self, spack_env, run_env): spack_env.append_path('LUA_PATH', stage_lua_path.format( version=self.version), separator=';') - patch('fix_tclsh_paths.patch') + patch('fix_tclsh_paths.patch', when='@:6.4.3') def patch(self): """The tcl scripts should use the tclsh that was discovered by the configure script. Touch up their #! lines so that the sed in the Makefile's install step has something to work on. Requires the change in the associated patch file.fg""" - for tclscript in glob('src/*.tcl'): - filter_file(r'^#!.*tclsh', '#!@path_to_tclsh@', tclscript) + if self.spec.version <= Version('6.4.3'): + for tclscript in glob('src/*.tcl'): + filter_file(r'^#!.*tclsh', '#!@path_to_tclsh@', tclscript) def install(self, spec, prefix): configure('--prefix=%s' % prefix) From 2ad21e70700845a51beae5855171492232bd99c1 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sat, 6 Aug 2016 18:12:47 -0500 Subject: [PATCH 250/284] Add list_url to pango package Added a `list_url` so the updated pango tar file can be retrieved. --- var/spack/repos/builtin/packages/pango/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/pango/package.py b/var/spack/repos/builtin/packages/pango/package.py index 7ae90f17955..2da20e9dc27 100644 --- a/var/spack/repos/builtin/packages/pango/package.py +++ b/var/spack/repos/builtin/packages/pango/package.py @@ -32,6 +32,8 @@ class Pango(Package): far has been done in the context of the GTK+ widget toolkit.""" homepage = "http://www.pango.org" url = "http://ftp.gnome.org/pub/gnome/sources/pango/1.36/pango-1.36.8.tar.xz" + list_url = "http://ftp.gnome.org/pub/gnome/sources/pango/" + list_depth = 2 version('1.36.8', '217a9a753006275215fa9fa127760ece') version('1.40.1', '6fc88c6529890d6c8e03074d57a3eceb') From e9edfec0ec8385ea48faf17f613433089810f8ee Mon Sep 17 00:00:00 2001 From: Pramod Kumbhar Date: Sun, 7 Aug 2016 01:09:28 +0200 Subject: [PATCH 251/284] PDT should download full installer package pdtoolkit-X.tar.gz (default pdt-X.tar.gz is only for x86 and cray) --- var/spack/repos/builtin/packages/pdt/package.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/pdt/package.py b/var/spack/repos/builtin/packages/pdt/package.py index 60136fc0cdb..32611a7b147 100644 --- a/var/spack/repos/builtin/packages/pdt/package.py +++ b/var/spack/repos/builtin/packages/pdt/package.py @@ -34,10 +34,12 @@ class Pdt(Package): class library supporting common PDB operations. """ homepage = "https://www.cs.uoregon.edu/research/pdt/home.php" - url = "https://www.cs.uoregon.edu/research/tau/pdt_releases/pdt-3.21.tar.gz" - version('3.21', '8df94298b71703decf680709a4ddf68f') - version('3.19', 'ba5591994998771fdab216699e362228') + version('3.21', '3092ca0d8833b69992c17e63ae66c263') + version('3.19', '5c5e1e6607086aa13bf4b1b9befc5864') + + def url_for_version(self, version): + return 'https://www.cs.uoregon.edu/research/tau/pdt_releases/pdtoolkit-%s.tar.gz' % (version) def install(self, spec, prefix): configure('-prefix=%s' % prefix) From d7329d7bc22661f469ac1cfe4d0c69faedbade56 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sun, 7 Aug 2016 17:53:25 -0500 Subject: [PATCH 252/284] Fix erroneously stripped trailing whitespace in patch (#1467) --- .../builtin/packages/pkg-config/g_date_strftime.patch | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch b/var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch index 578cbf4d7c4..9538f23875e 100644 --- a/var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch +++ b/var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch @@ -20,14 +20,15 @@ index 4aece02..92c34d2 100644 +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wformat-nonliteral" + - gsize - g_date_strftime (gchar *s, - gsize slen, + gsize + g_date_strftime (gchar *s, + gsize slen, @@ -2549,3 +2552,5 @@ g_date_strftime (gchar *s, return retval; #endif } + +#pragma GCC diagnostic pop --- +-- 2.7.1 + From db87a9d3ce4666d655d6727b9b2179a36d55d5f7 Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Mon, 8 Aug 2016 00:54:01 +0200 Subject: [PATCH 253/284] tar: fix 1.28 on macOS; add 1.29 (#1462) --- .../tar/gnutar-configure-xattrs.patch | 482 ++++++++++++++++++ .../repos/builtin/packages/tar/package.py | 14 +- 2 files changed, 494 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin/packages/tar/gnutar-configure-xattrs.patch diff --git a/var/spack/repos/builtin/packages/tar/gnutar-configure-xattrs.patch b/var/spack/repos/builtin/packages/tar/gnutar-configure-xattrs.patch new file mode 100644 index 00000000000..e5c183b7203 --- /dev/null +++ b/var/spack/repos/builtin/packages/tar/gnutar-configure-xattrs.patch @@ -0,0 +1,482 @@ +diff --git a/Makefile.in b/Makefile.in +index f9f1d1d..b403e46 100644 +--- a/Makefile.in ++++ b/Makefile.in +@@ -1,4 +1,4 @@ +-# Makefile.in generated by automake 1.14 from Makefile.am. ++# Makefile.in generated by automake 1.14.1 from Makefile.am. + # @configure_input@ + + # Copyright (C) 1994-2013 Free Software Foundation, Inc. +@@ -1583,9 +1583,10 @@ distcheck: dist + && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ + && am__cwd=`pwd` \ + && $(am__cd) $(distdir)/_build \ +- && ../configure --srcdir=.. --prefix="$$dc_install_base" \ ++ && ../configure \ + $(AM_DISTCHECK_CONFIGURE_FLAGS) \ + $(DISTCHECK_CONFIGURE_FLAGS) \ ++ --srcdir=.. --prefix="$$dc_install_base" \ + && $(MAKE) $(AM_MAKEFLAGS) \ + && $(MAKE) $(AM_MAKEFLAGS) dvi \ + && $(MAKE) $(AM_MAKEFLAGS) check \ +diff --git a/aclocal.m4 b/aclocal.m4 +index 0e09589..804c0b3 100644 +--- a/aclocal.m4 ++++ b/aclocal.m4 +@@ -1,4 +1,4 @@ +-# generated automatically by aclocal 1.14 -*- Autoconf -*- ++# generated automatically by aclocal 1.14.1 -*- Autoconf -*- + + # Copyright (C) 1996-2013 Free Software Foundation, Inc. + +@@ -35,7 +35,7 @@ AC_DEFUN([AM_AUTOMAKE_VERSION], + [am__api_version='1.14' + dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to + dnl require some minimum version. Point them to the right macro. +-m4_if([$1], [1.14], [], ++m4_if([$1], [1.14.1], [], + [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl + ]) + +@@ -51,7 +51,7 @@ m4_define([_AM_AUTOCONF_VERSION], []) + # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. + # This function is AC_REQUIREd by AM_INIT_AUTOMAKE. + AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], +-[AM_AUTOMAKE_VERSION([1.14])dnl ++[AM_AUTOMAKE_VERSION([1.14.1])dnl + m4_ifndef([AC_AUTOCONF_VERSION], + [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl + _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) +diff --git a/config.h.in b/config.h.in +index 6646ea5..8571ef4 100644 +--- a/config.h.in ++++ b/config.h.in +@@ -617,7 +617,7 @@ + */ + #undef HAVE_ALLOCA_H + +-/* define to 1 if we have header */ ++/* Define to 1 if you have the header file. */ + #undef HAVE_ATTR_XATTR_H + + /* Define to 1 if you have the header file. */ +@@ -924,21 +924,12 @@ + /* Define to 1 if you have the header file. */ + #undef HAVE_FEATURES_H + +-/* Define to 1 if you have the `fgetxattr' function. */ +-#undef HAVE_FGETXATTR +- +-/* Define to 1 if you have the `flistxattr' function. */ +-#undef HAVE_FLISTXATTR +- + /* Define to 1 if you have the `flockfile' function. */ + #undef HAVE_FLOCKFILE + + /* Define to 1 if fseeko (and presumably ftello) exists and is declared. */ + #undef HAVE_FSEEKO + +-/* Define to 1 if you have the `fsetxattr' function. */ +-#undef HAVE_FSETXATTR +- + /* Define to 1 if you have the `fstatat' function. */ + #undef HAVE_FSTATAT + +@@ -990,9 +981,6 @@ + /* Define to 1 if you have the `gettimeofday' function. */ + #undef HAVE_GETTIMEOFDAY + +-/* Define to 1 if you have the `getxattr' function. */ +-#undef HAVE_GETXATTR +- + /* Define to 1 if you have the `grantpt' function. */ + #undef HAVE_GRANTPT + +@@ -1045,12 +1033,6 @@ + /* Define to 1 if you have the `lchown' function. */ + #undef HAVE_LCHOWN + +-/* Define to 1 if you have the `lgetxattr' function. */ +-#undef HAVE_LGETXATTR +- +-/* Define to 1 if you have the `attr' library (-lattr). */ +-#undef HAVE_LIBATTR +- + /* Define to 1 if you have the header file. */ + #undef HAVE_LIBGEN_H + +@@ -1069,12 +1051,6 @@ + /* Define to 1 if you have the header file. */ + #undef HAVE_LINUX_FD_H + +-/* Define to 1 if you have the `listxattr' function. */ +-#undef HAVE_LISTXATTR +- +-/* Define to 1 if you have the `llistxattr' function. */ +-#undef HAVE_LLISTXATTR +- + /* Define to 1 if you have the header file. */ + #undef HAVE_LOCALE_H + +@@ -1087,9 +1063,6 @@ + /* Define to 1 if the system has the type 'long long int'. */ + #undef HAVE_LONG_LONG_INT + +-/* Define to 1 if you have the `lsetxattr' function. */ +-#undef HAVE_LSETXATTR +- + /* Define to 1 if you have the `lstat' function. */ + #undef HAVE_LSTAT + +@@ -1867,9 +1840,6 @@ + /* Define to 1 if you have the `setlocale' function. */ + #undef HAVE_SETLOCALE + +-/* Define to 1 if you have the `setxattr' function. */ +-#undef HAVE_SETXATTR +- + /* Define to 1 if you have the header file. */ + #undef HAVE_SGTTY_H + +@@ -2074,7 +2044,7 @@ + /* Define to 1 if you have the header file. */ + #undef HAVE_SYS_WAIT_H + +-/* define to 1 if we have header */ ++/* Define to 1 if you have the header file. */ + #undef HAVE_SYS_XATTR_H + + /* Define if struct tm has the tm_gmtoff member. */ +diff --git a/configure b/configure +index cfdd721..8cf6e91 100755 +--- a/configure ++++ b/configure +@@ -663,8 +663,6 @@ RSH + LIBOBJS + TAR_COND_GRANTPT_FALSE + TAR_COND_GRANTPT_TRUE +-TAR_LIB_ATTR_FALSE +-TAR_LIB_ATTR_TRUE + TAR_COND_XATTR_H_FALSE + TAR_COND_XATTR_H_TRUE + GNULIB_TEST_WARN_CFLAGS +@@ -34994,41 +34992,6 @@ else + TAR_COND_XATTR_H_FALSE= + fi + +- if false; then +- TAR_LIB_ATTR_TRUE= +- TAR_LIB_ATTR_FALSE='#' +-else +- TAR_LIB_ATTR_TRUE='#' +- TAR_LIB_ATTR_FALSE= +-fi +- +- if test "$ac_cv_header_sys_xattr_h" = yes; then +- for ac_func in getxattr fgetxattr lgetxattr \ +- setxattr fsetxattr lsetxattr \ +- listxattr flistxattr llistxattr +-do : +- as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` +-ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" +-if eval test \"x\$"$as_ac_var"\" = x"yes"; then : +- cat >>confdefs.h <<_ACEOF +-#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 +-_ACEOF +- # only when functions are present +- +-$as_echo "#define HAVE_SYS_XATTR_H 1" >>confdefs.h +- +- if test "$with_xattrs" != no; then +- +-$as_echo "#define HAVE_XATTRS /**/" >>confdefs.h +- +- fi +- +-fi +-done +- +- fi +- +- # If is not found, then check for + if test "$ac_cv_header_sys_xattr_h" != yes; then + for ac_header in attr/xattr.h + do : +@@ -35050,13 +35013,20 @@ else + TAR_COND_XATTR_H_FALSE= + fi + +- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgetxattr in -lattr" >&5 +-$as_echo_n "checking for fgetxattr in -lattr... " >&6; } +-if ${ac_cv_lib_attr_fgetxattr+:} false; then : ++ fi ++ ++ if test "$with_xattrs" != no; then ++ for i in getxattr fgetxattr lgetxattr \ ++ setxattr fsetxattr lsetxattr \ ++ listxattr flistxattr llistxattr ++ do ++ as_ac_Search=`$as_echo "ac_cv_search_$i" | $as_tr_sh` ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing $i" >&5 ++$as_echo_n "checking for library containing $i... " >&6; } ++if eval \${$as_ac_Search+:} false; then : + $as_echo_n "(cached) " >&6 + else +- ac_check_lib_save_LIBS=$LIBS +-LIBS="-lattr $LIBS" ++ ac_func_search_save_LIBS=$LIBS + cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + +@@ -35066,67 +35036,56 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + #ifdef __cplusplus + extern "C" + #endif +-char fgetxattr (); ++char $i (); + int + main () + { +-return fgetxattr (); ++return $i (); + ; + return 0; + } + _ACEOF +-if ac_fn_c_try_link "$LINENO"; then : +- ac_cv_lib_attr_fgetxattr=yes +-else +- ac_cv_lib_attr_fgetxattr=no ++for ac_lib in '' attr; do ++ if test -z "$ac_lib"; then ++ ac_res="none required" ++ else ++ ac_res=-l$ac_lib ++ LIBS="-l$ac_lib $ac_func_search_save_LIBS" ++ fi ++ if ac_fn_c_try_link "$LINENO"; then : ++ eval "$as_ac_Search=\$ac_res" + fi + rm -f core conftest.err conftest.$ac_objext \ +- conftest$ac_exeext conftest.$ac_ext +-LIBS=$ac_check_lib_save_LIBS +-fi +-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_attr_fgetxattr" >&5 +-$as_echo "$ac_cv_lib_attr_fgetxattr" >&6; } +-if test "x$ac_cv_lib_attr_fgetxattr" = xyes; then : +- cat >>confdefs.h <<_ACEOF +-#define HAVE_LIBATTR 1 +-_ACEOF +- +- LIBS="-lattr $LIBS" +- ++ conftest$ac_exeext ++ if eval \${$as_ac_Search+:} false; then : ++ break + fi ++done ++if eval \${$as_ac_Search+:} false; then : + +- if test "$ac_cv_lib_attr_fgetxattr" = yes; then +- TAR_LIB_ATTR_TRUE= +- TAR_LIB_ATTR_FALSE='#' + else +- TAR_LIB_ATTR_TRUE='#' +- TAR_LIB_ATTR_FALSE= ++ eval "$as_ac_Search=no" ++fi ++rm conftest.$ac_ext ++LIBS=$ac_func_search_save_LIBS + fi ++eval ac_res=\$$as_ac_Search ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 ++$as_echo "$ac_res" >&6; } ++eval ac_res=\$$as_ac_Search ++if test "$ac_res" != no; then : ++ test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + +- if test "$ac_cv_header_attr_xattr_h" = yes; then +- for ac_func in getxattr fgetxattr lgetxattr \ +- setxattr fsetxattr lsetxattr \ +- listxattr flistxattr llistxattr +-do : +- as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` +-ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" +-if eval test \"x\$"$as_ac_var"\" = x"yes"; then : +- cat >>confdefs.h <<_ACEOF +-#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 +-_ACEOF +- # only when functions are present ++fi + +-$as_echo "#define HAVE_ATTR_XATTR_H 1" >>confdefs.h ++ eval found=\$ac_cv_search_$i ++ test "$found" = "no" && break ++ done + +- if test "$with_xattrs" != no; then ++ if test "$found" != no; then + + $as_echo "#define HAVE_XATTRS /**/" >>confdefs.h + +- fi +- +-fi +-done +- + fi + fi + +@@ -38187,18 +38146,10 @@ if test -z "${TAR_COND_XATTR_H_TRUE}" && test -z "${TAR_COND_XATTR_H_FALSE}"; th + as_fn_error $? "conditional \"TAR_COND_XATTR_H\" was never defined. + Usually this means the macro was only invoked conditionally." "$LINENO" 5 + fi +-if test -z "${TAR_LIB_ATTR_TRUE}" && test -z "${TAR_LIB_ATTR_FALSE}"; then +- as_fn_error $? "conditional \"TAR_LIB_ATTR\" was never defined. +-Usually this means the macro was only invoked conditionally." "$LINENO" 5 +-fi + if test -z "${TAR_COND_XATTR_H_TRUE}" && test -z "${TAR_COND_XATTR_H_FALSE}"; then + as_fn_error $? "conditional \"TAR_COND_XATTR_H\" was never defined. + Usually this means the macro was only invoked conditionally." "$LINENO" 5 + fi +-if test -z "${TAR_LIB_ATTR_TRUE}" && test -z "${TAR_LIB_ATTR_FALSE}"; then +- as_fn_error $? "conditional \"TAR_LIB_ATTR\" was never defined. +-Usually this means the macro was only invoked conditionally." "$LINENO" 5 +-fi + if test -z "${TAR_COND_GRANTPT_TRUE}" && test -z "${TAR_COND_GRANTPT_FALSE}"; then + as_fn_error $? "conditional \"TAR_COND_GRANTPT\" was never defined. + Usually this means the macro was only invoked conditionally." "$LINENO" 5 +diff --git a/doc/Makefile.in b/doc/Makefile.in +index ca44f1a..42a06b3 100644 +--- a/doc/Makefile.in ++++ b/doc/Makefile.in +@@ -1,4 +1,4 @@ +-# Makefile.in generated by automake 1.14 from Makefile.am. ++# Makefile.in generated by automake 1.14.1 from Makefile.am. + # @configure_input@ + + # Copyright (C) 1994-2013 Free Software Foundation, Inc. +diff --git a/gnu/Makefile.in b/gnu/Makefile.in +index 03eed58..d908a03 100644 +--- a/gnu/Makefile.in ++++ b/gnu/Makefile.in +@@ -1,4 +1,4 @@ +-# Makefile.in generated by automake 1.14 from Makefile.am. ++# Makefile.in generated by automake 1.14.1 from Makefile.am. + # @configure_input@ + + # Copyright (C) 1994-2013 Free Software Foundation, Inc. +diff --git a/lib/Makefile.in b/lib/Makefile.in +index 41a9aca..1254b8f 100644 +--- a/lib/Makefile.in ++++ b/lib/Makefile.in +@@ -1,4 +1,4 @@ +-# Makefile.in generated by automake 1.14 from Makefile.am. ++# Makefile.in generated by automake 1.14.1 from Makefile.am. + # @configure_input@ + + # Copyright (C) 1994-2013 Free Software Foundation, Inc. +diff --git a/rmt/Makefile.in b/rmt/Makefile.in +index c3f2509..0f3dca4 100644 +--- a/rmt/Makefile.in ++++ b/rmt/Makefile.in +@@ -1,4 +1,4 @@ +-# Makefile.in generated by automake 1.14 from Makefile.am. ++# Makefile.in generated by automake 1.14.1 from Makefile.am. + # @configure_input@ + + # Copyright (C) 1994-2013 Free Software Foundation, Inc. +diff --git a/scripts/Makefile.in b/scripts/Makefile.in +index 57c0d0d..bf344ed 100644 +--- a/scripts/Makefile.in ++++ b/scripts/Makefile.in +@@ -1,4 +1,4 @@ +-# Makefile.in generated by automake 1.14 from Makefile.am. ++# Makefile.in generated by automake 1.14.1 from Makefile.am. + # @configure_input@ + + # Copyright (C) 1994-2013 Free Software Foundation, Inc. +diff --git a/src/Makefile.in b/src/Makefile.in +index 6f9a592..fc6fc87 100644 +--- a/src/Makefile.in ++++ b/src/Makefile.in +@@ -1,4 +1,4 @@ +-# Makefile.in generated by automake 1.14 from Makefile.am. ++# Makefile.in generated by automake 1.14.1 from Makefile.am. + # @configure_input@ + + # Copyright (C) 1994-2013 Free Software Foundation, Inc. +@@ -99,7 +99,6 @@ POST_UNINSTALL = : + build_triplet = @build@ + host_triplet = @host@ + bin_PROGRAMS = tar$(EXEEXT) +-@TAR_LIB_ATTR_TRUE@am__append_1 = -lattr + subdir = src + DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ + $(top_srcdir)/build-aux/depcomp $(noinst_HEADERS) +@@ -254,7 +253,7 @@ am__DEPENDENCIES_2 = ../lib/libtar.a ../gnu/libgnu.a \ + $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) + tar_DEPENDENCIES = $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_2) \ + $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ +- $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) ++ $(am__DEPENDENCIES_1) + AM_V_P = $(am__v_P_@AM_V@) + am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) + am__v_P_0 = false +@@ -1225,8 +1224,7 @@ tar_SOURCES = \ + AM_CPPFLAGS = -I$(top_srcdir)/gnu -I../ -I../gnu -I$(top_srcdir)/lib -I../lib + AM_CFLAGS = $(WARN_CFLAGS) $(WERROR_CFLAGS) + LDADD = ../lib/libtar.a ../gnu/libgnu.a $(LIBINTL) $(LIBICONV) +-tar_LDADD = $(LIBS) $(LDADD) $(LIB_CLOCK_GETTIME) $(LIB_EACCESS) \ +- $(LIB_SELINUX) $(am__append_1) ++tar_LDADD = $(LIBS) $(LDADD) $(LIB_CLOCK_GETTIME) $(LIB_EACCESS) $(LIB_SELINUX) + all: all-am + + .SUFFIXES: +diff --git a/tests/Makefile.in b/tests/Makefile.in +index 6807509..50d7689 100644 +--- a/tests/Makefile.in ++++ b/tests/Makefile.in +@@ -1,4 +1,4 @@ +-# Makefile.in generated by automake 1.14 from Makefile.am. ++# Makefile.in generated by automake 1.14.1 from Makefile.am. + # @configure_input@ + + # Copyright (C) 1994-2013 Free Software Foundation, Inc. +diff --git a/lib/xattr-at.c b/lib/xattr-at.c +index 443ccae..009bde5 100644 +--- a/lib/xattr-at.c ++++ b/lib/xattr-at.c +@@ -18,6 +18,11 @@ + + #include + ++/* Temporarily don't build. We are unable to build on (probably not only) ++ darwin due to lack of l*xattr callbacks (XATTR_NOFOLLOW is alternative) and ++ different function definitions. */ ++#ifdef HAVE_XATTRS ++ + #include "xattr-at.h" + #include "openat.h" + +@@ -108,3 +113,5 @@ + #undef AT_FUNC_RESULT + #undef AT_FUNC_POST_FILE_PARAM_DECLS + #undef AT_FUNC_POST_FILE_ARGS ++ ++#endif +diff --git a/src/Makefile.am b/src/Makefile.am +index 82b2d46..42daaef 100644 +--- a/src/Makefile.am ++++ b/src/Makefile.am +@@ -52,7 +52,3 @@ AM_CFLAGS = $(WARN_CFLAGS) $(WERROR_CFLAGS) + LDADD = ../lib/libtar.a ../gnu/libgnu.a $(LIBINTL) $(LIBICONV) + + tar_LDADD = $(LIBS) $(LDADD) $(LIB_CLOCK_GETTIME) $(LIB_EACCESS) $(LIB_SELINUX) +- +-if TAR_LIB_ATTR +-tar_LDADD += -lattr +-endif +-- +1.9.3 \ No newline at end of file diff --git a/var/spack/repos/builtin/packages/tar/package.py b/var/spack/repos/builtin/packages/tar/package.py index c55b5165bfa..4dce0e5be18 100644 --- a/var/spack/repos/builtin/packages/tar/package.py +++ b/var/spack/repos/builtin/packages/tar/package.py @@ -23,14 +23,24 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * +import sys +import os + class Tar(Package): - """GNU Tar provides the ability to create tar archives, as well as various other kinds of manipulation.""" + """GNU Tar provides the ability to create tar archives, as well as various + other kinds of manipulation.""" homepage = "https://www.gnu.org/software/tar/" - url = "http://ftp.gnu.org/gnu/tar/tar-1.28.tar.gz" + url = "https://ftp.gnu.org/gnu/tar/tar-1.28.tar.gz" + version('1.29', 'cae466e6e58c7292355e7080248f244db3a4cf755f33f4fa25ca7f9a7ed09af0') version('1.28', '6ea3dbea1f2b0409b234048e021a9fd7') + # see http://lists.gnu.org/archive/html/bug-tar/2014-08/msg00001.html and + # https://github.com/Homebrew/homebrew-core/commit/aef9a1792de4648d0322b4b04d32287532f046bb # NOQA: ignore=E501 + # TODO: when=sys.platform=='darwin' ? + patch('gnutar-configure-xattrs.patch', when='@1.28') + def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() From 5d48c108a3a3b1bbdc4821d7463f331331d941f2 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 8 Aug 2016 15:02:05 -0500 Subject: [PATCH 254/284] Only strip newline chars, not spaces --- lib/spack/llnl/util/filesystem.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 2478f5c1592..4cf99163e06 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -93,7 +93,7 @@ def groupid_to_group(x): try: for line in fileinput.input(filename, inplace=True): - print(re.sub(regex, repl, line.rstrip())) + print(re.sub(regex, repl, line.rstrip('\n'))) except: # clean up the original file on failure. shutil.move(backup_filename, filename) From 7aaad89ba93e3ace4bfd835306850f43218e0ef8 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 24 May 2016 11:50:01 -0700 Subject: [PATCH 255/284] Lazily evaluate all_package_names in repository.py - Don't need to list all packages unless we have to. - Only use the list of all packages for existence checks if we have generated it for some other purpose. --- lib/spack/spack/repository.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 2c160a5f45d..ae9fd7bee65 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -104,7 +104,7 @@ def __init__(self, *repo_dirs, **kwargs): self.by_namespace = NamespaceTrie() self.by_path = {} - self._all_package_names = [] + self._all_package_names = None self._provider_index = None # If repo_dirs is empty, just use the configuration @@ -163,11 +163,6 @@ def _add(self, repo): self.by_namespace[repo.full_namespace] = repo self.by_path[repo.root] = repo - # add names to the cached name list - new_pkgs = set(repo.all_package_names()) - new_pkgs.update(set(self._all_package_names)) - self._all_package_names = sorted(new_pkgs, key=lambda n:n.lower()) - def put_first(self, repo): """Add repo first in the search path.""" @@ -214,6 +209,12 @@ def first_repo(self): def all_package_names(self): """Return all unique package names in all repositories.""" + if self._all_package_names is None: + all_pkgs = set() + for repo in self.repos: + for name in repo.all_package_names(): + all_pkgs.add(name) + self._all_package_names = sorted(all_pkgs, key=lambda n:n.lower()) return self._all_package_names @@ -682,10 +683,16 @@ def all_packages(self): def exists(self, pkg_name): """Whether a package with the supplied name exists.""" - # This does a binary search in the sorted list. - idx = bisect_left(self.all_package_names(), pkg_name) - return (idx < len(self._all_package_names) and - self._all_package_names[idx] == pkg_name) + if self._all_package_names: + # This does a binary search in the sorted list. + idx = bisect_left(self.all_package_names(), pkg_name) + return (idx < len(self._all_package_names) and + self._all_package_names[idx] == pkg_name) + + # If we haven't generated the full package list, don't. + # Just check whether the file exists. + filename = self.filename_for_package_name(pkg_name) + return os.path.exists(filename) def _get_pkg_module(self, pkg_name): From 025609c63ffa42bce30b15784bd587805aef809c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 27 May 2016 20:50:48 -0500 Subject: [PATCH 256/284] More compact YAML formatting for abstract specs. - Don't add empty/absent fields to Spec YAML when they're not there. --- lib/spack/spack/spec.py | 50 ++++++++++++++++--------------- lib/spack/spack/test/spec_yaml.py | 3 +- 2 files changed, 27 insertions(+), 26 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 8e44075f42f..1300f35ca48 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -904,19 +904,25 @@ def dag_hash(self, length=None): return b32_hash def to_node_dict(self): + d = {} + params = dict((name, v.value) for name, v in self.variants.items()) params.update(dict((name, value) for name, value in self.compiler_flags.items())) - deps = self.dependencies_dict(deptype=('link', 'run')) - d = { - 'parameters': params, - 'arch': self.architecture, - 'dependencies': dict( + + if params: + d['parameters'] = params + + if self.architecture is not None: + d['arch'] = self.architecture + + if self.dependencies: + deps = self.dependencies_dict(deptype=('link', 'run')) + d['dependencies'] = dict( (name, { 'hash': dspec.spec.dag_hash(), 'type': [str(s) for s in dspec.deptypes]}) for name, dspec in deps.items()) - } # Older concrete specs do not have a namespace. Omit for # consistent hashing. @@ -932,9 +938,9 @@ def to_node_dict(self): if self.compiler: d.update(self.compiler.to_dict()) - else: - d['compiler'] = None - d.update(self.versions.to_dict()) + + if self.versions: + d.update(self.versions.to_dict()) return {self.name: d} @@ -954,17 +960,17 @@ def from_node_dict(node): spec = Spec(name) spec.namespace = node.get('namespace', None) - spec.versions = VersionList.from_dict(node) + spec._hash = node.get('hash', None) - if 'hash' in node: - spec._hash = node['hash'] + if 'version' in node or 'versions' in node: + spec.versions = VersionList.from_dict(node) spec.architecture = spack.architecture.arch_from_dict(node['arch']) - if node['compiler'] is None: - spec.compiler = None - else: + if 'compiler' in node: spec.compiler = CompilerSpec.from_dict(node) + else: + spec.compiler = None if 'parameters' in node: for name, value in node['parameters'].items(): @@ -972,14 +978,12 @@ def from_node_dict(node): spec.compiler_flags[name] = value else: spec.variants[name] = VariantSpec(name, value) + elif 'variants' in node: for name, value in node['variants'].items(): spec.variants[name] = VariantSpec(name, value) for name in FlagMap.valid_compiler_flags(): spec.compiler_flags[name] = [] - else: - raise SpackRecordError( - "Did not find a valid format for variants in YAML file") # Don't read dependencies here; from_node_dict() is used by # from_yaml() to read the root *and* each dependency spec. @@ -1037,6 +1041,10 @@ def from_yaml(stream): for node in nodes: # get dependency dict from the node. name = next(iter(node)) + + if 'dependencies' not in node[name]: + continue + yaml_deps = node[name]['dependencies'] for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps): # Fill in dependencies by looking them up by name in deps dict @@ -2824,12 +2832,6 @@ def __init__(self, msg, yaml_error): super(SpackYAMLError, self).__init__(msg, str(yaml_error)) -class SpackRecordError(spack.error.SpackError): - - def __init__(self, msg): - super(SpackRecordError, self).__init__(msg) - - class AmbiguousHashError(SpecError): def __init__(self, msg, *specs): diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py index 0230fc203a8..fc0ce0b2f3f 100644 --- a/lib/spack/spack/test/spec_yaml.py +++ b/lib/spack/spack/test/spec_yaml.py @@ -30,7 +30,7 @@ from spack.spec import Spec from spack.test.mock_packages_test import * -class SpecDagTest(MockPackagesTest): +class SpecYamlTest(MockPackagesTest): def check_yaml_round_trip(self, spec): yaml_text = spec.to_yaml() @@ -64,7 +64,6 @@ def test_concrete_spec(self): def test_yaml_subdag(self): spec = Spec('mpileaks^mpich+debug') spec.concretize() - yaml_spec = Spec.from_yaml(spec.to_yaml()) for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'): From bf028990e70927872cf506cf88bbf5a927ced2c4 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 27 May 2016 21:20:40 -0500 Subject: [PATCH 257/284] Make ProviderIndex yaml-izable. - allow a provider index to be stored and re-read. --- lib/spack/spack/repository.py | 1 + lib/spack/spack/test/__init__.py | 52 ++++++++++++++++++++++++------ lib/spack/spack/test/virtual.py | 43 +++++++++++++++++++++++++ lib/spack/spack/virtual.py | 55 +++++++++++++++++++++++++++++--- 4 files changed, 138 insertions(+), 13 deletions(-) create mode 100644 lib/spack/spack/test/virtual.py diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index ae9fd7bee65..bf0dac6a22b 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -51,6 +51,7 @@ # These names describe how repos should be laid out in the filesystem. # repo_config_name = 'repo.yaml' # Top-level filename for repo config. +repo_index_name = 'index.yaml' # Top-level filename for repository index. packages_dir_name = 'packages' # Top-level repo directory containing pkgs. package_file_name = 'package.py' # Filename for packages in a repository. diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 3439764ee6e..11f7298c04f 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -33,15 +33,49 @@ """Names of tests to be included in Spack's test suite""" test_names = [ - 'architecture', 'versions', 'url_parse', 'url_substitution', 'packages', - 'stage', 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize', - 'multimethod', 'install', 'package_sanity', 'config', 'directory_layout', - 'pattern', 'python_version', 'git_fetch', 'svn_fetch', 'hg_fetch', - 'mirror', 'modules', 'url_extrapolate', 'cc', 'link_tree', 'spec_yaml', - 'optional_deps', 'make_executable', 'build_system_guess', 'lock', - 'database', 'namespace_trie', 'yaml', 'sbang', 'environment', - 'concretize_preferences', 'cmd.find', 'cmd.uninstall', 'cmd.test_install', - 'cmd.test_compiler_cmd', 'cmd.module' + 'architecture', + 'build_system_guess', + 'cc', + 'cmd.find', + 'cmd.module', + 'cmd.test_compiler_cmd', + 'cmd.test_install', + 'cmd.uninstall', + 'concretize', + 'concretize_preferences', + 'config', + 'configure_guess', + 'database', + 'directory_layout', + 'environment', + 'git_fetch', + 'hg_fetch', + 'install', + 'link_tree', + 'lock', + 'make_executable', + 'mirror', + 'modules', + 'multimethod', + 'namespace_trie', + 'optional_deps', + 'package_sanity', + 'packages', + 'pattern', + 'python_version', + 'sbang', + 'spec_dag', + 'spec_semantics', + 'spec_syntax', + 'spec_yaml', + 'stage', + 'svn_fetch', + 'url_extrapolate', + 'url_parse', + 'url_substitution', + 'versions', + 'virtual', + 'yaml', ] diff --git a/lib/spack/spack/test/virtual.py b/lib/spack/spack/test/virtual.py new file mode 100644 index 00000000000..7699165554e --- /dev/null +++ b/lib/spack/spack/test/virtual.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from StringIO import StringIO +import unittest + +import spack +from spack.virtual import ProviderIndex + + +class VirtualTest(unittest.TestCase): + + def test_write_and_read(self): + p = ProviderIndex(spack.repo.all_package_names()) + + ostream = StringIO () + p.to_yaml(ostream) + + istream = StringIO(ostream.getvalue()) + q = ProviderIndex.from_yaml(istream) + + self.assertTrue(p == q) diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/virtual.py index bb8333f0235..bf6d8227a42 100644 --- a/lib/spack/spack/virtual.py +++ b/lib/spack/spack/virtual.py @@ -25,8 +25,12 @@ """ The ``virtual`` module contains utility classes for virtual dependencies. """ -import spack.spec import itertools +import yaml +from yaml.error import MarkedYAMLError + +import spack + class ProviderIndex(object): """This is a dict of dicts used for finding providers of particular @@ -45,10 +49,11 @@ class ProviderIndex(object): Calling providers_for(spec) will find specs that provide a matching implementation of MPI. """ - def __init__(self, specs, **kwargs): + def __init__(self, specs=None, **kwargs): # TODO: come up with another name for this. This "restricts" values to # the verbatim impu specs (i.e., it doesn't pre-apply package's constraints, and # keeps things as broad as possible, so it's really the wrong name) + if specs is None: specs = [] self.restrict = kwargs.setdefault('restrict', False) self.providers = {} @@ -64,7 +69,7 @@ def __init__(self, specs, **kwargs): def update(self, spec): - if type(spec) != spack.spec.Spec: + if not isinstance(spec, spack.spec.Spec): spec = spack.spec.Spec(spec) if not spec.name: @@ -75,7 +80,8 @@ def update(self, spec): pkg = spec.package for provided_spec, provider_spec in pkg.provided.iteritems(): - provider_spec.compiler_flags = spec.compiler_flags.copy()#We want satisfaction other than flags + # We want satisfaction other than flags + provider_spec.compiler_flags = spec.compiler_flags.copy() if provider_spec.satisfies(spec, deps=False): provided_name = provided_spec.name @@ -164,3 +170,44 @@ def satisfies(self, other): result[name] = crossed return all(c in result for c in common) + + + def to_yaml(self, stream=None): + provider_list = dict( + (name, [[vpkg.to_node_dict(), [p.to_node_dict() for p in pset]] + for vpkg, pset in pdict.items()]) + for name, pdict in self.providers.items()) + + yaml.dump({'provider_index': {'providers': provider_list}}, + stream=stream) + + + @staticmethod + def from_yaml(stream): + try: + yfile = yaml.load(stream) + except MarkedYAMLError, e: + raise spack.spec.SpackYAMLError( + "error parsing YAML ProviderIndex cache:", str(e)) + + if not isinstance(yfile, dict): + raise spack.spec.SpackYAMLError( + "YAML ProviderIndex was not a dict.") + + if not 'provider_index' in yfile: + raise spack.spec.SpackYAMLError( + "YAML ProviderIndex does not start with 'provider_index'") + + index = ProviderIndex() + providers = yfile['provider_index']['providers'] + index.providers = dict( + (name, dict((spack.spec.Spec.from_node_dict(vpkg), + set(spack.spec.Spec.from_node_dict(p) for p in plist)) + for vpkg, plist in pdict_list)) + for name, pdict_list in providers.items()) + + return index + + + def __eq__(self, other): + return self.providers == other.providers From cf2f902b82a3080243eae58ca728b55189108c10 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 28 May 2016 20:13:13 -0500 Subject: [PATCH 258/284] Make ProviderIndexes mergeable, so we can cache them per-repo. --- lib/spack/spack/repository.py | 31 +++++++++++++++++++++++-------- lib/spack/spack/test/virtual.py | 6 ++++++ lib/spack/spack/virtual.py | 28 ++++++++++++++++++++++++++++ 3 files changed, 57 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index bf0dac6a22b..6e7e95b8bca 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -224,12 +224,20 @@ def all_packages(self): yield self.get(name) + @property + def provider_index(self): + """Merged ProviderIndex from all Repos in the RepoPath.""" + if self._provider_index is None: + self._provider_index = ProviderIndex() + for repo in reversed(self.repos): + self._provider_index.merge(repo.provider_index) + + return self._provider_index + + @_autospec def providers_for(self, vpkg_spec): - if self._provider_index is None: - self._provider_index = ProviderIndex(self.all_package_names()) - - providers = self._provider_index.providers_for(vpkg_spec) + providers = self.provider_index.providers_for(vpkg_spec) if not providers: raise UnknownPackageError(vpkg_spec.name) return providers @@ -603,12 +611,19 @@ def purge(self): self._instances.clear() + @property + def provider_index(self): + """A provider index with names *specific* to this repo.""" + if self._provider_index is None: + namespaced_names = ['%s.%s' % (self.namespace, n) + for n in self.all_package_names()] + self._provider_index = ProviderIndex(namespaced_names) + return self._provider_index + + @_autospec def providers_for(self, vpkg_spec): - if self._provider_index is None: - self._provider_index = ProviderIndex(self.all_package_names()) - - providers = self._provider_index.providers_for(vpkg_spec) + providers = self.provider_index.providers_for(vpkg_spec) if not providers: raise UnknownPackageError(vpkg_spec.name) return providers diff --git a/lib/spack/spack/test/virtual.py b/lib/spack/spack/test/virtual.py index 7699165554e..1923e7006f8 100644 --- a/lib/spack/spack/test/virtual.py +++ b/lib/spack/spack/test/virtual.py @@ -41,3 +41,9 @@ def test_write_and_read(self): q = ProviderIndex.from_yaml(istream) self.assertTrue(p == q) + + + def test_copy(self): + p = ProviderIndex(spack.repo.all_package_names()) + q = p.copy() + self.assertTrue(p == q) diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/virtual.py index bf6d8227a42..2c47921a3f1 100644 --- a/lib/spack/spack/virtual.py +++ b/lib/spack/spack/virtual.py @@ -209,5 +209,33 @@ def from_yaml(stream): return index + def merge(self, other): + """Merge `other` ProviderIndex into this one.""" + other = other.copy() # defensive copy. + + for pkg in other.providers: + if pkg not in self.providers: + self.providers[pkg] = other.providers[pkg] + continue + + spdict, opdict = self.providers[pkg], other.providers[pkg] + for provided_spec in opdict: + if provided_spec not in spdict: + spdict[provided_spec] = opdict[provided_spec] + continue + + spdict[provided_spec] += opdict[provided_spec] + + + def copy(self): + """Deep copy of this ProviderIndex.""" + clone = ProviderIndex() + clone.providers = dict( + (name, dict((vpkg, set((p.copy() for p in pset))) + for vpkg, pset in pdict.items())) + for name, pdict in self.providers.items()) + return clone + + def __eq__(self, other): return self.providers == other.providers From faa0a0e4c3a6e1bdfc68b97b3158c8cc14356e5d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 28 May 2016 20:27:22 -0700 Subject: [PATCH 259/284] Add a ProviderIndex cache. - Spack will check if the index needs updating, and will only parse all package files if it does. - Spack tries to parse as few package files as necessary. --- .gitignore | 2 + lib/spack/spack/repository.py | 140 ++++++++++++++++++++++++++++++---- lib/spack/spack/virtual.py | 14 ++++ 3 files changed, 143 insertions(+), 13 deletions(-) diff --git a/.gitignore b/.gitignore index 960b5b0035b..b1215f0c7e5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,7 @@ /var/spack/stage /var/spack/cache +/var/spack/repos/*/index.yaml +/var/spack/repos/*/lock *.pyc /opt *~ diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 6e7e95b8bca..63ae999ce1b 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -23,6 +23,9 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +import stat +import shutil +import errno import exceptions import sys import inspect @@ -33,6 +36,7 @@ import yaml import llnl.util.tty as tty +from llnl.util.lock import Lock from llnl.util.filesystem import * import spack.error @@ -394,13 +398,25 @@ def check(condition, msg): if not condition: raise BadRepoError(msg) # Validate repository layout. - self.config_file = join_path(self.root, repo_config_name) + self.config_file = join_path(self.root, repo_config_name) check(os.path.isfile(self.config_file), "No %s found in '%s'" % (repo_config_name, root)) + self.packages_path = join_path(self.root, packages_dir_name) check(os.path.isdir(self.packages_path), "No directory '%s' found in '%s'" % (repo_config_name, root)) + self.index_file = join_path(self.root, repo_index_name) + check(not os.path.exists(self.index_file) or + (os.path.isfile(self.index_file) and os.access(self.index_file, os.R_OK|os.W_OK)), + "Cannot access repository index file in %s" % root) + + # lock file for reading/writing the index + self._lock_path = join_path(self.root, 'lock') + if not os.path.exists(self._lock_path): + touch(self._lock_path) + self._lock = Lock(self._lock_path) + # Read configuration and validate namespace config = self._read_config() check('namespace' in config, '%s must define a namespace.' @@ -424,7 +440,14 @@ def check(condition, msg): self._modules = {} self._classes = {} self._instances = {} + + # list of packages that are newer than the index. + self._needs_update = [] + + # Index of virtual dependencies self._provider_index = None + + # Cached list of package names. self._all_package_names = None # make sure the namespace for packages in this repo exists. @@ -611,13 +634,56 @@ def purge(self): self._instances.clear() + def _update_provider_index(self): + # Check modification dates of all packages + self._fast_package_check() + + def read(): + with open(self.index_file) as f: + self._provider_index = ProviderIndex.from_yaml(f) + + # Read the old ProviderIndex, or make a new one. + index_existed = os.path.isfile(self.index_file) + if index_existed and not self._needs_update: + self._lock.acquire_read() + try: + read() + finally: + self._lock.release_read() + + else: + self._lock.acquire_write() + try: + if index_existed: + with open(self.index_file) as f: + self._provider_index = ProviderIndex.from_yaml(f) + else: + self._provider_index = ProviderIndex() + + for pkg_name in self._needs_update: + namespaced_name = '%s.%s' % (self.namespace, pkg_name) + self._provider_index.remove_provider(namespaced_name) + self._provider_index.update(namespaced_name) + + + tmp = self.index_file + '.tmp' + with open(tmp, 'w') as f: + self._provider_index.to_yaml(f) + os.rename(tmp, self.index_file) + + except: + shutil.rmtree(tmp, ignore_errors=True) + raise + + finally: + self._lock.release_write() + + @property def provider_index(self): """A provider index with names *specific* to this repo.""" if self._provider_index is None: - namespaced_names = ['%s.%s' % (self.namespace, n) - for n in self.all_package_names()] - self._provider_index = ProviderIndex(namespaced_names) + self._update_provider_index() return self._provider_index @@ -663,21 +729,33 @@ def filename_for_package_name(self, spec): return join_path(pkg_dir, package_file_name) - def all_package_names(self): - """Returns a sorted list of all package names in the Repo.""" + def _fast_package_check(self): + """List packages in the repo and cehck whether index is up to date. + + Both of these opreations require checking all `package.py` + files so we do them at the same time. We list the repo + directory and look at package.py files, and we compare the + index modification date with the ost recently modified package + file, storing the result. + + The implementation here should try to minimize filesystem + calls. At the moment, it is O(number of packages) and makes + about one stat call per package. This is resonably fast, and + avoids actually importing packages in Spack, which is slow. + + """ if self._all_package_names is None: self._all_package_names = [] + # Get index modification time. + index_mtime = 0 + if os.path.exists(self.index_file): + sinfo = os.stat(self.index_file) + index_mtime = sinfo.st_mtime + for pkg_name in os.listdir(self.packages_path): # Skip non-directories in the package root. pkg_dir = join_path(self.packages_path, pkg_name) - if not os.path.isdir(pkg_dir): - continue - - # Skip directories without a package.py in them. - pkg_file = join_path(self.packages_path, pkg_name, package_file_name) - if not os.path.isfile(pkg_file): - continue # Warn about invalid names that look like packages. if not valid_module_name(pkg_name): @@ -685,14 +763,50 @@ def all_package_names(self): % (pkg_dir, pkg_name)) continue + # construct the file name from the directory + pkg_file = join_path( + self.packages_path, pkg_name, package_file_name) + + # Use stat here to avoid lots of calls to the filesystem. + try: + sinfo = os.stat(pkg_file) + except OSError as e: + if e.errno == errno.ENOENT: + # No package.py file here. + continue + elif e.errno == errno.EACCES: + tty.warn("Can't read package file %s." % pkg_file) + continue + raise e + + # if it's not a file, skip it. + if stat.S_ISDIR(sinfo.st_mode): + continue + # All checks passed. Add it to the list. self._all_package_names.append(pkg_name) + + # record the package if it is newer than the index. + if sinfo.st_mtime > index_mtime: + self._needs_update.append(pkg_name) + self._all_package_names.sort() return self._all_package_names + def all_package_names(self): + """Returns a sorted list of all package names in the Repo.""" + self._fast_package_check() + return self._all_package_names + + def all_packages(self): + """Iterator over all packages in the repository. + + Use this with care, because loading packages is slow. + + """ for name in self.all_package_names(): yield self.get(name) diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/virtual.py index 2c47921a3f1..785ab989186 100644 --- a/lib/spack/spack/virtual.py +++ b/lib/spack/spack/virtual.py @@ -227,6 +227,20 @@ def merge(self, other): spdict[provided_spec] += opdict[provided_spec] + def remove_provider(self, pkg_name): + """Remove a provider from the ProviderIndex.""" + for pkg in self.providers: + pkg_dict = self.providers[pkg] + for provided, pset in pkg_dict.items(): + for provider in pset: + if provider.fullname == pkg_name: + pset.remove(provider) + if not pset: + del pkg_dict[provided] + if not pkg_dict: + del self.providers[pkg] + + def copy(self): """Deep copy of this ProviderIndex.""" clone = ProviderIndex() From 37fc2583136f1abd5d39b15bb05d2102d182003a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 29 May 2016 23:23:33 -0700 Subject: [PATCH 260/284] Remove vestigial methods from Package. --- lib/spack/spack/package.py | 38 -------------------------------------- 1 file changed, 38 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index c916bfaaa28..43aefbf65e6 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -630,50 +630,12 @@ def activated(self): exts = spack.install_layout.extension_map(self.extendee_spec) return (self.name in exts) and (exts[self.name] == self.spec) - def preorder_traversal(self, visited=None, **kwargs): - """This does a preorder traversal of the package's dependence DAG.""" - virtual = kwargs.get("virtual", False) - - if visited is None: - visited = set() - - if self.name in visited: - return - visited.add(self.name) - - if not virtual: - yield self - - for name in sorted(self.dependencies.keys()): - dep_spec = self.get_dependency(name) - spec = dep_spec.spec - - # Currently, we do not descend into virtual dependencies, as this - # makes doing a sensible traversal much harder. We just assume - # that ANY of the virtual deps will work, which might not be true - # (due to conflicts or unsatisfiable specs). For now this is ok, - # but we might want to reinvestigate if we start using a lot of - # complicated virtual dependencies - # TODO: reinvestigate this. - if spec.virtual: - if virtual: - yield spec - continue - - for pkg in spack.repo.get(name).preorder_traversal(visited, - **kwargs): - yield pkg - def provides(self, vpkg_name): """ True if this package provides a virtual package with the specified name """ return any(s.name == vpkg_name for s in self.provided) - def virtual_dependencies(self, visited=None): - for spec in sorted(set(self.preorder_traversal(virtual=True))): - yield spec - @property def installed(self): return os.path.isdir(self.prefix) From ab049eca4129b389e7dab53d6dd475b24f8099ed Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 29 May 2016 23:51:39 -0700 Subject: [PATCH 261/284] Faster key in FlagMap._cmp_key --- lib/spack/spack/spec.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 1300f35ca48..b9d9d3e0a46 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -438,8 +438,7 @@ def copy(self): return clone def _cmp_key(self): - return ''.join(str(key) + ' '.join(str(v) for v in value) - for key, value in sorted(self.items())) + return tuple((k, tuple(v)) for k, v in sorted(self.iteritems())) def __str__(self): sorted_keys = filter( From 1f5a21decf5aa97897692501337e700c572a25f6 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 30 May 2016 00:26:49 -0700 Subject: [PATCH 262/284] Fix namespace support in Repo.get_pkg_class() --- lib/spack/spack/repository.py | 6 ++++++ lib/spack/spack/spec.py | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 63ae999ce1b..6aa9b8dd2f0 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -864,6 +864,12 @@ def get_pkg_class(self, pkg_name): package. Then extracts the package class from the module according to Spack's naming convention. """ + fullname = pkg_name + namespace, _, pkg_name = pkg_name.rpartition('.') + if namespace and (namespace != self.namespace): + raise InvalidNamespaceError('Invalid namespace for %s repo: %s' + % (self.namespace, namespace)) + class_name = mod_to_class(pkg_name) module = self._get_pkg_module(pkg_name) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index b9d9d3e0a46..24459fd3b3f 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -714,7 +714,7 @@ def package_class(self): """Internal package call gets only the class object for a package. Use this to just get package metadata. """ - return spack.repo.get_pkg_class(self.name) + return spack.repo.get_pkg_class(self.fullname) @property def virtual(self): @@ -1574,7 +1574,7 @@ def validate_names(self): UnsupportedCompilerError. """ for spec in self.traverse(): - # Don't get a package for a virtual name. + # raise an UnknownPackageError if the spec's package isn't real. if (not spec.virtual) and spec.name: spack.repo.get(spec.fullname) From ce6ac93abed2a7193745f7082670e1cbe2ffede2 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 5 Jun 2016 00:52:52 -0700 Subject: [PATCH 263/284] rename `virtual` module to `provider_index` --- lib/spack/spack/{virtual.py => provider_index.py} | 0 lib/spack/spack/repository.py | 2 +- lib/spack/spack/spec.py | 15 +++++++++------ lib/spack/spack/test/__init__.py | 5 +++-- .../spack/test/{virtual.py => provider_index.py} | 6 +++--- 5 files changed, 16 insertions(+), 12 deletions(-) rename lib/spack/spack/{virtual.py => provider_index.py} (100%) rename lib/spack/spack/test/{virtual.py => provider_index.py} (93%) diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/provider_index.py similarity index 100% rename from lib/spack/spack/virtual.py rename to lib/spack/spack/provider_index.py diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 6aa9b8dd2f0..3b0d3167b37 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -42,7 +42,7 @@ import spack.error import spack.config import spack.spec -from spack.virtual import ProviderIndex +from spack.provider_index import ProviderIndex from spack.util.naming import * # diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 24459fd3b3f..8a47ec95ad0 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -102,23 +102,26 @@ from StringIO import StringIO from operator import attrgetter +import yaml +from yaml.error import MarkedYAMLError + import llnl.util.tty as tty +from llnl.util.filesystem import join_path +from llnl.util.lang import * +from llnl.util.tty.color import * + import spack import spack.architecture import spack.compilers as compilers import spack.error import spack.parse -import yaml -from llnl.util.filesystem import join_path -from llnl.util.lang import * -from llnl.util.tty.color import * from spack.build_environment import get_path_from_module, load_module from spack.util.naming import mod_to_class from spack.util.prefix import Prefix from spack.util.string import * from spack.version import * -from spack.virtual import ProviderIndex -from yaml.error import MarkedYAMLError +from spack.provider_index import ProviderIndex + # Valid pattern for an identifier in Spack identifier_re = r'\w[\w-]*' diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 11f7298c04f..e092a509135 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -32,6 +32,8 @@ from spack.test.tally_plugin import Tally """Names of tests to be included in Spack's test suite""" +# All the tests Spack knows about. +# Keep these one per line so that it's easy to see changes in diffs. test_names = [ 'architecture', 'build_system_guess', @@ -74,11 +76,10 @@ 'url_parse', 'url_substitution', 'versions', - 'virtual', + 'provider_index', 'yaml', ] - def list_tests(): """Return names of all tests that can be run for Spack.""" return test_names diff --git a/lib/spack/spack/test/virtual.py b/lib/spack/spack/test/provider_index.py similarity index 93% rename from lib/spack/spack/test/virtual.py rename to lib/spack/spack/test/provider_index.py index 1923e7006f8..15fb9acff26 100644 --- a/lib/spack/spack/test/virtual.py +++ b/lib/spack/spack/test/provider_index.py @@ -26,15 +26,15 @@ import unittest import spack -from spack.virtual import ProviderIndex +from spack.provider_index import ProviderIndex -class VirtualTest(unittest.TestCase): +class ProviderIndexTest(unittest.TestCase): def test_write_and_read(self): p = ProviderIndex(spack.repo.all_package_names()) - ostream = StringIO () + ostream = StringIO() p.to_yaml(ostream) istream = StringIO(ostream.getvalue()) From 4de45c268417e518c7ee616d7454c1c91a5b8b35 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 5 Jun 2016 01:27:35 -0700 Subject: [PATCH 264/284] fix scoping issue. --- lib/spack/spack/repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 3b0d3167b37..df21810a126 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -652,6 +652,7 @@ def read(): self._lock.release_read() else: + tmp = self.index_file + '.tmp' self._lock.acquire_write() try: if index_existed: @@ -666,7 +667,6 @@ def read(): self._provider_index.update(namespaced_name) - tmp = self.index_file + '.tmp' with open(tmp, 'w') as f: self._provider_index.to_yaml(f) os.rename(tmp, self.index_file) From 5e5024342f95fe3bea86b25ae488c8e738566a2e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 5 Jun 2016 01:27:54 -0700 Subject: [PATCH 265/284] Fix iterator invalidation issues. --- lib/spack/spack/provider_index.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index 785ab989186..6cd2134e962 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -50,9 +50,10 @@ class ProviderIndex(object): matching implementation of MPI. """ def __init__(self, specs=None, **kwargs): - # TODO: come up with another name for this. This "restricts" values to - # the verbatim impu specs (i.e., it doesn't pre-apply package's constraints, and - # keeps things as broad as possible, so it's really the wrong name) + # TODO: come up with another name for this. This "restricts" + # values to the verbatim impu specs (i.e., it doesn't + # pre-apply package's constraints, and keeps things as broad + # as possible, so it's really the wrong name) if specs is None: specs = [] self.restrict = kwargs.setdefault('restrict', False) @@ -229,16 +230,24 @@ def merge(self, other): def remove_provider(self, pkg_name): """Remove a provider from the ProviderIndex.""" - for pkg in self.providers: - pkg_dict = self.providers[pkg] + empty_pkg_dict = [] + for pkg, pkg_dict in self.providers.items(): + empty_pset = [] for provided, pset in pkg_dict.items(): - for provider in pset: - if provider.fullname == pkg_name: - pset.remove(provider) + same_name = set(p for p in pset if p.fullname == pkg_name) + pset.difference_update(same_name) + if not pset: - del pkg_dict[provided] + empty_pset.append(provided) + + for provided in empty_pset: + del pkg_dict[provided] + if not pkg_dict: - del self.providers[pkg] + empty_pkg_dict.append(pkg) + + for pkg in empty_pkg_dict: + del self.providers[pkg] def copy(self): From d195576fba37672a6a26ebb6208acd5f00e4871f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 12 Jun 2016 23:50:59 -0700 Subject: [PATCH 266/284] WIP --- lib/spack/spack/provider_index.py | 86 +++++++++++++++++++------- lib/spack/spack/test/concretize.py | 3 - lib/spack/spack/test/provider_index.py | 59 ++++++++++++++++-- 3 files changed, 119 insertions(+), 29 deletions(-) diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index 6cd2134e962..ecdc25c4d2e 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -26,6 +26,8 @@ The ``virtual`` module contains utility classes for virtual dependencies. """ import itertools +from pprint import pformat + import yaml from yaml.error import MarkedYAMLError @@ -48,15 +50,30 @@ class ProviderIndex(object): Calling providers_for(spec) will find specs that provide a matching implementation of MPI. - """ - def __init__(self, specs=None, **kwargs): - # TODO: come up with another name for this. This "restricts" - # values to the verbatim impu specs (i.e., it doesn't - # pre-apply package's constraints, and keeps things as broad - # as possible, so it's really the wrong name) - if specs is None: specs = [] - self.restrict = kwargs.setdefault('restrict', False) + """ + + + def __init__(self, specs=None, restrict=False): + """Create a new ProviderIndex. + + Optional arguments: + + specs + List (or sequence) of specs. If provided, will call + `update` on this ProviderIndex with each spec in the list. + + restrict + "restricts" values to the verbatim input specs; do not + pre-apply package's constraints. + + TODO: rename this. It is intended to keep things as broad + as possible without overly restricting results, so it is + not the best name. + """ + if specs is None: specs = [] + + self.restrict = restrict self.providers = {} for spec in specs: @@ -174,10 +191,9 @@ def satisfies(self, other): def to_yaml(self, stream=None): - provider_list = dict( - (name, [[vpkg.to_node_dict(), [p.to_node_dict() for p in pset]] - for vpkg, pset in pdict.items()]) - for name, pdict in self.providers.items()) + provider_list = self._transform( + lambda vpkg, pset: [ + vpkg.to_node_dict(), [p.to_node_dict() for p in pset]], list) yaml.dump({'provider_index': {'providers': provider_list}}, stream=stream) @@ -201,12 +217,11 @@ def from_yaml(stream): index = ProviderIndex() providers = yfile['provider_index']['providers'] - index.providers = dict( - (name, dict((spack.spec.Spec.from_node_dict(vpkg), - set(spack.spec.Spec.from_node_dict(p) for p in plist)) - for vpkg, plist in pdict_list)) - for name, pdict_list in providers.items()) - + index.providers = _transform( + providers, + lambda vpkg, plist: ( + spack.spec.Spec.from_node_dict(vpkg), + set(spack.spec.Spec.from_node_dict(p) for p in plist))) return index @@ -253,12 +268,39 @@ def remove_provider(self, pkg_name): def copy(self): """Deep copy of this ProviderIndex.""" clone = ProviderIndex() - clone.providers = dict( - (name, dict((vpkg, set((p.copy() for p in pset))) - for vpkg, pset in pdict.items())) - for name, pdict in self.providers.items()) + clone.providers = self._transform( + lambda vpkg, pset: (vpkg, set((p.copy() for p in pset)))) return clone def __eq__(self, other): return self.providers == other.providers + + + def _transform(self, transform_fun, out_mapping_type=dict): + return _transform(self.providers, transform_fun, out_mapping_type) + + + def __str__(self): + return pformat( + _transform(self.providers, + lambda k, v: (k, list(v)))) + + +def _transform(providers, transform_fun, out_mapping_type=dict): + """Syntactic sugar for transforming a providers dict. + + transform_fun takes a (vpkg, pset) mapping and runs it on each + pair in nested dicts. + + """ + def mapiter(mappings): + if isinstance(mappings, dict): + return mappings.iteritems() + else: + return iter(mappings) + + return dict( + (name, out_mapping_type([ + transform_fun(vpkg, pset) for vpkg, pset in mapiter(mappings)])) + for name, mappings in providers.items()) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index ae3ceecfc8d..ec0a2ec2440 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -153,9 +153,6 @@ def test_concretize_with_provides_when(self): self.assertTrue(not any(spec.satisfies('mpich2@:1.1') for spec in spack.repo.providers_for('mpi@2.2'))) - self.assertTrue(not any(spec.satisfies('mpich2@:1.1') - for spec in spack.repo.providers_for('mpi@2.2'))) - self.assertTrue(not any(spec.satisfies('mpich@:1') for spec in spack.repo.providers_for('mpi@2'))) diff --git a/lib/spack/spack/test/provider_index.py b/lib/spack/spack/test/provider_index.py index 15fb9acff26..7d5f997b0a3 100644 --- a/lib/spack/spack/test/provider_index.py +++ b/lib/spack/spack/test/provider_index.py @@ -26,12 +26,27 @@ import unittest import spack +from spack.spec import Spec from spack.provider_index import ProviderIndex +from spack.test.mock_packages_test import * +# Test assume that mock packages provide this: +# +# {'blas': { +# blas: set([netlib-blas, openblas, openblas-with-lapack])}, +# 'lapack': {lapack: set([netlib-lapack, openblas-with-lapack])}, +# 'mpi': {mpi@:1: set([mpich@:1]), +# mpi@:2.0: set([mpich2]), +# mpi@:2.1: set([mpich2@1.1:]), +# mpi@:2.2: set([mpich2@1.2:]), +# mpi@:3: set([mpich@3:]), +# mpi@:10.0: set([zmpi])}, +# 'stuff': {stuff: set([externalvirtual])}} +# -class ProviderIndexTest(unittest.TestCase): +class ProviderIndexTest(MockPackagesTest): - def test_write_and_read(self): + def test_yaml_round_trip(self): p = ProviderIndex(spack.repo.all_package_names()) ostream = StringIO() @@ -40,10 +55,46 @@ def test_write_and_read(self): istream = StringIO(ostream.getvalue()) q = ProviderIndex.from_yaml(istream) - self.assertTrue(p == q) + self.assertEqual(p, q) + + + def test_providers_for_simple(self): + p = ProviderIndex(spack.repo.all_package_names()) + + blas_providers = p.providers_for('blas') + self.assertTrue(Spec('netlib-blas') in blas_providers) + self.assertTrue(Spec('openblas') in blas_providers) + self.assertTrue(Spec('openblas-with-lapack') in blas_providers) + + lapack_providers = p.providers_for('lapack') + self.assertTrue(Spec('netlib-lapack') in lapack_providers) + self.assertTrue(Spec('openblas-with-lapack') in lapack_providers) + + + def test_mpi_providers(self): + p = ProviderIndex(spack.repo.all_package_names()) + + mpi_2_providers = p.providers_for('mpi@2') + self.assertTrue(Spec('mpich2') in mpi_2_providers) + self.assertTrue(Spec('mpich@3:') in mpi_2_providers) + + mpi_3_providers = p.providers_for('mpi@3') + self.assertTrue(Spec('mpich2') not in mpi_3_providers) + self.assertTrue(Spec('mpich@3:') in mpi_3_providers) + self.assertTrue(Spec('zmpi') in mpi_3_providers) + + + def test_equal(self): + p = ProviderIndex(spack.repo.all_package_names()) + q = ProviderIndex(spack.repo.all_package_names()) + self.assertEqual(p, q) def test_copy(self): p = ProviderIndex(spack.repo.all_package_names()) q = p.copy() - self.assertTrue(p == q) + self.assertEqual(p, q) + + + def test_copy(self): + pass From e5743db9b996b5f0aa1934471a4d603bc24f3725 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 7 Aug 2016 17:39:18 -0700 Subject: [PATCH 267/284] Fix issues with import order in tests. - modules weren't set properly as attributes in parent modules --- lib/spack/spack/repository.py | 45 +++++++++++++++++++++++------------ 1 file changed, 30 insertions(+), 15 deletions(-) diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index df21810a126..58747ba25df 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -33,6 +33,8 @@ import re import traceback from bisect import bisect_left +from types import ModuleType + import yaml import llnl.util.tty as tty @@ -73,12 +75,21 @@ def converter(self, spec_like, *args, **kwargs): return converter -def _make_namespace_module(ns): - module = imp.new_module(ns) - module.__file__ = "(spack namespace)" - module.__path__ = [] - module.__package__ = ns - return module +class SpackNamespace(ModuleType): + """ Allow lazy loading of modules.""" + def __init__(self, namespace): + super(ModuleType, self).__init__(self, namespace) + self.__file__ = "(spack namespace)" + self.__path__ = [] + self.__name__ = namespace + self.__package__ = namespace + self.__modules = {} + + def __getattr__(self, name): + """Getattr lazily loads modules if they're not already loaded.""" + submodule = self.__package__ + '.' + name + setattr(self, name, __import__(submodule)) + return getattr(self, name) def substitute_spack_prefix(path): @@ -287,13 +298,10 @@ def load_module(self, fullname): if fullname in sys.modules: return sys.modules[fullname] - # partition fullname into prefix and module name. - namespace, dot, module_name = fullname.rpartition('.') - if not self.by_namespace.is_prefix(fullname): raise ImportError("No such Spack repo: %s" % fullname) - module = _make_namespace_module(namespace) + module = SpackNamespace(fullname) module.__loader__ = self sys.modules[fullname] = module return module @@ -464,8 +472,9 @@ def _create_namespace(self): parent = None for l in range(1, len(self._names)+1): ns = '.'.join(self._names[:l]) + if not ns in sys.modules: - module = _make_namespace_module(ns) + module = SpackNamespace(ns) module.__loader__ = self sys.modules[ns] = module @@ -476,11 +485,12 @@ def _create_namespace(self): # import spack.pkg.builtin.mpich as mpich if parent: modname = self._names[l-1] - if not hasattr(parent, modname): - setattr(parent, modname, module) + setattr(parent, modname, module) else: - # no need to set up a module, but keep track of the parent. + # no need to set up a module module = sys.modules[ns] + + # but keep track of the parent in this loop parent = module @@ -543,7 +553,7 @@ def load_module(self, fullname): namespace, dot, module_name = fullname.rpartition('.') if self.is_prefix(fullname): - module = _make_namespace_module(fullname) + module = SpackNamespace(fullname) elif namespace == self.full_namespace: real_name = self.real_name(module_name) @@ -556,6 +566,11 @@ def load_module(self, fullname): module.__loader__ = self sys.modules[fullname] = module + if namespace != fullname: + parent = sys.modules[namespace] + if not hasattr(parent, module_name): + setattr(parent, module_name, module) + return module From 5d690c9270bd20366923da4bc1b8498621c2ff69 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 7 Aug 2016 17:53:59 -0700 Subject: [PATCH 268/284] Make compiler command test last until caching is fixed. - global compiler cache breaks tests that come after this one. --- lib/spack/spack/test/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index e092a509135..3cc7ed512b1 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import sys +import os import llnl.util.tty as tty import nose @@ -40,13 +41,11 @@ 'cc', 'cmd.find', 'cmd.module', - 'cmd.test_compiler_cmd', 'cmd.test_install', 'cmd.uninstall', 'concretize', 'concretize_preferences', 'config', - 'configure_guess', 'database', 'directory_layout', 'environment', @@ -78,6 +77,8 @@ 'versions', 'provider_index', 'yaml', + # This test needs to be last until global compiler cache is fixed. + 'cmd.test_compiler_cmd', ] def list_tests(): From 1339714eecc927e46a1336241512483dd8d11eab Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 7 Aug 2016 17:54:37 -0700 Subject: [PATCH 269/284] Restore text output in verbose mode. --- lib/spack/spack/test/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 3cc7ed512b1..7795cb59c72 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -89,6 +89,10 @@ def list_tests(): def run(names, outputDir, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of Spack's tests.""" + # Print output to stdout if verbose is 1. + if verbose: + os.environ['NOSE_NOCAPTURE'] = '1' + if not names: names = test_names else: From 2042e9a6d85d02adc9424ce6f973e17341ebb292 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 7 Aug 2016 18:36:11 -0700 Subject: [PATCH 270/284] Fix bugs with sparse spec printing. - Make namespace, arch, and dependnecies show up in spec yaml only if they're set. - Lost some of this functionality with deptypes --- lib/spack/spack/architecture.py | 7 +++++++ lib/spack/spack/spec.py | 14 ++++---------- lib/spack/spack/test/architecture.py | 23 +++++++++++++++++++++++ 3 files changed, 34 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index 974505ee3a0..886e170b1aa 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -383,6 +383,13 @@ def __str__(self): def __contains__(self, string): return string in str(self) + # TODO: make this unnecessary: don't include an empty arch on *every* spec. + def __nonzero__(self): + return (self.platform is not None or + self.platform_os is not None or + self.target is not None) + __bool__ = __nonzero__ + def _cmp_key(self): if isinstance(self.platform, Platform): platform = self.platform.name diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 8a47ec95ad0..a37b39be671 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -915,10 +915,7 @@ def to_node_dict(self): if params: d['parameters'] = params - if self.architecture is not None: - d['arch'] = self.architecture - - if self.dependencies: + if self.dependencies(): deps = self.dependencies_dict(deptype=('link', 'run')) d['dependencies'] = dict( (name, { @@ -926,17 +923,13 @@ def to_node_dict(self): 'type': [str(s) for s in dspec.deptypes]}) for name, dspec in deps.items()) - # Older concrete specs do not have a namespace. Omit for - # consistent hashing. - if not self.concrete or self.namespace: + if self.namespace: d['namespace'] = self.namespace if self.architecture: # TODO: Fix the target.to_dict to account for the tuple # Want it to be a dict of dicts d['arch'] = self.architecture.to_dict() - else: - d['arch'] = None if self.compiler: d.update(self.compiler.to_dict()) @@ -967,7 +960,8 @@ def from_node_dict(node): if 'version' in node or 'versions' in node: spec.versions = VersionList.from_dict(node) - spec.architecture = spack.architecture.arch_from_dict(node['arch']) + if 'arch' in node: + spec.architecture = spack.architecture.arch_from_dict(node['arch']) if 'compiler' in node: spec.compiler = CompilerSpec.from_dict(node) diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index 42dd9f4c040..b8441bdac42 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -86,6 +86,29 @@ def test_platform(self): self.assertEqual(str(output_platform_class), str(my_platform_class)) + def test_boolness(self): + # Make sure architecture reports that it's False when nothing's set. + arch = spack.architecture.Arch() + self.assertFalse(arch) + + # Dummy architecture parts + plat = spack.architecture.platform() + plat_os = plat.operating_system('default_os') + plat_target = plat.target('default_target') + + # Make sure architecture reports that it's True when anything is set. + arch = spack.architecture.Arch() + arch.platform = plat + self.assertTrue(arch) + + arch = spack.architecture.Arch() + arch.platform_os = plat_os + self.assertTrue(arch) + + arch = spack.architecture.Arch() + arch.target = plat_target + self.assertTrue(arch) + def test_user_front_end_input(self): """Test when user inputs just frontend that both the frontend target and frontend operating system match From 15d9fb187916d94d4a11ed4654cbbb3c978001c4 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 4 Aug 2016 09:42:02 +0200 Subject: [PATCH 271/284] plumed : adding dependents * cp2k : added plumed * gromacs : patched pme load balancing * gromacs : added plumed --- .../repos/builtin/packages/cp2k/package.py | 51 ++++++++++++------- .../repos/builtin/packages/gromacs/package.py | 10 +++- .../repos/builtin/packages/plumed/package.py | 28 ++++++++++ 3 files changed, 71 insertions(+), 18 deletions(-) diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index 8fdd1e0ca35..5f592863233 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -40,6 +40,7 @@ class Cp2k(Package): version('3.0', 'c05bc47335f68597a310b1ed75601d35') variant('mpi', default=True, description='Enable MPI support') + variant('plumed', default=False, description='Enable PLUMED support') depends_on('python') # Build dependency @@ -49,6 +50,8 @@ class Cp2k(Package): depends_on('mpi', when='+mpi') depends_on('scalapack', when='+mpi') + depends_on('plumed+shared+mpi', when='+plumed+mpi') + depends_on('plumed+shared~mpi', when='+plumed~mpi') # TODO : add dependency on libint # TODO : add dependency on libsmm, libxsmm @@ -56,7 +59,6 @@ class Cp2k(Package): # TODO : add dependency on CUDA # TODO : add dependency on PEXSI # TODO : add dependency on QUIP - # TODO : add dependency on plumed # TODO : add dependency on libwannier90 parallel = False @@ -70,22 +72,6 @@ def install(self, spec, prefix): # Write the custom makefile with open(makefile, 'w') as mkf: - mkf.write('CC = {0.compiler.cc}\n'.format(self)) - if '%intel' in self.spec: - # CPP is a commented command in Intel arch of CP2K - # This is the hack through which cp2k developers avoid doing : - # - # ${CPP} .F > .f90 - # - # and use `-fpp` instead - mkf.write('CPP = # {0.compiler.cc} -P\n'.format(self)) - mkf.write('AR = xiar -r\n') - else: - mkf.write('CPP = {0.compiler.cc} -E\n'.format(self)) - mkf.write('AR = ar -r\n') - fc = self.compiler.fc if '~mpi' in spec else self.spec['mpi'].mpifc - mkf.write('FC = {0}\n'.format(fc)) - mkf.write('LD = {0}\n'.format(fc)) # Optimization flags optflags = { 'gcc': ['-O2', @@ -110,6 +96,37 @@ def install(self, spec, prefix): ]) ldflags = ['-L' + spec['fftw'].prefix.lib] libs = [] + if '+plumed' in self.spec: + # Include Plumed.inc in the Makefile + mkf.write('include {0}\n'.format( + join_path(self.spec['plumed'].prefix.lib, + 'plumed', + 'src', + 'lib', + 'Plumed.inc') + )) + # Add required macro + cppflags.extend(['-D__PLUMED2']) + libs.extend([ + join_path(self.spec['plumed'].prefix.lib, 'libplumed.so') + ]) + + mkf.write('CC = {0.compiler.cc}\n'.format(self)) + if '%intel' in self.spec: + # CPP is a commented command in Intel arch of CP2K + # This is the hack through which cp2k developers avoid doing : + # + # ${CPP} .F > .f90 + # + # and use `-fpp` instead + mkf.write('CPP = # {0.compiler.cc} -P\n'.format(self)) + mkf.write('AR = xiar -r\n') + else: + mkf.write('CPP = {0.compiler.cc} -E\n'.format(self)) + mkf.write('AR = ar -r\n') + fc = self.compiler.fc if '~mpi' in spec else self.spec['mpi'].mpifc + mkf.write('FC = {0}\n'.format(fc)) + mkf.write('LD = {0}\n'.format(fc)) # Intel if '%intel' in self.spec: cppflags.extend([ diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 55eacc8d38d..99c6701bc38 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -22,6 +22,8 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import subprocess + from spack import * @@ -46,14 +48,20 @@ class Gromacs(Package): variant('shared', default=True, description='Enables the build of shared libraries') variant('debug', default=False, description='Enables debug mode') variant('double', default=False, description='Produces a double precision version of the executables') + variant('plumed', default=False, description='Enable PLUMED support') depends_on('mpi', when='+mpi') - + depends_on('plumed+mpi', when='+plumed+mpi') + depends_on('plumed~mpi', when='+plumed~mpi') depends_on('fftw') depends_on('cmake', type='build') # TODO : add GPU support + def patch(self): + if '+plumed' in self.spec: + self.spec['plumed'].package.apply_patch(self) + def install(self, spec, prefix): options = [] diff --git a/var/spack/repos/builtin/packages/plumed/package.py b/var/spack/repos/builtin/packages/plumed/package.py index 32571455ebb..4a1dd1baa56 100644 --- a/var/spack/repos/builtin/packages/plumed/package.py +++ b/var/spack/repos/builtin/packages/plumed/package.py @@ -22,6 +22,7 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import subprocess from spack import * @@ -55,6 +56,33 @@ class Plumed(Package): depends_on('mpi', when='+mpi') depends_on('gsl', when='+gsl') + # Dictionary mapping PLUMED versions to the patches it provides + # interactively + patches = { + '2.2.3': { + 'amber-14': '1', + 'gromacs-4.5.7': '2', + 'gromacs-4.6.7': '3', + 'gromacs-5.0.7': '4', + 'gromacs-5.1.2': '5', + 'lammps-6Apr13': '6', + 'namd-2.8': '7', + 'namd-2.9': '8', + 'espresso-5.0.2': '9' + } + } + + def apply_patch(self, other): + plumed = subprocess.Popen( + [join_path(self.spec.prefix.bin, 'plumed'), 'patch', '-p'], + stdin=subprocess.PIPE + ) + opts = Plumed.patches[str(self.version)] + search = '{0.name}-{0.version}'.format(other) + choice = opts[search] + '\n' + plumed.stdin.write(choice) + plumed.wait() + def setup_dependent_package(self, module, ext_spec): # Make plumed visible from dependent packages module.plumed = Executable(join_path(self.spec.prefix.bin, 'plumed')) From 5362864cc9003d8f630097f12d4b560d970fd306 Mon Sep 17 00:00:00 2001 From: alalazo Date: Fri, 5 Aug 2016 07:49:47 +0200 Subject: [PATCH 272/284] plumed : fixed name clash on attribute --- var/spack/repos/builtin/packages/plumed/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/plumed/package.py b/var/spack/repos/builtin/packages/plumed/package.py index 4a1dd1baa56..abb03e07703 100644 --- a/var/spack/repos/builtin/packages/plumed/package.py +++ b/var/spack/repos/builtin/packages/plumed/package.py @@ -58,7 +58,7 @@ class Plumed(Package): # Dictionary mapping PLUMED versions to the patches it provides # interactively - patches = { + plumed_patches = { '2.2.3': { 'amber-14': '1', 'gromacs-4.5.7': '2', @@ -77,7 +77,7 @@ def apply_patch(self, other): [join_path(self.spec.prefix.bin, 'plumed'), 'patch', '-p'], stdin=subprocess.PIPE ) - opts = Plumed.patches[str(self.version)] + opts = Plumed.plumed_patches[str(self.version)] search = '{0.name}-{0.version}'.format(other) choice = opts[search] + '\n' plumed.stdin.write(choice) From 102ac7bcf1bc7fd134b10a9c54e40302d4f1345b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 9 Aug 2016 00:24:54 -0700 Subject: [PATCH 273/284] Move provider cache to home directory and refactor Transactions Major stuff: - Created a FileCache for managing user cache files in Spack. Currently just handles virtuals. - Moved virtual cache from the repository to the home directory so that users do not need write access to Spack repositories to use them. - Refactored `Transaction` class in `database.py` -- moved it to `LockTransaction` in `lock.py` and made it reusable by other classes. Other additions: - Added tests for file cache and transactions. - Added a few more tests for database - Fixed bug in DB where writes could happen even if exceptions were raised during a transaction. - `spack uninstall` now attempts to repair the database when it discovers that a prefix doesn't exist but a DB record does. --- lib/spack/llnl/util/lock.py | 69 +++++++++- lib/spack/spack/__init__.py | 9 +- lib/spack/spack/cmd/purge.py | 10 +- lib/spack/spack/cmd/test.py | 10 +- lib/spack/spack/cmd/uninstall.py | 3 +- lib/spack/spack/config.py | 2 +- lib/spack/spack/database.py | 61 ++------- lib/spack/spack/file_cache.py | 181 +++++++++++++++++++++++++ lib/spack/spack/modules.py | 3 +- lib/spack/spack/package.py | 10 +- lib/spack/spack/repository.py | 58 +++----- lib/spack/spack/stage.py | 5 +- lib/spack/spack/test/__init__.py | 1 + lib/spack/spack/test/database.py | 34 +++++ lib/spack/spack/test/file_cache.py | 84 ++++++++++++ lib/spack/spack/test/lock.py | 163 +++++++++++++++++++++- lib/spack/spack/test/mock_database.py | 6 +- lib/spack/spack/test/provider_index.py | 4 - 18 files changed, 600 insertions(+), 113 deletions(-) create mode 100644 lib/spack/spack/file_cache.py create mode 100644 lib/spack/spack/test/file_cache.py diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index 479a1b01670..e1f5b4878a0 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -28,6 +28,9 @@ import time import socket +__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction', + 'LockError'] + # Default timeout in seconds, after which locks will raise exceptions. _default_timeout = 60 @@ -63,7 +66,9 @@ def _lock(self, op, timeout): fcntl.lockf(self._fd, op | fcntl.LOCK_NB) if op == fcntl.LOCK_EX: - os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn())) + os.write( + self._fd, + "pid=%s,host=%s" % (os.getpid(), socket.getfqdn())) return except IOError as error: @@ -170,6 +175,66 @@ def release_write(self): return False +class LockTransaction(object): + """Simple nested transaction context manager that uses a file lock. + + This class can trigger actions when the lock is acquired for the + first time and released for the last. + + If the acquire_fn returns a value, it is used as the return value for + __enter__, allowing it to be passed as the `as` argument of a `with` + statement. + + If acquire_fn returns a context manager, *its* `__enter__` function will be + called in `__enter__` after acquire_fn, and its `__exit__` funciton will be + called before `release_fn` in `__exit__`, allowing you to nest a context + manager to be used along with the lock. + + Timeout for lock is customizable. + + """ + + def __init__(self, lock, acquire_fn=None, release_fn=None, + timeout=_default_timeout): + self._lock = lock + self._timeout = timeout + self._acquire_fn = acquire_fn + self._release_fn = release_fn + self._as = None + + def __enter__(self): + if self._enter() and self._acquire_fn: + self._as = self._acquire_fn() + if hasattr(self._as, '__enter__'): + return self._as.__enter__() + else: + return self._as + + def __exit__(self, type, value, traceback): + if self._exit(): + if self._as and hasattr(self._as, '__exit__'): + self._as.__exit__(type, value, traceback) + if self._release_fn: + self._release_fn(type, value, traceback) + if value: + raise value + + +class ReadTransaction(LockTransaction): + def _enter(self): + return self._lock.acquire_read(self._timeout) + + def _exit(self): + return self._lock.release_read() + + +class WriteTransaction(LockTransaction): + def _enter(self): + return self._lock.acquire_write(self._timeout) + + def _exit(self): + return self._lock.release_write() + + class LockError(Exception): """Raised when an attempt to acquire a lock times out.""" - pass diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index d67585aac49..a6e21987c8a 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -50,8 +50,15 @@ share_path = join_path(spack_root, "share", "spack") cache_path = join_path(var_path, "cache") +# User configuration location +user_config_path = os.path.expanduser('~/.spack') + import spack.fetch_strategy -cache = spack.fetch_strategy.FsCache(cache_path) +fetch_cache = spack.fetch_strategy.FsCache(cache_path) + +from spack.file_cache import FileCache +user_cache_path = join_path(user_config_path, 'cache') +user_cache = FileCache(user_cache_path) prefix = spack_root opt_path = join_path(prefix, "opt") diff --git a/lib/spack/spack/cmd/purge.py b/lib/spack/spack/cmd/purge.py index f4e27a39691..26745810a83 100644 --- a/lib/spack/spack/cmd/purge.py +++ b/lib/spack/spack/cmd/purge.py @@ -33,7 +33,11 @@ def setup_parser(subparser): '-s', '--stage', action='store_true', default=True, help="Remove all temporary build stages (default).") subparser.add_argument( - '-c', '--cache', action='store_true', help="Remove cached downloads.") + '-d', '--downloads', action='store_true', + help="Remove cached downloads.") + subparser.add_argument( + '-u', '--user-cache', action='store_true', + help="Remove caches in user home directory. Includes virtual indices.") subparser.add_argument( '-a', '--all', action='store_true', help="Remove all of the above.") @@ -49,4 +53,6 @@ def purge(parser, args): if args.stage or args.all: stage.purge() if args.cache or args.all: - spack.cache.destroy() + spack.fetch_cache.destroy() + if args.user_cache or args.all: + spack.user_cache.destroy() diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py index 36810321ef7..2667b428209 100644 --- a/lib/spack/spack/cmd/test.py +++ b/lib/spack/spack/cmd/test.py @@ -41,10 +41,10 @@ def setup_parser(subparser): subparser.add_argument( '-l', '--list', action='store_true', dest='list', help="Show available tests") subparser.add_argument( - '--createXmlOutput', action='store_true', dest='createXmlOutput', + '--createXmlOutput', action='store_true', dest='createXmlOutput', help="Create JUnit XML from test results") subparser.add_argument( - '--xmlOutputDir', dest='xmlOutputDir', + '--xmlOutputDir', dest='xmlOutputDir', help="Nose creates XML files in this directory") subparser.add_argument( '-v', '--verbose', action='store_true', dest='verbose', @@ -62,7 +62,7 @@ def fetcher(self, targetPath, digest): class MockCacheFetcher(object): def set_stage(self, stage): pass - + def fetch(self): raise FetchError("Mock cache always fails for tests") @@ -82,8 +82,8 @@ def test(parser, args): outputDir = join_path(os.getcwd(), "test-output") else: outputDir = os.path.abspath(args.xmlOutputDir) - + if not os.path.exists(outputDir): mkdirp(outputDir) - spack.cache = MockCache() + spack.fetch_cache = MockCache() spack.test.run(args.names, outputDir, args.verbose) diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index a17b7c685c3..dbe6cd65847 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -184,7 +184,8 @@ def uninstall(parser, args): uninstall_list = list(set(uninstall_list)) if has_error: - tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') # NOQA: ignore=E501 + tty.die('You can use spack uninstall --dependents ' + 'to uninstall these dependencies as well') if not args.yes_to_all: tty.msg("The following packages will be uninstalled : ") diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 31f0eb3a560..a4e274893c0 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -525,7 +525,7 @@ def clear(self): ConfigScope('site', os.path.join(spack.etc_path, 'spack')) """User configuration can override both spack defaults and site config.""" -ConfigScope('user', os.path.expanduser('~/.spack')) +ConfigScope('user', spack.user_config_path) def highest_precedence_scope(): diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 317b0d5784f..5ce42b2e670 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -165,11 +165,11 @@ def __init__(self, root, db_dir=None): def write_transaction(self, timeout=_db_lock_timeout): """Get a write lock context manager for use in a `with` block.""" - return WriteTransaction(self, self._read, self._write, timeout) + return WriteTransaction(self.lock, self._read, self._write, timeout) def read_transaction(self, timeout=_db_lock_timeout): """Get a read lock context manager for use in a `with` block.""" - return ReadTransaction(self, self._read, None, timeout) + return ReadTransaction(self.lock, self._read, timeout=timeout) def _write_to_yaml(self, stream): """Write out the databsae to a YAML file. @@ -352,12 +352,22 @@ def _check_ref_counts(self): "Invalid ref_count: %s: %d (expected %d), in DB %s" % (key, found, expected, self._index_path)) - def _write(self): + def _write(self, type, value, traceback): """Write the in-memory database index to its file path. - Does no locking. + This is a helper function called by the WriteTransaction context + manager. If there is an exception while the write lock is active, + nothing will be written to the database file, but the in-memory database + *may* be left in an inconsistent state. It will be consistent after the + start of the next transaction, when it read from disk again. + + This routine does no locking. """ + # Do not write if exceptions were raised + if type is not None: + return + temp_file = self._index_path + ( '.%s.%s.temp' % (socket.getfqdn(), os.getpid())) @@ -589,49 +599,6 @@ def missing(self, spec): return key in self._data and not self._data[key].installed -class _Transaction(object): - """Simple nested transaction context manager that uses a file lock. - - This class can trigger actions when the lock is acquired for the - first time and released for the last. - - Timeout for lock is customizable. - """ - - def __init__(self, db, - acquire_fn=None, - release_fn=None, - timeout=_db_lock_timeout): - self._db = db - self._timeout = timeout - self._acquire_fn = acquire_fn - self._release_fn = release_fn - - def __enter__(self): - if self._enter() and self._acquire_fn: - self._acquire_fn() - - def __exit__(self, type, value, traceback): - if self._exit() and self._release_fn: - self._release_fn() - - -class ReadTransaction(_Transaction): - def _enter(self): - return self._db.lock.acquire_read(self._timeout) - - def _exit(self): - return self._db.lock.release_read() - - -class WriteTransaction(_Transaction): - def _enter(self): - return self._db.lock.acquire_write(self._timeout) - - def _exit(self): - return self._db.lock.release_write() - - class CorruptDatabaseError(SpackError): def __init__(self, path, msg=''): super(CorruptDatabaseError, self).__init__( diff --git a/lib/spack/spack/file_cache.py b/lib/spack/spack/file_cache.py new file mode 100644 index 00000000000..2124df9c9cb --- /dev/null +++ b/lib/spack/spack/file_cache.py @@ -0,0 +1,181 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +import shutil + +from llnl.util.filesystem import * +from llnl.util.lock import * + +import spack +from spack.error import SpackError + + +class FileCache(object): + """This class manages cached data in the filesystem. + + - Cache files are fetched and stored by unique keys. Keys can be relative + paths, so that thre can be some hierarchy in the cache. + + - The FileCache handles locking cache files for reading and writing, so + client code need not manage locks for cache entries. + + """ + def __init__(self, root): + """Create a file cache object. + + This will create the cache directory if it does not exist yet. + + """ + self.root = root.rstrip(os.path.sep) + if not os.path.exists(self.root): + mkdirp(self.root) + + self._locks = {} + + def purge(self): + """Remove all files under the cache root.""" + for f in os.listdir(self.root): + path = join_path(self.root, f) + shutil.rmtree(f) + + def cache_path(self, key): + """Path to the file in the cache for a particular key.""" + return join_path(self.root, key) + + def _lock_path(self, key): + """Path to the file in the cache for a particular key.""" + keyfile = os.path.basename(key) + keydir = os.path.dirname(key) + + return join_path(self.root, keydir, '.' + keyfile + '.lock') + + def _get_lock(self, key): + """Create a lock for a key, if necessary, and return a lock object.""" + if key not in self._locks: + lock_file = self._lock_path(key) + if not os.path.exists(lock_file): + touch(lock_file) + self._locks[key] = Lock(lock_file) + return self._locks[key] + + def init_entry(self, key): + """Ensure we can access a cache file. Create a lock for it if needed. + + Return whether the cache file exists yet or not. + """ + cache_path = self.cache_path(key) + + exists = os.path.exists(cache_path) + if exists: + if not os.path.isfile(cache_path): + raise CacheError("Cache file is not a file: %s" % cache_path) + + if not os.access(cache_path, os.R_OK|os.W_OK): + raise CacheError("Cannot access cache file: %s" % cache_path) + else: + # if the file is hierarchical, make parent directories + parent = os.path.dirname(cache_path) + if parent.rstrip(os.path.sep) != self.root: + mkdirp(parent) + + if not os.access(parent, os.R_OK|os.W_OK): + raise CacheError("Cannot access cache directory: %s" % parent) + + # ensure lock is created for this key + self._get_lock(key) + return exists + + def read_transaction(self, key): + """Get a read transaction on a file cache item. + + Returns a ReadTransaction context manager and opens the cache file for + reading. You can use it like this: + + with spack.user_cache.read_transaction(key) as cache_file: + cache_file.read() + + """ + return ReadTransaction( + self._get_lock(key), lambda: open(self.cache_path(key))) + + def write_transaction(self, key): + """Get a write transaction on a file cache item. + + Returns a WriteTransaction context manager that opens a temporary file + for writing. Once the context manager finishes, if nothing went wrong, + moves the file into place on top of the old file atomically. + + """ + class WriteContextManager(object): + def __enter__(cm): + cm.orig_filename = self.cache_path(key) + cm.orig_file = None + if os.path.exists(cm.orig_filename): + cm.orig_file = open(cm.orig_filename, 'r') + + cm.tmp_filename = self.cache_path(key) + '.tmp' + cm.tmp_file = open(cm.tmp_filename, 'w') + + return cm.orig_file, cm.tmp_file + + def __exit__(cm, type, value, traceback): + if cm.orig_file: + cm.orig_file.close() + cm.tmp_file.close() + + if value: + # remove tmp on exception & raise it + shutil.rmtree(cm.tmp_filename, True) + raise value + else: + os.rename(cm.tmp_filename, cm.orig_filename) + + return WriteTransaction(self._get_lock(key), WriteContextManager) + + + def mtime(self, key): + """Return modification time of cache file, or 0 if it does not exist. + + Time is in units returned by os.stat in the mtime field, which is + platform-dependent. + + """ + if not self.init_entry(key): + return 0 + else: + sinfo = os.stat(self.cache_path(key)) + return sinfo.st_mtime + + + def remove(self, key): + lock = self._get_lock(key) + try: + lock.acquire_write() + os.unlink(self.cache_path(key)) + finally: + lock.release_write() + os.unlink(self._lock_path(key)) + +class CacheError(SpackError): pass diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 8701a31c496..8ac6a77d13e 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -520,7 +520,8 @@ def header(self): def prerequisite(self, spec): tty.warn('prerequisites: not supported by dotkit module files') - tty.warn('\tYou may want to check ~/.spack/modules.yaml') + tty.warn('\tYou may want to check %s/modules.yaml' + % spack.user_config_path) return '' diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 43aefbf65e6..475155937c6 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1198,7 +1198,15 @@ def install(self, spec, prefix): def do_uninstall(self, force=False): if not self.installed: - raise InstallError(str(self.spec) + " is not installed.") + # prefix may not exist, but DB may be inconsistent. Try to fix by + # removing, but omit hooks. + specs = spack.installed_db.query(self.spec, installed=True) + if specs: + spack.installed_db.remove(specs[0]) + tty.msg("Removed stale DB entry for %s" % self.spec.short_spec) + return + else: + raise InstallError(str(self.spec) + " is not installed.") if not force: dependents = self.installed_dependents diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 58747ba25df..a0904a2cde1 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -41,6 +41,7 @@ from llnl.util.lock import Lock from llnl.util.filesystem import * +import spack import spack.error import spack.config import spack.spec @@ -414,17 +415,6 @@ def check(condition, msg): check(os.path.isdir(self.packages_path), "No directory '%s' found in '%s'" % (repo_config_name, root)) - self.index_file = join_path(self.root, repo_index_name) - check(not os.path.exists(self.index_file) or - (os.path.isfile(self.index_file) and os.access(self.index_file, os.R_OK|os.W_OK)), - "Cannot access repository index file in %s" % root) - - # lock file for reading/writing the index - self._lock_path = join_path(self.root, 'lock') - if not os.path.exists(self._lock_path): - touch(self._lock_path) - self._lock = Lock(self._lock_path) - # Read configuration and validate namespace config = self._read_config() check('namespace' in config, '%s must define a namespace.' @@ -461,6 +451,8 @@ def check(condition, msg): # make sure the namespace for packages in this repo exists. self._create_namespace() + # Unique filename for cache of virtual dependency providers + self._cache_file = 'providers/%s-index.yaml' % self.namespace def _create_namespace(self): """Create this repo's namespace module and insert it into sys.modules. @@ -658,21 +650,15 @@ def read(): self._provider_index = ProviderIndex.from_yaml(f) # Read the old ProviderIndex, or make a new one. - index_existed = os.path.isfile(self.index_file) + key = self._cache_file + index_existed = spack.user_cache.init_entry(key) if index_existed and not self._needs_update: - self._lock.acquire_read() - try: - read() - finally: - self._lock.release_read() - + with spack.user_cache.read_transaction(key) as f: + self._provider_index = ProviderIndex.from_yaml(f) else: - tmp = self.index_file + '.tmp' - self._lock.acquire_write() - try: - if index_existed: - with open(self.index_file) as f: - self._provider_index = ProviderIndex.from_yaml(f) + with spack.user_cache.write_transaction(key) as (old, new): + if old: + self._provider_index = ProviderIndex.from_yaml(old) else: self._provider_index = ProviderIndex() @@ -681,17 +667,7 @@ def read(): self._provider_index.remove_provider(namespaced_name) self._provider_index.update(namespaced_name) - - with open(tmp, 'w') as f: - self._provider_index.to_yaml(f) - os.rename(tmp, self.index_file) - - except: - shutil.rmtree(tmp, ignore_errors=True) - raise - - finally: - self._lock.release_write() + self._provider_index.to_yaml(new) @property @@ -745,7 +721,7 @@ def filename_for_package_name(self, spec): def _fast_package_check(self): - """List packages in the repo and cehck whether index is up to date. + """List packages in the repo and check whether index is up to date. Both of these opreations require checking all `package.py` files so we do them at the same time. We list the repo @@ -763,10 +739,7 @@ def _fast_package_check(self): self._all_package_names = [] # Get index modification time. - index_mtime = 0 - if os.path.exists(self.index_file): - sinfo = os.stat(self.index_file) - index_mtime = sinfo.st_mtime + index_mtime = spack.user_cache.mtime(self._cache_file) for pkg_name in os.listdir(self.packages_path): # Skip non-directories in the package root. @@ -774,8 +747,9 @@ def _fast_package_check(self): # Warn about invalid names that look like packages. if not valid_module_name(pkg_name): - tty.warn("Skipping package at %s. '%s' is not a valid Spack module name." - % (pkg_dir, pkg_name)) + msg = ("Skipping package at %s. " + "'%s' is not a valid Spack module name.") + tty.warn(msg % (pkg_dir, pkg_name)) continue # construct the file name from the directory diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 8fcc331482a..7676cb9ab6f 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -315,7 +315,8 @@ def fetch(self, mirror_only=False): # Add URL strategies for all the mirrors with the digest for url in urls: fetchers.insert(0, fs.URLFetchStrategy(url, digest)) - fetchers.insert(0, spack.cache.fetcher(self.mirror_path, digest)) + fetchers.insert(0, spack.fetch_cache.fetcher(self.mirror_path, + digest)) # Look for the archive in list_url package_name = os.path.dirname(self.mirror_path) @@ -365,7 +366,7 @@ def check(self): self.fetcher.check() def cache_local(self): - spack.cache.store(self.fetcher, self.mirror_path) + spack.fetch_cache.store(self.fetcher, self.mirror_path) def expand_archive(self): """Changes to the stage directory and attempt to expand the downloaded diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 7795cb59c72..4969081e638 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -49,6 +49,7 @@ 'database', 'directory_layout', 'environment', + 'file_cache', 'git_fetch', 'hg_fetch', 'install', diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index e1322f20816..a2f09450bc9 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -273,3 +273,37 @@ def test_090_non_root_ref_counts(self): # mpich ref count updated properly. mpich_rec = self.installed_db.get_record('mpich') self.assertEqual(mpich_rec.ref_count, 0) + + def test_100_no_write_with_exception_on_remove(self): + def fail_while_writing(): + with self.installed_db.write_transaction(): + self._mock_remove('mpileaks ^zmpi') + raise Exception() + + with self.installed_db.read_transaction(): + self.assertEqual( + len(self.installed_db.query('mpileaks ^zmpi', installed=any)), 1) + + self.assertRaises(Exception, fail_while_writing) + + # reload DB and make sure zmpi is still there. + with self.installed_db.read_transaction(): + self.assertEqual( + len(self.installed_db.query('mpileaks ^zmpi', installed=any)), 1) + + def test_110_no_write_with_exception_on_install(self): + def fail_while_writing(): + with self.installed_db.write_transaction(): + self._mock_install('cmake') + raise Exception() + + with self.installed_db.read_transaction(): + self.assertEqual( + self.installed_db.query('cmake', installed=any), []) + + self.assertRaises(Exception, fail_while_writing) + + # reload DB and make sure cmake was not written. + with self.installed_db.read_transaction(): + self.assertEqual( + self.installed_db.query('cmake', installed=any), []) diff --git a/lib/spack/spack/test/file_cache.py b/lib/spack/spack/test/file_cache.py new file mode 100644 index 00000000000..6142b135ebe --- /dev/null +++ b/lib/spack/spack/test/file_cache.py @@ -0,0 +1,84 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +""" +Test Spack's FileCache. +""" +import os +import shutil +import tempfile +import unittest + +import spack +from spack.file_cache import FileCache + + +class FileCacheTest(unittest.TestCase): + """Ensure that a file cache can properly write to a file and recover its + contents.""" + + def setUp(self): + self.scratch_dir = tempfile.mkdtemp() + self.cache = FileCache(self.scratch_dir) + + def tearDown(self): + shutil.rmtree(self.scratch_dir) + + def test_write_and_read_cache_file(self): + """Test writing then reading a cached file.""" + with self.cache.write_transaction('test.yaml') as (old, new): + self.assertTrue(old is None) + self.assertTrue(new is not None) + new.write("foobar\n") + + with self.cache.read_transaction('test.yaml') as stream: + text = stream.read() + self.assertEqual("foobar\n", text) + + def test_remove(self): + """Test removing an entry from the cache.""" + self.test_write_and_write_cache_file() + + self.cache.remove('test.yaml') + + self.assertFalse(os.path.exists(self.cache.cache_path('test.yaml'))) + self.assertFalse(os.path.exists(self.cache._lock_path('test.yaml'))) + + def test_write_and_write_cache_file(self): + """Test two write transactions on a cached file.""" + with self.cache.write_transaction('test.yaml') as (old, new): + self.assertTrue(old is None) + self.assertTrue(new is not None) + new.write("foobar\n") + + with self.cache.write_transaction('test.yaml') as (old, new): + self.assertTrue(old is not None) + text = old.read() + self.assertEqual("foobar\n", text) + self.assertTrue(new is not None) + new.write("barbaz\n") + + with self.cache.read_transaction('test.yaml') as stream: + text = stream.read() + self.assertEqual("barbaz\n", text) diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py index 0e9f6daf4d5..aaf573241bf 100644 --- a/lib/spack/spack/test/lock.py +++ b/lib/spack/spack/test/lock.py @@ -187,7 +187,6 @@ def p1(barrier): barrier.wait() # ---------------------------------------- 13 lock.release_read() - def p2(barrier): lock = Lock(self.lock_path) @@ -224,7 +223,6 @@ def p2(barrier): barrier.wait() # ---------------------------------------- 13 lock.release_read() - def p3(barrier): lock = Lock(self.lock_path) @@ -262,3 +260,164 @@ def p3(barrier): lock.release_read() self.multiproc_test(p1, p2, p3) + + def test_transaction(self): + def enter_fn(): + vals['entered'] = True + + def exit_fn(t, v, tb): + vals['exited'] = True + vals['exception'] = (t or v or tb) + + lock = Lock(self.lock_path) + vals = {'entered': False, 'exited': False, 'exception': False } + with ReadTransaction(lock, enter_fn, exit_fn): pass + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertFalse(vals['exception']) + + vals = {'entered': False, 'exited': False, 'exception': False } + with WriteTransaction(lock, enter_fn, exit_fn): pass + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertFalse(vals['exception']) + + def test_transaction_with_exception(self): + def enter_fn(): + vals['entered'] = True + + def exit_fn(t, v, tb): + vals['exited'] = True + vals['exception'] = (t or v or tb) + + lock = Lock(self.lock_path) + + def do_read_with_exception(): + with ReadTransaction(lock, enter_fn, exit_fn): + raise Exception() + + def do_write_with_exception(): + with WriteTransaction(lock, enter_fn, exit_fn): + raise Exception() + + vals = {'entered': False, 'exited': False, 'exception': False } + self.assertRaises(Exception, do_read_with_exception) + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertTrue(vals['exception']) + + vals = {'entered': False, 'exited': False, 'exception': False } + self.assertRaises(Exception, do_write_with_exception) + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertTrue(vals['exception']) + + def test_transaction_with_context_manager(self): + class TestContextManager(object): + def __enter__(self): + vals['entered'] = True + + def __exit__(self, t, v, tb): + vals['exited'] = True + vals['exception'] = (t or v or tb) + + def exit_fn(t, v, tb): + vals['exited_fn'] = True + vals['exception_fn'] = (t or v or tb) + + lock = Lock(self.lock_path) + + vals = {'entered': False, 'exited': False, 'exited_fn': False, + 'exception': False, 'exception_fn': False } + with ReadTransaction(lock, TestContextManager, exit_fn): pass + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertFalse(vals['exception']) + self.assertTrue(vals['exited_fn']) + self.assertFalse(vals['exception_fn']) + + vals = {'entered': False, 'exited': False, 'exited_fn': False, + 'exception': False, 'exception_fn': False } + with ReadTransaction(lock, TestContextManager): pass + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertFalse(vals['exception']) + self.assertFalse(vals['exited_fn']) + self.assertFalse(vals['exception_fn']) + + vals = {'entered': False, 'exited': False, 'exited_fn': False, + 'exception': False, 'exception_fn': False } + with WriteTransaction(lock, TestContextManager, exit_fn): pass + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertFalse(vals['exception']) + self.assertTrue(vals['exited_fn']) + self.assertFalse(vals['exception_fn']) + + vals = {'entered': False, 'exited': False, 'exited_fn': False, + 'exception': False, 'exception_fn': False } + with WriteTransaction(lock, TestContextManager): pass + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertFalse(vals['exception']) + self.assertFalse(vals['exited_fn']) + self.assertFalse(vals['exception_fn']) + + def test_transaction_with_context_manager_and_exception(self): + class TestContextManager(object): + def __enter__(self): + vals['entered'] = True + + def __exit__(self, t, v, tb): + vals['exited'] = True + vals['exception'] = (t or v or tb) + + def exit_fn(t, v, tb): + vals['exited_fn'] = True + vals['exception_fn'] = (t or v or tb) + + lock = Lock(self.lock_path) + + def do_read_with_exception(exit_fn): + with ReadTransaction(lock, TestContextManager, exit_fn): + raise Exception() + + def do_write_with_exception(exit_fn): + with WriteTransaction(lock, TestContextManager, exit_fn): + raise Exception() + + vals = {'entered': False, 'exited': False, 'exited_fn': False, + 'exception': False, 'exception_fn': False } + self.assertRaises(Exception, do_read_with_exception, exit_fn) + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertTrue(vals['exception']) + self.assertTrue(vals['exited_fn']) + self.assertTrue(vals['exception_fn']) + + vals = {'entered': False, 'exited': False, 'exited_fn': False, + 'exception': False, 'exception_fn': False } + self.assertRaises(Exception, do_read_with_exception, None) + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertTrue(vals['exception']) + self.assertFalse(vals['exited_fn']) + self.assertFalse(vals['exception_fn']) + + vals = {'entered': False, 'exited': False, 'exited_fn': False, + 'exception': False, 'exception_fn': False } + self.assertRaises(Exception, do_write_with_exception, exit_fn) + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertTrue(vals['exception']) + self.assertTrue(vals['exited_fn']) + self.assertTrue(vals['exception_fn']) + + vals = {'entered': False, 'exited': False, 'exited_fn': False, + 'exception': False, 'exception_fn': False } + self.assertRaises(Exception, do_write_with_exception, None) + self.assertTrue(vals['entered']) + self.assertTrue(vals['exited']) + self.assertTrue(vals['exception']) + self.assertFalse(vals['exited_fn']) + self.assertFalse(vals['exception_fn']) diff --git a/lib/spack/spack/test/mock_database.py b/lib/spack/spack/test/mock_database.py index b1194f2451b..da01e82bfad 100644 --- a/lib/spack/spack/test/mock_database.py +++ b/lib/spack/spack/test/mock_database.py @@ -95,8 +95,10 @@ def setUp(self): self._mock_install('mpileaks ^zmpi') def tearDown(self): - for spec in spack.installed_db.query(): - spec.package.do_uninstall(spec) + with spack.installed_db.write_transaction(): + for spec in spack.installed_db.query(): + spec.package.do_uninstall(spec) + super(MockDatabase, self).tearDown() shutil.rmtree(self.install_path) spack.install_path = self.spack_install_path diff --git a/lib/spack/spack/test/provider_index.py b/lib/spack/spack/test/provider_index.py index 7d5f997b0a3..861814e0ae4 100644 --- a/lib/spack/spack/test/provider_index.py +++ b/lib/spack/spack/test/provider_index.py @@ -94,7 +94,3 @@ def test_copy(self): p = ProviderIndex(spack.repo.all_package_names()) q = p.copy() self.assertEqual(p, q) - - - def test_copy(self): - pass From 38dcd6bce95f0167f6e22d65f3a8ff982059df15 Mon Sep 17 00:00:00 2001 From: alalazo Date: Tue, 9 Aug 2016 09:54:24 +0200 Subject: [PATCH 274/284] qa : flake8 issues --- .../repos/builtin/packages/gromacs/package.py | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 99c6701bc38..607927fe8b8 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -22,21 +22,21 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import subprocess from spack import * class Gromacs(Package): - """ - GROMACS (GROningen MAchine for Chemical Simulations) is a molecular dynamics package primarily designed for - simulations of proteins, lipids and nucleic acids. It was originally developed in the Biophysical Chemistry - department of University of Groningen, and is now maintained by contributors in universities and research centers - across the world. + """GROMACS (GROningen MAchine for Chemical Simulations) is a molecular + dynamics package primarily designed for simulations of proteins, lipids + and nucleic acids. It was originally developed in the Biophysical + Chemistry department of University of Groningen, and is now maintained + by contributors in universities and research centers across the world. - GROMACS is one of the fastest and most popular software packages available and can run on CPUs as well as GPUs. - It is free, open source released under the GNU General Public License. Starting from version 4.6, GROMACS is - released under the GNU Lesser General Public License. + GROMACS is one of the fastest and most popular software packages + available and can run on CPUs as well as GPUs. It is free, open source + released under the GNU General Public License. Starting from version 4.6, + GROMACS is released under the GNU Lesser General Public License. """ homepage = 'http://www.gromacs.org' @@ -45,9 +45,10 @@ class Gromacs(Package): version('5.1.2', '614d0be372f1a6f1f36382b7a6fcab98') variant('mpi', default=True, description='Activate MPI support') - variant('shared', default=True, description='Enables the build of shared libraries') + variant('shared', default=True, + description='Enables the build of shared libraries') variant('debug', default=False, description='Enables debug mode') - variant('double', default=False, description='Produces a double precision version of the executables') + variant('double', default=False, description='Produces a double precision version of the executables') # NOQA: ignore=E501 variant('plumed', default=False, description='Enable PLUMED support') depends_on('mpi', when='+mpi') From 0c75c13cc0b26e10dd4c06cca24d597a18230f8c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 9 Aug 2016 01:37:19 -0700 Subject: [PATCH 275/284] Flake8 fixes --- lib/spack/llnl/util/lock.py | 18 +-- lib/spack/spack/__init__.py | 22 +-- lib/spack/spack/cmd/purge.py | 4 +- lib/spack/spack/cmd/test.py | 9 +- lib/spack/spack/database.py | 6 +- lib/spack/spack/file_cache.py | 18 +-- lib/spack/spack/package.py | 1 + lib/spack/spack/provider_index.py | 32 ++--- lib/spack/spack/repository.py | 110 +++++---------- lib/spack/spack/test/__init__.py | 1 + lib/spack/spack/test/concretize.py | 100 ++++++------- lib/spack/spack/test/database.py | 74 +++++----- lib/spack/spack/test/file_cache.py | 1 - lib/spack/spack/test/lock.py | 187 ++++++++++++++----------- lib/spack/spack/test/provider_index.py | 33 ++--- lib/spack/spack/test/spec_yaml.py | 6 +- 16 files changed, 293 insertions(+), 329 deletions(-) diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index e1f5b4878a0..bef20025baf 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -39,13 +39,20 @@ class Lock(object): - def __init__(self,file_path): + """This is an implementation of a filesystem lock using Python's lockf. + + In Python, `lockf` actually calls `fcntl`, so this should work with any + filesystem implementation that supports locking through the fcntl calls. + This includes distributed filesystems like Lustre (when flock is enabled) + and recent NFS versions. + + """ + def __init__(self, file_path): self._file_path = file_path self._fd = None self._reads = 0 self._writes = 0 - def _lock(self, op, timeout): """This takes a lock using POSIX locks (``fnctl.lockf``). @@ -80,7 +87,6 @@ def _lock(self, op, timeout): raise LockError("Timed out waiting for lock.") - def _unlock(self): """Releases a lock using POSIX locks (``fcntl.lockf``) @@ -88,11 +94,10 @@ def _unlock(self): be masquerading as write locks, but this removes either. """ - fcntl.lockf(self._fd,fcntl.LOCK_UN) + fcntl.lockf(self._fd, fcntl.LOCK_UN) os.close(self._fd) self._fd = None - def acquire_read(self, timeout=_default_timeout): """Acquires a recursive, shared lock for reading. @@ -112,7 +117,6 @@ def acquire_read(self, timeout=_default_timeout): self._reads += 1 return False - def acquire_write(self, timeout=_default_timeout): """Acquires a recursive, exclusive lock for writing. @@ -132,7 +136,6 @@ def acquire_write(self, timeout=_default_timeout): self._writes += 1 return False - def release_read(self): """Releases a read lock. @@ -153,7 +156,6 @@ def release_read(self): self._reads -= 1 return False - def release_write(self): """Releases a write lock. diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index a6e21987c8a..3d508d0fde5 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -1,3 +1,4 @@ +# flake8: noqa ############################################################################## # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. @@ -147,7 +148,7 @@ _tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp') for path in _tmp_candidates: # don't add a second username if it's already unique by user. - if not _tmp_user in path: + if _tmp_user not in path: tmp_dirs.append(join_path(path, '%u', 'spack-stage')) else: tmp_dirs.append(join_path(path, 'spack-stage')) @@ -179,12 +180,13 @@ # Spack internal code should call 'import spack' and accesses other # variables (spack.repo, paths, etc.) directly. # -# TODO: maybe this should be separated out and should go in build_environment.py? -# TODO: it's not clear where all the stuff that needs to be included in packages -# should live. This file is overloaded for spack core vs. for packages. +# TODO: maybe this should be separated out to build_environment.py? +# TODO: it's not clear where all the stuff that needs to be included in +# packages should live. This file is overloaded for spack core vs. +# for packages. # -__all__ = ['Package', 'StagedPackage', 'CMakePackage', \ - 'Version', 'when', 'ver', 'alldeps', 'nolink'] +__all__ = ['Package', 'StagedPackage', 'CMakePackage', + 'Version', 'when', 'ver', 'alldeps', 'nolink'] from spack.package import Package, ExtensionConflictError from spack.package import StagedPackage, CMakePackage from spack.version import Version, ver @@ -204,8 +206,8 @@ __all__ += spack.util.executable.__all__ from spack.package import \ - install_dependency_symlinks, flatten_dependencies, DependencyConflictError, \ - InstallError, ExternalPackageError + install_dependency_symlinks, flatten_dependencies, \ + DependencyConflictError, InstallError, ExternalPackageError __all__ += [ - 'install_dependency_symlinks', 'flatten_dependencies', 'DependencyConflictError', - 'InstallError', 'ExternalPackageError'] + 'install_dependency_symlinks', 'flatten_dependencies', + 'DependencyConflictError', 'InstallError', 'ExternalPackageError'] diff --git a/lib/spack/spack/cmd/purge.py b/lib/spack/spack/cmd/purge.py index 26745810a83..26d160635cc 100644 --- a/lib/spack/spack/cmd/purge.py +++ b/lib/spack/spack/cmd/purge.py @@ -45,14 +45,14 @@ def setup_parser(subparser): def purge(parser, args): # Special case: no flags. - if not any((args.stage, args.cache, args.all)): + if not any((args.stage, args.downloads, args.user_cache, args.all)): stage.purge() return # handle other flags with fall through. if args.stage or args.all: stage.purge() - if args.cache or args.all: + if args.downloads or args.all: spack.fetch_cache.destroy() if args.user_cache or args.all: spack.user_cache.destroy() diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py index 2667b428209..b9f2a449aee 100644 --- a/lib/spack/spack/cmd/test.py +++ b/lib/spack/spack/cmd/test.py @@ -23,23 +23,23 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -from pprint import pprint from llnl.util.filesystem import join_path, mkdirp from llnl.util.tty.colify import colify -from llnl.util.lang import list_modules import spack import spack.test from spack.fetch_strategy import FetchError -description ="Run unit tests" +description = "Run unit tests" + def setup_parser(subparser): subparser.add_argument( 'names', nargs='*', help="Names of tests to run.") subparser.add_argument( - '-l', '--list', action='store_true', dest='list', help="Show available tests") + '-l', '--list', action='store_true', dest='list', + help="Show available tests") subparser.add_argument( '--createXmlOutput', action='store_true', dest='createXmlOutput', help="Create JUnit XML from test results") @@ -69,6 +69,7 @@ def fetch(self): def __str__(self): return "[mock fetcher]" + def test(parser, args): if args.list: print "Available tests:" diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 5ce42b2e670..16814429dcd 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -357,9 +357,9 @@ def _write(self, type, value, traceback): This is a helper function called by the WriteTransaction context manager. If there is an exception while the write lock is active, - nothing will be written to the database file, but the in-memory database - *may* be left in an inconsistent state. It will be consistent after the - start of the next transaction, when it read from disk again. + nothing will be written to the database file, but the in-memory + database *may* be left in an inconsistent state. It will be consistent + after the start of the next transaction, when it read from disk again. This routine does no locking. diff --git a/lib/spack/spack/file_cache.py b/lib/spack/spack/file_cache.py index 2124df9c9cb..fb9ccf46b8f 100644 --- a/lib/spack/spack/file_cache.py +++ b/lib/spack/spack/file_cache.py @@ -28,7 +28,6 @@ from llnl.util.filesystem import * from llnl.util.lock import * -import spack from spack.error import SpackError @@ -54,11 +53,14 @@ def __init__(self, root): self._locks = {} - def purge(self): + def destroy(self): """Remove all files under the cache root.""" for f in os.listdir(self.root): path = join_path(self.root, f) - shutil.rmtree(f) + if os.path.isdir(path): + shutil.rmtree(path, True) + else: + os.remove(path) def cache_path(self, key): """Path to the file in the cache for a particular key.""" @@ -92,7 +94,7 @@ def init_entry(self, key): if not os.path.isfile(cache_path): raise CacheError("Cache file is not a file: %s" % cache_path) - if not os.access(cache_path, os.R_OK|os.W_OK): + if not os.access(cache_path, os.R_OK | os.W_OK): raise CacheError("Cannot access cache file: %s" % cache_path) else: # if the file is hierarchical, make parent directories @@ -100,7 +102,7 @@ def init_entry(self, key): if parent.rstrip(os.path.sep) != self.root: mkdirp(parent) - if not os.access(parent, os.R_OK|os.W_OK): + if not os.access(parent, os.R_OK | os.W_OK): raise CacheError("Cannot access cache directory: %s" % parent) # ensure lock is created for this key @@ -154,7 +156,6 @@ def __exit__(cm, type, value, traceback): return WriteTransaction(self._get_lock(key), WriteContextManager) - def mtime(self, key): """Return modification time of cache file, or 0 if it does not exist. @@ -168,7 +169,6 @@ def mtime(self, key): sinfo = os.stat(self.cache_path(key)) return sinfo.st_mtime - def remove(self, key): lock = self._get_lock(key) try: @@ -178,4 +178,6 @@ def remove(self, key): lock.release_write() os.unlink(self._lock_path(key)) -class CacheError(SpackError): pass + +class CacheError(SpackError): + pass diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 475155937c6..25e07541d07 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1416,6 +1416,7 @@ def use_cray_compiler_names(): os.environ['FC'] = 'ftn' os.environ['F77'] = 'ftn' + def flatten_dependencies(spec, flat_dir): """Make each dependency of spec present in dir via symlink.""" for dep in spec.traverse(root=False): diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index ecdc25c4d2e..b5fbb67c6ea 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -25,7 +25,7 @@ """ The ``virtual`` module contains utility classes for virtual dependencies. """ -import itertools +from itertools import product as iproduct from pprint import pformat import yaml @@ -52,8 +52,6 @@ class ProviderIndex(object): matching implementation of MPI. """ - - def __init__(self, specs=None, restrict=False): """Create a new ProviderIndex. @@ -71,7 +69,8 @@ def __init__(self, specs=None, restrict=False): as possible without overly restricting results, so it is not the best name. """ - if specs is None: specs = [] + if specs is None: + specs = [] self.restrict = restrict self.providers = {} @@ -85,7 +84,6 @@ def __init__(self, specs=None, restrict=False): self.update(spec) - def update(self, spec): if not isinstance(spec, spack.spec.Spec): spec = spack.spec.Spec(spec) @@ -104,7 +102,7 @@ def update(self, spec): provided_name = provided_spec.name provider_map = self.providers.setdefault(provided_name, {}) - if not provided_spec in provider_map: + if provided_spec not in provider_map: provider_map[provided_spec] = set() if self.restrict: @@ -126,7 +124,6 @@ def update(self, spec): constrained.constrain(provider_spec) provider_map[provided_spec].add(constrained) - def providers_for(self, *vpkg_specs): """Gives specs of all packages that provide virtual packages with the supplied specs.""" @@ -138,26 +135,25 @@ def providers_for(self, *vpkg_specs): # Add all the providers that satisfy the vpkg spec. if vspec.name in self.providers: - for provider_spec, spec_set in self.providers[vspec.name].items(): - if provider_spec.satisfies(vspec, deps=False): + for p_spec, spec_set in self.providers[vspec.name].items(): + if p_spec.satisfies(vspec, deps=False): providers.update(spec_set) # Return providers in order return sorted(providers) - # TODO: this is pretty darned nasty, and inefficient, but there # are not that many vdeps in most specs. def _cross_provider_maps(self, lmap, rmap): result = {} - for lspec, rspec in itertools.product(lmap, rmap): + for lspec, rspec in iproduct(lmap, rmap): try: constrained = lspec.constrained(rspec) except spack.spec.UnsatisfiableSpecError: continue # lp and rp are left and right provider specs. - for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]): + for lp_spec, rp_spec in iproduct(lmap[lspec], rmap[rspec]): if lp_spec.name == rp_spec.name: try: const = lp_spec.constrained(rp_spec, deps=False) @@ -166,12 +162,10 @@ def _cross_provider_maps(self, lmap, rmap): continue return result - def __contains__(self, name): """Whether a particular vpkg name is in the index.""" return name in self.providers - def satisfies(self, other): """Check that providers of virtual specs are compatible.""" common = set(self.providers) & set(other.providers) @@ -189,7 +183,6 @@ def satisfies(self, other): return all(c in result for c in common) - def to_yaml(self, stream=None): provider_list = self._transform( lambda vpkg, pset: [ @@ -198,7 +191,6 @@ def to_yaml(self, stream=None): yaml.dump({'provider_index': {'providers': provider_list}}, stream=stream) - @staticmethod def from_yaml(stream): try: @@ -211,7 +203,7 @@ def from_yaml(stream): raise spack.spec.SpackYAMLError( "YAML ProviderIndex was not a dict.") - if not 'provider_index' in yfile: + if 'provider_index' not in yfile: raise spack.spec.SpackYAMLError( "YAML ProviderIndex does not start with 'provider_index'") @@ -224,7 +216,6 @@ def from_yaml(stream): set(spack.spec.Spec.from_node_dict(p) for p in plist))) return index - def merge(self, other): """Merge `other` ProviderIndex into this one.""" other = other.copy() # defensive copy. @@ -242,7 +233,6 @@ def merge(self, other): spdict[provided_spec] += opdict[provided_spec] - def remove_provider(self, pkg_name): """Remove a provider from the ProviderIndex.""" empty_pkg_dict = [] @@ -264,7 +254,6 @@ def remove_provider(self, pkg_name): for pkg in empty_pkg_dict: del self.providers[pkg] - def copy(self): """Deep copy of this ProviderIndex.""" clone = ProviderIndex() @@ -272,15 +261,12 @@ def copy(self): lambda vpkg, pset: (vpkg, set((p.copy() for p in pset)))) return clone - def __eq__(self, other): return self.providers == other.providers - def _transform(self, transform_fun, out_mapping_type=dict): return _transform(self.providers, transform_fun, out_mapping_type) - def __str__(self): return pformat( _transform(self.providers, diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index a0904a2cde1..eada10f7cb6 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -38,7 +38,6 @@ import yaml import llnl.util.tty as tty -from llnl.util.lock import Lock from llnl.util.filesystem import * import spack @@ -142,7 +141,6 @@ def __init__(self, *repo_dirs, **kwargs): "To remove the bad repository, run this command:", " spack repo rm %s" % root) - def swap(self, other): """Convenience function to make swapping repostiories easier. @@ -160,7 +158,6 @@ def swap(self, other): setattr(self, attr, getattr(other, attr)) setattr(other, attr, tmp) - def _add(self, repo): """Add a repository to the namespace and path indexes. @@ -174,31 +171,28 @@ def _add(self, repo): if repo.namespace in self.by_namespace: raise DuplicateRepoError( "Package repos '%s' and '%s' both provide namespace %s" - % (repo.root, self.by_namespace[repo.namespace].root, repo.namespace)) + % (repo.root, self.by_namespace[repo.namespace].root, + repo.namespace)) # Add repo to the pkg indexes self.by_namespace[repo.full_namespace] = repo self.by_path[repo.root] = repo - def put_first(self, repo): """Add repo first in the search path.""" self._add(repo) self.repos.insert(0, repo) - def put_last(self, repo): """Add repo last in the search path.""" self._add(repo) self.repos.append(repo) - def remove(self, repo): """Remove a repo from the search path.""" if repo in self.repos: self.repos.remove(repo) - def get_repo(self, namespace, default=NOT_PROVIDED): """Get a repository by namespace. Arguments @@ -218,12 +212,10 @@ def get_repo(self, namespace, default=NOT_PROVIDED): return default return self.by_namespace[fullspace] - def first_repo(self): """Get the first repo in precedence order.""" return self.repos[0] if self.repos else None - def all_package_names(self): """Return all unique package names in all repositories.""" if self._all_package_names is None: @@ -231,15 +223,13 @@ def all_package_names(self): for repo in self.repos: for name in repo.all_package_names(): all_pkgs.add(name) - self._all_package_names = sorted(all_pkgs, key=lambda n:n.lower()) + self._all_package_names = sorted(all_pkgs, key=lambda n: n.lower()) return self._all_package_names - def all_packages(self): for name in self.all_package_names(): yield self.get(name) - @property def provider_index(self): """Merged ProviderIndex from all Repos in the RepoPath.""" @@ -250,7 +240,6 @@ def provider_index(self): return self._provider_index - @_autospec def providers_for(self, vpkg_spec): providers = self.provider_index.providers_for(vpkg_spec) @@ -258,12 +247,10 @@ def providers_for(self, vpkg_spec): raise UnknownPackageError(vpkg_spec.name) return providers - @_autospec def extensions_for(self, extendee_spec): return [p for p in self.all_packages() if p.extends(extendee_spec)] - def find_module(self, fullname, path=None): """Implements precedence for overlaid namespaces. @@ -290,7 +277,6 @@ def find_module(self, fullname, path=None): return None - def load_module(self, fullname): """Handles loading container namespaces when necessary. @@ -307,7 +293,6 @@ def load_module(self, fullname): sys.modules[fullname] = module return module - @_autospec def repo_for_pkg(self, spec): """Given a spec, get the repository for its package.""" @@ -329,7 +314,6 @@ def repo_for_pkg(self, spec): # that can operate on packages that don't exist yet. return self.first_repo() - @_autospec def get(self, spec, new=False): """Find a repo that contains the supplied spec's package. @@ -338,12 +322,10 @@ def get(self, spec, new=False): """ return self.repo_for_pkg(spec).get(spec) - def get_pkg_class(self, pkg_name): """Find a class for the spec's package and return the class object.""" return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name) - @_autospec def dump_provenance(self, spec, path): """Dump provenance information for a spec to a particular path. @@ -353,24 +335,19 @@ def dump_provenance(self, spec, path): """ return self.repo_for_pkg(spec).dump_provenance(spec, path) - def dirname_for_package_name(self, pkg_name): return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name) - def filename_for_package_name(self, pkg_name): return self.repo_for_pkg(pkg_name).filename_for_package_name(pkg_name) - def exists(self, pkg_name): return any(repo.exists(pkg_name) for repo in self.repos) - def __contains__(self, pkg_name): return self.exists(pkg_name) - class Repo(object): """Class representing a package repository in the filesystem. @@ -404,7 +381,8 @@ def __init__(self, root, namespace=repo_namespace): # check and raise BadRepoError on fail. def check(condition, msg): - if not condition: raise BadRepoError(msg) + if not condition: + raise BadRepoError(msg) # Validate repository layout. self.config_file = join_path(self.root, repo_config_name) @@ -422,12 +400,14 @@ def check(condition, msg): self.namespace = config['namespace'] check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace), - ("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root)) + + ("Invalid namespace '%s' in repo '%s'. " + % (self.namespace, self.root)) + "Namespaces must be valid python identifiers separated by '.'") # Set up 'full_namespace' to include the super-namespace if self.super_namespace: - self.full_namespace = "%s.%s" % (self.super_namespace, self.namespace) + self.full_namespace = "%s.%s" % ( + self.super_namespace, self.namespace) else: self.full_namespace = self.namespace @@ -462,10 +442,10 @@ def _create_namespace(self): """ parent = None - for l in range(1, len(self._names)+1): + for l in range(1, len(self._names) + 1): ns = '.'.join(self._names[:l]) - if not ns in sys.modules: + if ns not in sys.modules: module = SpackNamespace(ns) module.__loader__ = self sys.modules[ns] = module @@ -476,7 +456,7 @@ def _create_namespace(self): # This ensures that we can do things like: # import spack.pkg.builtin.mpich as mpich if parent: - modname = self._names[l-1] + modname = self._names[l - 1] setattr(parent, modname, module) else: # no need to set up a module @@ -485,7 +465,6 @@ def _create_namespace(self): # but keep track of the parent in this loop parent = module - def real_name(self, import_name): """Allow users to import Spack packages using Python identifiers. @@ -511,13 +490,11 @@ def real_name(self, import_name): return name return None - def is_prefix(self, fullname): """True if fullname is a prefix of this Repo's namespace.""" parts = fullname.split('.') return self._names[:len(parts)] == parts - def find_module(self, fullname, path=None): """Python find_module import hook. @@ -533,7 +510,6 @@ def find_module(self, fullname, path=None): return None - def load_module(self, fullname): """Python importer load hook. @@ -565,7 +541,6 @@ def load_module(self, fullname): return module - def _read_config(self): """Check for a YAML config file in this db's root directory.""" try: @@ -573,40 +548,39 @@ def _read_config(self): yaml_data = yaml.load(reponame_file) if (not yaml_data or 'repo' not in yaml_data or - not isinstance(yaml_data['repo'], dict)): - tty.die("Invalid %s in repository %s" - % (repo_config_name, self.root)) + not isinstance(yaml_data['repo'], dict)): + tty.die("Invalid %s in repository %s" % ( + repo_config_name, self.root)) return yaml_data['repo'] - except exceptions.IOError, e: + except exceptions.IOError: tty.die("Error reading %s when opening %s" % (self.config_file, self.root)) - @_autospec def get(self, spec, new=False): if spec.virtual: raise UnknownPackageError(spec.name) if spec.namespace and spec.namespace != self.namespace: - raise UnknownPackageError("Repository %s does not contain package %s" - % (self.namespace, spec.fullname)) + raise UnknownPackageError( + "Repository %s does not contain package %s" + % (self.namespace, spec.fullname)) key = hash(spec) if new or key not in self._instances: package_class = self.get_pkg_class(spec.name) try: - copy = spec.copy() # defensive copy. Package owns its spec. + copy = spec.copy() # defensive copy. Package owns its spec. self._instances[key] = package_class(copy) - except Exception, e: + except Exception: if spack.debug: sys.excepthook(*sys.exc_info()) raise FailedConstructorError(spec.fullname, *sys.exc_info()) return self._instances[key] - @_autospec def dump_provenance(self, spec, path): """Dump provenance information for a spec to a particular path. @@ -619,8 +593,9 @@ def dump_provenance(self, spec, path): raise UnknownPackageError(spec.name) if spec.namespace and spec.namespace != self.namespace: - raise UnknownPackageError("Repository %s does not contain package %s." - % (self.namespace, spec.fullname)) + raise UnknownPackageError( + "Repository %s does not contain package %s." + % (self.namespace, spec.fullname)) # Install any patch files needed by packages. mkdirp(path) @@ -635,12 +610,10 @@ def dump_provenance(self, spec, path): # Install the package.py file itself. install(self.filename_for_package_name(spec), path) - def purge(self): """Clear entire package instance cache.""" self._instances.clear() - def _update_provider_index(self): # Check modification dates of all packages self._fast_package_check() @@ -669,7 +642,6 @@ def read(): self._provider_index.to_yaml(new) - @property def provider_index(self): """A provider index with names *specific* to this repo.""" @@ -677,7 +649,6 @@ def provider_index(self): self._update_provider_index() return self._provider_index - @_autospec def providers_for(self, vpkg_spec): providers = self.provider_index.providers_for(vpkg_spec) @@ -685,18 +656,15 @@ def providers_for(self, vpkg_spec): raise UnknownPackageError(vpkg_spec.name) return providers - @_autospec def extensions_for(self, extendee_spec): return [p for p in self.all_packages() if p.extends(extendee_spec)] - def _check_namespace(self, spec): """Check that the spec's namespace is the same as this repository's.""" if spec.namespace and spec.namespace != self.namespace: raise UnknownNamespaceError(spec.namespace) - @_autospec def dirname_for_package_name(self, spec): """Get the directory name for a particular package. This is the @@ -704,7 +672,6 @@ def dirname_for_package_name(self, spec): self._check_namespace(spec) return join_path(self.packages_path, spec.name) - @_autospec def filename_for_package_name(self, spec): """Get the filename for the module we should load for a particular @@ -719,7 +686,6 @@ def filename_for_package_name(self, spec): pkg_dir = self.dirname_for_package_name(spec.name) return join_path(pkg_dir, package_file_name) - def _fast_package_check(self): """List packages in the repo and check whether index is up to date. @@ -783,13 +749,11 @@ def _fast_package_check(self): return self._all_package_names - def all_package_names(self): """Returns a sorted list of all package names in the Repo.""" self._fast_package_check() return self._all_package_names - def all_packages(self): """Iterator over all packages in the repository. @@ -799,7 +763,6 @@ def all_packages(self): for name in self.all_package_names(): yield self.get(name) - def exists(self, pkg_name): """Whether a package with the supplied name exists.""" if self._all_package_names: @@ -813,7 +776,6 @@ def exists(self, pkg_name): filename = self.filename_for_package_name(pkg_name) return os.path.exists(filename) - def _get_pkg_module(self, pkg_name): """Create a module for a particular package. @@ -845,7 +807,6 @@ def _get_pkg_module(self, pkg_name): return self._modules[pkg_name] - def get_pkg_class(self, pkg_name): """Get the class for the package out of its module. @@ -853,7 +814,6 @@ def get_pkg_class(self, pkg_name): package. Then extracts the package class from the module according to Spack's naming convention. """ - fullname = pkg_name namespace, _, pkg_name = pkg_name.rpartition('.') if namespace and (namespace != self.namespace): raise InvalidNamespaceError('Invalid namespace for %s repo: %s' @@ -868,15 +828,12 @@ def get_pkg_class(self, pkg_name): return cls - def __str__(self): return "[Repo '%s' at '%s']" % (self.namespace, self.root) - def __repr__(self): return self.__str__() - def __contains__(self, pkg_name): return self.exists(pkg_name) @@ -885,30 +842,37 @@ def create_repo(root, namespace=None): """Create a new repository in root with the specified namespace. If the namespace is not provided, use basename of root. - Return the canonicalized path and the namespace of the created repository. + Return the canonicalized path and namespace of the created repository. """ root = canonicalize_path(root) if not namespace: namespace = os.path.basename(root) if not re.match(r'\w[\.\w-]*', namespace): - raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace) + raise InvalidNamespaceError( + "'%s' is not a valid namespace." % namespace) existed = False if os.path.exists(root): if os.path.isfile(root): - raise BadRepoError('File %s already exists and is not a directory' % root) + raise BadRepoError('File %s already exists and is not a directory' + % root) elif os.path.isdir(root): if not os.access(root, os.R_OK | os.W_OK): - raise BadRepoError('Cannot create new repo in %s: cannot access directory.' % root) + raise BadRepoError( + 'Cannot create new repo in %s: cannot access directory.' + % root) if os.listdir(root): - raise BadRepoError('Cannot create new repo in %s: directory is not empty.' % root) + raise BadRepoError( + 'Cannot create new repo in %s: directory is not empty.' + % root) existed = True full_path = os.path.realpath(root) parent = os.path.dirname(full_path) if not os.access(parent, os.R_OK | os.W_OK): - raise BadRepoError("Cannot create repository in %s: can't access parent!" % root) + raise BadRepoError( + "Cannot create repository in %s: can't access parent!" % root) try: config_path = os.path.join(root, repo_config_name) diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 4969081e638..db683917b56 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -82,6 +82,7 @@ 'cmd.test_compiler_cmd', ] + def list_tests(): """Return names of all tests that can be run for Spack.""" return test_names diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index ec0a2ec2440..8ecbddbda2e 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -29,6 +29,7 @@ from spack.concretize import find_spec from spack.test.mock_packages_test import * + class ConcretizeTest(MockPackagesTest): def check_spec(self, abstract, concrete): @@ -59,7 +60,6 @@ def check_spec(self, abstract, concrete): if abstract.architecture and abstract.architecture.concrete: self.assertEqual(abstract.architecture, concrete.architecture) - def check_concretize(self, abstract_spec): abstract = Spec(abstract_spec) concrete = abstract.concretized() @@ -70,29 +70,24 @@ def check_concretize(self, abstract_spec): return concrete - def test_concretize_no_deps(self): self.check_concretize('libelf') self.check_concretize('libelf@0.8.13') - def test_concretize_dag(self): self.check_concretize('callpath') self.check_concretize('mpileaks') self.check_concretize('libelf') - def test_concretize_variant(self): self.check_concretize('mpich+debug') self.check_concretize('mpich~debug') self.check_concretize('mpich debug=2') self.check_concretize('mpich') - def test_conretize_compiler_flags(self): self.check_concretize('mpich cppflags="-O3"') - def test_concretize_preferred_version(self): spec = self.check_concretize('python') self.assertEqual(spec.versions, ver('2.7.11')) @@ -100,7 +95,6 @@ def test_concretize_preferred_version(self): spec = self.check_concretize('python@3.5.1') self.assertEqual(spec.versions, ver('3.5.1')) - def test_concretize_with_virtual(self): self.check_concretize('mpileaks ^mpi') self.check_concretize('mpileaks ^mpi@:1.1') @@ -111,7 +105,6 @@ def test_concretize_with_virtual(self): self.check_concretize('mpileaks ^mpi@:1') self.check_concretize('mpileaks ^mpi@1.2:2') - def test_concretize_with_restricted_virtual(self): self.check_concretize('mpileaks ^mpich2') @@ -142,55 +135,55 @@ def test_concretize_with_restricted_virtual(self): concrete = self.check_concretize('mpileaks ^mpich2@1.3.1:1.4') self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.3.1:1.4')) - def test_concretize_with_provides_when(self): """Make sure insufficient versions of MPI are not in providers list when we ask for some advanced version. """ - self.assertTrue(not any(spec.satisfies('mpich2@:1.0') - for spec in spack.repo.providers_for('mpi@2.1'))) + self.assertTrue( + not any(spec.satisfies('mpich2@:1.0') + for spec in spack.repo.providers_for('mpi@2.1'))) - self.assertTrue(not any(spec.satisfies('mpich2@:1.1') - for spec in spack.repo.providers_for('mpi@2.2'))) + self.assertTrue( + not any(spec.satisfies('mpich2@:1.1') + for spec in spack.repo.providers_for('mpi@2.2'))) - self.assertTrue(not any(spec.satisfies('mpich@:1') - for spec in spack.repo.providers_for('mpi@2'))) + self.assertTrue( + not any(spec.satisfies('mpich@:1') + for spec in spack.repo.providers_for('mpi@2'))) - self.assertTrue(not any(spec.satisfies('mpich@:1') - for spec in spack.repo.providers_for('mpi@3'))) - - self.assertTrue(not any(spec.satisfies('mpich2') - for spec in spack.repo.providers_for('mpi@3'))) + self.assertTrue( + not any(spec.satisfies('mpich@:1') + for spec in spack.repo.providers_for('mpi@3'))) + self.assertTrue( + not any(spec.satisfies('mpich2') + for spec in spack.repo.providers_for('mpi@3'))) def test_concretize_two_virtuals(self): """Test a package with multiple virtual dependencies.""" - s = Spec('hypre').concretize() - + Spec('hypre').concretize() def test_concretize_two_virtuals_with_one_bound(self): """Test a package with multiple virtual dependencies and one preset.""" - s = Spec('hypre ^openblas').concretize() - + Spec('hypre ^openblas').concretize() def test_concretize_two_virtuals_with_two_bound(self): - """Test a package with multiple virtual dependencies and two of them preset.""" - s = Spec('hypre ^openblas ^netlib-lapack').concretize() - + """Test a package with multiple virtual deps and two of them preset.""" + Spec('hypre ^openblas ^netlib-lapack').concretize() def test_concretize_two_virtuals_with_dual_provider(self): """Test a package with multiple virtual dependencies and force a provider that provides both.""" - s = Spec('hypre ^openblas-with-lapack').concretize() - + Spec('hypre ^openblas-with-lapack').concretize() def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): - """Test a package with multiple virtual dependencies and force a provider - that provides both, and another conflicting package that provides one.""" + """Test a package with multiple virtual dependencies and force a + provider that provides both, and another conflicting package that + provides one. + """ s = Spec('hypre ^openblas-with-lapack ^netlib-lapack') self.assertRaises(spack.spec.MultipleProviderError, s.concretize) - def test_virtual_is_fully_expanded_for_callpath(self): # force dependence on fake "zmpi" by asking for MPI 10.0 spec = Spec('callpath ^mpi@10.0') @@ -207,7 +200,6 @@ def test_virtual_is_fully_expanded_for_callpath(self): self.assertTrue('fake' in spec._dependencies['zmpi'].spec) - def test_virtual_is_fully_expanded_for_mpileaks(self): spec = Spec('mpileaks ^mpi@10.0') self.assertTrue('mpi' in spec._dependencies) @@ -217,23 +209,24 @@ def test_virtual_is_fully_expanded_for_mpileaks(self): self.assertTrue('zmpi' in spec._dependencies) self.assertTrue('callpath' in spec._dependencies) - self.assertTrue('zmpi' in spec._dependencies['callpath']. - spec._dependencies) - self.assertTrue('fake' in spec._dependencies['callpath']. - spec._dependencies['zmpi']. - spec._dependencies) + self.assertTrue( + 'zmpi' in spec._dependencies['callpath'] + .spec._dependencies) + self.assertTrue( + 'fake' in spec._dependencies['callpath'] + .spec._dependencies['zmpi'] + .spec._dependencies) - self.assertTrue(all(not 'mpi' in d._dependencies for d in spec.traverse())) + self.assertTrue( + all('mpi' not in d._dependencies for d in spec.traverse())) self.assertTrue('zmpi' in spec) self.assertTrue('mpi' in spec) - def test_my_dep_depends_on_provider_of_my_virtual_dep(self): spec = Spec('indirect_mpich') spec.normalize() spec.concretize() - def test_compiler_inheritance(self): spec = Spec('mpileaks') spec.normalize() @@ -245,26 +238,26 @@ def test_compiler_inheritance(self): self.assertTrue(spec['libdwarf'].compiler.satisfies('clang')) self.assertTrue(spec['libelf'].compiler.satisfies('clang')) - def test_external_package(self): spec = Spec('externaltool%gcc') spec.concretize() - self.assertEqual(spec['externaltool'].external, '/path/to/external_tool') + self.assertEqual( + spec['externaltool'].external, '/path/to/external_tool') self.assertFalse('externalprereq' in spec) self.assertTrue(spec['externaltool'].compiler.satisfies('gcc')) - def test_external_package_module(self): # No tcl modules on darwin/linux machines # TODO: improved way to check for this. - if (spack.architecture.platform().name == 'darwin' or - spack.architecture.platform().name == 'linux'): + platform = spack.architecture.platform().name + if (platform == 'darwin' or platform == 'linux'): return spec = Spec('externalmodule') spec.concretize() - self.assertEqual(spec['externalmodule'].external_module, 'external-module') + self.assertEqual( + spec['externalmodule'].external_module, 'external-module') self.assertFalse('externalprereq' in spec) self.assertTrue(spec['externalmodule'].compiler.satisfies('gcc')) @@ -277,16 +270,16 @@ def test_nobuild_package(self): got_error = True self.assertTrue(got_error) - def test_external_and_virtual(self): spec = Spec('externaltest') spec.concretize() - self.assertEqual(spec['externaltool'].external, '/path/to/external_tool') - self.assertEqual(spec['stuff'].external, '/path/to/external_virtual_gcc') + self.assertEqual( + spec['externaltool'].external, '/path/to/external_tool') + self.assertEqual( + spec['stuff'].external, '/path/to/external_virtual_gcc') self.assertTrue(spec['externaltool'].compiler.satisfies('gcc')) self.assertTrue(spec['stuff'].compiler.satisfies('gcc')) - def test_find_spec_parents(self): """Tests the spec finding logic used by concretization. """ s = Spec('a +foo', @@ -297,7 +290,6 @@ def test_find_spec_parents(self): self.assertEqual('a', find_spec(s['b'], lambda s: '+foo' in s).name) - def test_find_spec_children(self): s = Spec('a', Spec('b +foo', @@ -312,7 +304,6 @@ def test_find_spec_children(self): Spec('e +foo')) self.assertEqual('c', find_spec(s['b'], lambda s: '+foo' in s).name) - def test_find_spec_sibling(self): s = Spec('a', Spec('b +foo', @@ -330,7 +321,6 @@ def test_find_spec_sibling(self): Spec('f +foo'))) self.assertEqual('f', find_spec(s['b'], lambda s: '+foo' in s).name) - def test_find_spec_self(self): s = Spec('a', Spec('b +foo', @@ -339,7 +329,6 @@ def test_find_spec_self(self): Spec('e')) self.assertEqual('b', find_spec(s['b'], lambda s: '+foo' in s).name) - def test_find_spec_none(self): s = Spec('a', Spec('b', @@ -348,7 +337,6 @@ def test_find_spec_none(self): Spec('e')) self.assertEqual(None, find_spec(s['b'], lambda s: '+foo' in s)) - def test_compiler_child(self): s = Spec('mpileaks%clang ^dyninst%gcc') s.concretize() diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index a2f09450bc9..0d44a27b7e8 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -31,7 +31,6 @@ import spack from llnl.util.filesystem import join_path -from llnl.util.lock import * from llnl.util.tty.colify import colify from spack.test.mock_database import MockDatabase @@ -88,26 +87,28 @@ def test_010_all_install_sanity(self): # query specs with multiple configurations mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')] callpath_specs = [s for s in all_specs if s.satisfies('callpath')] - mpi_specs = [s for s in all_specs if s.satisfies('mpi')] + mpi_specs = [s for s in all_specs if s.satisfies('mpi')] self.assertEqual(len(mpileaks_specs), 3) self.assertEqual(len(callpath_specs), 3) self.assertEqual(len(mpi_specs), 3) # query specs with single configurations - dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')] + dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')] libdwarf_specs = [s for s in all_specs if s.satisfies('libdwarf')] - libelf_specs = [s for s in all_specs if s.satisfies('libelf')] + libelf_specs = [s for s in all_specs if s.satisfies('libelf')] self.assertEqual(len(dyninst_specs), 1) self.assertEqual(len(libdwarf_specs), 1) self.assertEqual(len(libelf_specs), 1) # Query by dependency - self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^mpich')]), 1) - self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^mpich2')]), 1) - self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^zmpi')]), 1) - + self.assertEqual( + len([s for s in all_specs if s.satisfies('mpileaks ^mpich')]), 1) + self.assertEqual( + len([s for s in all_specs if s.satisfies('mpileaks ^mpich2')]), 1) + self.assertEqual( + len([s for s in all_specs if s.satisfies('mpileaks ^zmpi')]), 1) def test_015_write_and_read(self): # write and read DB @@ -122,7 +123,6 @@ def test_015_write_and_read(self): self.assertEqual(new_rec.path, rec.path) self.assertEqual(new_rec.installed, rec.installed) - def _check_db_sanity(self): """Utiilty function to check db against install layout.""" expected = sorted(spack.install_layout.all_specs()) @@ -132,12 +132,10 @@ def _check_db_sanity(self): for e, a in zip(expected, actual): self.assertEqual(e, a) - def test_020_db_sanity(self): """Make sure query() returns what's actually in the db.""" self._check_db_sanity() - def test_030_db_sanity_from_another_process(self): def read_and_modify(): self._check_db_sanity() # check that other process can read DB @@ -152,30 +150,28 @@ def read_and_modify(): with self.installed_db.read_transaction(): self.assertEqual(len(self.installed_db.query('mpileaks ^zmpi')), 0) - def test_040_ref_counts(self): """Ensure that we got ref counts right when we read the DB.""" self.installed_db._check_ref_counts() - def test_050_basic_query(self): - """Ensure that querying the database is consistent with what is installed.""" + """Ensure querying database is consistent with what is installed.""" # query everything self.assertEqual(len(spack.installed_db.query()), 13) # query specs with multiple configurations mpileaks_specs = self.installed_db.query('mpileaks') callpath_specs = self.installed_db.query('callpath') - mpi_specs = self.installed_db.query('mpi') + mpi_specs = self.installed_db.query('mpi') self.assertEqual(len(mpileaks_specs), 3) self.assertEqual(len(callpath_specs), 3) self.assertEqual(len(mpi_specs), 3) # query specs with single configurations - dyninst_specs = self.installed_db.query('dyninst') + dyninst_specs = self.installed_db.query('dyninst') libdwarf_specs = self.installed_db.query('libdwarf') - libelf_specs = self.installed_db.query('libelf') + libelf_specs = self.installed_db.query('libelf') self.assertEqual(len(dyninst_specs), 1) self.assertEqual(len(libdwarf_specs), 1) @@ -186,7 +182,6 @@ def test_050_basic_query(self): self.assertEqual(len(self.installed_db.query('mpileaks ^mpich2')), 1) self.assertEqual(len(self.installed_db.query('mpileaks ^zmpi')), 1) - def _check_remove_and_add_package(self, spec): """Remove a spec from the DB, then add it and make sure everything's still ok once it is added. This checks that it was @@ -215,15 +210,12 @@ def _check_remove_and_add_package(self, spec): self._check_db_sanity() self.installed_db._check_ref_counts() - def test_060_remove_and_add_root_package(self): self._check_remove_and_add_package('mpileaks ^mpich') - def test_070_remove_and_add_dependency_package(self): self._check_remove_and_add_package('dyninst') - def test_080_root_ref_counts(self): rec = self.installed_db.get_record('mpileaks ^mpich') @@ -231,44 +223,52 @@ def test_080_root_ref_counts(self): self.installed_db.remove('mpileaks ^mpich') # record no longer in DB - self.assertEqual(self.installed_db.query('mpileaks ^mpich', installed=any), []) + self.assertEqual( + self.installed_db.query('mpileaks ^mpich', installed=any), []) # record's deps have updated ref_counts - self.assertEqual(self.installed_db.get_record('callpath ^mpich').ref_count, 0) + self.assertEqual( + self.installed_db.get_record('callpath ^mpich').ref_count, 0) self.assertEqual(self.installed_db.get_record('mpich').ref_count, 1) - # put the spec back + # Put the spec back self.installed_db.add(rec.spec, rec.path) # record is present again - self.assertEqual(len(self.installed_db.query('mpileaks ^mpich', installed=any)), 1) + self.assertEqual( + len(self.installed_db.query('mpileaks ^mpich', installed=any)), 1) # dependencies have ref counts updated - self.assertEqual(self.installed_db.get_record('callpath ^mpich').ref_count, 1) + self.assertEqual( + self.installed_db.get_record('callpath ^mpich').ref_count, 1) self.assertEqual(self.installed_db.get_record('mpich').ref_count, 2) - def test_090_non_root_ref_counts(self): - mpileaks_mpich_rec = self.installed_db.get_record('mpileaks ^mpich') - callpath_mpich_rec = self.installed_db.get_record('callpath ^mpich') + self.installed_db.get_record('mpileaks ^mpich') + self.installed_db.get_record('callpath ^mpich') # "force remove" a non-root spec from the DB self.installed_db.remove('callpath ^mpich') # record still in DB but marked uninstalled - self.assertEqual(self.installed_db.query('callpath ^mpich', installed=True), []) - self.assertEqual(len(self.installed_db.query('callpath ^mpich', installed=any)), 1) + self.assertEqual( + self.installed_db.query('callpath ^mpich', installed=True), []) + self.assertEqual( + len(self.installed_db.query('callpath ^mpich', installed=any)), 1) # record and its deps have same ref_counts - self.assertEqual(self.installed_db.get_record('callpath ^mpich', installed=any).ref_count, 1) + self.assertEqual(self.installed_db.get_record( + 'callpath ^mpich', installed=any).ref_count, 1) self.assertEqual(self.installed_db.get_record('mpich').ref_count, 2) # remove only dependent of uninstalled callpath record self.installed_db.remove('mpileaks ^mpich') # record and parent are completely gone. - self.assertEqual(self.installed_db.query('mpileaks ^mpich', installed=any), []) - self.assertEqual(self.installed_db.query('callpath ^mpich', installed=any), []) + self.assertEqual( + self.installed_db.query('mpileaks ^mpich', installed=any), []) + self.assertEqual( + self.installed_db.query('callpath ^mpich', installed=any), []) # mpich ref count updated properly. mpich_rec = self.installed_db.get_record('mpich') @@ -282,14 +282,16 @@ def fail_while_writing(): with self.installed_db.read_transaction(): self.assertEqual( - len(self.installed_db.query('mpileaks ^zmpi', installed=any)), 1) + len(self.installed_db.query('mpileaks ^zmpi', installed=any)), + 1) self.assertRaises(Exception, fail_while_writing) # reload DB and make sure zmpi is still there. with self.installed_db.read_transaction(): self.assertEqual( - len(self.installed_db.query('mpileaks ^zmpi', installed=any)), 1) + len(self.installed_db.query('mpileaks ^zmpi', installed=any)), + 1) def test_110_no_write_with_exception_on_install(self): def fail_while_writing(): diff --git a/lib/spack/spack/test/file_cache.py b/lib/spack/spack/test/file_cache.py index 6142b135ebe..cc66beda2e4 100644 --- a/lib/spack/spack/test/file_cache.py +++ b/lib/spack/spack/test/file_cache.py @@ -30,7 +30,6 @@ import tempfile import unittest -import spack from spack.file_cache import FileCache diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py index aaf573241bf..b24050aa749 100644 --- a/lib/spack/spack/test/lock.py +++ b/lib/spack/spack/test/lock.py @@ -46,21 +46,21 @@ def setUp(self): self.lock_path = join_path(self.tempdir, 'lockfile') touch(self.lock_path) - def tearDown(self): - shutil.rmtree(self.tempdir, ignore_errors=True) - + shutil.rmtree(self.tempdir, ignore_errors=True) def multiproc_test(self, *functions): """Order some processes using simple barrier synchronization.""" b = Barrier(len(functions), timeout=barrier_timeout) procs = [Process(target=f, args=(b,)) for f in functions] - for p in procs: p.start() + + for p in procs: + p.start() + for p in procs: p.join() self.assertEqual(p.exitcode, 0) - # # Process snippets below can be composed into tests. # @@ -68,27 +68,26 @@ def acquire_write(self, barrier): lock = Lock(self.lock_path) lock.acquire_write() # grab exclusive lock barrier.wait() - barrier.wait() # hold the lock until exception raises in other procs. + barrier.wait() # hold the lock until exception raises in other procs. def acquire_read(self, barrier): lock = Lock(self.lock_path) lock.acquire_read() # grab shared lock barrier.wait() - barrier.wait() # hold the lock until exception raises in other procs. + barrier.wait() # hold the lock until exception raises in other procs. def timeout_write(self, barrier): lock = Lock(self.lock_path) - barrier.wait() # wait for lock acquire in first process + barrier.wait() # wait for lock acquire in first process self.assertRaises(LockError, lock.acquire_write, 0.1) barrier.wait() def timeout_read(self, barrier): lock = Lock(self.lock_path) - barrier.wait() # wait for lock acquire in first process + barrier.wait() # wait for lock acquire in first process self.assertRaises(LockError, lock.acquire_read, 0.1) barrier.wait() - # # Test that exclusive locks on other processes time out when an # exclusive lock is held. @@ -97,11 +96,13 @@ def test_write_lock_timeout_on_write(self): self.multiproc_test(self.acquire_write, self.timeout_write) def test_write_lock_timeout_on_write_2(self): - self.multiproc_test(self.acquire_write, self.timeout_write, self.timeout_write) + self.multiproc_test( + self.acquire_write, self.timeout_write, self.timeout_write) def test_write_lock_timeout_on_write_3(self): - self.multiproc_test(self.acquire_write, self.timeout_write, self.timeout_write, self.timeout_write) - + self.multiproc_test( + self.acquire_write, self.timeout_write, self.timeout_write, + self.timeout_write) # # Test that shared locks on other processes time out when an @@ -111,11 +112,13 @@ def test_read_lock_timeout_on_write(self): self.multiproc_test(self.acquire_write, self.timeout_read) def test_read_lock_timeout_on_write_2(self): - self.multiproc_test(self.acquire_write, self.timeout_read, self.timeout_read) + self.multiproc_test( + self.acquire_write, self.timeout_read, self.timeout_read) def test_read_lock_timeout_on_write_3(self): - self.multiproc_test(self.acquire_write, self.timeout_read, self.timeout_read, self.timeout_read) - + self.multiproc_test( + self.acquire_write, self.timeout_read, self.timeout_read, + self.timeout_read) # # Test that exclusive locks time out when shared locks are held. @@ -124,27 +127,35 @@ def test_write_lock_timeout_on_read(self): self.multiproc_test(self.acquire_read, self.timeout_write) def test_write_lock_timeout_on_read_2(self): - self.multiproc_test(self.acquire_read, self.timeout_write, self.timeout_write) + self.multiproc_test( + self.acquire_read, self.timeout_write, self.timeout_write) def test_write_lock_timeout_on_read_3(self): - self.multiproc_test(self.acquire_read, self.timeout_write, self.timeout_write, self.timeout_write) - + self.multiproc_test( + self.acquire_read, self.timeout_write, self.timeout_write, + self.timeout_write) # # Test that exclusive locks time while lots of shared locks are held. # def test_write_lock_timeout_with_multiple_readers_2_1(self): - self.multiproc_test(self.acquire_read, self.acquire_read, self.timeout_write) + self.multiproc_test( + self.acquire_read, self.acquire_read, self.timeout_write) def test_write_lock_timeout_with_multiple_readers_2_2(self): - self.multiproc_test(self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write) + self.multiproc_test( + self.acquire_read, self.acquire_read, self.timeout_write, + self.timeout_write) def test_write_lock_timeout_with_multiple_readers_3_1(self): - self.multiproc_test(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write) + self.multiproc_test( + self.acquire_read, self.acquire_read, self.acquire_read, + self.timeout_write) def test_write_lock_timeout_with_multiple_readers_3_2(self): - self.multiproc_test(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write) - + self.multiproc_test( + self.acquire_read, self.acquire_read, self.acquire_read, + self.timeout_write, self.timeout_write) # # Longer test case that ensures locks are reusable. Ordering is @@ -155,108 +166,108 @@ def p1(barrier): lock = Lock(self.lock_path) lock.acquire_write() - barrier.wait() # ---------------------------------------- 1 + barrier.wait() # ---------------------------------------- 1 # others test timeout - barrier.wait() # ---------------------------------------- 2 + barrier.wait() # ---------------------------------------- 2 lock.release_write() # release and others acquire read - barrier.wait() # ---------------------------------------- 3 + barrier.wait() # ---------------------------------------- 3 self.assertRaises(LockError, lock.acquire_write, 0.1) lock.acquire_read() - barrier.wait() # ---------------------------------------- 4 + barrier.wait() # ---------------------------------------- 4 lock.release_read() - barrier.wait() # ---------------------------------------- 5 + barrier.wait() # ---------------------------------------- 5 # p2 upgrades read to write - barrier.wait() # ---------------------------------------- 6 + barrier.wait() # ---------------------------------------- 6 self.assertRaises(LockError, lock.acquire_write, 0.1) self.assertRaises(LockError, lock.acquire_read, 0.1) - barrier.wait() # ---------------------------------------- 7 + barrier.wait() # ---------------------------------------- 7 # p2 releases write and read - barrier.wait() # ---------------------------------------- 8 + barrier.wait() # ---------------------------------------- 8 # p3 acquires read - barrier.wait() # ---------------------------------------- 9 + barrier.wait() # ---------------------------------------- 9 # p3 upgrades read to write - barrier.wait() # ---------------------------------------- 10 + barrier.wait() # ---------------------------------------- 10 self.assertRaises(LockError, lock.acquire_write, 0.1) self.assertRaises(LockError, lock.acquire_read, 0.1) - barrier.wait() # ---------------------------------------- 11 + barrier.wait() # ---------------------------------------- 11 # p3 releases locks - barrier.wait() # ---------------------------------------- 12 + barrier.wait() # ---------------------------------------- 12 lock.acquire_read() - barrier.wait() # ---------------------------------------- 13 + barrier.wait() # ---------------------------------------- 13 lock.release_read() def p2(barrier): lock = Lock(self.lock_path) # p1 acquires write - barrier.wait() # ---------------------------------------- 1 + barrier.wait() # ---------------------------------------- 1 self.assertRaises(LockError, lock.acquire_write, 0.1) self.assertRaises(LockError, lock.acquire_read, 0.1) - barrier.wait() # ---------------------------------------- 2 + barrier.wait() # ---------------------------------------- 2 lock.acquire_read() - barrier.wait() # ---------------------------------------- 3 + barrier.wait() # ---------------------------------------- 3 # p1 tests shared read - barrier.wait() # ---------------------------------------- 4 + barrier.wait() # ---------------------------------------- 4 # others release reads - barrier.wait() # ---------------------------------------- 5 + barrier.wait() # ---------------------------------------- 5 - lock.acquire_write() # upgrade read to write - barrier.wait() # ---------------------------------------- 6 + lock.acquire_write() # upgrade read to write + barrier.wait() # ---------------------------------------- 6 # others test timeout - barrier.wait() # ---------------------------------------- 7 + barrier.wait() # ---------------------------------------- 7 lock.release_write() # release read AND write (need both) lock.release_read() - barrier.wait() # ---------------------------------------- 8 + barrier.wait() # ---------------------------------------- 8 # p3 acquires read - barrier.wait() # ---------------------------------------- 9 + barrier.wait() # ---------------------------------------- 9 # p3 upgrades read to write - barrier.wait() # ---------------------------------------- 10 + barrier.wait() # ---------------------------------------- 10 self.assertRaises(LockError, lock.acquire_write, 0.1) self.assertRaises(LockError, lock.acquire_read, 0.1) - barrier.wait() # ---------------------------------------- 11 + barrier.wait() # ---------------------------------------- 11 # p3 releases locks - barrier.wait() # ---------------------------------------- 12 + barrier.wait() # ---------------------------------------- 12 lock.acquire_read() - barrier.wait() # ---------------------------------------- 13 + barrier.wait() # ---------------------------------------- 13 lock.release_read() def p3(barrier): lock = Lock(self.lock_path) # p1 acquires write - barrier.wait() # ---------------------------------------- 1 + barrier.wait() # ---------------------------------------- 1 self.assertRaises(LockError, lock.acquire_write, 0.1) self.assertRaises(LockError, lock.acquire_read, 0.1) - barrier.wait() # ---------------------------------------- 2 + barrier.wait() # ---------------------------------------- 2 lock.acquire_read() - barrier.wait() # ---------------------------------------- 3 + barrier.wait() # ---------------------------------------- 3 # p1 tests shared read - barrier.wait() # ---------------------------------------- 4 + barrier.wait() # ---------------------------------------- 4 lock.release_read() - barrier.wait() # ---------------------------------------- 5 + barrier.wait() # ---------------------------------------- 5 # p2 upgrades read to write - barrier.wait() # ---------------------------------------- 6 + barrier.wait() # ---------------------------------------- 6 self.assertRaises(LockError, lock.acquire_write, 0.1) self.assertRaises(LockError, lock.acquire_read, 0.1) - barrier.wait() # ---------------------------------------- 7 + barrier.wait() # ---------------------------------------- 7 # p2 releases write & read - barrier.wait() # ---------------------------------------- 8 + barrier.wait() # ---------------------------------------- 8 lock.acquire_read() - barrier.wait() # ---------------------------------------- 9 + barrier.wait() # ---------------------------------------- 9 lock.acquire_write() - barrier.wait() # ---------------------------------------- 10 + barrier.wait() # ---------------------------------------- 10 # others test timeout - barrier.wait() # ---------------------------------------- 11 + barrier.wait() # ---------------------------------------- 11 lock.release_read() # release read AND write in opposite lock.release_write() # order from before on p2 - barrier.wait() # ---------------------------------------- 12 + barrier.wait() # ---------------------------------------- 12 lock.acquire_read() - barrier.wait() # ---------------------------------------- 13 + barrier.wait() # ---------------------------------------- 13 lock.release_read() self.multiproc_test(p1, p2, p3) @@ -270,14 +281,18 @@ def exit_fn(t, v, tb): vals['exception'] = (t or v or tb) lock = Lock(self.lock_path) - vals = {'entered': False, 'exited': False, 'exception': False } - with ReadTransaction(lock, enter_fn, exit_fn): pass + vals = {'entered': False, 'exited': False, 'exception': False} + with ReadTransaction(lock, enter_fn, exit_fn): + pass + self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) self.assertFalse(vals['exception']) - vals = {'entered': False, 'exited': False, 'exception': False } - with WriteTransaction(lock, enter_fn, exit_fn): pass + vals = {'entered': False, 'exited': False, 'exception': False} + with WriteTransaction(lock, enter_fn, exit_fn): + pass + self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) self.assertFalse(vals['exception']) @@ -300,13 +315,13 @@ def do_write_with_exception(): with WriteTransaction(lock, enter_fn, exit_fn): raise Exception() - vals = {'entered': False, 'exited': False, 'exception': False } + vals = {'entered': False, 'exited': False, 'exception': False} self.assertRaises(Exception, do_read_with_exception) self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) self.assertTrue(vals['exception']) - vals = {'entered': False, 'exited': False, 'exception': False } + vals = {'entered': False, 'exited': False, 'exception': False} self.assertRaises(Exception, do_write_with_exception) self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) @@ -328,8 +343,10 @@ def exit_fn(t, v, tb): lock = Lock(self.lock_path) vals = {'entered': False, 'exited': False, 'exited_fn': False, - 'exception': False, 'exception_fn': False } - with ReadTransaction(lock, TestContextManager, exit_fn): pass + 'exception': False, 'exception_fn': False} + with ReadTransaction(lock, TestContextManager, exit_fn): + pass + self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) self.assertFalse(vals['exception']) @@ -337,8 +354,10 @@ def exit_fn(t, v, tb): self.assertFalse(vals['exception_fn']) vals = {'entered': False, 'exited': False, 'exited_fn': False, - 'exception': False, 'exception_fn': False } - with ReadTransaction(lock, TestContextManager): pass + 'exception': False, 'exception_fn': False} + with ReadTransaction(lock, TestContextManager): + pass + self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) self.assertFalse(vals['exception']) @@ -346,8 +365,10 @@ def exit_fn(t, v, tb): self.assertFalse(vals['exception_fn']) vals = {'entered': False, 'exited': False, 'exited_fn': False, - 'exception': False, 'exception_fn': False } - with WriteTransaction(lock, TestContextManager, exit_fn): pass + 'exception': False, 'exception_fn': False} + with WriteTransaction(lock, TestContextManager, exit_fn): + pass + self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) self.assertFalse(vals['exception']) @@ -355,8 +376,10 @@ def exit_fn(t, v, tb): self.assertFalse(vals['exception_fn']) vals = {'entered': False, 'exited': False, 'exited_fn': False, - 'exception': False, 'exception_fn': False } - with WriteTransaction(lock, TestContextManager): pass + 'exception': False, 'exception_fn': False} + with WriteTransaction(lock, TestContextManager): + pass + self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) self.assertFalse(vals['exception']) @@ -387,7 +410,7 @@ def do_write_with_exception(exit_fn): raise Exception() vals = {'entered': False, 'exited': False, 'exited_fn': False, - 'exception': False, 'exception_fn': False } + 'exception': False, 'exception_fn': False} self.assertRaises(Exception, do_read_with_exception, exit_fn) self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) @@ -396,7 +419,7 @@ def do_write_with_exception(exit_fn): self.assertTrue(vals['exception_fn']) vals = {'entered': False, 'exited': False, 'exited_fn': False, - 'exception': False, 'exception_fn': False } + 'exception': False, 'exception_fn': False} self.assertRaises(Exception, do_read_with_exception, None) self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) @@ -405,7 +428,7 @@ def do_write_with_exception(exit_fn): self.assertFalse(vals['exception_fn']) vals = {'entered': False, 'exited': False, 'exited_fn': False, - 'exception': False, 'exception_fn': False } + 'exception': False, 'exception_fn': False} self.assertRaises(Exception, do_write_with_exception, exit_fn) self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) @@ -414,7 +437,7 @@ def do_write_with_exception(exit_fn): self.assertTrue(vals['exception_fn']) vals = {'entered': False, 'exited': False, 'exited_fn': False, - 'exception': False, 'exception_fn': False } + 'exception': False, 'exception_fn': False} self.assertRaises(Exception, do_write_with_exception, None) self.assertTrue(vals['entered']) self.assertTrue(vals['exited']) diff --git a/lib/spack/spack/test/provider_index.py b/lib/spack/spack/test/provider_index.py index 861814e0ae4..9847dd05a69 100644 --- a/lib/spack/spack/test/provider_index.py +++ b/lib/spack/spack/test/provider_index.py @@ -22,27 +22,28 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +"""Tests for provider index cache files. + +Tests assume that mock packages provide this: + + {'blas': { + blas: set([netlib-blas, openblas, openblas-with-lapack])}, + 'lapack': {lapack: set([netlib-lapack, openblas-with-lapack])}, + 'mpi': {mpi@:1: set([mpich@:1]), + mpi@:2.0: set([mpich2]), + mpi@:2.1: set([mpich2@1.1:]), + mpi@:2.2: set([mpich2@1.2:]), + mpi@:3: set([mpich@3:]), + mpi@:10.0: set([zmpi])}, + 'stuff': {stuff: set([externalvirtual])}} +""" from StringIO import StringIO -import unittest import spack from spack.spec import Spec from spack.provider_index import ProviderIndex from spack.test.mock_packages_test import * -# Test assume that mock packages provide this: -# -# {'blas': { -# blas: set([netlib-blas, openblas, openblas-with-lapack])}, -# 'lapack': {lapack: set([netlib-lapack, openblas-with-lapack])}, -# 'mpi': {mpi@:1: set([mpich@:1]), -# mpi@:2.0: set([mpich2]), -# mpi@:2.1: set([mpich2@1.1:]), -# mpi@:2.2: set([mpich2@1.2:]), -# mpi@:3: set([mpich@3:]), -# mpi@:10.0: set([zmpi])}, -# 'stuff': {stuff: set([externalvirtual])}} -# class ProviderIndexTest(MockPackagesTest): @@ -57,7 +58,6 @@ def test_yaml_round_trip(self): self.assertEqual(p, q) - def test_providers_for_simple(self): p = ProviderIndex(spack.repo.all_package_names()) @@ -70,7 +70,6 @@ def test_providers_for_simple(self): self.assertTrue(Spec('netlib-lapack') in lapack_providers) self.assertTrue(Spec('openblas-with-lapack') in lapack_providers) - def test_mpi_providers(self): p = ProviderIndex(spack.repo.all_package_names()) @@ -83,13 +82,11 @@ def test_mpi_providers(self): self.assertTrue(Spec('mpich@3:') in mpi_3_providers) self.assertTrue(Spec('zmpi') in mpi_3_providers) - def test_equal(self): p = ProviderIndex(spack.repo.all_package_names()) q = ProviderIndex(spack.repo.all_package_names()) self.assertEqual(p, q) - def test_copy(self): p = ProviderIndex(spack.repo.all_package_names()) q = p.copy() diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py index fc0ce0b2f3f..964aea94225 100644 --- a/lib/spack/spack/test/spec_yaml.py +++ b/lib/spack/spack/test/spec_yaml.py @@ -30,6 +30,7 @@ from spack.spec import Spec from spack.test.mock_packages_test import * + class SpecYamlTest(MockPackagesTest): def check_yaml_round_trip(self, spec): @@ -37,30 +38,25 @@ def check_yaml_round_trip(self, spec): spec_from_yaml = Spec.from_yaml(yaml_text) self.assertTrue(spec.eq_dag(spec_from_yaml)) - def test_simple_spec(self): spec = Spec('mpileaks') self.check_yaml_round_trip(spec) - def test_normal_spec(self): spec = Spec('mpileaks+debug~opt') spec.normalize() self.check_yaml_round_trip(spec) - def test_ambiguous_version_spec(self): spec = Spec('mpileaks@1.0:5.0,6.1,7.3+debug~opt') spec.normalize() self.check_yaml_round_trip(spec) - def test_concrete_spec(self): spec = Spec('mpileaks+debug~opt') spec.concretize() self.check_yaml_round_trip(spec) - def test_yaml_subdag(self): spec = Spec('mpileaks^mpich+debug') spec.concretize() From 9d4a36a62f0ccd8cffc6fb96d6487f0805928e0b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 9 Aug 2016 02:06:30 -0700 Subject: [PATCH 276/284] Properly re-raise exceptions from lock context handler. --- lib/spack/llnl/util/lock.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index bef20025baf..4a4aec23854 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -213,13 +213,15 @@ def __enter__(self): return self._as def __exit__(self, type, value, traceback): + suppress = False if self._exit(): if self._as and hasattr(self._as, '__exit__'): - self._as.__exit__(type, value, traceback) + if self._as.__exit__(type, value, traceback): + suppress = True if self._release_fn: - self._release_fn(type, value, traceback) - if value: - raise value + if self._release_fn(type, value, traceback): + suppress = True + return suppress class ReadTransaction(LockTransaction): From 2be065418b4620d3ff37e07e4ef9cd526ba4d7a6 Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Thu, 4 Aug 2016 11:11:45 -0400 Subject: [PATCH 277/284] Openssl package should not require openssl (https) to obtain source. --- var/spack/repos/builtin/packages/openssl/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index 78bdd88d9c3..8d9049a8f7d 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -34,7 +34,7 @@ class Openssl(Package): Transport Layer Security (TLS v1) protocols as well as a full-strength general purpose cryptography library.""" homepage = "http://www.openssl.org" - url = "https://www.openssl.org/source/openssl-1.0.1h.tar.gz" + url = "ftp://openssl.org/source/openssl-1.0.1h.tar.gz" version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf') version('1.0.1r', '1abd905e079542ccae948af37e393d28') From 4bca1c5440c7539745120e201430b1e5b08f0761 Mon Sep 17 00:00:00 2001 From: George Hartzell Date: Tue, 9 Aug 2016 15:13:08 -0700 Subject: [PATCH 278/284] Fix typo 'flexbile' -> 'flexible' --- lib/spack/docs/basic_usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index df9a3901bfc..a42d9417916 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -1159,7 +1159,7 @@ More than one spec may be placed on the command line here. Module Commands for Shell Scripts `````````````````````````````````` -Although Spack is flexbile, the ``module`` command is much faster. +Although Spack is flexible, the ``module`` command is much faster. This could become an issue when emitting a series of ``spack load`` commands inside a shell script. By adding the ``--shell`` flag, ``spack module find`` may also be used to generate code that can be From e7f4fd404d03468db5a2a8f2d4c43b6fb7c36902 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 10 Aug 2016 13:28:39 -0700 Subject: [PATCH 279/284] Fix superclass constructor for SpackNamespace objects. --- lib/spack/spack/repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index eada10f7cb6..d751a98b359 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -78,7 +78,7 @@ def converter(self, spec_like, *args, **kwargs): class SpackNamespace(ModuleType): """ Allow lazy loading of modules.""" def __init__(self, namespace): - super(ModuleType, self).__init__(self, namespace) + super(SpackNamespace, self).__init__(namespace) self.__file__ = "(spack namespace)" self.__path__ = [] self.__name__ = namespace From bf1072c9022cd161b9cc4860e5403a463bc0e05b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 9 Aug 2016 13:23:53 -0700 Subject: [PATCH 280/284] Make Spack core PEP8 compliant. --- .flake8 | 2 +- bin/spack | 15 +- lib/spack/llnl/util/filesystem.py | 16 +- lib/spack/llnl/util/lang.py | 77 +++++---- lib/spack/llnl/util/link_tree.py | 10 +- lib/spack/llnl/util/lock.py | 3 + lib/spack/llnl/util/tty/__init__.py | 12 +- lib/spack/llnl/util/tty/colify.py | 32 ++-- lib/spack/llnl/util/tty/color.py | 29 ++-- lib/spack/llnl/util/tty/log.py | 16 +- lib/spack/spack/abi.py | 25 ++- lib/spack/spack/architecture.py | 1 + lib/spack/spack/cmd/__init__.py | 2 +- lib/spack/spack/cmd/activate.py | 4 +- lib/spack/spack/cmd/arch.py | 2 +- lib/spack/spack/cmd/cd.py | 3 +- lib/spack/spack/cmd/clean.py | 1 + lib/spack/spack/cmd/common/arguments.py | 4 +- lib/spack/spack/cmd/compiler.py | 65 +++++--- lib/spack/spack/cmd/compilers.py | 6 +- lib/spack/spack/cmd/config.py | 10 +- lib/spack/spack/cmd/create.py | 1 + lib/spack/spack/cmd/deactivate.py | 11 +- lib/spack/spack/cmd/dependents.py | 7 +- lib/spack/spack/cmd/diy.py | 8 +- lib/spack/spack/cmd/doc.py | 1 + lib/spack/spack/cmd/edit.py | 14 +- lib/spack/spack/cmd/env.py | 8 +- lib/spack/spack/cmd/extensions.py | 11 +- lib/spack/spack/cmd/fetch.py | 12 +- lib/spack/spack/cmd/graph.py | 13 +- lib/spack/spack/cmd/help.py | 4 +- lib/spack/spack/cmd/install.py | 7 +- lib/spack/spack/cmd/load.py | 7 +- lib/spack/spack/cmd/location.py | 29 ++-- lib/spack/spack/cmd/mirror.py | 25 +-- lib/spack/spack/cmd/module.py | 17 +- lib/spack/spack/cmd/package-list.py | 2 +- lib/spack/spack/cmd/patch.py | 6 +- lib/spack/spack/cmd/pkg.py | 53 +++--- lib/spack/spack/cmd/providers.py | 9 +- lib/spack/spack/cmd/python.py | 10 +- lib/spack/spack/cmd/reindex.py | 2 +- lib/spack/spack/cmd/repo.py | 29 ++-- lib/spack/spack/cmd/restage.py | 1 + lib/spack/spack/cmd/setup.py | 9 +- lib/spack/spack/cmd/spec.py | 13 +- lib/spack/spack/cmd/stage.py | 4 +- lib/spack/spack/cmd/test-install.py | 61 ++++--- lib/spack/spack/cmd/test.py | 2 + lib/spack/spack/cmd/uninstall.py | 20 ++- lib/spack/spack/cmd/unload.py | 6 +- lib/spack/spack/cmd/unuse.py | 6 +- lib/spack/spack/cmd/url-parse.py | 14 +- lib/spack/spack/cmd/urls.py | 5 +- lib/spack/spack/cmd/use.py | 6 +- lib/spack/spack/cmd/versions.py | 7 +- lib/spack/spack/compiler.py | 58 ++++--- lib/spack/spack/compilers/__init__.py | 68 ++++---- lib/spack/spack/compilers/clang.py | 12 +- lib/spack/spack/compilers/craype.py | 38 ++--- lib/spack/spack/compilers/gcc.py | 10 +- lib/spack/spack/compilers/intel.py | 10 +- lib/spack/spack/compilers/nag.py | 18 +- lib/spack/spack/compilers/pgi.py | 13 +- lib/spack/spack/compilers/xl.py | 50 +++--- lib/spack/spack/concretize.py | 16 +- lib/spack/spack/config.py | 130 ++++++++------- lib/spack/spack/database.py | 3 + lib/spack/spack/directives.py | 5 +- lib/spack/spack/directory_layout.py | 85 ++++------ lib/spack/spack/environment.py | 17 +- lib/spack/spack/error.py | 8 +- lib/spack/spack/fetch_strategy.py | 2 + lib/spack/spack/file_cache.py | 2 + lib/spack/spack/graph.py | 21 +-- lib/spack/spack/hooks/__init__.py | 2 + lib/spack/spack/hooks/extensions.py | 2 - lib/spack/spack/mirror.py | 27 +-- lib/spack/spack/modules.py | 17 +- lib/spack/spack/multimethod.py | 15 +- lib/spack/spack/operating_systems/cnl.py | 1 + .../spack/operating_systems/linux_distro.py | 2 + lib/spack/spack/operating_systems/mac_os.py | 1 + lib/spack/spack/package.py | 15 +- lib/spack/spack/parse.py | 20 ++- lib/spack/spack/patch.py | 4 +- lib/spack/spack/platforms/bgq.py | 2 +- lib/spack/spack/platforms/darwin.py | 3 +- lib/spack/spack/platforms/linux.py | 3 +- lib/spack/spack/platforms/test.py | 27 ++- lib/spack/spack/preferred_packages.py | 2 +- lib/spack/spack/provider_index.py | 1 + lib/spack/spack/repository.py | 7 + lib/spack/spack/resource.py | 6 +- lib/spack/spack/spec.py | 23 ++- lib/spack/spack/test/architecture.py | 1 + lib/spack/spack/test/cc.py | 64 ++++--- lib/spack/spack/test/cmd/module.py | 14 +- lib/spack/spack/test/cmd/test_compiler_cmd.py | 7 +- lib/spack/spack/test/cmd/uninstall.py | 2 + lib/spack/spack/test/config.py | 61 +++---- lib/spack/spack/test/database.py | 1 + lib/spack/spack/test/directory_layout.py | 13 +- lib/spack/spack/test/environment.py | 3 +- lib/spack/spack/test/git_fetch.py | 18 +- lib/spack/spack/test/hg_fetch.py | 8 +- lib/spack/spack/test/install.py | 8 +- lib/spack/spack/test/link_tree.py | 6 - lib/spack/spack/test/lock.py | 2 + lib/spack/spack/test/make_executable.py | 26 +-- lib/spack/spack/test/mirror.py | 17 +- lib/spack/spack/test/mock_database.py | 1 + lib/spack/spack/test/mock_packages_test.py | 8 +- lib/spack/spack/test/mock_repo.py | 6 +- lib/spack/spack/test/multimethod.py | 17 +- lib/spack/spack/test/namespace_trie.py | 6 - lib/spack/spack/test/operating_system.py | 32 +++- lib/spack/spack/test/optional_deps.py | 36 ++-- lib/spack/spack/test/package_sanity.py | 3 - lib/spack/spack/test/packages.py | 49 ++---- lib/spack/spack/test/pattern.py | 2 + lib/spack/spack/test/python_version.py | 11 +- lib/spack/spack/test/sbang.py | 1 + lib/spack/spack/test/spec_dag.py | 1 + lib/spack/spack/test/spec_semantics.py | 157 +++++++++--------- lib/spack/spack/test/spec_syntax.py | 130 ++++++++++----- lib/spack/spack/test/stage.py | 28 +--- lib/spack/spack/test/svn_fetch.py | 8 +- lib/spack/spack/test/tally_plugin.py | 1 + lib/spack/spack/test/url_extrapolate.py | 20 +-- lib/spack/spack/test/url_parse.py | 7 +- lib/spack/spack/test/url_substitution.py | 32 ++-- lib/spack/spack/test/yaml.py | 12 +- lib/spack/spack/url.py | 79 +++++---- lib/spack/spack/util/compression.py | 4 +- lib/spack/spack/util/crypto.py | 11 +- lib/spack/spack/util/debug.py | 5 +- lib/spack/spack/util/executable.py | 2 +- lib/spack/spack/util/multiproc.py | 18 +- lib/spack/spack/util/naming.py | 35 ++-- lib/spack/spack/util/pattern.py | 10 +- lib/spack/spack/util/prefix.py | 1 + lib/spack/spack/util/spack_yaml.py | 57 ++++--- lib/spack/spack/util/string.py | 4 +- lib/spack/spack/util/web.py | 1 + lib/spack/spack/variant.py | 2 + lib/spack/spack/version.py | 6 +- lib/spack/spack/yaml_version_check.py | 4 +- share/spack/qa/run-flake8 | 17 +- 150 files changed, 1436 insertions(+), 1160 deletions(-) diff --git a/.flake8 b/.flake8 index 286522bc48a..b178a2da577 100644 --- a/.flake8 +++ b/.flake8 @@ -19,5 +19,5 @@ # - F999: name name be undefined or undefined from star imports. # [flake8] -ignore = E221,E241,E731,F403,F821,F999,F405 +ignore = E129,E221,E241,E272,E731,F403,F821,F999,F405 max-line-length = 79 diff --git a/bin/spack b/bin/spack index e9307d1485a..9fed11f33bd 100755 --- a/bin/spack +++ b/bin/spack @@ -1,4 +1,5 @@ #!/usr/bin/env python +# flake8: noqa ############################################################################## # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. @@ -24,9 +25,10 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import sys -if not sys.version_info[:2] >= (2,6): +if not sys.version_info[:2] >= (2, 6): v_info = sys.version_info[:3] - sys.exit("Spack requires Python 2.6 or higher. This is Python %d.%d.%d." % v_info) + sys.exit("Spack requires Python 2.6 or higher. " + "This is Python %d.%d.%d." % v_info) import os @@ -62,7 +64,8 @@ for pyc_file in orphaned_pyc_files: try: os.remove(pyc_file) except OSError as e: - print "WARNING: Spack may fail mysteriously. Couldn't remove orphaned .pyc file: %s" % pyc_file + print ("WARNING: Spack may fail mysteriously. " + "Couldn't remove orphaned .pyc file: %s" % pyc_file) # If there is no working directory, use the spack prefix. try: @@ -128,6 +131,7 @@ if len(sys.argv) == 1: # actually parse the args. args = parser.parse_args() + def main(): # Set up environment based on args. tty.set_verbose(args.verbose) @@ -148,7 +152,7 @@ def main(): # If the user asked for it, don't check ssl certs. if args.insecure: - tty.warn("You asked for --insecure, which does not check SSL certificates.") + tty.warn("You asked for --insecure. Will NOT check SSL certificates.") spack.curl.add_default_arg('-k') # Try to load the particular command asked for and run it @@ -167,7 +171,8 @@ def main(): elif isinstance(return_val, int): sys.exit(return_val) else: - tty.die("Bad return value from command %s: %s" % (args.command, return_val)) + tty.die("Bad return value from command %s: %s" + % (args.command, return_val)) if args.profile: import cProfile diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 4cf99163e06..22ca85abf92 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -106,6 +106,7 @@ def groupid_to_group(x): class FileFilter(object): """Convenience class for calling filter_file a lot.""" + def __init__(self, *filenames): self.filenames = filenames @@ -355,7 +356,8 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): # When follow_nonexisting isn't set, don't descend into dirs # in source that do not exist in dest if follow_nonexisting or os.path.exists(dest_child): - tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs) # NOQA: ignore=E501 + tuples = traverse_tree( + source_root, dest_root, rel_child, **kwargs) for t in tuples: yield t @@ -422,14 +424,20 @@ def fix_darwin_install_name(path): libs = glob.glob(join_path(path, "*.dylib")) for lib in libs: # fix install name first: - subprocess.Popen(["install_name_tool", "-id", lib, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501 - long_deps = subprocess.Popen(["otool", "-L", lib], stdout=subprocess.PIPE).communicate()[0].split('\n') # NOQA: ignore=E501 + subprocess.Popen( + ["install_name_tool", "-id", lib, lib], + stdout=subprocess.PIPE).communicate()[0] + long_deps = subprocess.Popen( + ["otool", "-L", lib], + stdout=subprocess.PIPE).communicate()[0].split('\n') deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]] # fix all dependencies: for dep in deps: for loc in libs: if dep == os.path.basename(loc): - subprocess.Popen(["install_name_tool", "-change", dep, loc, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501 + subprocess.Popen( + ["install_name_tool", "-change", dep, loc, lib], + stdout=subprocess.PIPE).communicate()[0] break diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 63eb08d8031..df32012e2de 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -24,7 +24,6 @@ ############################################################################## import os import re -import sys import functools import collections import inspect @@ -39,14 +38,15 @@ def index_by(objects, *funcs): Values are used as keys. For example, suppose you have four objects with attributes that look like this: - a = Spec(name="boost", compiler="gcc", arch="bgqos_0") - b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib") - c = Spec(name="libelf", compiler="xlc", arch="bgqos_0") - d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib") + a = Spec(name="boost", compiler="gcc", arch="bgqos_0") + b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib") + c = Spec(name="libelf", compiler="xlc", arch="bgqos_0") + d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib") - list_of_specs = [a,b,c,d] - index1 = index_by(list_of_specs, lambda s: s.arch, lambda s: s.compiler) - index2 = index_by(list_of_specs, lambda s: s.compiler) + list_of_specs = [a,b,c,d] + index1 = index_by(list_of_specs, lambda s: s.arch, + lambda s: s.compiler) + index2 = index_by(list_of_specs, lambda s: s.compiler) ``index1'' now has two levels of dicts, with lists at the leaves, like this: @@ -137,7 +137,7 @@ def get_calling_module_name(): finally: del stack - if not '__module__' in caller_locals: + if '__module__' not in caller_locals: raise RuntimeError("Must invoke get_calling_module_name() " "from inside a class definition!") @@ -173,11 +173,11 @@ def has_method(cls, name): class memoized(object): """Decorator that caches the results of a function, storing them in an attribute of that function.""" + def __init__(self, func): self.func = func self.cache = {} - def __call__(self, *args): if not isinstance(args, collections.Hashable): # Not hashable, so just call the function. @@ -187,12 +187,10 @@ def __call__(self, *args): self.cache[args] = self.func(*args) return self.cache[args] - def __get__(self, obj, objtype): """Support instance methods.""" return functools.partial(self.__call__, obj) - def clear(self): """Expunge cache so that self.func will be called again.""" self.cache.clear() @@ -237,13 +235,21 @@ def setter(name, value): if not has_method(cls, '_cmp_key'): raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__) - setter('__eq__', lambda s,o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key())) - setter('__lt__', lambda s,o: o is not None and s._cmp_key() < o._cmp_key()) - setter('__le__', lambda s,o: o is not None and s._cmp_key() <= o._cmp_key()) + setter('__eq__', + lambda s, o: + (s is o) or (o is not None and s._cmp_key() == o._cmp_key())) + setter('__lt__', + lambda s, o: o is not None and s._cmp_key() < o._cmp_key()) + setter('__le__', + lambda s, o: o is not None and s._cmp_key() <= o._cmp_key()) - setter('__ne__', lambda s,o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key())) - setter('__gt__', lambda s,o: o is None or s._cmp_key() > o._cmp_key()) - setter('__ge__', lambda s,o: o is None or s._cmp_key() >= o._cmp_key()) + setter('__ne__', + lambda s, o: + (s is not o) and (o is None or s._cmp_key() != o._cmp_key())) + setter('__gt__', + lambda s, o: o is None or s._cmp_key() > o._cmp_key()) + setter('__ge__', + lambda s, o: o is None or s._cmp_key() >= o._cmp_key()) setter('__hash__', lambda self: hash(self._cmp_key())) @@ -254,10 +260,10 @@ def setter(name, value): class HashableMap(dict): """This is a hashable, comparable dictionary. Hash is performed on a tuple of the values in the dictionary.""" + def _cmp_key(self): return tuple(sorted(self.values())) - def copy(self): """Type-agnostic clone method. Preserves subclass type.""" # Construct a new dict of my type @@ -336,24 +342,39 @@ def match(string): return match - def DictWrapper(dictionary): """Returns a class that wraps a dictionary and enables it to be used like an object.""" class wrapper(object): - def __getattr__(self, name): return dictionary[name] - def __setattr__(self, name, value): dictionary[name] = value - def setdefault(self, *args): return dictionary.setdefault(*args) - def get(self, *args): return dictionary.get(*args) - def keys(self): return dictionary.keys() - def values(self): return dictionary.values() - def items(self): return dictionary.items() - def __iter__(self): return iter(dictionary) + def __getattr__(self, name): + return dictionary[name] + + def __setattr__(self, name, value): + dictionary[name] = value + + def setdefault(self, *args): + return dictionary.setdefault(*args) + + def get(self, *args): + return dictionary.get(*args) + + def keys(self): + return dictionary.keys() + + def values(self): + return dictionary.values() + + def items(self): + return dictionary.items() + + def __iter__(self): + return iter(dictionary) return wrapper() class RequiredAttributeError(ValueError): + def __init__(self, message): super(RequiredAttributeError, self).__init__(message) diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py index b6d87960841..d6547e933af 100644 --- a/lib/spack/llnl/util/link_tree.py +++ b/lib/spack/llnl/util/link_tree.py @@ -23,12 +23,13 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## """LinkTree class for setting up trees of symbolic links.""" -__all__ = ['LinkTree'] import os import shutil from llnl.util.filesystem import * +__all__ = ['LinkTree'] + empty_file_name = '.spack-empty' @@ -43,13 +44,13 @@ class LinkTree(object): modified. """ + def __init__(self, source_root): if not os.path.exists(source_root): raise IOError("No such file or directory: '%s'", source_root) self._root = source_root - def find_conflict(self, dest_root, **kwargs): """Returns the first file in dest that conflicts with src""" kwargs['follow_nonexisting'] = False @@ -61,9 +62,9 @@ def find_conflict(self, dest_root, **kwargs): return dest return None - def merge(self, dest_root, **kwargs): - """Link all files in src into dest, creating directories if necessary.""" + """Link all files in src into dest, creating directories + if necessary.""" kwargs['order'] = 'pre' for src, dest in traverse_tree(self._root, dest_root, **kwargs): if os.path.isdir(src): @@ -83,7 +84,6 @@ def merge(self, dest_root, **kwargs): assert(not os.path.exists(dest)) os.symlink(src, dest) - def unmerge(self, dest_root, **kwargs): """Unlink all files in dest that exist in src. diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index 4a4aec23854..aa8272d5fe9 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -47,6 +47,7 @@ class Lock(object): and recent NFS versions. """ + def __init__(self, file_path): self._file_path = file_path self._fd = None @@ -225,6 +226,7 @@ def __exit__(self, type, value, traceback): class ReadTransaction(LockTransaction): + def _enter(self): return self._lock.acquire_read(self._timeout) @@ -233,6 +235,7 @@ def _exit(self): class WriteTransaction(LockTransaction): + def _enter(self): return self._lock.acquire_write(self._timeout) diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index ee81e11a207..db74aaba6b5 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -36,6 +36,7 @@ _verbose = False indent = " " + def is_verbose(): return _verbose @@ -148,7 +149,8 @@ def get_yes_or_no(prompt, **kwargs): elif default_value is False: prompt += ' [y/N] ' else: - raise ValueError("default for get_yes_no() must be True, False, or None.") + raise ValueError( + "default for get_yes_no() must be True, False, or None.") result = None while result is None: @@ -174,8 +176,9 @@ def hline(label=None, **kwargs): char = kwargs.pop('char', '-') max_width = kwargs.pop('max_width', 64) if kwargs: - raise TypeError("'%s' is an invalid keyword argument for this function." - % next(kwargs.iterkeys())) + raise TypeError( + "'%s' is an invalid keyword argument for this function." + % next(kwargs.iterkeys())) rows, cols = terminal_size() if not cols: @@ -200,7 +203,8 @@ def terminal_size(): """Gets the dimensions of the console: (rows, cols).""" def ioctl_GWINSZ(fd): try: - rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) + rc = struct.unpack('hh', fcntl.ioctl( + fd, termios.TIOCGWINSZ, '1234')) except: return return rc diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py index 81a83691d7a..67acdfa5179 100644 --- a/lib/spack/llnl/util/tty/colify.py +++ b/lib/spack/llnl/util/tty/colify.py @@ -27,15 +27,14 @@ """ import os import sys -import fcntl -import termios -import struct from StringIO import StringIO from llnl.util.tty import terminal_size from llnl.util.tty.color import clen, cextra + class ColumnConfig: + def __init__(self, cols): self.cols = cols self.line_length = 0 @@ -43,7 +42,8 @@ def __init__(self, cols): self.widths = [0] * cols # does not include ansi colors def __repr__(self): - attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")] + attrs = [(a, getattr(self, a)) + for a in dir(self) if not a.startswith("__")] return "" % ", ".join("%s: %r" % a for a in attrs) @@ -68,7 +68,7 @@ def config_variable_cols(elts, console_width, padding, cols=0): max_cols = min(len(elts), max_cols) # Range of column counts to try. If forced, use the supplied value. - col_range = [cols] if cols else xrange(1, max_cols+1) + col_range = [cols] if cols else xrange(1, max_cols + 1) # Determine the most columns possible for the console width. configs = [ColumnConfig(c) for c in col_range] @@ -106,7 +106,6 @@ def config_uniform_cols(elts, console_width, padding, cols=0): # 'clen' ignores length of ansi color sequences. max_len = max(clen(e) for e in elts) + padding - max_clen = max(len(e) for e in elts) + padding if cols == 0: cols = max(1, console_width / max_len) cols = min(len(elts), cols) @@ -130,17 +129,19 @@ def colify(elts, **options): output= A file object to write to. Default is sys.stdout. indent= Optionally indent all columns by some number of spaces. padding= Spaces between columns. Default is 2. - width= Width of the output. Default is 80 if tty is not detected. + width= Width of the output. Default is 80 if tty not detected. cols= Force number of columns. Default is to size to terminal, or single-column if no tty tty= Whether to attempt to write to a tty. Default is to - autodetect a tty. Set to False to force single-column output. + autodetect a tty. Set to False to force + single-column output. - method= Method to use to fit columns. Options are variable or uniform. - Variable-width columns are tighter, uniform columns are all the - same width and fit less data on the screen. + method= Method to use to fit columns. Options are variable or + uniform. Variable-width columns are tighter, uniform + columns are all the same width and fit less data on + the screen. """ # Get keyword arguments or set defaults cols = options.pop("cols", 0) @@ -152,8 +153,9 @@ def colify(elts, **options): console_cols = options.pop("width", None) if options: - raise TypeError("'%s' is an invalid keyword argument for this function." - % next(options.iterkeys())) + raise TypeError( + "'%s' is an invalid keyword argument for this function." + % next(options.iterkeys())) # elts needs to be an array of strings so we can count the elements elts = [str(elt) for elt in elts] @@ -167,7 +169,8 @@ def colify(elts, **options): r, c = env_size.split('x') console_rows, console_cols = int(r), int(c) tty = True - except: pass + except: + pass # Use only one column if not a tty. if not tty: @@ -228,6 +231,7 @@ def colify_table(table, **options): raise ValueError("Table is empty in colify_table!") columns = len(table[0]) + def transpose(): for i in xrange(columns): for row in table: diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py index 0abcb09b973..b0c00f15022 100644 --- a/lib/spack/llnl/util/tty/color.py +++ b/lib/spack/llnl/util/tty/color.py @@ -75,25 +75,27 @@ import re import sys + class ColorParseError(Exception): """Raised when a color format fails to parse.""" + def __init__(self, message): super(ColorParseError, self).__init__(message) # Text styles for ansi codes -styles = {'*' : '1', # bold - '_' : '4', # underline - None : '0' } # plain +styles = {'*': '1', # bold + '_': '4', # underline + None: '0'} # plain # Dim and bright ansi colors -colors = {'k' : 30, 'K' : 90, # black - 'r' : 31, 'R' : 91, # red - 'g' : 32, 'G' : 92, # green - 'y' : 33, 'Y' : 93, # yellow - 'b' : 34, 'B' : 94, # blue - 'm' : 35, 'M' : 95, # magenta - 'c' : 36, 'C' : 96, # cyan - 'w' : 37, 'W' : 97 } # white +colors = {'k': 30, 'K': 90, # black + 'r': 31, 'R': 91, # red + 'g': 32, 'G': 92, # green + 'y': 33, 'Y': 93, # yellow + 'b': 34, 'B': 94, # blue + 'm': 35, 'M': 95, # magenta + 'c': 36, 'C': 96, # cyan + 'w': 37, 'W': 97} # white # Regex to be used for color formatting color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)' @@ -104,6 +106,7 @@ def __init__(self, message): class match_to_ansi(object): + def __init__(self, color=True): self.color = color @@ -179,12 +182,14 @@ def cprint(string, stream=sys.stdout, color=None): """Same as cwrite, but writes a trailing newline to the stream.""" cwrite(string + "\n", stream, color) + def cescape(string): """Replace all @ with @@ in the string provided.""" return str(string).replace('@', '@@') class ColorStream(object): + def __init__(self, stream, color=None): self._stream = stream self._color = color @@ -196,7 +201,7 @@ def write(self, string, **kwargs): color = self._color if self._color is None: if raw: - color=True + color = True else: color = self._stream.isatty() or _force_color raw_write(colorize(string, color=color)) diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py index ca82da7b174..b67edcf9ccf 100644 --- a/lib/spack/llnl/util/tty/log.py +++ b/lib/spack/llnl/util/tty/log.py @@ -36,6 +36,7 @@ # Use this to strip escape sequences _escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h') + def _strip(line): """Strip color and control characters from a line.""" return _escape.sub('', line) @@ -58,10 +59,10 @@ class keyboard_input(object): When the with block completes, this will restore settings before canonical and echo were disabled. """ + def __init__(self, stream): self.stream = stream - def __enter__(self): self.old_cfg = None @@ -86,10 +87,9 @@ def __enter__(self): # Apply new settings for terminal termios.tcsetattr(fd, termios.TCSADRAIN, self.new_cfg) - except Exception, e: + except Exception: pass # Some OS's do not support termios, so ignore. - def __exit__(self, exc_type, exception, traceback): # If termios was avaialble, restore old settings after the # with block @@ -114,6 +114,7 @@ class log_output(object): Closes the provided stream when done with the block. If echo is True, also prints the output to stdout. """ + def __init__(self, stream, echo=False, force_color=False, debug=False): self.stream = stream @@ -122,7 +123,7 @@ def __init__(self, stream, echo=False, force_color=False, debug=False): self.force_color = force_color self.debug = debug - # Default is to try file-descriptor reassignment unless the system + # Default is to try file-descriptor reassignment unless the system # out/err streams do not have an associated file descriptor self.directAssignment = False @@ -130,7 +131,6 @@ def trace(self, frame, event, arg): """Jumps to __exit__ on the child process.""" raise _SkipWithBlock() - def __enter__(self): """Redirect output from the with block to a file. @@ -154,7 +154,8 @@ def __enter__(self): with self.stream as log_file: with keyboard_input(sys.stdin): while True: - rlist, w, x = select.select([read_file, sys.stdin], [], []) + rlist, w, x = select.select( + [read_file, sys.stdin], [], []) if not rlist: break @@ -211,7 +212,6 @@ def __enter__(self): if self.debug: tty._debug = True - def __exit__(self, exc_type, exception, traceback): """Exits on child, handles skipping the with block on parent.""" # Child should just exit here. @@ -235,7 +235,7 @@ def __exit__(self, exc_type, exception, traceback): sys.stderr = self._stderr else: os.dup2(self._stdout, sys.stdout.fileno()) - os.dup2(self._stderr, sys.stderr.fileno()) + os.dup2(self._stderr, sys.stderr.fileno()) return False diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py index 38cff62af49..064abb97824 100644 --- a/lib/spack/spack/abi.py +++ b/lib/spack/spack/abi.py @@ -30,15 +30,15 @@ from spack.util.executable import Executable, ProcessError from llnl.util.lang import memoized + class ABI(object): """This class provides methods to test ABI compatibility between specs. The current implementation is rather rough and could be improved.""" def architecture_compatible(self, parent, child): - """Returns true iff the parent and child specs have ABI compatible targets.""" - return not parent.architecture or not child.architecture \ - or parent.architecture == child.architecture - + """Return true if parent and child have ABI compatible targets.""" + return not parent.architecture or not child.architecture or \ + parent.architecture == child.architecture @memoized def _gcc_get_libstdcxx_version(self, version): @@ -61,8 +61,9 @@ def _gcc_get_libstdcxx_version(self, version): else: return None try: - output = rungcc("--print-file-name=%s" % libname, return_output=True) - except ProcessError, e: + output = rungcc("--print-file-name=%s" % libname, + return_output=True) + except ProcessError: return None if not output: return None @@ -71,7 +72,6 @@ def _gcc_get_libstdcxx_version(self, version): return None return os.path.basename(libpath) - @memoized def _gcc_compiler_compare(self, pversion, cversion): """Returns true iff the gcc version pversion and cversion @@ -82,7 +82,6 @@ def _gcc_compiler_compare(self, pversion, cversion): return False return plib == clib - def _intel_compiler_compare(self, pversion, cversion): """Returns true iff the intel version pversion and cversion are ABI compatible""" @@ -92,9 +91,8 @@ def _intel_compiler_compare(self, pversion, cversion): return False return pversion.version[:2] == cversion.version[:2] - def compiler_compatible(self, parent, child, **kwargs): - """Returns true iff the compilers for parent and child specs are ABI compatible""" + """Return true if compilers for parent and child are ABI compatible.""" if not parent.compiler or not child.compiler: return True @@ -109,8 +107,8 @@ def compiler_compatible(self, parent, child, **kwargs): # TODO: into compiler classes? for pversion in parent.compiler.versions: for cversion in child.compiler.versions: - # For a few compilers use specialized comparisons. Otherwise - # match on version match. + # For a few compilers use specialized comparisons. + # Otherwise match on version match. if pversion.satisfies(cversion): return True elif (parent.compiler.name == "gcc" and @@ -121,9 +119,8 @@ def compiler_compatible(self, parent, child, **kwargs): return True return False - def compatible(self, parent, child, **kwargs): """Returns true iff a parent and child spec are ABI compatible""" loosematch = kwargs.get('loose', False) return self.architecture_compatible(parent, child) and \ - self.compiler_compatible(parent, child, loose=loosematch) + self.compiler_compatible(parent, child, loose=loosematch) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index 886e170b1aa..0d210f97412 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -91,6 +91,7 @@ class NoPlatformError(serr.SpackError): + def __init__(self): super(NoPlatformError, self).__init__( "Could not determine a platform for this machine.") diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 230115df50a..f69f434afdc 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -240,4 +240,4 @@ def fmt(s): else: raise ValueError( "Invalid mode for display_specs: %s. Must be one of (paths," - "deps, short)." % mode) # NOQA: ignore=E501 + "deps, short)." % mode) diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py index 9867fa88355..797cdcb1367 100644 --- a/lib/spack/spack/cmd/activate.py +++ b/lib/spack/spack/cmd/activate.py @@ -29,12 +29,14 @@ description = "Activate a package extension." + def setup_parser(subparser): subparser.add_argument( '-f', '--force', action='store_true', help="Activate without first activating dependencies.") subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.") + 'spec', nargs=argparse.REMAINDER, + help="spec of package extension to activate.") def activate(parser, args): diff --git a/lib/spack/spack/cmd/arch.py b/lib/spack/spack/cmd/arch.py index cf2f96fd21f..1badd40f7f4 100644 --- a/lib/spack/spack/cmd/arch.py +++ b/lib/spack/spack/cmd/arch.py @@ -22,10 +22,10 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import spack import spack.architecture as architecture description = "Print the architecture for this machine" + def arch(parser, args): print architecture.sys_type() diff --git a/lib/spack/spack/cmd/cd.py b/lib/spack/spack/cmd/cd.py index aa45f67ae1a..cf7232258c0 100644 --- a/lib/spack/spack/cmd/cd.py +++ b/lib/spack/spack/cmd/cd.py @@ -25,7 +25,8 @@ import spack.cmd.location import spack.modules -description="cd to spack directories in the shell." +description = "cd to spack directories in the shell." + def setup_parser(subparser): """This is for decoration -- spack cd is used through spack's diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index 514c5874ef2..dc62fbcaf66 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -31,6 +31,7 @@ description = "Remove build stage and source tarball for packages." + def setup_parser(subparser): subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to clean") diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py index af041708243..afcba337149 100644 --- a/lib/spack/spack/cmd/common/arguments.py +++ b/lib/spack/spack/cmd/common/arguments.py @@ -35,7 +35,7 @@ def add_common_arguments(parser, list_of_arguments): for argument in list_of_arguments: if argument not in _arguments: - message = 'Trying to add the non existing argument "{0}" to a command' # NOQA: ignore=E501 + message = 'Trying to add non existing argument "{0}" to a command' raise KeyError(message.format(argument)) x = _arguments[argument] parser.add_argument(*x.flags, **x.kwargs) @@ -82,7 +82,7 @@ def __call__(self, parser, namespace, values, option_string=None): kwargs={ 'action': 'store_true', 'dest': 'yes_to_all', - 'help': 'Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501 + 'help': 'Assume "yes" is the answer to every confirmation request.' }) _arguments['yes_to_all'] = parms diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index c325628ebf8..ea91c714795 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -37,6 +37,7 @@ description = "Manage compilers" + def setup_parser(subparser): sp = subparser.add_subparsers( metavar='SUBCOMMAND', dest='compiler_command') @@ -44,48 +45,58 @@ def setup_parser(subparser): scopes = spack.config.config_scopes # Find - find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.') + find_parser = sp.add_parser( + 'find', aliases=['add'], + help='Search the system for compilers to add to Spack configuration.') find_parser.add_argument('add_paths', nargs=argparse.REMAINDER) - find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope, - help="Configuration scope to modify.") + find_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_modify_scope, + help="Configuration scope to modify.") # Remove - remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.') + remove_parser = sp.add_parser( + 'remove', aliases=['rm'], help='Remove compiler by spec.') remove_parser.add_argument( - '-a', '--all', action='store_true', help='Remove ALL compilers that match spec.') + '-a', '--all', action='store_true', + help='Remove ALL compilers that match spec.') remove_parser.add_argument('compiler_spec') - remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope, - help="Configuration scope to modify.") + remove_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_modify_scope, + help="Configuration scope to modify.") # List list_parser = sp.add_parser('list', help='list available compilers') - list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope, - help="Configuration scope to read from.") + list_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_list_scope, + help="Configuration scope to read from.") # Info info_parser = sp.add_parser('info', help='Show compiler paths.') info_parser.add_argument('compiler_spec') - info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope, - help="Configuration scope to read from.") + info_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_list_scope, + help="Configuration scope to read from.") def compiler_find(args): - """Search either $PATH or a list of paths OR MODULES for compilers and add them - to Spack's configuration.""" + """Search either $PATH or a list of paths OR MODULES for compilers and + add them to Spack's configuration. + + """ paths = args.add_paths if not paths: paths = get_path('PATH') - # Don't initialize compilers config via compilers.get_compiler_config. - # Just let compiler_find do the + # Don't initialize compilers config via compilers.get_compiler_config. + # Just let compiler_find do the # entire process and return an empty config from all_compilers # Default for any other process is init_config=True compilers = [c for c in spack.compilers.find_compilers(*paths) if c.spec not in spack.compilers.all_compilers( - scope=args.scope, init_config=False)] + scope=args.scope, init_config=False)] if compilers: spack.compilers.add_compilers_to_config(compilers, scope=args.scope, - init_config=False) + init_config=False) n = len(compilers) s = 's' if n > 1 else '' filename = spack.config.get_config_filename(args.scope, 'compilers') @@ -103,11 +114,12 @@ def compiler_remove(args): elif not args.all and len(compilers) > 1: tty.error("Multiple compilers match spec %s. Choose one:" % cspec) colify(reversed(sorted([c.spec for c in compilers])), indent=4) - tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.") + tty.msg("Or, use `spack compiler remove -a` to remove all of them.") sys.exit(1) for compiler in compilers: - spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope) + spack.compilers.remove_compiler_from_config( + compiler.spec, scope=args.scope) tty.msg("Removed compiler %s" % compiler.spec) @@ -133,7 +145,8 @@ def compiler_list(args): tty.msg("Available compilers") index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name') for i, (name, compilers) in enumerate(index.items()): - if i >= 1: print + if i >= 1: + print cname = "%s{%s}" % (spack.spec.compiler_color, name) tty.hline(colorize(cname), char='-') @@ -141,10 +154,10 @@ def compiler_list(args): def compiler(parser, args): - action = {'add' : compiler_find, - 'find' : compiler_find, - 'remove' : compiler_remove, - 'rm' : compiler_remove, - 'info' : compiler_info, - 'list' : compiler_list } + action = {'add': compiler_find, + 'find': compiler_find, + 'remove': compiler_remove, + 'rm': compiler_remove, + 'info': compiler_info, + 'list': compiler_list} action[args.compiler_command](args) diff --git a/lib/spack/spack/cmd/compilers.py b/lib/spack/spack/cmd/compilers.py index 9fbc2bb952f..b87f977e5a3 100644 --- a/lib/spack/spack/cmd/compilers.py +++ b/lib/spack/spack/cmd/compilers.py @@ -22,18 +22,16 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import llnl.util.tty as tty -from llnl.util.tty.colify import colify -from llnl.util.lang import index_by - import spack from spack.cmd.compiler import compiler_list description = "List available compilers. Same as 'spack compiler list'." + def setup_parser(subparser): subparser.add_argument('--scope', choices=spack.config.config_scopes, help="Configuration scope to read/modify.") + def compilers(parser, args): compiler_list(args) diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py index d6f56c270d9..c189e37036b 100644 --- a/lib/spack/spack/cmd/config.py +++ b/lib/spack/spack/cmd/config.py @@ -22,15 +22,11 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys -import argparse - -import llnl.util.tty as tty - import spack.config description = "Get and set configuration options." + def setup_parser(subparser): # User can only choose one scope_group = subparser.add_mutually_exclusive_group() @@ -64,6 +60,6 @@ def config_edit(args): def config(parser, args): - action = { 'get' : config_get, - 'edit' : config_edit } + action = {'get': config_get, + 'edit': config_edit} action[args.config_command](args) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 51bf17a44b8..52a82eb38fe 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -217,6 +217,7 @@ def setup_parser(subparser): class BuildSystemGuesser(object): + def __call__(self, stage, url): """Try to guess the type of build system used by a project based on the contents of its archive or the URL it was downloaded from.""" diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py index 990309ee488..2b15a0331e3 100644 --- a/lib/spack/spack/cmd/deactivate.py +++ b/lib/spack/spack/cmd/deactivate.py @@ -31,6 +31,7 @@ description = "Deactivate a package extension." + def setup_parser(subparser): subparser.add_argument( '-f', '--force', action='store_true', @@ -40,7 +41,8 @@ def setup_parser(subparser): help="Deactivate all extensions of an extendable package, or " "deactivate an extension AND its dependencies.") subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.") + 'spec', nargs=argparse.REMAINDER, + help="spec of package extension to deactivate.") def deactivate(parser, args): @@ -65,7 +67,8 @@ def deactivate(parser, args): if not args.force and not spec.package.activated: tty.die("%s is not activated." % pkg.spec.short_spec) - tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec) + tty.msg("Deactivating %s and all dependencies." % + pkg.spec.short_spec) topo_order = topological_sort(spec) index = spec.index() @@ -79,7 +82,9 @@ def deactivate(parser, args): epkg.do_deactivate(force=args.force) else: - tty.die("spack deactivate --all requires an extendable package or an extension.") + tty.die( + "spack deactivate --all requires an extendable package " + "or an extension.") else: if not pkg.is_extension: diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py index 78eb6847b84..7729105e62b 100644 --- a/lib/spack/spack/cmd/dependents.py +++ b/lib/spack/spack/cmd/dependents.py @@ -31,9 +31,11 @@ description = "Show installed packages that depend on another." + def setup_parser(subparser): subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="specs to list dependencies of.") + 'spec', nargs=argparse.REMAINDER, + help="specs to list dependencies of.") def dependents(parser, args): @@ -42,5 +44,6 @@ def dependents(parser, args): tty.die("spack dependents takes only one spec.") fmt = '$_$@$%@$+$=$#' - deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents] + deps = [d.format(fmt, color=True) + for d in specs[0].package.installed_dependents] tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps) diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 643e6374b23..487654d261e 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -35,6 +35,7 @@ description = "Do-It-Yourself: build from an existing source directory." + def setup_parser(subparser): subparser.add_argument( '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', @@ -76,14 +77,17 @@ def diy(self, args): return if not spec.versions.concrete: - tty.die("spack diy spec must have a single, concrete version. Did you forget a package version number?") + tty.die( + "spack diy spec must have a single, concrete version. " + "Did you forget a package version number?") spec.concretize() package = spack.repo.get(spec) if package.installed: tty.error("Already installed in %s" % package.prefix) - tty.msg("Uninstall or try adding a version suffix for this DIY build.") + tty.msg("Uninstall or try adding a version suffix for this " + "DIY build.") sys.exit(1) # Forces the build to run out of the current directory. diff --git a/lib/spack/spack/cmd/doc.py b/lib/spack/spack/cmd/doc.py index b3d0737d13d..291b17216fe 100644 --- a/lib/spack/spack/cmd/doc.py +++ b/lib/spack/spack/cmd/doc.py @@ -25,6 +25,7 @@ description = "Run pydoc from within spack." + def setup_parser(subparser): subparser.add_argument('entity', help="Run pydoc help on entity") diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py index 49ab83867a0..286136dd672 100644 --- a/lib/spack/spack/cmd/edit.py +++ b/lib/spack/spack/cmd/edit.py @@ -68,7 +68,7 @@ def edit_package(name, repo_path, namespace, force=False): if os.path.exists(path): if not os.path.isfile(path): tty.die("Something's wrong. '%s' is not a file!" % path) - if not os.access(path, os.R_OK|os.W_OK): + if not os.access(path, os.R_OK | os.W_OK): tty.die("Insufficient permissions on '%s'!" % path) elif not force: tty.die("No package '%s'. Use spack create, or supply -f/--force " @@ -93,19 +93,23 @@ def setup_parser(subparser): # Various filetypes you can edit directly from the cmd line. excl_args.add_argument( '-c', '--command', dest='path', action='store_const', - const=spack.cmd.command_path, help="Edit the command with the supplied name.") + const=spack.cmd.command_path, + help="Edit the command with the supplied name.") excl_args.add_argument( '-t', '--test', dest='path', action='store_const', const=spack.test_path, help="Edit the test with the supplied name.") excl_args.add_argument( '-m', '--module', dest='path', action='store_const', - const=spack.module_path, help="Edit the main spack module with the supplied name.") + const=spack.module_path, + help="Edit the main spack module with the supplied name.") # Options for editing packages excl_args.add_argument( - '-r', '--repo', default=None, help="Path to repo to edit package in.") + '-r', '--repo', default=None, + help="Path to repo to edit package in.") excl_args.add_argument( - '-N', '--namespace', default=None, help="Namespace of package to edit.") + '-N', '--namespace', default=None, + help="Namespace of package to edit.") subparser.add_argument( 'name', nargs='?', default=None, help="name of package to edit") diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index 85d111e91e5..f3bad039d4c 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -28,11 +28,13 @@ import spack.cmd import spack.build_environment as build_env -description = "Run a command with the environment for a particular spec's install." +description = "Run a command with the install environment for a spec." + def setup_parser(subparser): subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.") + 'spec', nargs=argparse.REMAINDER, + help="specs of package environment to emulate.") def env(parser, args): @@ -47,7 +49,7 @@ def env(parser, args): if sep in args.spec: s = args.spec.index(sep) spec = args.spec[:s] - cmd = args.spec[s+1:] + cmd = args.spec[s + 1:] else: spec = args.spec[0] cmd = args.spec[1:] diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index 11659e0c967..b5c484305f0 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -22,7 +22,6 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys import argparse import llnl.util.tty as tty @@ -34,6 +33,7 @@ description = "List extensions for package." + def setup_parser(subparser): format_group = subparser.add_mutually_exclusive_group() format_group.add_argument( @@ -47,7 +47,8 @@ def setup_parser(subparser): help='Show full dependency DAG of extensions') subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for') + 'spec', nargs=argparse.REMAINDER, + help='Spec of package to list extensions for') def extensions(parser, args): @@ -85,7 +86,8 @@ def extensions(parser, args): # # List specs of installed extensions. # - installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)] + installed = [ + s.spec for s in spack.installed_db.installed_extensions_for(spec)] print if not installed: tty.msg("None installed.") @@ -102,4 +104,5 @@ def extensions(parser, args): tty.msg("None activated.") return tty.msg("%d currently activated:" % len(activated)) - spack.cmd.find.display_specs(activated.values(), mode=args.mode, long=args.long) + spack.cmd.find.display_specs( + activated.values(), mode=args.mode, long=args.long) diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py index 1afc51d9fae..c1ac2ed48d1 100644 --- a/lib/spack/spack/cmd/fetch.py +++ b/lib/spack/spack/cmd/fetch.py @@ -29,16 +29,21 @@ description = "Fetch archives for packages" + def setup_parser(subparser): subparser.add_argument( '-n', '--no-checksum', action='store_true', dest='no_checksum', help="Do not check packages against checksum") subparser.add_argument( - '-m', '--missing', action='store_true', help="Also fetch all missing dependencies") + '-m', '--missing', action='store_true', + help="Also fetch all missing dependencies") subparser.add_argument( - '-D', '--dependencies', action='store_true', help="Also fetch all dependencies") + '-D', '--dependencies', action='store_true', + help="Also fetch all dependencies") subparser.add_argument( - 'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch") + 'packages', nargs=argparse.REMAINDER, + help="specs of packages to fetch") + def fetch(parser, args): if not args.packages: @@ -50,7 +55,6 @@ def fetch(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: if args.missing or args.dependencies: - to_fetch = set() for s in spec.traverse(deptype_query=spack.alldeps): package = spack.repo.get(s) if args.missing and package.installed: diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index da651218364..8faabfbb7bb 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -30,6 +30,7 @@ description = "Generate graphs of package dependency relationships." + def setup_parser(subparser): setup_parser.parser = subparser @@ -42,10 +43,12 @@ def setup_parser(subparser): help="Generate graph in dot format and print to stdout.") subparser.add_argument( - '--concretize', action='store_true', help="Concretize specs before graphing.") + '--concretize', action='store_true', + help="Concretize specs before graphing.") subparser.add_argument( - 'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.") + 'specs', nargs=argparse.REMAINDER, + help="specs of packages to graph.") def graph(parser, args): @@ -56,11 +59,11 @@ def graph(parser, args): setup_parser.parser.print_help() return 1 - if args.dot: # Dot graph only if asked for. + if args.dot: # Dot graph only if asked for. graph_dot(*specs) - elif specs: # ascii is default: user doesn't need to provide it explicitly + elif specs: # ascii is default: user doesn't need to provide it explicitly graph_ascii(specs[0], debug=spack.debug) for spec in specs[1:]: - print # extra line bt/w independent graphs + print # extra line bt/w independent graphs graph_ascii(spec, debug=spack.debug) diff --git a/lib/spack/spack/cmd/help.py b/lib/spack/spack/cmd/help.py index 1d231618391..5bc8fc3e743 100644 --- a/lib/spack/spack/cmd/help.py +++ b/lib/spack/spack/cmd/help.py @@ -22,14 +22,14 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys - description = "Get help on spack and its commands" + def setup_parser(subparser): subparser.add_argument('help_command', nargs='?', default=None, help='command to get help on') + def help(parser, args): if args.help_command: parser.parse_args([args.help_command, '-h']) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 4c076322a9d..7663a97a284 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -31,6 +31,7 @@ description = "Build and install packages" + def setup_parser(subparser): subparser.add_argument( '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', @@ -52,18 +53,18 @@ def setup_parser(subparser): help="Display verbose build output while installing.") subparser.add_argument( '--fake', action='store_true', dest='fake', - help="Fake install. Just remove the prefix and touch a fake file in it.") + help="Fake install. Just remove prefix and create a fake file.") subparser.add_argument( '--dirty', action='store_true', dest='dirty', help="Install a package *without* cleaning the environment.") subparser.add_argument( - 'packages', nargs=argparse.REMAINDER, help="specs of packages to install") + 'packages', nargs=argparse.REMAINDER, + help="specs of packages to install") subparser.add_argument( '--run-tests', action='store_true', dest='run_tests', help="Run tests during installation of a package.") - def install(parser, args): if not args.packages: tty.die("install requires at least one package argument") diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py index 205abbb6b37..85190a5d0b5 100644 --- a/lib/spack/spack/cmd/load.py +++ b/lib/spack/spack/cmd/load.py @@ -25,13 +25,16 @@ import argparse import spack.modules -description ="Add package to environment using modules." +description = "Add package to environment using modules." + def setup_parser(subparser): """Parser is only constructed so that this prints a nice help message with -h. """ subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="Spec of package to load with modules. (If -, read specs from STDIN)") + 'spec', nargs=argparse.REMAINDER, + help="Spec of package to load with modules. " + "(If -, read specs from STDIN)") def load(parser, args): diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index b0dbb1a550f..b9c8b5c3301 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -22,8 +22,6 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os -import sys import argparse import llnl.util.tty as tty @@ -32,16 +30,19 @@ import spack import spack.cmd -description="Print out locations of various directories used by Spack" +description = "Print out locations of various directories used by Spack" + def setup_parser(subparser): global directories directories = subparser.add_mutually_exclusive_group() directories.add_argument( - '-m', '--module-dir', action='store_true', help="Spack python module directory.") + '-m', '--module-dir', action='store_true', + help="Spack python module directory.") directories.add_argument( - '-r', '--spack-root', action='store_true', help="Spack installation root.") + '-r', '--spack-root', action='store_true', + help="Spack installation root.") directories.add_argument( '-i', '--install-dir', action='store_true', @@ -53,15 +54,19 @@ def setup_parser(subparser): '-P', '--packages', action='store_true', help="Top-level packages directory for Spack.") directories.add_argument( - '-s', '--stage-dir', action='store_true', help="Stage directory for a spec.") + '-s', '--stage-dir', action='store_true', + help="Stage directory for a spec.") directories.add_argument( - '-S', '--stages', action='store_true', help="Top level Stage directory.") + '-S', '--stages', action='store_true', + help="Top level Stage directory.") directories.add_argument( '-b', '--build-dir', action='store_true', - help="Checked out or expanded source directory for a spec (requires it to be staged first).") + help="Checked out or expanded source directory for a spec " + "(requires it to be staged first).") subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help="spec of package to fetch directory for.") + 'spec', nargs=argparse.REMAINDER, + help="spec of package to fetch directory for.") def location(parser, args): @@ -104,9 +109,9 @@ def location(parser, args): if args.stage_dir: print pkg.stage.path - else: # args.build_dir is the default. + else: # args.build_dir is the default. if not pkg.stage.source_path: - tty.die("Build directory does not exist yet. Run this to create it:", + tty.die("Build directory does not exist yet. " + "Run this to create it:", "spack stage " + " ".join(args.spec)) print pkg.stage.source_path - diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 0cf682fc4fd..585faaf5249 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -23,7 +23,6 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import sys from datetime import datetime import argparse @@ -40,6 +39,7 @@ description = "Manage mirrors." + def setup_parser(subparser): subparser.add_argument( '-n', '--no-checksum', action='store_true', dest='no_checksum', @@ -61,8 +61,9 @@ def setup_parser(subparser): '-D', '--dependencies', action='store_true', help="Also fetch all dependencies") create_parser.add_argument( - '-o', '--one-version-per-spec', action='store_const', const=1, default=0, - help="Only fetch one 'preferred' version per spec, not all known versions.") + '-o', '--one-version-per-spec', action='store_const', + const=1, default=0, + help="Only fetch one 'preferred' version per spec, not all known.") scopes = spack.config.config_scopes @@ -70,7 +71,7 @@ def setup_parser(subparser): add_parser = sp.add_parser('add', help=mirror_add.__doc__) add_parser.add_argument('name', help="Mnemonic name for mirror.") add_parser.add_argument( - 'url', help="URL of mirror directory created by 'spack mirror create'.") + 'url', help="URL of mirror directory from 'spack mirror create'.") add_parser.add_argument( '--scope', choices=scopes, default=spack.cmd.default_modify_scope, help="Configuration scope to modify.") @@ -107,7 +108,7 @@ def mirror_add(args): tty.die("Mirror with url %s already exists." % url) # should only be one item per mirror dict. - items = [(n,u) for n,u in mirrors.items()] + items = [(n, u) for n, u in mirrors.items()] items.insert(0, (args.name, url)) mirrors = syaml_dict(items) spack.config.update_config('mirrors', mirrors, scope=args.scope) @@ -121,7 +122,7 @@ def mirror_remove(args): if not mirrors: mirrors = syaml_dict() - if not name in mirrors: + if name not in mirrors: tty.die("No mirror with name %s" % name) old_value = mirrors.pop(name) @@ -152,7 +153,7 @@ def _read_specs_from_file(filename): s.package specs.append(s) except SpackError, e: - tty.die("Parse error in %s, line %d:" % (args.file, i+1), + tty.die("Parse error in %s, line %d:" % (args.file, i + 1), ">>> " + string, str(e)) return specs @@ -214,10 +215,10 @@ def mirror_create(args): def mirror(parser, args): - action = { 'create' : mirror_create, - 'add' : mirror_add, - 'remove' : mirror_remove, - 'rm' : mirror_remove, - 'list' : mirror_list } + action = {'create': mirror_create, + 'add': mirror_add, + 'remove': mirror_remove, + 'rm': mirror_remove, + 'list': mirror_list} action[args.mirror_command](args) diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py index a10e36e077a..2d0b83fe00f 100644 --- a/lib/spack/spack/cmd/module.py +++ b/lib/spack/spack/cmd/module.py @@ -118,7 +118,8 @@ def loads(mtype, specs, args): seen_add = seen.add for spec in specs_from_user_constraint: specs.extend( - [item for item in spec.traverse(order='post', cover='nodes') if not (item in seen or seen_add(item))] # NOQA: ignore=E501 + [item for item in spec.traverse(order='post', cover='nodes') + if not (item in seen or seen_add(item))] ) module_cls = module_types[mtype] @@ -178,7 +179,9 @@ def rm(mtype, specs, args): # Ask for confirmation if not args.yes_to_all: - tty.msg('You are about to remove {0} module files the following specs:\n'.format(mtype)) # NOQA: ignore=E501 + tty.msg( + 'You are about to remove {0} module files the following specs:\n' + .format(mtype)) spack.cmd.display_specs(specs_with_modules, long=True) print('') spack.cmd.ask_for_confirmation('Do you want to proceed ? ') @@ -197,7 +200,9 @@ def refresh(mtype, specs, args): return if not args.yes_to_all: - tty.msg('You are about to regenerate {name} module files for the following specs:\n'.format(name=mtype)) # NOQA: ignore=E501 + tty.msg( + 'You are about to regenerate {name} module files for:\n' + .format(name=mtype)) spack.cmd.display_specs(specs, long=True) print('') spack.cmd.ask_for_confirmation('Do you want to proceed ? ') @@ -245,11 +250,13 @@ def module(parser, args): try: callbacks[args.subparser_name](module_type, args.specs, args) except MultipleMatches: - message = 'the constraint \'{query}\' matches multiple packages, and this is not allowed in this context' # NOQA: ignore=E501 + message = ('the constraint \'{query}\' matches multiple packages, ' + 'and this is not allowed in this context') tty.error(message.format(query=constraint)) for s in args.specs: sys.stderr.write(s.format(color=True) + '\n') raise SystemExit(1) except NoMatch: - message = 'the constraint \'{query}\' match no package, and this is not allowed in this context' # NOQA: ignore=E501 + message = ('the constraint \'{query}\' match no package, ' + 'and this is not allowed in this context') tty.die(message.format(query=constraint)) diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py index a27502d30e6..9ed42de823d 100644 --- a/lib/spack/spack/cmd/package-list.py +++ b/lib/spack/spack/cmd/package-list.py @@ -32,7 +32,7 @@ def github_url(pkg): """Link to a package file on github.""" - url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" # NOQA: ignore=E501 + url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" return (url % pkg.name) diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py index a5507e42cfb..9c72da40b5d 100644 --- a/lib/spack/spack/cmd/patch.py +++ b/lib/spack/spack/cmd/patch.py @@ -29,14 +29,16 @@ import spack -description="Patch expanded archive sources in preparation for install" +description = "Patch expanded archive sources in preparation for install" + def setup_parser(subparser): subparser.add_argument( '-n', '--no-checksum', action='store_true', dest='no_checksum', help="Do not check downloaded packages against checksum") subparser.add_argument( - 'packages', nargs=argparse.REMAINDER, help="specs of packages to stage") + 'packages', nargs=argparse.REMAINDER, + help="specs of packages to stage") def patch(parser, args): diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py index a24c2759fec..7791b93cf58 100644 --- a/lib/spack/spack/cmd/pkg.py +++ b/lib/spack/spack/cmd/pkg.py @@ -33,6 +33,7 @@ description = "Query packages associated with particular git revisions." + def setup_parser(subparser): sp = subparser.add_subparsers( metavar='SUBCOMMAND', dest='pkg_command') @@ -46,22 +47,28 @@ def setup_parser(subparser): help="Revision to list packages for.") diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__) - diff_parser.add_argument('rev1', nargs='?', default='HEAD^', - help="Revision to compare against.") - diff_parser.add_argument('rev2', nargs='?', default='HEAD', - help="Revision to compare to rev1 (default is HEAD).") + diff_parser.add_argument( + 'rev1', nargs='?', default='HEAD^', + help="Revision to compare against.") + diff_parser.add_argument( + 'rev2', nargs='?', default='HEAD', + help="Revision to compare to rev1 (default is HEAD).") add_parser = sp.add_parser('added', help=pkg_added.__doc__) - add_parser.add_argument('rev1', nargs='?', default='HEAD^', - help="Revision to compare against.") - add_parser.add_argument('rev2', nargs='?', default='HEAD', - help="Revision to compare to rev1 (default is HEAD).") + add_parser.add_argument( + 'rev1', nargs='?', default='HEAD^', + help="Revision to compare against.") + add_parser.add_argument( + 'rev2', nargs='?', default='HEAD', + help="Revision to compare to rev1 (default is HEAD).") rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__) - rm_parser.add_argument('rev1', nargs='?', default='HEAD^', - help="Revision to compare against.") - rm_parser.add_argument('rev2', nargs='?', default='HEAD', - help="Revision to compare to rev1 (default is HEAD).") + rm_parser.add_argument( + 'rev1', nargs='?', default='HEAD^', + help="Revision to compare against.") + rm_parser.add_argument( + 'rev2', nargs='?', default='HEAD', + help="Revision to compare to rev1 (default is HEAD).") def get_git(): @@ -88,7 +95,8 @@ def pkg_add(args): for pkg_name in args.packages: filename = spack.repo.filename_for_package_name(pkg_name) if not os.path.isfile(filename): - tty.die("No such package: %s. Path does not exist:" % pkg_name, filename) + tty.die("No such package: %s. Path does not exist:" % + pkg_name, filename) git = get_git() git('-C', spack.packages_path, 'add', filename) @@ -112,7 +120,8 @@ def pkg_diff(args): if u1: print "%s:" % args.rev1 colify(sorted(u1), indent=4) - if u1: print + if u1: + print if u2: print "%s:" % args.rev2 @@ -122,19 +131,21 @@ def pkg_diff(args): def pkg_removed(args): """Show packages removed since a commit.""" u1, u2 = diff_packages(args.rev1, args.rev2) - if u1: colify(sorted(u1)) + if u1: + colify(sorted(u1)) def pkg_added(args): """Show packages added since a commit.""" u1, u2 = diff_packages(args.rev1, args.rev2) - if u2: colify(sorted(u2)) + if u2: + colify(sorted(u2)) def pkg(parser, args): - action = { 'add' : pkg_add, - 'diff' : pkg_diff, - 'list' : pkg_list, - 'removed' : pkg_removed, - 'added' : pkg_added } + action = {'add': pkg_add, + 'diff': pkg_diff, + 'list': pkg_list, + 'removed': pkg_removed, + 'added': pkg_added} action[args.pkg_command](args) diff --git a/lib/spack/spack/cmd/providers.py b/lib/spack/spack/cmd/providers.py index e9007486d26..0f4a97cc4a7 100644 --- a/lib/spack/spack/cmd/providers.py +++ b/lib/spack/spack/cmd/providers.py @@ -22,7 +22,6 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os import argparse from llnl.util.tty.colify import colify @@ -30,11 +29,13 @@ import spack import spack.cmd -description ="List packages that provide a particular virtual package" +description = "List packages that provide a particular virtual package" + def setup_parser(subparser): - subparser.add_argument('vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER, - help='Find packages that provide this virtual package') + subparser.add_argument( + 'vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER, + help='Find packages that provide this virtual package') def providers(parser, args): diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py index 59423271b95..12727cb5998 100644 --- a/lib/spack/spack/cmd/python.py +++ b/lib/spack/spack/cmd/python.py @@ -30,18 +30,22 @@ import spack + def setup_parser(subparser): subparser.add_argument( '-c', dest='python_command', help='Command to execute.') subparser.add_argument( - 'python_args', nargs=argparse.REMAINDER, help="File to run plus arguments.") + 'python_args', nargs=argparse.REMAINDER, + help="File to run plus arguments.") + description = "Launch an interpreter as spack would launch a command" + def python(parser, args): # Fake a main python shell by setting __name__ to __main__. - console = code.InteractiveConsole({'__name__' : '__main__', - 'spack' : spack}) + console = code.InteractiveConsole({'__name__': '__main__', + 'spack': spack}) if "PYTHONSTARTUP" in os.environ: startup_file = os.environ["PYTHONSTARTUP"] diff --git a/lib/spack/spack/cmd/reindex.py b/lib/spack/spack/cmd/reindex.py index 93eba7a0f1d..e37eebbd92a 100644 --- a/lib/spack/spack/cmd/reindex.py +++ b/lib/spack/spack/cmd/reindex.py @@ -22,10 +22,10 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import argparse import spack description = "Rebuild Spack's package database." + def reindex(parser, args): spack.installed_db.reindex(spack.install_layout) diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index cbd8f4784e9..5ab2ac08333 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -23,20 +23,16 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import re -import shutil -import argparse import llnl.util.tty as tty -from llnl.util.filesystem import join_path, mkdirp import spack.spec import spack.config -from spack.util.environment import get_path from spack.repository import * description = "Manage package source repositories." + def setup_parser(subparser): sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command') scopes = spack.config.config_scopes @@ -57,13 +53,15 @@ def setup_parser(subparser): # Add add_parser = sp.add_parser('add', help=repo_add.__doc__) - add_parser.add_argument('path', help="Path to a Spack package repository directory.") + add_parser.add_argument( + 'path', help="Path to a Spack package repository directory.") add_parser.add_argument( '--scope', choices=scopes, default=spack.cmd.default_modify_scope, help="Configuration scope to modify.") # Remove - remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm']) + remove_parser = sp.add_parser( + 'remove', help=repo_remove.__doc__, aliases=['rm']) remove_parser.add_argument( 'path_or_namespace', help="Path or namespace of a Spack package repository.") @@ -100,7 +98,8 @@ def repo_add(args): # If that succeeds, finally add it to the configuration. repos = spack.config.get_config('repos', args.scope) - if not repos: repos = [] + if not repos: + repos = [] if repo.root in repos or path in repos: tty.die("Repository is already registered with Spack: %s" % path) @@ -135,7 +134,7 @@ def repo_remove(args): tty.msg("Removed repository %s with namespace '%s'." % (repo.root, repo.namespace)) return - except RepoError as e: + except RepoError: continue tty.die("No repository with path or namespace: %s" @@ -149,7 +148,7 @@ def repo_list(args): for r in roots: try: repos.append(Repo(r)) - except RepoError as e: + except RepoError: continue msg = "%d package repositor" % len(repos) @@ -166,9 +165,9 @@ def repo_list(args): def repo(parser, args): - action = { 'create' : repo_create, - 'list' : repo_list, - 'add' : repo_add, - 'remove' : repo_remove, - 'rm' : repo_remove} + action = {'create': repo_create, + 'list': repo_list, + 'add': repo_add, + 'remove': repo_remove, + 'rm': repo_remove} action[args.repo_command](args) diff --git a/lib/spack/spack/cmd/restage.py b/lib/spack/spack/cmd/restage.py index 325d30662f4..969afe09bda 100644 --- a/lib/spack/spack/cmd/restage.py +++ b/lib/spack/spack/cmd/restage.py @@ -31,6 +31,7 @@ description = "Revert checked out package source code." + def setup_parser(subparser): subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to restage") diff --git a/lib/spack/spack/cmd/setup.py b/lib/spack/spack/cmd/setup.py index 04f3d663df5..b55e102c0e8 100644 --- a/lib/spack/spack/cmd/setup.py +++ b/lib/spack/spack/cmd/setup.py @@ -35,6 +35,7 @@ description = "Create a configuration script and module, but don't build." + def setup_parser(subparser): subparser.add_argument( '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', @@ -70,7 +71,9 @@ def setup(self, args): return if not spec.versions.concrete: - tty.die("spack setup spec must have a single, concrete version. Did you forget a package version number?") + tty.die( + "spack setup spec must have a single, concrete version. " + "Did you forget a package version number?") spec.concretize() package = spack.repo.get(spec) @@ -84,8 +87,8 @@ def setup(self, args): spack.do_checksum = False package.do_install( - keep_prefix=True, # Don't remove install directory, even if you think you should + keep_prefix=True, # Don't remove install directory ignore_deps=args.ignore_deps, verbose=args.verbose, keep_stage=True, # don't remove source dir for SETUP. - install_phases = set(['setup', 'provenance'])) + install_phases=set(['setup', 'provenance'])) diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py index 321e3e429b9..6e6d1c1277e 100644 --- a/lib/spack/spack/cmd/spec.py +++ b/lib/spack/spack/cmd/spec.py @@ -25,23 +25,22 @@ import argparse import spack.cmd -import llnl.util.tty as tty - import spack -import spack.url as url description = "print out abstract and concrete versions of a spec." + def setup_parser(subparser): subparser.add_argument('-i', '--ids', action='store_true', help="show numerical ids for dependencies.") - subparser.add_argument('specs', nargs=argparse.REMAINDER, help="specs of packages") + subparser.add_argument( + 'specs', nargs=argparse.REMAINDER, help="specs of packages") def spec(parser, args): - kwargs = { 'ids' : args.ids, - 'indent' : 2, - 'color' : True } + kwargs = {'ids': args.ids, + 'indent': 2, + 'color': True} for spec in spack.cmd.parse_specs(args.specs): print "Input spec" diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py index 61e9c6d9fff..bfc2e5f4561 100644 --- a/lib/spack/spack/cmd/stage.py +++ b/lib/spack/spack/cmd/stage.py @@ -22,14 +22,14 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os import argparse import llnl.util.tty as tty import spack import spack.cmd -description="Expand downloaded archive in preparation for install" +description = "Expand downloaded archive in preparation for install" + def setup_parser(subparser): subparser.add_argument( diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 14c06d136d5..8e7173e9a2b 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -36,25 +36,25 @@ from spack.build_environment import InstallError from spack.fetch_strategy import FetchError -description = "Run package installation as a unit test, output formatted results." +description = "Run package install as a unit test, output formatted results." def setup_parser(subparser): - subparser.add_argument('-j', - '--jobs', - action='store', - type=int, - help="Explicitly set number of make jobs. Default is #cpus.") + subparser.add_argument( + '-j', '--jobs', action='store', type=int, + help="Explicitly set number of make jobs. Default is #cpus.") - subparser.add_argument('-n', - '--no-checksum', - action='store_true', - dest='no_checksum', - help="Do not check packages against checksum") + subparser.add_argument( + '-n', '--no-checksum', action='store_true', dest='no_checksum', + help="Do not check packages against checksum") - subparser.add_argument('-o', '--output', action='store', help="test output goes in this file") + subparser.add_argument( + '-o', '--output', action='store', + help="test output goes in this file") - subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install") + subparser.add_argument( + 'package', nargs=argparse.REMAINDER, + help="spec of package to install") class TestResult(object): @@ -65,6 +65,7 @@ class TestResult(object): class TestSuite(object): + def __init__(self, filename): self.filename = filename self.root = ET.Element('testsuite') @@ -75,14 +76,17 @@ def __enter__(self): def append(self, item): if not isinstance(item, TestCase): - raise TypeError('only TestCase instances may be appended to a TestSuite instance') + raise TypeError( + 'only TestCase instances may be appended to TestSuite') self.tests.append(item) # Append the item to the list of tests def __exit__(self, exc_type, exc_val, exc_tb): # Prepare the header for the entire test suite - number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests) + number_of_errors = sum( + x.result_type == TestResult.ERRORED for x in self.tests) self.root.set('errors', str(number_of_errors)) - number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests) + number_of_failures = sum( + x.result_type == TestResult.FAILED for x in self.tests) self.root.set('failures', str(number_of_failures)) self.root.set('tests', str(len(self.tests))) @@ -112,7 +116,8 @@ def __init__(self, classname, name, time=None): self.element.set('time', str(time)) self.result_type = None - def set_result(self, result_type, message=None, error_type=None, text=None): + def set_result(self, result_type, + message=None, error_type=None, text=None): self.result_type = result_type result = TestCase.results[self.result_type] if result is not None and result is not TestResult.PASSED: @@ -155,13 +160,19 @@ def install_single_spec(spec, number_of_jobs): # If it is already installed, skip the test if spack.repo.get(spec).installed: testcase = TestCase(package.name, package.spec.short_spec, time=0.0) - testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed') + testcase.set_result( + TestResult.SKIPPED, + message='Skipped [already installed]', + error_type='already_installed') return testcase # If it relies on dependencies that did not install, skip if failed_dependencies(spec): testcase = TestCase(package.name, package.spec.short_spec, time=0.0) - testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed') + testcase.set_result( + TestResult.SKIPPED, + message='Skipped [failed dependencies]', + error_type='dep_failed') return testcase # Otherwise try to install the spec @@ -177,26 +188,30 @@ def install_single_spec(spec, number_of_jobs): testcase = TestCase(package.name, package.spec.short_spec, duration) testcase.set_result(TestResult.PASSED) except InstallError: - # An InstallError is considered a failure (the recipe didn't work correctly) + # An InstallError is considered a failure (the recipe didn't work + # correctly) duration = time.time() - start_time # Try to get the log lines = fetch_log(package.build_log_path) text = '\n'.join(lines) testcase = TestCase(package.name, package.spec.short_spec, duration) - testcase.set_result(TestResult.FAILED, message='Installation failure', text=text) + testcase.set_result(TestResult.FAILED, + message='Installation failure', text=text) except FetchError: # A FetchError is considered an error (we didn't even start building) duration = time.time() - start_time testcase = TestCase(package.name, package.spec.short_spec, duration) - testcase.set_result(TestResult.ERRORED, message='Unable to fetch package') + testcase.set_result(TestResult.ERRORED, + message='Unable to fetch package') return testcase def get_filename(args, top_spec): if not args.output: - fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash()) + fname = 'test-{x.name}-{x.version}-{hash}.xml'.format( + x=top_spec, hash=top_spec.dag_hash()) output_directory = join_path(os.getcwd(), 'test-output') if not os.path.exists(output_directory): os.mkdir(output_directory) diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py index b9f2a449aee..bf7342f6065 100644 --- a/lib/spack/spack/cmd/test.py +++ b/lib/spack/spack/cmd/test.py @@ -52,6 +52,7 @@ def setup_parser(subparser): class MockCache(object): + def store(self, copyCmd, relativeDst): pass @@ -60,6 +61,7 @@ def fetcher(self, targetPath, digest): class MockCacheFetcher(object): + def set_stage(self, stage): pass diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index dbe6cd65847..8957d1c908d 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -50,25 +50,27 @@ def setup_parser(subparser): subparser.add_argument( '-f', '--force', action='store_true', dest='force', help="Remove regardless of whether other packages depend on this one.") + subparser.add_argument( '-a', '--all', action='store_true', dest='all', - help="USE CAREFULLY. Remove ALL installed packages that match each " + - "supplied spec. i.e., if you say uninstall libelf, ALL versions of " + # NOQA: ignore=E501 - "libelf are uninstalled. This is both useful and dangerous, like rm -r.") # NOQA: ignore=E501 + help="USE CAREFULLY. Remove ALL installed packages that match each " + "supplied spec. i.e., if you say uninstall libelf, ALL versions " + "of libelf are uninstalled. This is both useful and dangerous, " + "like rm -r.") + subparser.add_argument( '-d', '--dependents', action='store_true', dest='dependents', - help='Also uninstall any packages that depend on the ones given via command line.' # NOQA: ignore=E501 - ) + help='Also uninstall any packages that depend on the ones given ' + 'via command line.') + subparser.add_argument( '-y', '--yes-to-all', action='store_true', dest='yes_to_all', - help='Assume "yes" is the answer to every confirmation asked to the user.' # NOQA: ignore=E501 + help='Assume "yes" is the answer to every confirmation requested') - ) subparser.add_argument( 'packages', nargs=argparse.REMAINDER, - help="specs of packages to uninstall" - ) + help="specs of packages to uninstall") def concretize_specs(specs, allow_multiple_matches=False, force=False): diff --git a/lib/spack/spack/cmd/unload.py b/lib/spack/spack/cmd/unload.py index 7bd15750ed2..b52bedb7b48 100644 --- a/lib/spack/spack/cmd/unload.py +++ b/lib/spack/spack/cmd/unload.py @@ -25,13 +25,15 @@ import argparse import spack.modules -description ="Remove package from environment using module." +description = "Remove package from environment using module." + def setup_parser(subparser): """Parser is only constructed so that this prints a nice help message with -h. """ subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='Spec of package to unload with modules.') + 'spec', nargs=argparse.REMAINDER, + help='Spec of package to unload with modules.') def unload(parser, args): diff --git a/lib/spack/spack/cmd/unuse.py b/lib/spack/spack/cmd/unuse.py index 789a690e9c4..6403cf61628 100644 --- a/lib/spack/spack/cmd/unuse.py +++ b/lib/spack/spack/cmd/unuse.py @@ -25,13 +25,15 @@ import argparse import spack.modules -description ="Remove package from environment using dotkit." +description = "Remove package from environment using dotkit." + def setup_parser(subparser): """Parser is only constructed so that this prints a nice help message with -h. """ subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='Spec of package to unuse with dotkit.') + 'spec', nargs=argparse.REMAINDER, + help='Spec of package to unuse with dotkit.') def unuse(parser, args): diff --git a/lib/spack/spack/cmd/url-parse.py b/lib/spack/spack/cmd/url-parse.py index ce12a17d13d..b8c7c950409 100644 --- a/lib/spack/spack/cmd/url-parse.py +++ b/lib/spack/spack/cmd/url-parse.py @@ -22,28 +22,28 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys - import llnl.util.tty as tty import spack import spack.url from spack.util.web import find_versions_of_archive -description = "Show parsing of a URL, optionally spider web for other versions." +description = "Show parsing of a URL, optionally spider web for versions." + def setup_parser(subparser): subparser.add_argument('url', help="url of a package archive") subparser.add_argument( - '-s', '--spider', action='store_true', help="Spider the source page for versions.") + '-s', '--spider', action='store_true', + help="Spider the source page for versions.") def print_name_and_version(url): name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url) - underlines = [" "] * max(ns+nl, vs+vl) - for i in range(ns, ns+nl): + underlines = [" "] * max(ns + nl, vs + vl) + for i in range(ns, ns + nl): underlines[i] = '-' - for i in range(vs, vs+vl): + for i in range(vs, vs + vl): underlines[i] = '~' print " %s" % url diff --git a/lib/spack/spack/cmd/urls.py b/lib/spack/spack/cmd/urls.py index 2fe2019a22b..f151581d7db 100644 --- a/lib/spack/spack/cmd/urls.py +++ b/lib/spack/spack/cmd/urls.py @@ -22,12 +22,12 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys import spack import spack.url description = "Inspect urls used by packages in spack." + def setup_parser(subparser): subparser.add_argument( '-c', '--color', action='store_true', @@ -53,6 +53,7 @@ def urls(parser, args): for url in sorted(urls): if args.color or args.extrapolation: - print spack.url.color_url(url, subs=args.extrapolation, errors=True) + print spack.url.color_url( + url, subs=args.extrapolation, errors=True) else: print url diff --git a/lib/spack/spack/cmd/use.py b/lib/spack/spack/cmd/use.py index bbb90fde1b9..e3612ace483 100644 --- a/lib/spack/spack/cmd/use.py +++ b/lib/spack/spack/cmd/use.py @@ -25,13 +25,15 @@ import argparse import spack.modules -description ="Add package to environment using dotkit." +description = "Add package to environment using dotkit." + def setup_parser(subparser): """Parser is only constructed so that this prints a nice help message with -h. """ subparser.add_argument( - 'spec', nargs=argparse.REMAINDER, help='Spec of package to use with dotkit.') + 'spec', nargs=argparse.REMAINDER, + help='Spec of package to use with dotkit.') def use(parser, args): diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py index ec3a4b2e342..1e95225ab8c 100644 --- a/lib/spack/spack/cmd/versions.py +++ b/lib/spack/spack/cmd/versions.py @@ -22,15 +22,16 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os from llnl.util.tty.colify import colify import llnl.util.tty as tty import spack -description ="List available versions of a package" +description = "List available versions of a package" + def setup_parser(subparser): - subparser.add_argument('package', metavar='PACKAGE', help='Package to list versions for') + subparser.add_argument('package', metavar='PACKAGE', + help='Package to list versions for') def versions(parser, args): diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index ce4555bc565..a77991e4dc6 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -25,10 +25,8 @@ import os import re import itertools -from datetime import datetime import llnl.util.tty as tty -from llnl.util.lang import memoized from llnl.util.filesystem import join_path import spack.error @@ -37,10 +35,10 @@ from spack.util.multiproc import parmap from spack.util.executable import * from spack.util.environment import get_path -from spack.version import Version __all__ = ['Compiler', 'get_compiler_version'] + def _verify_executables(*paths): for path in paths: if not os.path.isfile(path) and os.access(path, os.X_OK): @@ -49,8 +47,9 @@ def _verify_executables(*paths): _version_cache = {} + def get_compiler_version(compiler_path, version_arg, regex='(.*)'): - if not compiler_path in _version_cache: + if compiler_path not in _version_cache: compiler = Executable(compiler_path) output = compiler(version_arg, output=str, error=str) @@ -113,7 +112,7 @@ def fc_rpath_arg(self): # Name of module used to switch versions of this compiler PrgEnv_compiler = None - def __init__(self, cspec, operating_system, + def __init__(self, cspec, operating_system, paths, modules=[], alias=None, **kwargs): def check(exe): if exe is None: @@ -130,11 +129,6 @@ def check(exe): else: self.fc = check(paths[3]) - #self.cc = check(cc) - #self.cxx = check(cxx) - #self.f77 = check(f77) - #self.fc = check(fc) - # Unfortunately have to make sure these params are accepted # in the same order they are returned by sorted(flags) # in compilers/__init__.py @@ -158,31 +152,30 @@ def version(self): @property def openmp_flag(self): # If it is not overridden, assume it is not supported and warn the user - tty.die("The compiler you have chosen does not currently support OpenMP.", - "If you think it should, please edit the compiler subclass and", - "submit a pull request or issue.") - + tty.die( + "The compiler you have chosen does not currently support OpenMP.", + "If you think it should, please edit the compiler subclass and", + "submit a pull request or issue.") # This property should be overridden in the compiler subclass if # C++11 is supported by that compiler @property def cxx11_flag(self): # If it is not overridden, assume it is not supported and warn the user - tty.die("The compiler you have chosen does not currently support C++11.", - "If you think it should, please edit the compiler subclass and", - "submit a pull request or issue.") - + tty.die( + "The compiler you have chosen does not currently support C++11.", + "If you think it should, please edit the compiler subclass and", + "submit a pull request or issue.") # This property should be overridden in the compiler subclass if # C++14 is supported by that compiler @property def cxx14_flag(self): # If it is not overridden, assume it is not supported and warn the user - tty.die("The compiler you have chosen does not currently support C++14.", - "If you think it should, please edit the compiler subclass and", - "submit a pull request or issue.") - - + tty.die( + "The compiler you have chosen does not currently support C++14.", + "If you think it should, please edit the compiler subclass and", + "submit a pull request or issue.") # # Compiler classes have methods for querying the version of @@ -191,7 +184,6 @@ def cxx14_flag(self): # Compiler *instances* are just data objects, and can only be # constructed from an actual set of executables. # - @classmethod def default_version(cls, cc): """Override just this to override all compiler version functions.""" @@ -258,16 +250,19 @@ def check(key): version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError, e: - tty.debug("Couldn't get version for compiler %s" % full_path, e) + tty.debug( + "Couldn't get version for compiler %s" % full_path, e) return None except Exception, e: # Catching "Exception" here is fine because it just # means something went wrong running a candidate executable. - tty.debug("Error while executing candidate compiler %s" % full_path, - "%s: %s" %(e.__class__.__name__, e)) + tty.debug("Error while executing candidate compiler %s" + % full_path, + "%s: %s" % (e.__class__.__name__, e)) return None - successful = [key for key in parmap(check, checks) if key is not None] + successful = [k for k in parmap(check, checks) if k is not None] + # The 'successful' list is ordered like the input paths. # Reverse it here so that the dict creation (last insert wins) # does not spoil the intented precedence. @@ -278,20 +273,23 @@ def __repr__(self): """Return a string representation of the compiler toolchain.""" return self.__str__() - def __str__(self): """Return a string representation of the compiler toolchain.""" return "%s(%s)" % ( - self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc, self.modules, str(self.operating_system))))) + self.name, '\n '.join((str(s) for s in ( + self.cc, self.cxx, self.f77, self.fc, self.modules, + str(self.operating_system))))) class CompilerAccessError(spack.error.SpackError): + def __init__(self, path): super(CompilerAccessError, self).__init__( "'%s' is not a valid compiler." % path) class InvalidCompilerError(spack.error.SpackError): + def __init__(self): super(InvalidCompilerError, self).__init__( "Compiler has no executables.") diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 0ba94741daa..eb866c8bbb0 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -26,15 +26,9 @@ system and configuring Spack to use multiple compilers. """ import imp -import os import platform -import copy -import hashlib -import base64 -import yaml -import sys -from llnl.util.lang import memoized, list_modules +from llnl.util.lang import list_modules from llnl.util.filesystem import join_path import spack @@ -43,11 +37,7 @@ import spack.config import spack.architecture -from spack.util.multiproc import parmap -from spack.compiler import Compiler -from spack.util.executable import which from spack.util.naming import mod_to_class -from spack.util.environment import get_path _imported_compilers_module = 'spack.compilers' _path_instance_vars = ['cc', 'cxx', 'f77', 'fc'] @@ -73,7 +63,8 @@ def _to_dict(compiler): """Return a dict version of compiler suitable to insert in YAML.""" d = {} d['spec'] = str(compiler.spec) - d['paths'] = dict( (attr, getattr(compiler, attr, None)) for attr in _path_instance_vars ) + d['paths'] = dict((attr, getattr(compiler, attr, None)) + for attr in _path_instance_vars) d['operating_system'] = str(compiler.operating_system) d['modules'] = compiler.modules if compiler.modules else [] @@ -140,15 +131,19 @@ def remove_compiler_from_config(compiler_spec, scope=None): - compiler_specs: a list of CompilerSpec objects. - scope: configuration scope to modify. """ + # Need a better way for this + global _cache_config_file + compiler_config = get_compiler_config(scope) config_length = len(compiler_config) - filtered_compiler_config = [comp for comp in compiler_config - if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec] - # Need a better way for this - global _cache_config_file - _cache_config_file = filtered_compiler_config # Update the cache for changes - if len(filtered_compiler_config) == config_length: # No items removed + filtered_compiler_config = [ + comp for comp in compiler_config + if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec] + + # Update the cache for changes + _cache_config_file = filtered_compiler_config + if len(filtered_compiler_config) == config_length: # No items removed CompilerSpecInsufficientlySpecificError(compiler_spec) spack.config.update_config('compilers', filtered_compiler_config, scope) @@ -158,7 +153,8 @@ def all_compilers_config(scope=None, init_config=True): available to build with. These are instances of CompilerSpec. """ # Get compilers for this architecture. - global _cache_config_file #Create a cache of the config file so we don't load all the time. + # Create a cache of the config file so we don't load all the time. + global _cache_config_file if not _cache_config_file: _cache_config_file = get_compiler_config(scope, init_config) return _cache_config_file @@ -236,7 +232,8 @@ def get_compilers(cspec): continue items = items['compiler'] - if not ('paths' in items and all(n in items['paths'] for n in _path_instance_vars)): + if not ('paths' in items and + all(n in items['paths'] for n in _path_instance_vars)): raise InvalidCompilerConfigurationError(cspec) cls = class_for_compiler_name(cspec.name) @@ -254,10 +251,10 @@ def get_compilers(cspec): mods = [] if 'operating_system' in items: - operating_system = spack.architecture._operating_system_from_dict(items['operating_system'], platform) + os = spack.architecture._operating_system_from_dict( + items['operating_system'], platform) else: - operating_system = None - + os = None alias = items['alias'] if 'alias' in items else None @@ -266,7 +263,8 @@ def get_compilers(cspec): if f in items: flags[f] = items[f] - compilers.append(cls(cspec, operating_system, compiler_paths, mods, alias, **flags)) + compilers.append( + cls(cspec, os, compiler_paths, mods, alias, **flags)) return compilers @@ -275,7 +273,6 @@ def get_compilers(cspec): for cspec in matches: compilers.extend(get_compilers(cspec)) return compilers -# return [get_compilers(cspec) for cspec in matches] @_auto_compiler_spec @@ -285,8 +282,9 @@ def compiler_for_spec(compiler_spec, arch): operating_system = arch.platform_os assert(compiler_spec.concrete) - compilers = [c for c in compilers_for_spec(compiler_spec, platform=arch.platform) - if c.operating_system == operating_system] + compilers = [ + c for c in compilers_for_spec(compiler_spec, platform=arch.platform) + if c.operating_system == operating_system] if len(compilers) < 1: raise NoCompilerForSpecError(compiler_spec, operating_system) if len(compilers) > 1: @@ -321,11 +319,13 @@ def all_os_classes(): return classes + def all_compiler_types(): return [class_for_compiler_name(c) for c in supported_compilers()] class InvalidCompilerConfigurationError(spack.error.SpackError): + def __init__(self, compiler_spec): super(InvalidCompilerConfigurationError, self).__init__( "Invalid configuration for [compiler \"%s\"]: " % compiler_spec, @@ -335,14 +335,18 @@ def __init__(self, compiler_spec): class NoCompilersError(spack.error.SpackError): def __init__(self): - super(NoCompilersError, self).__init__("Spack could not find any compilers!") + super(NoCompilersError, self).__init__( + "Spack could not find any compilers!") + class NoCompilerForSpecError(spack.error.SpackError): def __init__(self, compiler_spec, target): - super(NoCompilerForSpecError, self).__init__("No compilers for operating system %s satisfy spec %s" % ( - target, compiler_spec)) + super(NoCompilerForSpecError, self).__init__( + "No compilers for operating system %s satisfy spec %s" + % (target, compiler_spec)) + class CompilerSpecInsufficientlySpecificError(spack.error.SpackError): def __init__(self, compiler_spec): - super(CompilerSpecInsufficientlySpecificError, self).__init__("Multiple compilers satisfy spec %s", - compiler_spec) + super(CompilerSpecInsufficientlySpecificError, self).__init__( + "Multiple compilers satisfy spec %s" % compiler_spec) diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py index 00b406d8200..4cf65222aea 100644 --- a/lib/spack/spack/compilers/clang.py +++ b/lib/spack/spack/compilers/clang.py @@ -29,6 +29,7 @@ import llnl.util.tty as tty from spack.version import ver + class Clang(Compiler): # Subclasses use possible names of C compiler cc_names = ['clang'] @@ -43,11 +44,12 @@ class Clang(Compiler): fc_names = [] # Named wrapper links within spack.build_env_path - link_paths = { 'cc' : 'clang/clang', - 'cxx' : 'clang/clang++', - # Use default wrappers for fortran, in case provided in compilers.yaml - 'f77' : 'f77', - 'fc' : 'f90' } + link_paths = {'cc': 'clang/clang', + 'cxx': 'clang/clang++', + # Use default wrappers for fortran, in case provided in + # compilers.yaml + 'f77': 'f77', + 'fc': 'f90'} @property def is_apple(self): diff --git a/lib/spack/spack/compilers/craype.py b/lib/spack/spack/compilers/craype.py index 4ba8b110ecc..c92e5c131a8 100644 --- a/lib/spack/spack/compilers/craype.py +++ b/lib/spack/spack/compilers/craype.py @@ -1,34 +1,33 @@ -##############################################################################} -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # -# For details, see https://scalability-llnl.github.io/spack +# For details, see https://github.com/llnl/spack # Please also see the LICENSE file for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. +# conditions of the GNU Lesser General Public License for more details. # -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import llnl.util.tty as tty - -#from spack.build_environment import load_module from spack.compiler import * -#from spack.version import ver + class Craype(Compiler): + """Cray programming environment compiler.""" + # Subclasses use possible names of C compiler cc_names = ['cc'] @@ -47,12 +46,11 @@ class Craype(Compiler): PrgEnv = 'PrgEnv-cray' PrgEnv_compiler = 'craype' - link_paths = { 'cc' : 'cc', - 'cxx' : 'c++', - 'f77' : 'f77', - 'fc' : 'fc'} - + link_paths = {'cc': 'cc', + 'cxx': 'c++', + 'f77': 'f77', + 'fc': 'fc'} + @classmethod def default_version(cls, comp): return get_compiler_version(comp, r'([Vv]ersion).*(\d+(\.\d+)+)') - diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py index 2fae6688db9..a556f346d73 100644 --- a/lib/spack/spack/compilers/gcc.py +++ b/lib/spack/spack/compilers/gcc.py @@ -26,6 +26,7 @@ from spack.compiler import * from spack.version import ver + class Gcc(Compiler): # Subclasses use possible names of C compiler cc_names = ['gcc'] @@ -44,10 +45,10 @@ class Gcc(Compiler): suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d'] # Named wrapper links within spack.build_env_path - link_paths = {'cc' : 'gcc/gcc', - 'cxx' : 'gcc/g++', - 'f77' : 'gcc/gfortran', - 'fc' : 'gcc/gfortran' } + link_paths = {'cc': 'gcc/gcc', + 'cxx': 'gcc/g++', + 'f77': 'gcc/gfortran', + 'fc': 'gcc/gfortran'} PrgEnv = 'PrgEnv-gnu' PrgEnv_compiler = 'gcc' @@ -79,7 +80,6 @@ def fc_version(cls, fc): # older gfortran versions don't have simple dumpversion output. r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)') - @classmethod def f77_version(cls, f77): return cls.fc_version(f77) diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py index 6cad03ff47d..8531ecd19ab 100644 --- a/lib/spack/spack/compilers/intel.py +++ b/lib/spack/spack/compilers/intel.py @@ -26,6 +26,7 @@ import llnl.util.tty as tty from spack.version import ver + class Intel(Compiler): # Subclasses use possible names of C compiler cc_names = ['icc'] @@ -40,10 +41,10 @@ class Intel(Compiler): fc_names = ['ifort'] # Named wrapper links within spack.build_env_path - link_paths = { 'cc' : 'intel/icc', - 'cxx' : 'intel/icpc', - 'f77' : 'intel/ifort', - 'fc' : 'intel/ifort' } + link_paths = {'cc': 'intel/icc', + 'cxx': 'intel/icpc', + 'f77': 'intel/ifort', + 'fc': 'intel/ifort'} PrgEnv = 'PrgEnv-intel' PrgEnv_compiler = 'intel' @@ -64,7 +65,6 @@ def cxx11_flag(self): else: return "-std=c++11" - @classmethod def default_version(cls, comp): """The '--version' option seems to be the most consistent one diff --git a/lib/spack/spack/compilers/nag.py b/lib/spack/spack/compilers/nag.py index cee11bc97a1..fdfc078b5e5 100644 --- a/lib/spack/spack/compilers/nag.py +++ b/lib/spack/spack/compilers/nag.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack.compiler import * -import llnl.util.tty as tty + class Nag(Compiler): # Subclasses use possible names of C compiler @@ -39,11 +39,12 @@ class Nag(Compiler): fc_names = ['nagfor'] # Named wrapper links within spack.build_env_path - link_paths = { # Use default wrappers for C and C++, in case provided in compilers.yaml - 'cc' : 'cc', - 'cxx' : 'c++', - 'f77' : 'nag/nagfor', - 'fc' : 'nag/nagfor' } + # Use default wrappers for C and C++, in case provided in compilers.yaml + link_paths = { + 'cc': 'cc', + 'cxx': 'c++', + 'f77': 'nag/nagfor', + 'fc': 'nag/nagfor'} @property def openmp_flag(self): @@ -71,9 +72,8 @@ def default_version(self, comp): """The '-V' option works for nag compilers. Output looks like this:: - NAG Fortran Compiler Release 6.0(Hibiya) Build 1037 - Product NPL6A60NA for x86-64 Linux - Copyright 1990-2015 The Numerical Algorithms Group Ltd., Oxford, U.K. + NAG Fortran Compiler Release 6.0(Hibiya) Build 1037 + Product NPL6A60NA for x86-64 Linux """ return get_compiler_version( comp, '-V', r'NAG Fortran Compiler Release ([0-9.]+)') diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py index 6d36d8bfa6d..0e4be6e9ba6 100644 --- a/lib/spack/spack/compilers/pgi.py +++ b/lib/spack/spack/compilers/pgi.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack.compiler import * -import llnl.util.tty as tty + class Pgi(Compiler): # Subclasses use possible names of C compiler @@ -39,17 +39,14 @@ class Pgi(Compiler): fc_names = ['pgfortran', 'pgf95', 'pgf90'] # Named wrapper links within spack.build_env_path - link_paths = { 'cc' : 'pgi/pgcc', - 'cxx' : 'pgi/pgc++', - 'f77' : 'pgi/pgfortran', - 'fc' : 'pgi/pgfortran' } - - + link_paths = {'cc': 'pgi/pgcc', + 'cxx': 'pgi/pgc++', + 'f77': 'pgi/pgfortran', + 'fc': 'pgi/pgfortran'} PrgEnv = 'PrgEnv-pgi' PrgEnv_compiler = 'pgi' - @property def openmp_flag(self): return "-mp" diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py index b1431436add..5c83209781b 100644 --- a/lib/spack/spack/compilers/xl.py +++ b/lib/spack/spack/compilers/xl.py @@ -26,24 +26,26 @@ import llnl.util.tty as tty from spack.version import ver + class Xl(Compiler): # Subclasses use possible names of C compiler - cc_names = ['xlc','xlc_r'] + cc_names = ['xlc', 'xlc_r'] # Subclasses use possible names of C++ compiler - cxx_names = ['xlC','xlC_r','xlc++','xlc++_r'] + cxx_names = ['xlC', 'xlC_r', 'xlc++', 'xlc++_r'] # Subclasses use possible names of Fortran 77 compiler - f77_names = ['xlf','xlf_r'] + f77_names = ['xlf', 'xlf_r'] # Subclasses use possible names of Fortran 90 compiler - fc_names = ['xlf90','xlf90_r','xlf95','xlf95_r','xlf2003','xlf2003_r','xlf2008','xlf2008_r'] + fc_names = ['xlf90', 'xlf90_r', 'xlf95', 'xlf95_r', + 'xlf2003', 'xlf2003_r', 'xlf2008', 'xlf2008_r'] # Named wrapper links within spack.build_env_path - link_paths = { 'cc' : 'xl/xlc', - 'cxx' : 'xl/xlc++', - 'f77' : 'xl/xlf', - 'fc' : 'xl/xlf90' } + link_paths = {'cc': 'xl/xlc', + 'cxx': 'xl/xlc++', + 'f77': 'xl/xlf', + 'fc': 'xl/xlf90'} @property def openmp_flag(self): @@ -56,7 +58,6 @@ def cxx11_flag(self): else: return "-qlanglvl=extended0x" - @classmethod def default_version(cls, comp): """The '-qversion' is the standard option fo XL compilers. @@ -82,29 +83,28 @@ def default_version(cls, comp): """ return get_compiler_version( - comp, '-qversion',r'([0-9]?[0-9]\.[0-9])') - + comp, '-qversion', r'([0-9]?[0-9]\.[0-9])') @classmethod def fc_version(cls, fc): - """The fortran and C/C++ versions of the XL compiler are always two units apart. - By this we mean that the fortran release that goes with XL C/C++ 11.1 is 13.1. - Having such a difference in version number is confusing spack quite a lot. - Most notably if you keep the versions as is the default xl compiler will only - have fortran and no C/C++. - So we associate the Fortran compiler with the version associated to the C/C++ - compiler. - One last stumble. Version numbers over 10 have at least a .1 those under 10 - a .0. There is no xlf 9.x or under currently available. BG/P and BG/L can - such a compiler mix and possibly older version of AIX and linux on power. + """The fortran and C/C++ versions of the XL compiler are always + two units apart. By this we mean that the fortran release that + goes with XL C/C++ 11.1 is 13.1. Having such a difference in + version number is confusing spack quite a lot. Most notably + if you keep the versions as is the default xl compiler will + only have fortran and no C/C++. So we associate the Fortran + compiler with the version associated to the C/C++ compiler. + One last stumble. Version numbers over 10 have at least a .1 + those under 10 a .0. There is no xlf 9.x or under currently + available. BG/P and BG/L can such a compiler mix and possibly + older version of AIX and linux on power. """ - fver = get_compiler_version(fc, '-qversion',r'([0-9]?[0-9]\.[0-9])') + fver = get_compiler_version(fc, '-qversion', r'([0-9]?[0-9]\.[0-9])') cver = float(fver) - 2 - if cver < 10 : - cver = cver - 0.1 + if cver < 10: + cver = cver - 0.1 return str(cver) - @classmethod def f77_version(cls, f77): return cls.fc_version(f77) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 6f11c86ce87..726dee62e3a 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -61,7 +61,9 @@ def _valid_virtuals_and_externals(self, spec): if not providers: raise UnsatisfiableProviderSpecError(providers[0], spec) spec_w_preferred_providers = find_spec( - spec, lambda x: spack.pkgsort.spec_has_preferred_provider(x.name, spec.name)) # NOQA: ignore=E501 + spec, + lambda x: spack.pkgsort.spec_has_preferred_provider( + x.name, spec.name)) if not spec_w_preferred_providers: spec_w_preferred_providers = spec provider_cmp = partial(spack.pkgsort.provider_compare, @@ -495,7 +497,8 @@ class UnavailableCompilerVersionError(spack.error.SpackError): def __init__(self, compiler_spec, operating_system): super(UnavailableCompilerVersionError, self).__init__( - "No available compiler version matches '%s' on operating_system %s" % (compiler_spec, operating_system), # NOQA: ignore=E501 + "No available compiler version matches '%s' on operating_system %s" + % (compiler_spec, operating_system), "Run 'spack compilers' to see available compiler Options.") @@ -506,14 +509,15 @@ class NoValidVersionError(spack.error.SpackError): def __init__(self, spec): super(NoValidVersionError, self).__init__( - "There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)) # NOQA: ignore=E501 + "There are no valid versions for %s that match '%s'" + % (spec.name, spec.versions)) class NoBuildError(spack.error.SpackError): - """Raised when a package is configured with the buildable option False, but no satisfactory external versions can be found""" def __init__(self, spec): - super(NoBuildError, self).__init__( - "The spec '%s' is configured as not buildable,and no matching external installs were found" % spec.name) # NOQA: ignore=E501 + msg = ("The spec '%s' is configured as not buildable, " + "and no matching external installs were found") + super(NoBuildError, self).__init__(msg % spec.name) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index a4e274893c0..a4a4f5411ee 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -158,35 +158,35 @@ 'required': ['cc', 'cxx', 'f77', 'fc'], 'additionalProperties': False, 'properties': { - 'cc': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'cxx': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'f77': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'fc': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'cflags': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'cxxflags': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'fflags': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'cppflags': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'ldflags': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}, - 'ldlibs': { 'anyOf': [ {'type' : 'string' }, - {'type' : 'null' }]}}}, - 'spec': { 'type': 'string'}, - 'operating_system': { 'type': 'string'}, - 'alias': { 'anyOf': [ {'type' : 'string'}, - {'type' : 'null' }]}, - 'modules': { 'anyOf': [ {'type' : 'string'}, - {'type' : 'null' }, - {'type': 'array'}, - ]} - },},},},},}, + 'cc': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cxx': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'f77': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'fc': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cxxflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'fflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cppflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'ldflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'ldlibs': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}}}, + 'spec': {'type': 'string'}, + 'operating_system': {'type': 'string'}, + 'alias': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'modules': {'anyOf': [{'type': 'string'}, + {'type': 'null'}, + {'type': 'array'}, + ]} + }, }, }, }, }, }, 'mirrors': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack mirror configuration file schema', @@ -199,7 +199,7 @@ 'additionalProperties': False, 'patternProperties': { r'\w[\w-]*': { - 'type': 'string'},},},},}, + 'type': 'string'}, }, }, }, }, 'repos': { '$schema': 'http://json-schema.org/schema#', @@ -211,7 +211,7 @@ 'type': 'array', 'default': [], 'items': { - 'type': 'string'},},},}, + 'type': 'string'}, }, }, }, 'packages': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack package configuration file schema', @@ -223,48 +223,48 @@ 'default': {}, 'additionalProperties': False, 'patternProperties': { - r'\w[\w-]*': { # package name + r'\w[\w-]*': { # package name 'type': 'object', 'default': {}, 'additionalProperties': False, 'properties': { 'version': { - 'type' : 'array', - 'default' : [], - 'items' : { 'anyOf' : [ { 'type' : 'string' }, - { 'type' : 'number'}]}}, #version strings + 'type': 'array', + 'default': [], + 'items': {'anyOf': [{'type': 'string'}, + {'type': 'number'}]}}, # version strings 'compiler': { - 'type' : 'array', - 'default' : [], - 'items' : { 'type' : 'string' } }, #compiler specs + 'type': 'array', + 'default': [], + 'items': {'type': 'string'}}, # compiler specs 'buildable': { 'type': 'boolean', 'default': True, - }, + }, 'modules': { - 'type' : 'object', - 'default' : {}, - }, + 'type': 'object', + 'default': {}, + }, 'providers': { 'type': 'object', 'default': {}, 'additionalProperties': False, 'patternProperties': { r'\w[\w-]*': { - 'type' : 'array', - 'default' : [], - 'items' : { 'type' : 'string' },},},}, + 'type': 'array', + 'default': [], + 'items': {'type': 'string'}, }, }, }, 'paths': { - 'type' : 'object', - 'default' : {}, - }, + 'type': 'object', + 'default': {}, + }, 'variants': { - 'oneOf' : [ - { 'type' : 'string' }, - { 'type' : 'array', - 'items' : { 'type' : 'string' } }, - ], }, - },},},},},}, + 'oneOf': [ + {'type': 'string'}, + {'type': 'array', + 'items': {'type': 'string'}}, + ], }, + }, }, }, }, }, }, 'targets': { '$schema': 'http://json-schema.org/schema#', @@ -277,8 +277,8 @@ 'default': {}, 'additionalProperties': False, 'patternProperties': { - r'\w[\w-]*': { # target name - 'type': 'string' ,},},},},}, + r'\w[\w-]*': { # target name + 'type': 'string', }, }, }, }, }, 'modules': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack module file configuration file schema', @@ -389,13 +389,15 @@ }, 'tcl': { 'allOf': [ - {'$ref': '#/definitions/module_type_configuration'}, # Base configuration + # Base configuration + {'$ref': '#/definitions/module_type_configuration'}, {} # Specific tcl extensions ] }, 'dotkit': { 'allOf': [ - {'$ref': '#/definitions/module_type_configuration'}, # Base configuration + # Base configuration + {'$ref': '#/definitions/module_type_configuration'}, {} # Specific dotkit extensions ] }, @@ -428,7 +430,8 @@ def extend_with_default(validator_class): """ validate_properties = validator_class.VALIDATORS["properties"] - validate_pattern_properties = validator_class.VALIDATORS["patternProperties"] + validate_pattern_properties = validator_class.VALIDATORS[ + "patternProperties"] def set_defaults(validator, properties, instance, schema): for property, subschema in properties.iteritems(): @@ -510,7 +513,8 @@ def write_section(self, section): except jsonschema.ValidationError as e: raise ConfigSanityError(e, data) except (yaml.YAMLError, IOError) as e: - raise ConfigFileError("Error writing to config file: '%s'" % str(e)) + raise ConfigFileError( + "Error writing to config file: '%s'" % str(e)) def clear(self): """Empty cached config information.""" @@ -739,7 +743,8 @@ def spec_externals(spec): path = get_path_from_module(module) - external_spec = spack.spec.Spec(external_spec, external=path, external_module=module) + external_spec = spack.spec.Spec( + external_spec, external=path, external_module=module) if external_spec.satisfies(spec): external_specs.append(external_spec) @@ -773,6 +778,7 @@ def get_path(path, data): class ConfigFormatError(ConfigError): """Raised when a configuration format does not match its schema.""" + def __init__(self, validation_error, data): # Try to get line number from erroneous instance and its parent instance_mark = getattr(validation_error.instance, '_start_mark', None) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 16814429dcd..f3dcdef0a98 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -119,6 +119,7 @@ def from_dict(cls, spec, dictionary): class Database(object): + def __init__(self, root, db_dir=None): """Create a Database for Spack installations under ``root``. @@ -600,6 +601,7 @@ def missing(self, spec): class CorruptDatabaseError(SpackError): + def __init__(self, path, msg=''): super(CorruptDatabaseError, self).__init__( "Spack database is corrupt: %s. %s." % (path, msg), @@ -607,6 +609,7 @@ def __init__(self, path, msg=''): class InvalidDatabaseVersionError(SpackError): + def __init__(self, expected, found): super(InvalidDatabaseVersionError, self).__init__( "Expected database version %s but found version %s." diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index e92dd6fb679..313bf48f0de 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -349,9 +349,10 @@ def __init__(self, directive, package): class UnknownDependencyTypeError(DirectiveError): """This is raised when a dependency is of an unknown type.""" + def __init__(self, directive, package, deptype): super(UnknownDependencyTypeError, self).__init__( directive, - "Package '%s' cannot depend on a package via %s." % - (package, deptype)) + "Package '%s' cannot depend on a package via %s." + % (package, deptype)) self.package = package diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 8150a6da2bb..0ae6f765f4a 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -22,16 +22,13 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import re import os import exceptions -import hashlib import shutil import glob import tempfile import yaml -import llnl.util.tty as tty from llnl.util.filesystem import join_path, mkdirp import spack @@ -51,10 +48,10 @@ class DirectoryLayout(object): install, and they can use this to customize the nesting structure of spack installs. """ + def __init__(self, root): self.root = root - @property def hidden_file_paths(self): """Return a list of hidden files used by the directory layout. @@ -67,25 +64,21 @@ def hidden_file_paths(self): """ raise NotImplementedError() - def all_specs(self): """To be implemented by subclasses to traverse all specs for which there is a directory within the root. """ raise NotImplementedError() - def relative_path_for_spec(self, spec): """Implemented by subclasses to return a relative path from the install root to a unique location for the provided spec.""" raise NotImplementedError() - def create_install_directory(self, spec): """Creates the installation directory for a spec.""" raise NotImplementedError() - def check_installed(self, spec): """Checks whether a spec is installed. @@ -95,7 +88,6 @@ def check_installed(self, spec): """ raise NotImplementedError() - def extension_map(self, spec): """Get a dict of currently installed extension packages for a spec. @@ -104,7 +96,6 @@ def extension_map(self, spec): """ raise NotImplementedError() - def check_extension_conflict(self, spec, ext_spec): """Ensure that ext_spec can be activated in spec. @@ -113,7 +104,6 @@ def check_extension_conflict(self, spec, ext_spec): """ raise NotImplementedError() - def check_activated(self, spec, ext_spec): """Ensure that ext_spec can be removed from spec. @@ -121,26 +111,22 @@ def check_activated(self, spec, ext_spec): """ raise NotImplementedError() - def add_extension(self, spec, ext_spec): """Add to the list of currently installed extensions.""" raise NotImplementedError() - def remove_extension(self, spec, ext_spec): """Remove from the list of currently installed extensions.""" raise NotImplementedError() - def path_for_spec(self, spec): - """Return an absolute path from the root to a directory for the spec.""" + """Return absolute path from the root to a directory for the spec.""" _check_concrete(spec) path = self.relative_path_for_spec(spec) assert(not path.startswith(self.root)) return os.path.join(self.root, path) - def remove_install_directory(self, spec): """Removes a prefix and any empty parent directories from the root. Raised RemoveFailedError if something goes wrong. @@ -177,6 +163,7 @@ class YamlDirectoryLayout(DirectoryLayout): only enabled variants are included in the install path. Disabled variants are omitted. """ + def __init__(self, root, **kwargs): super(YamlDirectoryLayout, self).__init__(root) self.metadata_dir = kwargs.get('metadata_dir', '.spack') @@ -191,12 +178,10 @@ def __init__(self, root, **kwargs): # Cache of already written/read extension maps. self._extension_maps = {} - @property def hidden_file_paths(self): return (self.metadata_dir,) - def relative_path_for_spec(self, spec): _check_concrete(spec) @@ -208,20 +193,19 @@ def relative_path_for_spec(self, spec): spec.version, spec.dag_hash(self.hash_len)) - path = join_path(spec.architecture, + path = join_path( + spec.architecture, "%s-%s" % (spec.compiler.name, spec.compiler.version), dir_name) return path - def write_spec(self, spec, path): """Write a spec out to a file.""" _check_concrete(spec) with open(path, 'w') as f: spec.to_yaml(f) - def read_spec(self, path): """Read the contents of a file and parse them as a spec""" try: @@ -237,32 +221,26 @@ def read_spec(self, path): spec._mark_concrete() return spec - def spec_file_path(self, spec): """Gets full path to spec file""" _check_concrete(spec) return join_path(self.metadata_path(spec), self.spec_file_name) - def metadata_path(self, spec): return join_path(self.path_for_spec(spec), self.metadata_dir) - def build_log_path(self, spec): return join_path(self.path_for_spec(spec), self.metadata_dir, self.build_log_name) - def build_env_path(self, spec): return join_path(self.path_for_spec(spec), self.metadata_dir, self.build_env_name) - def build_packages_path(self, spec): return join_path(self.path_for_spec(spec), self.metadata_dir, self.packages_dir) - def create_install_directory(self, spec): _check_concrete(spec) @@ -273,7 +251,6 @@ def create_install_directory(self, spec): mkdirp(self.metadata_path(spec)) self.write_spec(spec, self.spec_file_path(spec)) - def check_installed(self, spec): _check_concrete(spec) path = self.path_for_spec(spec) @@ -284,7 +261,7 @@ def check_installed(self, spec): if not os.path.isfile(spec_file_path): raise InconsistentInstallDirectoryError( - 'Inconsistent state: install prefix exists but contains no spec.yaml:', + 'Install prefix exists but contains no spec.yaml:', " " + path) installed_spec = self.read_spec(spec_file_path) @@ -297,7 +274,6 @@ def check_installed(self, spec): raise InconsistentInstallDirectoryError( 'Spec file in %s does not match hash!' % spec_file_path) - def all_specs(self): if not os.path.isdir(self.root): return [] @@ -307,20 +283,17 @@ def all_specs(self): spec_files = glob.glob(pattern) return [self.read_spec(s) for s in spec_files] - def specs_by_hash(self): by_hash = {} for spec in self.all_specs(): by_hash[spec.dag_hash()] = spec return by_hash - def extension_file_path(self, spec): """Gets full path to an installed package's extension file""" _check_concrete(spec) return join_path(self.metadata_path(spec), self.extension_file_name) - def _write_extensions(self, spec, extensions): path = self.extension_file_path(spec) @@ -332,23 +305,22 @@ def _write_extensions(self, spec, extensions): # write tmp file with tmp: yaml.dump({ - 'extensions' : [ - { ext.name : { - 'hash' : ext.dag_hash(), - 'path' : str(ext.prefix) + 'extensions': [ + {ext.name: { + 'hash': ext.dag_hash(), + 'path': str(ext.prefix) }} for ext in sorted(extensions.values())] }, tmp, default_flow_style=False) # Atomic update by moving tmpfile on top of old one. os.rename(tmp.name, path) - def _extension_map(self, spec): """Get a dict spec> for all extensions currently installed for this package.""" _check_concrete(spec) - if not spec in self._extension_maps: + if spec not in self._extension_maps: path = self.extension_file_path(spec) if not os.path.exists(path): self._extension_maps[spec] = {} @@ -363,14 +335,14 @@ def _extension_map(self, spec): dag_hash = entry[name]['hash'] prefix = entry[name]['path'] - if not dag_hash in by_hash: + if dag_hash not in by_hash: raise InvalidExtensionSpecError( "Spec %s not found in %s" % (dag_hash, prefix)) ext_spec = by_hash[dag_hash] - if not prefix == ext_spec.prefix: + if prefix != ext_spec.prefix: raise InvalidExtensionSpecError( - "Prefix %s does not match spec with hash %s: %s" + "Prefix %s does not match spec hash %s: %s" % (prefix, dag_hash, ext_spec)) exts[ext_spec.name] = ext_spec @@ -378,13 +350,11 @@ def _extension_map(self, spec): return self._extension_maps[spec] - def extension_map(self, spec): """Defensive copying version of _extension_map() for external API.""" _check_concrete(spec) return self._extension_map(spec).copy() - def check_extension_conflict(self, spec, ext_spec): exts = self._extension_map(spec) if ext_spec.name in exts: @@ -394,13 +364,11 @@ def check_extension_conflict(self, spec, ext_spec): else: raise ExtensionConflictError(spec, ext_spec, installed_spec) - def check_activated(self, spec, ext_spec): exts = self._extension_map(spec) - if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]): + if (ext_spec.name not in exts) or (ext_spec != exts[ext_spec.name]): raise NoSuchExtensionError(spec, ext_spec) - def add_extension(self, spec, ext_spec): _check_concrete(spec) _check_concrete(ext_spec) @@ -413,7 +381,6 @@ def add_extension(self, spec, ext_spec): exts[ext_spec.name] = ext_spec self._write_extensions(spec, exts) - def remove_extension(self, spec, ext_spec): _check_concrete(spec) _check_concrete(ext_spec) @@ -429,12 +396,14 @@ def remove_extension(self, spec, ext_spec): class DirectoryLayoutError(SpackError): """Superclass for directory layout errors.""" + def __init__(self, message, long_msg=None): super(DirectoryLayoutError, self).__init__(message, long_msg) class SpecHashCollisionError(DirectoryLayoutError): """Raised when there is a hash collision in an install layout.""" + def __init__(self, installed_spec, new_spec): super(SpecHashCollisionError, self).__init__( 'Specs %s and %s have the same SHA-1 prefix!' @@ -443,6 +412,7 @@ def __init__(self, installed_spec, new_spec): class RemoveFailedError(DirectoryLayoutError): """Raised when a DirectoryLayout cannot remove an install prefix.""" + def __init__(self, installed_spec, prefix, error): super(RemoveFailedError, self).__init__( 'Could not remove prefix %s for %s : %s' @@ -452,12 +422,15 @@ def __init__(self, installed_spec, prefix, error): class InconsistentInstallDirectoryError(DirectoryLayoutError): """Raised when a package seems to be installed to the wrong place.""" + def __init__(self, message, long_msg=None): - super(InconsistentInstallDirectoryError, self).__init__(message, long_msg) + super(InconsistentInstallDirectoryError, self).__init__( + message, long_msg) class InstallDirectoryAlreadyExistsError(DirectoryLayoutError): """Raised when create_install_directory is called unnecessarily.""" + def __init__(self, path): super(InstallDirectoryAlreadyExistsError, self).__init__( "Install path %s already exists!") @@ -473,22 +446,26 @@ class InvalidExtensionSpecError(DirectoryLayoutError): class ExtensionAlreadyInstalledError(DirectoryLayoutError): """Raised when an extension is added to a package that already has it.""" + def __init__(self, spec, ext_spec): super(ExtensionAlreadyInstalledError, self).__init__( - "%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec)) + "%s is already installed in %s" + % (ext_spec.short_spec, spec.short_spec)) class ExtensionConflictError(DirectoryLayoutError): """Raised when an extension is added to a package that already has it.""" + def __init__(self, spec, ext_spec, conflict): super(ExtensionConflictError, self).__init__( - "%s cannot be installed in %s because it conflicts with %s"% ( - ext_spec.short_spec, spec.short_spec, conflict.short_spec)) + "%s cannot be installed in %s because it conflicts with %s" + % (ext_spec.short_spec, spec.short_spec, conflict.short_spec)) class NoSuchExtensionError(DirectoryLayoutError): """Raised when an extension isn't there on deactivate.""" + def __init__(self, spec, ext_spec): super(NoSuchExtensionError, self).__init__( - "%s cannot be removed from %s because it's not activated."% ( - ext_spec.short_spec, spec.short_spec)) + "%s cannot be removed from %s because it's not activated." + % (ext_spec.short_spec, spec.short_spec)) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 41136ab2eb5..613ece2f454 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -1,4 +1,4 @@ -# +############################################################################## # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # @@ -21,7 +21,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# +############################################################################## import collections import inspect import json @@ -287,7 +287,10 @@ def from_sourcing_files(*args, **kwargs): shell = '{shell}'.format(**info) shell_options = '{shell_options}'.format(**info) source_file = '{source_command} {file} {concatenate_on_success}' - dump_environment = 'python -c "import os, json; print json.dumps(dict(os.environ))"' # NOQA: ignore=E501 + + dump_cmd = "import os, json; print json.dumps(dict(os.environ))" + dump_environment = 'python -c "%s"' % dump_cmd + # Construct the command that will be executed command = [source_file.format(file=file, **info) for file in args] command.append(dump_environment) @@ -326,8 +329,10 @@ def from_sourcing_files(*args, **kwargs): for x in unset_variables: env.unset(x) # Variables that have been modified - common_variables = set(this_environment).intersection(set(after_source_env)) # NOQA: ignore=E501 - modified_variables = [x for x in common_variables if this_environment[x] != after_source_env[x]] # NOQA: ignore=E501 + common_variables = set( + this_environment).intersection(set(after_source_env)) + modified_variables = [x for x in common_variables + if this_environment[x] != after_source_env[x]] def return_separator_if_any(first_value, second_value): separators = ':', ';' @@ -405,7 +410,7 @@ def set_or_unset_not_first(variable, changes, errstream): if indexes: good = '\t \t{context} at {filename}:{lineno}' nogood = '\t--->\t{context} at {filename}:{lineno}' - message = 'Suspicious requests to set or unset the variable \'{var}\' found' # NOQA: ignore=E501 + message = "Suspicious requests to set or unset '{var}' found" errstream(message.format(var=variable)) for ii, item in enumerate(changes): print_format = nogood if ii in indexes else good diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py index 85ad2fe249a..c94875e91a7 100644 --- a/lib/spack/spack/error.py +++ b/lib/spack/spack/error.py @@ -27,21 +27,21 @@ import llnl.util.tty as tty import spack + class SpackError(Exception): """This is the superclass for all Spack errors. Subclasses can be found in the modules they have to do with. """ + def __init__(self, message, long_message=None): super(SpackError, self).__init__() self.message = message self._long_message = long_message - @property def long_message(self): return self._long_message - def die(self): if spack.debug: sys.excepthook(*sys.exc_info()) @@ -52,21 +52,23 @@ def die(self): print self.long_message os._exit(1) - def __str__(self): msg = self.message if self._long_message: msg += "\n %s" % self._long_message return msg + class UnsupportedPlatformError(SpackError): """Raised by packages when a platform is not supported""" + def __init__(self, message): super(UnsupportedPlatformError, self).__init__(message) class NoNetworkConnectionError(SpackError): """Raised when an operation needs an internet connection.""" + def __init__(self, message, url): super(NoNetworkConnectionError, self).__init__( "No network connection: " + str(message), diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index bcb33bd0e69..c69a23033c8 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -356,6 +356,7 @@ def __str__(self): class CacheURLFetchStrategy(URLFetchStrategy): """The resource associated with a cache URL may be out of date.""" + def __init__(self, *args, **kwargs): super(CacheURLFetchStrategy, self).__init__(*args, **kwargs) @@ -836,6 +837,7 @@ def for_package_version(pkg, version): class FsCache(object): + def __init__(self, root): self.root = os.path.abspath(root) diff --git a/lib/spack/spack/file_cache.py b/lib/spack/spack/file_cache.py index fb9ccf46b8f..0a66166fd8f 100644 --- a/lib/spack/spack/file_cache.py +++ b/lib/spack/spack/file_cache.py @@ -41,6 +41,7 @@ class FileCache(object): client code need not manage locks for cache entries. """ + def __init__(self, root): """Create a file cache object. @@ -131,6 +132,7 @@ def write_transaction(self, key): """ class WriteContextManager(object): + def __enter__(cm): cm.orig_filename = self.cache_path(key) cm.orig_file = None diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 80d1199ef5f..b875e9da994 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -136,6 +136,7 @@ def find(seq, predicate): class AsciiGraph(object): + def __init__(self): # These can be set after initialization or after a call to # graph() to change behavior. @@ -288,22 +289,22 @@ def advance(to_pos, edges): self._indent() for p in prev_ends: - advance(p, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 - advance(p + 1, lambda: [("|/", self._pos)]) # NOQA: ignore=E272 + advance(p, lambda: [("| ", self._pos)]) + advance(p + 1, lambda: [("|/", self._pos)]) if end >= 0: - advance(end + 1, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 - advance(start - 1, lambda: [("|", self._pos), ("_", end)]) # NOQA: ignore=E272 + advance(end + 1, lambda: [("| ", self._pos)]) + advance(start - 1, lambda: [("|", self._pos), ("_", end)]) else: - advance(start - 1, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 + advance(start - 1, lambda: [("| ", self._pos)]) if start >= 0: - advance(start, lambda: [("|", self._pos), ("/", end)]) # NOQA: ignore=E272 + advance(start, lambda: [("|", self._pos), ("/", end)]) if collapse: - advance(flen, lambda: [(" /", self._pos)]) # NOQA: ignore=E272 + advance(flen, lambda: [(" /", self._pos)]) else: - advance(flen, lambda: [("| ", self._pos)]) # NOQA: ignore=E272 + advance(flen, lambda: [("| ", self._pos)]) self._set_state(BACK_EDGE, end, label) self._out.write("\n") @@ -438,8 +439,8 @@ def write(self, spec, **kwargs): # Expand forward after doing all back connections if (i + 1 < len(self._frontier) and - len(self._frontier[i + 1]) == 1 and - self._frontier[i + 1][0] in self._frontier[i]): + len(self._frontier[i + 1]) == 1 and + self._frontier[i + 1][0] in self._frontier[i]): # We need to connect to the element to the right. # Keep lines straight by connecting directly and # avoiding unnecessary expand/contract. diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py index 902e488ecae..c7c84defa02 100644 --- a/lib/spack/spack/hooks/__init__.py +++ b/lib/spack/spack/hooks/__init__.py @@ -45,6 +45,7 @@ from llnl.util.filesystem import join_path import spack + @memoized def all_hook_modules(): modules = [] @@ -58,6 +59,7 @@ def all_hook_modules(): class HookRunner(object): + def __init__(self, hook_name): self.hook_name = hook_name diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py index bcbd68dfa0b..070b309a431 100644 --- a/lib/spack/spack/hooks/extensions.py +++ b/lib/spack/spack/hooks/extensions.py @@ -23,8 +23,6 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import spack - def pre_uninstall(pkg): assert(pkg.spec.concrete) diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 0bbcfba6b4f..f053e4405f4 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -40,9 +40,8 @@ import spack.url as url import spack.fetch_strategy as fs from spack.spec import Spec -from spack.stage import Stage from spack.version import * -from spack.util.compression import extension, allowed_archive +from spack.util.compression import allowed_archive def mirror_archive_filename(spec, fetcher): @@ -52,10 +51,10 @@ def mirror_archive_filename(spec, fetcher): if isinstance(fetcher, fs.URLFetchStrategy): if fetcher.expand_archive: - # If we fetch this version with a URLFetchStrategy, use URL's archive type + # If we fetch with a URLFetchStrategy, use URL's archive type ext = url.downloaded_file_extension(fetcher.url) else: - # If the archive shouldn't be expanded, don't check for its extension. + # If the archive shouldn't be expanded, don't check extension. ext = None else: # Otherwise we'll make a .tar.gz ourselves @@ -106,7 +105,9 @@ def get_matching_versions(specs, **kwargs): def suggest_archive_basename(resource): """ - Return a tentative basename for an archive. Raise an exception if the name is among the allowed archive types. + Return a tentative basename for an archive. + + Raises an exception if the name is not an allowed archive type. :param fetcher: :return: @@ -170,7 +171,7 @@ def create(path, specs, **kwargs): 'error': [] } - # Iterate through packages and download all the safe tarballs for each of them + # Iterate through packages and download all safe tarballs for each for spec in version_specs: add_single_spec(spec, mirror_root, categories, **kwargs) @@ -190,12 +191,15 @@ def add_single_spec(spec, mirror_root, categories, **kwargs): fetcher = stage.fetcher if ii == 0: # create a subdirectory for the current package@version - archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher))) + archive_path = os.path.abspath(join_path( + mirror_root, mirror_archive_path(spec, fetcher))) name = spec.format("$_$@") else: resource = stage.resource - archive_path = join_path(subdir, suggest_archive_basename(resource)) - name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@")) + archive_path = join_path( + subdir, suggest_archive_basename(resource)) + name = "{resource} ({pkg}).".format( + resource=resource.name, pkg=spec.format("$_$@")) subdir = os.path.dirname(archive_path) mkdirp(subdir) @@ -217,15 +221,18 @@ def add_single_spec(spec, mirror_root, categories, **kwargs): categories['present'].append(spec) else: categories['mirrored'].append(spec) + except Exception as e: if spack.debug: sys.excepthook(*sys.exc_info()) else: - tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message) + tty.warn("Error while fetching %s" + % spec.format('$_$@'), e.message) categories['error'].append(spec) class MirrorError(spack.error.SpackError): """Superclass of all mirror-creation related errors.""" + def __init__(self, msg, long_msg=None): super(MirrorError, self).__init__(msg, long_msg) diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 8ac6a77d13e..debc6752b46 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -459,7 +459,8 @@ def process_environment_command(self, env): yield self.environment_modifications_formats[type( command)].format(**command.args) except KeyError: - message = 'Cannot handle command of type {command} : skipping request' # NOQA: ignore=E501 + message = ('Cannot handle command of type {command}: ' + 'skipping request') details = '{context} at {filename}:{lineno}' tty.warn(message.format(command=type(command))) tty.warn(details.format(**command.args)) @@ -494,7 +495,8 @@ class Dotkit(EnvModule): autoload_format = 'dk_op {module_file}\n' - default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501 + default_naming_format = \ + '{name}-{version}-{compiler.name}-{compiler.version}' @property def file_name(self): @@ -543,7 +545,8 @@ class TclModule(EnvModule): prerequisite_format = 'prereq {module_file}\n' - default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501 + default_naming_format = \ + '{name}-{version}-{compiler.name}-{compiler.version}' @property def file_name(self): @@ -554,7 +557,7 @@ def header(self): timestamp = datetime.datetime.now() # TCL Modulefile header header = '#%Module1.0\n' - header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501 + header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp header += '##\n' header += '## %s\n' % self.spec.short_spec header += '##\n' @@ -584,10 +587,12 @@ def module_specific_content(self, configuration): for naming_dir, conflict_dir in zip( self.naming_scheme.split('/'), item.split('/')): if naming_dir != conflict_dir: - message = 'conflict scheme does not match naming scheme [{spec}]\n\n' # NOQA: ignore=E501 + message = 'conflict scheme does not match naming ' + message += 'scheme [{spec}]\n\n' message += 'naming scheme : "{nformat}"\n' message += 'conflict scheme : "{cformat}"\n\n' - message += '** You may want to check your `modules.yaml` configuration file **\n' # NOQA: ignore=E501 + message += '** You may want to check your ' + message += '`modules.yaml` configuration file **\n' tty.error(message.format(spec=self.spec, nformat=self.naming_scheme, cformat=item)) diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py index 0818f9092fa..d1d1f324457 100644 --- a/lib/spack/spack/multimethod.py +++ b/lib/spack/spack/multimethod.py @@ -43,15 +43,13 @@ depending on the scenario, regular old conditionals might be clearer, so package authors should use their judgement. """ -import sys import functools -import collections from llnl.util.lang import * import spack.architecture import spack.error -from spack.spec import parse_anonymous_spec, Spec +from spack.spec import parse_anonymous_spec class SpecMultiMethod(object): @@ -89,13 +87,13 @@ class SpecMultiMethod(object): See the docs for decorators below for more details. """ + def __init__(self, default=None): self.method_list = [] self.default = default if default: functools.update_wrapper(self, default) - def register(self, spec, method): """Register a version of a method for a particular sys_type.""" self.method_list.append((spec, method)) @@ -105,12 +103,10 @@ def register(self, spec, method): else: assert(self.__name__ == method.__name__) - def __get__(self, obj, objtype): """This makes __call__ support instance methods.""" return functools.partial(self.__call__, obj) - def __call__(self, package_self, *args, **kwargs): """Find the first method with a spec that matches the package's spec. If none is found, call the default @@ -127,7 +123,6 @@ def __call__(self, package_self, *args, **kwargs): type(package_self), self.__name__, spec, [m[0] for m in self.method_list]) - def __str__(self): return "SpecMultiMethod {\n\tdefault: %s,\n\tspecs: %s\n}" % ( self.default, self.method_list) @@ -195,11 +190,13 @@ def install(self, prefix): platform-specific versions. There's not much we can do to get around this because of the way decorators work. """ + def __init__(self, spec): pkg = get_calling_module_name() if spec is True: spec = pkg - self.spec = parse_anonymous_spec(spec, pkg) if spec is not False else None + self.spec = (parse_anonymous_spec(spec, pkg) + if spec is not False else None) def __call__(self, method): # Get the first definition of the method in the calling scope @@ -218,12 +215,14 @@ def __call__(self, method): class MultiMethodError(spack.error.SpackError): """Superclass for multimethod dispatch errors""" + def __init__(self, message): super(MultiMethodError, self).__init__(message) class NoSuchMethodError(spack.error.SpackError): """Raised when we can't find a version of a multi-method.""" + def __init__(self, cls, method_name, spec, possible_specs): super(NoSuchMethodError, self).__init__( "Package %s does not support %s called with %s. Options are: %s" diff --git a/lib/spack/spack/operating_systems/cnl.py b/lib/spack/spack/operating_systems/cnl.py index dbd27758617..78807865b36 100644 --- a/lib/spack/spack/operating_systems/cnl.py +++ b/lib/spack/spack/operating_systems/cnl.py @@ -15,6 +15,7 @@ class Cnl(OperatingSystem): modules. If updated, user must make sure that version and name are updated to indicate that OS has been upgraded (or downgraded) """ + def __init__(self): name = 'CNL' version = '10' diff --git a/lib/spack/spack/operating_systems/linux_distro.py b/lib/spack/spack/operating_systems/linux_distro.py index 2e3c72719b2..6d70ae80b63 100644 --- a/lib/spack/spack/operating_systems/linux_distro.py +++ b/lib/spack/spack/operating_systems/linux_distro.py @@ -2,6 +2,7 @@ import platform as py_platform from spack.architecture import OperatingSystem + class LinuxDistro(OperatingSystem): """ This class will represent the autodetected operating system for a Linux System. Since there are many different flavors of @@ -9,6 +10,7 @@ class LinuxDistro(OperatingSystem): autodetection using the python module platform and the method platform.dist() """ + def __init__(self): distname, version, _ = py_platform.linux_distribution( full_distribution_name=False) diff --git a/lib/spack/spack/operating_systems/mac_os.py b/lib/spack/spack/operating_systems/mac_os.py index f35b3ca5771..3e5ab9b2e93 100644 --- a/lib/spack/spack/operating_systems/mac_os.py +++ b/lib/spack/spack/operating_systems/mac_os.py @@ -1,6 +1,7 @@ import platform as py_platform from spack.architecture import OperatingSystem + class MacOs(OperatingSystem): """This class represents the macOS operating system. This will be auto detected using the python platform.mac_ver. The macOS diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 25e07541d07..ff8c8e96bc0 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -34,6 +34,7 @@ README. """ import os +import sys import re import textwrap import time @@ -178,12 +179,10 @@ def install(self, spec, prefix): Most software comes in nicely packaged tarballs, like this one: http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz - Taking a page from homebrew, spack deduces pretty much everything it needs to know from the URL above. If you simply type this: spack create http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz - Spack will download the tarball, generate an md5 hash, figure out the version and the name of the package from the URL, and create a new package file for you with all the names and attributes set correctly. @@ -705,13 +704,13 @@ def do_fetch(self, mirror_only=False): # Ask the user whether to skip the checksum if we're # interactive, but just fail if non-interactive. - checksum_msg = "Add a checksum or use --no-checksum to skip this check." # NOQA: ignore=E501 + ck_msg = "Add a checksum or use --no-checksum to skip this check." ignore_checksum = False if sys.stdout.isatty(): ignore_checksum = tty.get_yes_or_no(" Fetch anyway?", default=False) if ignore_checksum: - tty.msg("Fetching with no checksum.", checksum_msg) + tty.msg("Fetching with no checksum.", ck_msg) if not ignore_checksum: raise FetchError("Will not fetch %s" % @@ -1305,9 +1304,10 @@ def do_deactivate(self, **kwargs): continue for dep in aspec.traverse(deptype='run'): if self.spec == dep: + msg = ("Cannot deactivate %s because %s is activated " + "and depends on it.") raise ActivationError( - "Cannot deactivate %s because %s is activated and depends on it." # NOQA: ignore=E501 - % (self.spec.short_spec, aspec.short_spec)) + msg % (self.spec.short_spec, aspec.short_spec)) self.extendee_spec.package.deactivate(self, **self.extendee_args) @@ -1564,6 +1564,7 @@ def make_executable(path): class CMakePackage(StagedPackage): + def make_make(self): import multiprocessing # number of jobs spack will to build with. @@ -1740,12 +1741,14 @@ class ExtensionError(PackageError): class ExtensionConflictError(ExtensionError): + def __init__(self, path): super(ExtensionConflictError, self).__init__( "Extension blocked by file: %s" % path) class ActivationError(ExtensionError): + def __init__(self, msg, long_msg=None): super(ActivationError, self).__init__(msg, long_msg) diff --git a/lib/spack/spack/parse.py b/lib/spack/spack/parse.py index 8adf957e7f2..1b88db2d7c5 100644 --- a/lib/spack/spack/parse.py +++ b/lib/spack/spack/parse.py @@ -29,6 +29,7 @@ class Token: """Represents tokens; generated from input by lexer and fed to parse().""" + def __init__(self, type, value='', start=0, end=0): self.type = type self.value = value @@ -51,11 +52,13 @@ def __cmp__(self, other): class Lexer(object): """Base class for Lexers that keep track of line numbers.""" + def __init__(self, lexicon): self.scanner = re.Scanner(lexicon) def token(self, type, value=''): - return Token(type, value, self.scanner.match.start(0), self.scanner.match.end(0)) + return Token(type, value, + self.scanner.match.start(0), self.scanner.match.end(0)) def lex(self, text): tokens, remainder = self.scanner.scan(text) @@ -66,10 +69,11 @@ def lex(self, text): class Parser(object): """Base class for simple recursive descent parsers.""" + def __init__(self, lexer): - self.tokens = iter([]) # iterators over tokens, handled in order. Starts empty. - self.token = Token(None) # last accepted token starts at beginning of file - self.next = None # next token + self.tokens = iter([]) # iterators over tokens, handled in order. + self.token = Token(None) # last accepted token + self.next = None # next token self.lexer = lexer self.text = None @@ -82,11 +86,12 @@ def gettok(self): def push_tokens(self, iterable): """Adds all tokens in some iterable to the token stream.""" - self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens) + self.tokens = itertools.chain( + iter(iterable), iter([self.next]), self.tokens) self.gettok() def accept(self, id): - """Puts the next symbol in self.token if we like it. Then calls gettok()""" + """Put the next symbol in self.token if accepted, then call gettok()""" if self.next and self.next.is_a(id): self.token = self.next self.gettok() @@ -124,9 +129,9 @@ def parse(self, text): return self.do_parse() - class ParseError(spack.error.SpackError): """Raised when we don't hit an error while parsing.""" + def __init__(self, message, string, pos): super(ParseError, self).__init__(message) self.string = string @@ -135,5 +140,6 @@ def __init__(self, message, string, pos): class LexError(ParseError): """Raised when we don't know how to lex something.""" + def __init__(self, message, string, pos): super(LexError, self).__init__(message, string, pos) diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index c2e181be2fe..0bd9f5d29d3 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -24,7 +24,6 @@ ############################################################################## import os -import llnl.util.tty as tty from llnl.util.filesystem import join_path import spack @@ -59,7 +58,6 @@ def __init__(self, pkg, path_or_url, level): if not os.path.isfile(self.path): raise NoSuchPatchFileError(pkg_name, self.path) - def apply(self, stage): """Fetch this patch, if necessary, and apply it to the source code in the supplied stage. @@ -84,9 +82,9 @@ def apply(self, stage): patch_stage.destroy() - class NoSuchPatchFileError(spack.error.SpackError): """Raised when user specifies a patch file that doesn't exist.""" + def __init__(self, package, path): super(NoSuchPatchFileError, self).__init__( "No such patch file for package %s: %s" % (package, path)) diff --git a/lib/spack/spack/platforms/bgq.py b/lib/spack/spack/platforms/bgq.py index e0eb76f3363..91afdd04db6 100644 --- a/lib/spack/spack/platforms/bgq.py +++ b/lib/spack/spack/platforms/bgq.py @@ -1,6 +1,7 @@ import os from spack.architecture import Platform, Target + class Bgq(Platform): priority = 30 front_end = 'power7' @@ -15,4 +16,3 @@ def __init__(self): @classmethod def detect(self): return os.path.exists('/bgsys') - diff --git a/lib/spack/spack/platforms/darwin.py b/lib/spack/spack/platforms/darwin.py index d47dd640f99..974ce3a3f9e 100644 --- a/lib/spack/spack/platforms/darwin.py +++ b/lib/spack/spack/platforms/darwin.py @@ -2,6 +2,7 @@ from spack.architecture import Platform, Target from spack.operating_systems.mac_os import MacOs + class Darwin(Platform): priority = 89 front_end = 'x86_64' @@ -21,6 +22,6 @@ def __init__(self): @classmethod def detect(self): - platform = subprocess.Popen(['uname', '-a'], stdout = subprocess.PIPE) + platform = subprocess.Popen(['uname', '-a'], stdout=subprocess.PIPE) platform, _ = platform.communicate() return 'darwin' in platform.strip().lower() diff --git a/lib/spack/spack/platforms/linux.py b/lib/spack/spack/platforms/linux.py index 4d3f59c320d..38d2cdbfecf 100644 --- a/lib/spack/spack/platforms/linux.py +++ b/lib/spack/spack/platforms/linux.py @@ -3,6 +3,7 @@ from spack.architecture import Platform, Target from spack.operating_systems.linux_distro import LinuxDistro + class Linux(Platform): priority = 90 @@ -26,6 +27,6 @@ def __init__(self): @classmethod def detect(self): - platform = subprocess.Popen(['uname', '-a'], stdout = subprocess.PIPE) + platform = subprocess.Popen(['uname', '-a'], stdout=subprocess.PIPE) platform, _ = platform.communicate() return 'linux' in platform.strip().lower() diff --git a/lib/spack/spack/platforms/test.py b/lib/spack/spack/platforms/test.py index 8fa2585a7a1..c918211555f 100644 --- a/lib/spack/spack/platforms/test.py +++ b/lib/spack/spack/platforms/test.py @@ -1,4 +1,27 @@ -import subprocess +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## from spack.architecture import Platform, Target from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.cnl import Cnl @@ -9,7 +32,7 @@ class Test(Platform): front_end = 'x86_32' back_end = 'x86_64' default = 'x86_64' - + back_os = 'CNL10' default_os = 'CNL10' diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index f079c1ef8b0..45a41c8e2b9 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -156,7 +156,7 @@ def spec_has_preferred_provider(self, pkgname, provider_str): """Return True iff the named package has a list of preferred providers""" return bool(self._order_for_package(pkgname, 'providers', - provider_str, False)) + provider_str, False)) def spec_preferred_variants(self, pkgname): """Return a VariantMap of preferred variants and their values""" diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index b5fbb67c6ea..3f9cd285e71 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -52,6 +52,7 @@ class ProviderIndex(object): matching implementation of MPI. """ + def __init__(self, specs=None, restrict=False): """Create a new ProviderIndex. diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index d751a98b359..2d8dc39648e 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -68,6 +68,7 @@ def _autospec(function): """Decorator that automatically converts the argument of a single-arg function to a Spec.""" + def converter(self, spec_like, *args, **kwargs): if not isinstance(spec_like, spack.spec.Spec): spec_like = spack.spec.Spec(spec_like) @@ -77,6 +78,7 @@ def converter(self, spec_like, *args, **kwargs): class SpackNamespace(ModuleType): """ Allow lazy loading of modules.""" + def __init__(self, namespace): super(SpackNamespace, self).__init__(namespace) self.__file__ = "(spack namespace)" @@ -112,6 +114,7 @@ class RepoPath(object): combined results of the Repos in its list instead of on a single package repository. """ + def __init__(self, *repo_dirs, **kwargs): # super-namespace for all packages in the RepoPath self.super_namespace = kwargs.get('namespace', repo_namespace) @@ -360,6 +363,7 @@ class Repo(object): A Python namespace where the repository's packages should live. """ + def __init__(self, root, namespace=repo_namespace): """Instantiate a package repository from a filesystem path. @@ -923,6 +927,7 @@ class PackageLoadError(spack.error.SpackError): class UnknownPackageError(PackageLoadError): """Raised when we encounter a package spack doesn't have.""" + def __init__(self, name, repo=None): msg = None if repo: @@ -935,6 +940,7 @@ def __init__(self, name, repo=None): class UnknownNamespaceError(PackageLoadError): """Raised when we encounter an unknown namespace""" + def __init__(self, namespace): super(UnknownNamespaceError, self).__init__( "Unknown namespace: %s" % namespace) @@ -942,6 +948,7 @@ def __init__(self, namespace): class FailedConstructorError(PackageLoadError): """Raised when a package's class constructor fails.""" + def __init__(self, name, exc_type, exc_obj, exc_tb): super(FailedConstructorError, self).__init__( "Class constructor failed for package '%s'." % name, diff --git a/lib/spack/spack/resource.py b/lib/spack/spack/resource.py index 24b675f8da1..1d4d448298e 100644 --- a/lib/spack/spack/resource.py +++ b/lib/spack/spack/resource.py @@ -31,9 +31,11 @@ class Resource(object): + """Represents an optional resource to be fetched by a package. + + Aggregates a name, a fetcher, a destination and a placement. """ - Represents an optional resource. Aggregates a name, a fetcher, a destination and a placement - """ + def __init__(self, name, fetcher, destination, placement): self.name = name self.fetcher = fetcher diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index a37b39be671..0d72d454c62 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -166,6 +166,7 @@ def colorize_spec(spec): """Returns a spec colorized according to the colors specified in color_formats.""" class insert_color: + def __init__(self): self.last = None @@ -186,6 +187,7 @@ class CompilerSpec(object): """The CompilerSpec field represents the compiler or range of compiler versions that a package should be built with. CompilerSpecs have a name and a version list. """ + def __init__(self, *args): nargs = len(args) if nargs == 1: @@ -296,6 +298,7 @@ class DependencySpec(object): - spec: the spack.spec.Spec description of a dependency. - deptypes: strings representing the type of dependency this is. """ + def __init__(self, spec, deptypes): self.spec = spec self.deptypes = deptypes @@ -317,6 +320,7 @@ class VariantSpec(object): on the particular package being built, and each named variant can be enabled or disabled. """ + def __init__(self, name, value): self.name = name self.value = value @@ -447,9 +451,9 @@ def __str__(self): sorted_keys = filter( lambda flag: self[flag] != [], sorted(self.keys())) cond_symbol = ' ' if len(sorted_keys) > 0 else '' - return cond_symbol + ' '.join(str(key) + '=\"' + ' '.join(str(f) - for f in self[key]) + '\"' - for key in sorted_keys) + return cond_symbol + ' '.join( + str(key) + '=\"' + ' '.join( + str(f) for f in self[key]) + '\"' for key in sorted_keys) class DependencyMap(HashableMap): @@ -910,7 +914,7 @@ def to_node_dict(self): params = dict((name, v.value) for name, v in self.variants.items()) params.update(dict((name, value) - for name, value in self.compiler_flags.items())) + for name, value in self.compiler_flags.items())) if params: d['parameters'] = params @@ -1598,8 +1602,8 @@ def constrain(self, other, deps=True): raise UnsatisfiableSpecNameError(self.name, other.name) if (other.namespace is not None and - self.namespace is not None and - other.namespace != self.namespace): + self.namespace is not None and + other.namespace != self.namespace): raise UnsatisfiableSpecNameError(self.fullname, other.fullname) if not self.versions.overlaps(other.versions): @@ -1753,8 +1757,8 @@ def satisfies(self, other, deps=True, strict=False): # namespaces either match, or other doesn't require one. if (other.namespace is not None and - self.namespace is not None and - self.namespace != other.namespace): + self.namespace is not None and + self.namespace != other.namespace): return False if self.versions and other.versions: if not self.versions.satisfies(other.versions, strict=strict): @@ -1849,7 +1853,7 @@ def satisfies_dependencies(self, other, strict=False): # compatible with mpich2) for spec in self.virtual_dependencies(): if (spec.name in other_index and - not other_index.providers_for(spec)): + not other_index.providers_for(spec)): return False for spec in other.virtual_dependencies(): @@ -2345,6 +2349,7 @@ def __init__(self): class SpecParser(spack.parse.Parser): + def __init__(self): super(SpecParser, self).__init__(_lexer) self.previous = None diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index b8441bdac42..22ddd4c97ef 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -40,6 +40,7 @@ class ArchitectureTest(MockPackagesTest): + def setUp(self): super(ArchitectureTest, self).setUp() self.platform = spack.architecture.platform() diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index ea2b1644621..f3e4bb31d2d 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -45,7 +45,8 @@ '-llib1', '-llib2', 'arg4', '-Wl,--end-group', - '-Xlinker', '-rpath', '-Xlinker', '/third/rpath', '-Xlinker', '-rpath', '-Xlinker', '/fourth/rpath', + '-Xlinker', '-rpath', '-Xlinker', '/third/rpath', '-Xlinker', + '-rpath', '-Xlinker', '/fourth/rpath', '-llib3', '-llib4', 'arg5', 'arg6'] @@ -67,7 +68,7 @@ def setUp(self): os.environ['SPACK_FC'] = self.realcc os.environ['SPACK_PREFIX'] = self.prefix - os.environ['SPACK_ENV_PATH']="test" + os.environ['SPACK_ENV_PATH'] = "test" os.environ['SPACK_DEBUG_LOG_DIR'] = "." os.environ['SPACK_COMPILER_SPEC'] = "gcc@4.4.7" os.environ['SPACK_SHORT_SPEC'] = "foo@1.2" @@ -97,16 +98,13 @@ def setUp(self): if 'SPACK_DEPENDENCIES' in os.environ: del os.environ['SPACK_DEPENDENCIES'] - def tearDown(self): shutil.rmtree(self.tmp_deps, True) - def check_cc(self, command, args, expected): os.environ['SPACK_TEST_COMMAND'] = command self.assertEqual(self.cc(*args, output=str).strip(), expected) - def check_cxx(self, command, args, expected): os.environ['SPACK_TEST_COMMAND'] = command self.assertEqual(self.cxx(*args, output=str).strip(), expected) @@ -115,46 +113,46 @@ def check_fc(self, command, args, expected): os.environ['SPACK_TEST_COMMAND'] = command self.assertEqual(self.fc(*args, output=str).strip(), expected) - def check_ld(self, command, args, expected): os.environ['SPACK_TEST_COMMAND'] = command self.assertEqual(self.ld(*args, output=str).strip(), expected) - def check_cpp(self, command, args, expected): os.environ['SPACK_TEST_COMMAND'] = command self.assertEqual(self.cpp(*args, output=str).strip(), expected) - def test_vcheck_mode(self): self.check_cc('dump-mode', ['-I/include', '--version'], "vcheck") self.check_cc('dump-mode', ['-I/include', '-V'], "vcheck") self.check_cc('dump-mode', ['-I/include', '-v'], "vcheck") self.check_cc('dump-mode', ['-I/include', '-dumpversion'], "vcheck") self.check_cc('dump-mode', ['-I/include', '--version', '-c'], "vcheck") - self.check_cc('dump-mode', ['-I/include', '-V', '-o', 'output'], "vcheck") - + self.check_cc('dump-mode', ['-I/include', + '-V', '-o', 'output'], "vcheck") def test_cpp_mode(self): self.check_cc('dump-mode', ['-E'], "cpp") self.check_cpp('dump-mode', [], "cpp") - def test_as_mode(self): self.check_cc('dump-mode', ['-S'], "as") - def test_ccld_mode(self): self.check_cc('dump-mode', [], "ccld") self.check_cc('dump-mode', ['foo.c', '-o', 'foo'], "ccld") - self.check_cc('dump-mode', ['foo.c', '-o', 'foo', '-Wl,-rpath,foo'], "ccld") - self.check_cc('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ccld") - + self.check_cc('dump-mode', ['foo.c', '-o', + 'foo', '-Wl,-rpath,foo'], "ccld") + self.check_cc( + 'dump-mode', + ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], + "ccld") def test_ld_mode(self): self.check_ld('dump-mode', [], "ld") - self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ld") - + self.check_ld( + 'dump-mode', + ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], + "ld") def test_flags(self): os.environ['SPACK_LDFLAGS'] = '-L foo' @@ -176,10 +174,11 @@ def test_flags(self): # Test cppflags added properly in cpp mode self.check_cpp('dump-args', test_command, "cpp " + - '-g -O1 ' + - ' '.join(test_command)) + '-g -O1 ' + + ' '.join(test_command)) - # Test ldflags, cppflags, and language specific flags are added in proper order + # Test ldflags, cppflags, and language specific flags are added in + # proper order self.check_cc('dump-args', test_command, self.realcc + ' ' + '-Wl,-rpath,' + self.prefix + '/lib ' + @@ -191,14 +190,14 @@ def test_flags(self): '-lfoo') self.check_cxx('dump-args', test_command, - self.realcc + ' ' + - '-Wl,-rpath,' + self.prefix + '/lib ' + - '-Wl,-rpath,' + self.prefix + '/lib64 ' + - '-g -O1 ' + - '-Werror ' + - '-L foo ' + - ' '.join(test_command) + ' ' + - '-lfoo') + self.realcc + ' ' + + '-Wl,-rpath,' + self.prefix + '/lib ' + + '-Wl,-rpath,' + self.prefix + '/lib64 ' + + '-g -O1 ' + + '-Werror ' + + '-L foo ' + + ' '.join(test_command) + ' ' + + '-lfoo') self.check_fc('dump-args', test_command, self.realcc + ' ' + @@ -210,9 +209,8 @@ def test_flags(self): ' '.join(test_command) + ' ' + '-lfoo') - os.environ['SPACK_LDFLAGS']='' - os.environ['SPACK_LDLIBS']='' - + os.environ['SPACK_LDFLAGS'] = '' + os.environ['SPACK_LDLIBS'] = '' def test_dep_rpath(self): """Ensure RPATHs for root package are added.""" @@ -222,7 +220,6 @@ def test_dep_rpath(self): '-Wl,-rpath,' + self.prefix + '/lib64 ' + ' '.join(test_command)) - def test_dep_include(self): """Ensure a single dependency include directory is added.""" os.environ['SPACK_DEPENDENCIES'] = self.dep4 @@ -233,7 +230,6 @@ def test_dep_include(self): '-I' + self.dep4 + '/include ' + ' '.join(test_command)) - def test_dep_lib(self): """Ensure a single dependency RPATH is added.""" os.environ['SPACK_DEPENDENCIES'] = self.dep2 @@ -245,7 +241,6 @@ def test_dep_lib(self): '-Wl,-rpath,' + self.dep2 + '/lib64 ' + ' '.join(test_command)) - def test_all_deps(self): """Ensure includes and RPATHs for all deps are added. """ os.environ['SPACK_DEPENDENCIES'] = ':'.join([ @@ -274,7 +269,6 @@ def test_all_deps(self): ' '.join(test_command)) - def test_ld_deps(self): """Ensure no (extra) -I args or -Wl, are passed in ld mode.""" os.environ['SPACK_DEPENDENCIES'] = ':'.join([ diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py index 36a4a73fe64..3a0ce32e6cb 100644 --- a/lib/spack/spack/test/cmd/module.py +++ b/lib/spack/spack/test/cmd/module.py @@ -33,16 +33,17 @@ class TestModule(spack.test.mock_database.MockDatabase): def _get_module_files(self, args): - return [ - modules.module_types[args.module_type](spec).file_name for spec in args.specs # NOQA: ignore=E501 - ] + return [modules.module_types[args.module_type](spec).file_name + for spec in args.specs] def test_module_common_operations(self): parser = argparse.ArgumentParser() module.setup_parser(parser) + # Try to remove a non existing module [tcl] args = parser.parse_args(['rm', 'doesnotexist']) self.assertRaises(SystemExit, module.module, parser, args) + # Remove existing modules [tcl] args = parser.parse_args(['rm', '-y', 'mpileaks']) module_files = self._get_module_files(args) @@ -51,22 +52,28 @@ def test_module_common_operations(self): module.module(parser, args) for item in module_files: self.assertFalse(os.path.exists(item)) + # Add them back [tcl] args = parser.parse_args(['refresh', '-y', 'mpileaks']) module.module(parser, args) for item in module_files: self.assertTrue(os.path.exists(item)) + # TODO : test the --delete-tree option # TODO : this requires having a separate directory for test modules + # Try to find a module with multiple matches args = parser.parse_args(['find', 'mpileaks']) self.assertRaises(SystemExit, module.module, parser, args) + # Try to find a module with no matches args = parser.parse_args(['find', 'doesnotexist']) self.assertRaises(SystemExit, module.module, parser, args) + # Try to find a module args = parser.parse_args(['find', 'libelf']) module.module(parser, args) + # Remove existing modules [dotkit] args = parser.parse_args(['rm', '-y', '-m', 'dotkit', 'mpileaks']) module_files = self._get_module_files(args) @@ -75,6 +82,7 @@ def test_module_common_operations(self): module.module(parser, args) for item in module_files: self.assertFalse(os.path.exists(item)) + # Add them back [dotkit] args = parser.parse_args(['refresh', '-y', '-m', 'dotkit', 'mpileaks']) module.module(parser, args) diff --git a/lib/spack/spack/test/cmd/test_compiler_cmd.py b/lib/spack/spack/test/cmd/test_compiler_cmd.py index d89814154b6..fa806ee6f42 100644 --- a/lib/spack/spack/test/cmd/test_compiler_cmd.py +++ b/lib/spack/spack/test/cmd/test_compiler_cmd.py @@ -12,7 +12,9 @@ test_version = '4.5-spacktest' + class MockArgs(object): + def __init__(self, add_paths=[], scope=None, compiler_spec=None, all=None): self.add_paths = add_paths self.scope = scope @@ -52,14 +54,12 @@ def make_mock_compiler(): class CompilerCmdTest(MockPackagesTest): """ Test compiler commands for add and remove """ - def test_compiler_remove(self): args = MockArgs(all=True, compiler_spec='gcc@4.5.0') spack.cmd.compiler.compiler_remove(args) compilers = spack.compilers.all_compilers() self.assertTrue(spack.spec.CompilerSpec("gcc@4.5.0") not in compilers) - def test_compiler_add(self): # compilers available by default. old_compilers = set(spack.compilers.all_compilers()) @@ -75,7 +75,8 @@ def test_compiler_add(self): new_compilers = set(spack.compilers.all_compilers()) new_compiler = new_compilers - old_compilers self.assertTrue(new_compiler) - self.assertTrue(new_compiler.pop().version == Version(test_version)) + self.assertTrue(new_compiler.pop().version == + Version(test_version)) finally: shutil.rmtree(compiler_dir, ignore_errors=True) diff --git a/lib/spack/spack/test/cmd/uninstall.py b/lib/spack/spack/test/cmd/uninstall.py index 9fffaace402..4ccb9ddbf48 100644 --- a/lib/spack/spack/test/cmd/uninstall.py +++ b/lib/spack/spack/test/cmd/uninstall.py @@ -28,6 +28,7 @@ class MockArgs(object): + def __init__(self, packages, all=False, force=False, dependents=False): self.packages = packages self.all = all @@ -37,6 +38,7 @@ def __init__(self, packages, all=False, force=False, dependents=False): class TestUninstall(spack.test.mock_database.MockDatabase): + def test_uninstall(self): parser = None # Multiple matches diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index 252d77e66be..0822e44db8f 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -32,79 +32,80 @@ from spack.test.mock_packages_test import * # Some sample compiler config data -a_comps = [ +a_comps = [ {'compiler': { 'paths': { - "cc" : "/gcc473", + "cc": "/gcc473", "cxx": "/g++473", "f77": None, - "fc" : None - }, + "fc": None + }, 'modules': None, 'spec': 'gcc@4.7.3', 'operating_system': 'CNL10' - }}, + }}, {'compiler': { 'paths': { - "cc" : "/gcc450", + "cc": "/gcc450", "cxx": "/g++450", "f77": 'gfortran', - "fc" : 'gfortran' - }, + "fc": 'gfortran' + }, 'modules': None, 'spec': 'gcc@4.5.0', 'operating_system': 'CNL10' - }}, + }}, {'compiler': { 'paths': { - "cc" : "", + "cc": "", "cxx": "", "f77": '', - "fc" : '' }, + "fc": ''}, 'modules': None, 'spec': 'clang@3.3', 'operating_system': 'CNL10' - }} + }} ] b_comps = [ {'compiler': { 'paths': { - "cc" : "/icc100", + "cc": "/icc100", "cxx": "/icp100", "f77": None, - "fc" : None - }, + "fc": None + }, 'modules': None, 'spec': 'icc@10.0', 'operating_system': 'CNL10' - }}, + }}, {'compiler': { 'paths': { - "cc" : "/icc111", + "cc": "/icc111", "cxx": "/icp111", "f77": 'ifort', - "fc" : 'ifort' - }, + "fc": 'ifort' + }, 'modules': None, 'spec': 'icc@11.1', 'operating_system': 'CNL10' - }}, + }}, {'compiler': { 'paths': { - "cc" : "", + "cc": "", "cxx": "", "f77": '', - "fc" : '' }, + "fc": ''}, 'modules': None, 'spec': 'clang@3.3', 'operating_system': 'CNL10' - }} + }} ] # Some Sample repo data -repos_low = [ "/some/path" ] -repos_high = [ "/some/other/path" ] +repos_low = ["/some/path"] +repos_high = ["/some/other/path"] + class ConfigTest(MockPackagesTest): @@ -112,14 +113,15 @@ def setUp(self): super(ConfigTest, self).setUp() self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-') spack.config.config_scopes = OrderedDict() - spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low')) - spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high')) + spack.config.ConfigScope( + 'test_low_priority', os.path.join(self.tmp_dir, 'low')) + spack.config.ConfigScope('test_high_priority', + os.path.join(self.tmp_dir, 'high')) def tearDown(self): super(ConfigTest, self).tearDown() shutil.rmtree(self.tmp_dir, True) - def check_config(self, comps, *compiler_names): """Check that named compilers in comps match Spack's config.""" config = spack.config.get_config('compilers') @@ -146,7 +148,7 @@ def test_write_list_in_memory(self): spack.config.update_config('repos', repos_low, 'test_low_priority') spack.config.update_config('repos', repos_high, 'test_high_priority') config = spack.config.get_config('repos') - self.assertEqual(config, repos_high+repos_low) + self.assertEqual(config, repos_high + repos_low) def test_write_key_in_memory(self): # Write b_comps "on top of" a_comps. @@ -157,7 +159,6 @@ def test_write_key_in_memory(self): self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0') self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3') - def test_write_key_to_disk(self): # Write b_comps "on top of" a_comps. spack.config.update_config('compilers', a_comps, 'test_low_priority') diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 0d44a27b7e8..22b1f17890e 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -71,6 +71,7 @@ def add_rec(spec): class DatabaseTest(MockDatabase): + def test_005_db_exists(self): """Make sure db cache file exists after creating.""" index_file = join_path(self.install_path, '.spack-db', 'index.yaml') diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py index 74669fe8a23..2d0565acaec 100644 --- a/lib/spack/spack/test/directory_layout.py +++ b/lib/spack/spack/test/directory_layout.py @@ -49,13 +49,11 @@ def setUp(self): self.tmpdir = tempfile.mkdtemp() self.layout = YamlDirectoryLayout(self.tmpdir) - def tearDown(self): super(DirectoryLayoutTest, self).tearDown() shutil.rmtree(self.tmpdir, ignore_errors=True) self.layout = None - def test_read_and_write_spec(self): """This goes through each package in spack and creates a directory for it. It then ensures that the spec for the directory's @@ -67,8 +65,8 @@ def test_read_and_write_spec(self): for pkg in packages: if pkg.name.startswith('external'): - #External package tests cannot be installed - continue + # External package tests cannot be installed + continue spec = pkg.spec # If a spec fails to concretize, just skip it. If it is a @@ -115,7 +113,6 @@ def test_read_and_write_spec(self): self.assertFalse(os.path.isdir(install_dir)) self.assertFalse(os.path.exists(install_dir)) - def test_handle_unknown_package(self): """This test ensures that spack can at least do *some* operations with packages that are installed but that it @@ -166,7 +163,6 @@ def test_handle_unknown_package(self): spack.repo.swap(mock_db) - def test_find(self): """Test that finding specs within an install layout works.""" packages = list(spack.repo.all_packages())[:max_packages] @@ -175,13 +171,14 @@ def test_find(self): installed_specs = {} for pkg in packages: if pkg.name.startswith('external'): - #External package tests cannot be installed + # External package tests cannot be installed continue spec = pkg.spec.concretized() installed_specs[spec.name] = spec self.layout.create_install_directory(spec) - # Make sure all the installed specs appear in DirectoryLayout.all_specs() + # Make sure all the installed specs appear in + # DirectoryLayout.all_specs() found_specs = dict((s.name, s) for s in self.layout.all_specs()) for name, spec in found_specs.items(): self.assertTrue(name in found_specs) diff --git a/lib/spack/spack/test/environment.py b/lib/spack/spack/test/environment.py index 23969618880..9b5d75f2734 100644 --- a/lib/spack/spack/test/environment.py +++ b/lib/spack/spack/test/environment.py @@ -38,7 +38,8 @@ def setUp(self): os.environ['UNSET_ME'] = 'foo' os.environ['EMPTY_PATH_LIST'] = '' os.environ['PATH_LIST'] = '/path/second:/path/third' - os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g' # NOQA: ignore=E501 + os.environ['REMOVE_PATH_LIST'] = \ + '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g' def tearDown(self): pass diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index 4de65760d70..0d1a8fe9498 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -87,33 +87,29 @@ def try_fetch(self, rev, test_file, args): self.assert_rev(rev) - def test_fetch_master(self): """Test a default git checkout with no commit or tag specified.""" self.try_fetch('master', self.repo.r0_file, { - 'git' : self.repo.path + 'git': self.repo.path }) - def test_fetch_branch(self): """Test fetching a branch.""" self.try_fetch(self.repo.branch, self.repo.branch_file, { - 'git' : self.repo.path, - 'branch' : self.repo.branch + 'git': self.repo.path, + 'branch': self.repo.branch }) - def test_fetch_tag(self): """Test fetching a tag.""" self.try_fetch(self.repo.tag, self.repo.tag_file, { - 'git' : self.repo.path, - 'tag' : self.repo.tag + 'git': self.repo.path, + 'tag': self.repo.tag }) - def test_fetch_commit(self): """Test fetching a particular commit.""" self.try_fetch(self.repo.r1, self.repo.r1_file, { - 'git' : self.repo.path, - 'commit' : self.repo.r1 + 'git': self.repo.path, + 'commit': self.repo.r1 }) diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py index 292ffba9493..44af6730a1d 100644 --- a/lib/spack/spack/test/hg_fetch.py +++ b/lib/spack/spack/test/hg_fetch.py @@ -83,17 +83,15 @@ def try_fetch(self, rev, test_file, args): self.assertEqual(self.repo.get_rev(), rev) - def test_fetch_default(self): """Test a default hg checkout with no commit or tag specified.""" self.try_fetch(self.repo.r1, self.repo.r1_file, { - 'hg' : self.repo.path + 'hg': self.repo.path }) - def test_fetch_rev0(self): """Test fetching a branch.""" self.try_fetch(self.repo.r0, self.repo.r0_file, { - 'hg' : self.repo.path, - 'revision' : self.repo.r0 + 'hg': self.repo.path, + 'revision': self.repo.r0 }) diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 390ec096a99..232d5aeeaff 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -55,7 +55,6 @@ def setUp(self): spack.install_layout = YamlDirectoryLayout(self.tmpdir) spack.installed_db = Database(self.tmpdir) - def tearDown(self): super(InstallTest, self).tearDown() self.repo.destroy() @@ -68,14 +67,12 @@ def tearDown(self): spack.installed_db = self.orig_db shutil.rmtree(self.tmpdir, ignore_errors=True) - def fake_fetchify(self, pkg): """Fake the URL for a package so it downloads from a file.""" fetcher = FetchStrategyComposite() fetcher.append(URLFetchStrategy(self.repo.url)) pkg.fetcher = fetcher - def test_install_and_uninstall(self): # Get a basic concrete spec for the trivial install package. spec = Spec('trivial_install_test_package') @@ -90,11 +87,10 @@ def test_install_and_uninstall(self): try: pkg.do_install() pkg.do_uninstall() - except Exception, e: + except Exception: pkg.remove_prefix() raise - def test_install_environment(self): spec = Spec('cmake-client').concretized() @@ -104,6 +100,6 @@ def test_install_environment(self): pkg = spec.package try: pkg.do_install() - except Exception, e: + except Exception: pkg.remove_prefix() raise diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py index de40991b575..5d0a7430b6e 100644 --- a/lib/spack/spack/test/link_tree.py +++ b/lib/spack/spack/test/link_tree.py @@ -53,16 +53,13 @@ def setUp(self): def tearDown(self): self.stage.destroy() - def check_file_link(self, filename): self.assertTrue(os.path.isfile(filename)) self.assertTrue(os.path.islink(filename)) - def check_dir(self, filename): self.assertTrue(os.path.isdir(filename)) - def test_merge_to_new_directory(self): with working_dir(self.stage.path): self.link_tree.merge('dest') @@ -79,7 +76,6 @@ def test_merge_to_new_directory(self): self.assertFalse(os.path.exists('dest')) - def test_merge_to_existing_directory(self): with working_dir(self.stage.path): @@ -112,7 +108,6 @@ def test_merge_to_existing_directory(self): self.assertFalse(os.path.isfile('dest/c/d/6')) self.assertFalse(os.path.isfile('dest/c/d/e/7')) - def test_merge_with_empty_directories(self): with working_dir(self.stage.path): mkdirp('dest/f/g') @@ -132,7 +127,6 @@ def test_merge_with_empty_directories(self): self.assertTrue(os.path.isdir('dest/a/b/h')) self.assertTrue(os.path.isdir('dest/f/g')) - def test_ignore(self): with working_dir(self.stage.path): touchp('source/.spec') diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py index b24050aa749..fb96539897a 100644 --- a/lib/spack/spack/test/lock.py +++ b/lib/spack/spack/test/lock.py @@ -329,6 +329,7 @@ def do_write_with_exception(): def test_transaction_with_context_manager(self): class TestContextManager(object): + def __enter__(self): vals['entered'] = True @@ -388,6 +389,7 @@ def exit_fn(t, v, tb): def test_transaction_with_context_manager_and_exception(self): class TestContextManager(object): + def __enter__(self): vals['entered'] = True diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py index b7a45a3f729..87a43a529a9 100644 --- a/lib/spack/spack/test/make_executable.py +++ b/lib/spack/spack/test/make_executable.py @@ -38,6 +38,7 @@ class MakeExecutableTest(unittest.TestCase): + def setUp(self): self.tmpdir = tempfile.mkdtemp() @@ -49,34 +50,30 @@ def setUp(self): path_put_first('PATH', [self.tmpdir]) - def tearDown(self): shutil.rmtree(self.tmpdir) - def test_make_normal(self): make = MakeExecutable('make', 8) self.assertEqual(make(output=str).strip(), '-j8') self.assertEqual(make('install', output=str).strip(), '-j8 install') - def test_make_explicit(self): make = MakeExecutable('make', 8) self.assertEqual(make(parallel=True, output=str).strip(), '-j8') - self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install') - + self.assertEqual(make('install', parallel=True, + output=str).strip(), '-j8 install') def test_make_one_job(self): make = MakeExecutable('make', 1) self.assertEqual(make(output=str).strip(), '') self.assertEqual(make('install', output=str).strip(), 'install') - def test_make_parallel_false(self): make = MakeExecutable('make', 8) self.assertEqual(make(parallel=False, output=str).strip(), '') - self.assertEqual(make('install', parallel=False, output=str).strip(), 'install') - + self.assertEqual(make('install', parallel=False, + output=str).strip(), 'install') def test_make_parallel_disabled(self): make = MakeExecutable('make', 8) @@ -100,26 +97,29 @@ def test_make_parallel_disabled(self): del os.environ['SPACK_NO_PARALLEL_MAKE'] - def test_make_parallel_precedence(self): make = MakeExecutable('make', 8) # These should work os.environ['SPACK_NO_PARALLEL_MAKE'] = 'true' self.assertEqual(make(parallel=True, output=str).strip(), '') - self.assertEqual(make('install', parallel=True, output=str).strip(), 'install') + self.assertEqual(make('install', parallel=True, + output=str).strip(), 'install') os.environ['SPACK_NO_PARALLEL_MAKE'] = '1' self.assertEqual(make(parallel=True, output=str).strip(), '') - self.assertEqual(make('install', parallel=True, output=str).strip(), 'install') + self.assertEqual(make('install', parallel=True, + output=str).strip(), 'install') # These don't disable (false and random string) os.environ['SPACK_NO_PARALLEL_MAKE'] = 'false' self.assertEqual(make(parallel=True, output=str).strip(), '-j8') - self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install') + self.assertEqual(make('install', parallel=True, + output=str).strip(), '-j8 install') os.environ['SPACK_NO_PARALLEL_MAKE'] = 'foobar' self.assertEqual(make(parallel=True, output=str).strip(), '-j8') - self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install') + self.assertEqual(make('install', parallel=True, + output=str).strip(), '-j8 install') del os.environ['SPACK_NO_PARALLEL_MAKE'] diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index b682d4e0970..d6d7b30b7c7 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -35,6 +35,7 @@ class MirrorTest(MockPackagesTest): + def setUp(self): """Sets up a mock package and a mock repo for each fetch strategy, to ensure that the mirror can create archives for each of them. @@ -42,7 +43,6 @@ def setUp(self): super(MirrorTest, self).setUp() self.repos = {} - def tearDown(self): """Destroy all the stages created by the repos in setup.""" super(MirrorTest, self).tearDown() @@ -50,7 +50,6 @@ def tearDown(self): repo.destroy() self.repos.clear() - def set_up_package(self, name, MockRepoClass, url_attr): """Set up a mock package to be mirrored. Each package needs us to: @@ -71,16 +70,14 @@ def set_up_package(self, name, MockRepoClass, url_attr): v = next(iter(pkg.versions)) pkg.versions[v][url_attr] = repo.url - def check_mirror(self): with Stage('spack-mirror-test') as stage: mirror_root = join_path(stage.path, 'test-mirror') # register mirror with spack config - mirrors = { 'spack-mirror-test' : 'file://' + mirror_root } + mirrors = {'spack-mirror-test': 'file://' + mirror_root} spack.config.update_config('mirrors', mirrors) - os.chdir(stage.path) spack.mirror.create( mirror_root, self.repos, no_checksum=True) @@ -110,16 +107,18 @@ def check_mirror(self): original_path = mock_repo.path if 'svn' in name: # have to check out the svn repo to compare. - original_path = join_path(mock_repo.path, 'checked_out') + original_path = join_path( + mock_repo.path, 'checked_out') svn('checkout', mock_repo.url, original_path) dcmp = dircmp(original_path, pkg.stage.source_path) - # make sure there are no new files in the expanded tarball + # make sure there are no new files in the expanded + # tarball self.assertFalse(dcmp.right_only) # and that all original files are present. - self.assertTrue(all(l in exclude for l in dcmp.left_only)) + self.assertTrue( + all(l in exclude for l in dcmp.left_only)) spack.do_checksum = saved_checksum_setting - def test_git_mirror(self): self.set_up_package('git-test', MockGitRepo, 'git') self.check_mirror() diff --git a/lib/spack/spack/test/mock_database.py b/lib/spack/spack/test/mock_database.py index da01e82bfad..d5867f06ec2 100644 --- a/lib/spack/spack/test/mock_database.py +++ b/lib/spack/spack/test/mock_database.py @@ -33,6 +33,7 @@ class MockDatabase(MockPackagesTest): + def _mock_install(self, spec): s = Spec(spec) s.concretize() diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 9d96622a6e3..82c2712b0ea 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -155,7 +155,9 @@ externalmodule@1.0%gcc@4.5.0: external-module """ + class MockPackagesTest(unittest.TestCase): + def initmock(self): # Use the mock packages database for these tests. This allows # us to set up contrived packages that don't interfere with @@ -172,7 +174,8 @@ def initmock(self): self.mock_user_config = os.path.join(self.temp_config, 'user') mkdirp(self.mock_site_config) mkdirp(self.mock_user_config) - for confs in [('compilers.yaml', mock_compiler_config), ('packages.yaml', mock_packages_config)]: + for confs in [('compilers.yaml', mock_compiler_config), + ('packages.yaml', mock_packages_config)]: conf_yaml = os.path.join(self.mock_site_config, confs[0]) with open(conf_yaml, 'w') as f: f.write(confs[1]) @@ -209,7 +212,6 @@ def set_pkg_dep(self, pkg_name, spec, deptypes=spack.alldeps): pkg.dependencies[spec.name] = {Spec(pkg_name): spec} pkg._deptypes[spec.name] = set(deptypes) - def cleanmock(self): """Restore the real packages path after any test.""" spack.repo.swap(self.db) @@ -226,10 +228,8 @@ def cleanmock(self): shutil.rmtree(spack.share_path, ignore_errors=True) spack.share_path = self.real_share_path - def setUp(self): self.initmock() - def tearDown(self): self.cleanmock() diff --git a/lib/spack/spack/test/mock_repo.py b/lib/spack/spack/test/mock_repo.py index 386af282e71..0ae7dbd5161 100644 --- a/lib/spack/spack/test/mock_repo.py +++ b/lib/spack/spack/test/mock_repo.py @@ -40,6 +40,7 @@ class MockRepo(object): + def __init__(self, stage_name, repo_name): """This creates a stage where some archive/repo files can be staged for testing spack's fetch strategies.""" @@ -50,7 +51,6 @@ def __init__(self, stage_name, repo_name): self.path = join_path(self.stage.path, repo_name) mkdirp(self.path) - def destroy(self): """Destroy resources associated with this mock repo.""" if self.stage: @@ -90,6 +90,7 @@ def __init__(self): class MockVCSRepo(MockRepo): + def __init__(self, stage_name, repo_name): """This creates a stage and a repo directory within the stage.""" super(MockVCSRepo, self).__init__(stage_name, repo_name) @@ -100,6 +101,7 @@ def __init__(self, stage_name, repo_name): class MockGitRepo(MockVCSRepo): + def __init__(self): super(MockGitRepo, self).__init__('mock-git-stage', 'mock-git-repo') @@ -147,6 +149,7 @@ def rev_hash(self, rev): class MockSvnRepo(MockVCSRepo): + def __init__(self): super(MockSvnRepo, self).__init__('mock-svn-stage', 'mock-svn-repo') @@ -176,6 +179,7 @@ def __init__(self): class MockHgRepo(MockVCSRepo): + def __init__(self): super(MockHgRepo, self).__init__('mock-hg-stage', 'mock-hg-repo') self.url = 'file://' + self.path diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py index c233ea4fd66..a8853740805 100644 --- a/lib/spack/spack/test/multimethod.py +++ b/lib/spack/spack/test/multimethod.py @@ -25,15 +25,10 @@ """ Test for multi_method dispatch. """ -import unittest - import spack from spack.multimethod import * from spack.version import * -from spack.spec import Spec -from spack.multimethod import when from spack.test.mock_packages_test import * -from spack.version import * class MultiMethodTest(MockPackagesTest): @@ -42,7 +37,6 @@ def test_no_version_match(self): pkg = spack.repo.get('multimethod@2.0') self.assertRaises(NoSuchMethodError, pkg.no_version_2) - def test_one_version_match(self): pkg = spack.repo.get('multimethod@1.0') self.assertEqual(pkg.no_version_2(), 1) @@ -53,7 +47,6 @@ def test_one_version_match(self): pkg = spack.repo.get('multimethod@4.0') self.assertEqual(pkg.no_version_2(), 4) - def test_version_overlap(self): pkg = spack.repo.get('multimethod@2.0') self.assertEqual(pkg.version_overlap(), 1) @@ -61,7 +54,6 @@ def test_version_overlap(self): pkg = spack.repo.get('multimethod@5.0') self.assertEqual(pkg.version_overlap(), 2) - def test_mpi_version(self): pkg = spack.repo.get('multimethod^mpich@3.0.4') self.assertEqual(pkg.mpi_version(), 3) @@ -72,7 +64,6 @@ def test_mpi_version(self): pkg = spack.repo.get('multimethod^mpich@1.0') self.assertEqual(pkg.mpi_version(), 1) - def test_undefined_mpi_version(self): pkg = spack.repo.get('multimethod^mpich@0.4') self.assertEqual(pkg.mpi_version(), 1) @@ -80,7 +71,6 @@ def test_undefined_mpi_version(self): pkg = spack.repo.get('multimethod^mpich@1.4') self.assertEqual(pkg.mpi_version(), 1) - def test_default_works(self): pkg = spack.repo.get('multimethod%gcc') self.assertEqual(pkg.has_a_default(), 'gcc') @@ -91,21 +81,19 @@ def test_default_works(self): pkg = spack.repo.get('multimethod%pgi') self.assertEqual(pkg.has_a_default(), 'default') - def test_target_match(self): platform = spack.architecture.platform() targets = platform.targets.values() for target in targets[:-1]: - pkg = spack.repo.get('multimethod target='+target.name) + pkg = spack.repo.get('multimethod target=' + target.name) self.assertEqual(pkg.different_by_target(), target.name) - pkg = spack.repo.get('multimethod target='+targets[-1].name) + pkg = spack.repo.get('multimethod target=' + targets[-1].name) if len(targets) == 1: self.assertEqual(pkg.different_by_target(), targets[-1].name) else: self.assertRaises(NoSuchMethodError, pkg.different_by_target) - def test_dependency_match(self): pkg = spack.repo.get('multimethod^zmpi') self.assertEqual(pkg.different_by_dep(), 'zmpi') @@ -118,7 +106,6 @@ def test_dependency_match(self): pkg = spack.repo.get('multimethod^foobar') self.assertEqual(pkg.different_by_dep(), 'mpich') - def test_virtual_dep_match(self): pkg = spack.repo.get('multimethod^mpich2') self.assertEqual(pkg.different_by_virtual_dep(), 2) diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py index b38ecd6179e..7927fc8e604 100644 --- a/lib/spack/spack/test/namespace_trie.py +++ b/lib/spack/spack/test/namespace_trie.py @@ -32,7 +32,6 @@ class NamespaceTrieTest(unittest.TestCase): def setUp(self): self.trie = NamespaceTrie() - def test_add_single(self): self.trie['foo'] = 'bar' @@ -40,7 +39,6 @@ def test_add_single(self): self.assertTrue(self.trie.has_value('foo')) self.assertEqual(self.trie['foo'], 'bar') - def test_add_multiple(self): self.trie['foo.bar'] = 'baz' @@ -54,7 +52,6 @@ def test_add_multiple(self): self.assertFalse(self.trie.is_prefix('foo.bar.baz')) self.assertFalse(self.trie.has_value('foo.bar.baz')) - def test_add_three(self): # add a three-level namespace self.trie['foo.bar.baz'] = 'quux' @@ -89,7 +86,6 @@ def test_add_three(self): self.assertFalse(self.trie.is_prefix('foo.bar.baz.quux')) self.assertFalse(self.trie.has_value('foo.bar.baz.quux')) - def test_add_none_single(self): self.trie['foo'] = None self.assertTrue(self.trie.is_prefix('foo')) @@ -99,8 +95,6 @@ def test_add_none_single(self): self.assertFalse(self.trie.is_prefix('foo.bar')) self.assertFalse(self.trie.has_value('foo.bar')) - - def test_add_none_multiple(self): self.trie['foo.bar'] = None diff --git a/lib/spack/spack/test/operating_system.py b/lib/spack/spack/test/operating_system.py index ed5f6ff8adc..8723f7244d2 100644 --- a/lib/spack/spack/test/operating_system.py +++ b/lib/spack/spack/test/operating_system.py @@ -1,18 +1,39 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## """ Test checks if the operating_system class is created correctly and that the functions are using the correct operating_system. Also checks whether the operating_system correctly uses the compiler_strategy """ - import unittest -import os -import platform from spack.platforms.cray_xc import CrayXc from spack.platforms.linux import Linux from spack.platforms.darwin import Darwin from spack.operating_system.linux_distro import LinuxDistro -from spack.operating_system.mac_os import MacOs from spack.operating_system.cnl import ComputeNodeLinux + class TestOperatingSystem(unittest.TestCase): def setUp(self): @@ -32,7 +53,7 @@ def test_cray_front_end_compiler_strategy(self): self.assertEquals(self.cray_operating_sys.compiler_strategy, "PATH") def test_cray_back_end_operating_system(self): - self.assertIsInstance(self.cray_back_os,ComputeNodeLinux) + self.assertIsInstance(self.cray_back_os, ComputeNodeLinux) def test_cray_back_end_compiler_strategy(self): self.assertEquals(self.cray_back_os.compiler_strategy, "MODULES") @@ -43,7 +64,6 @@ def test_linux_operating_system(self): def test_linux_compiler_strategy(self): self.assertEquals(self.linux_operating_sys.compiler_strategy, "PATH") - def test_cray_front_end_compiler_list(self): """ Operating systems will now be in charge of finding compilers. So, depending on which operating system you want to build for diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py index b5ba0ecf356..a9a2b9abf52 100644 --- a/lib/spack/spack/test/optional_deps.py +++ b/lib/spack/spack/test/optional_deps.py @@ -22,10 +22,10 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## - from spack.spec import Spec from spack.test.mock_packages_test import * + class ConcretizeTest(MockPackagesTest): def check_normalize(self, spec_string, expected): @@ -34,10 +34,10 @@ def check_normalize(self, spec_string, expected): self.assertEqual(spec, expected) self.assertTrue(spec.eq_dag(expected)) - def test_normalize_simple_conditionals(self): self.check_normalize('optional-dep-test', Spec('optional-dep-test')) - self.check_normalize('optional-dep-test~a', Spec('optional-dep-test~a')) + self.check_normalize('optional-dep-test~a', + Spec('optional-dep-test~a')) self.check_normalize('optional-dep-test+a', Spec('optional-dep-test+a', Spec('a'))) @@ -45,7 +45,6 @@ def test_normalize_simple_conditionals(self): self.check_normalize('optional-dep-test a=true', Spec('optional-dep-test a=true', Spec('a'))) - self.check_normalize('optional-dep-test a=true', Spec('optional-dep-test+a', Spec('a'))) @@ -55,25 +54,29 @@ def test_normalize_simple_conditionals(self): self.check_normalize('optional-dep-test%intel', Spec('optional-dep-test%intel', Spec('c'))) - self.check_normalize('optional-dep-test%intel@64.1', - Spec('optional-dep-test%intel@64.1', Spec('c'), Spec('d'))) + self.check_normalize( + 'optional-dep-test%intel@64.1', + Spec('optional-dep-test%intel@64.1', Spec('c'), Spec('d'))) - self.check_normalize('optional-dep-test%intel@64.1.2', - Spec('optional-dep-test%intel@64.1.2', Spec('c'), Spec('d'))) + self.check_normalize( + 'optional-dep-test%intel@64.1.2', + Spec('optional-dep-test%intel@64.1.2', Spec('c'), Spec('d'))) self.check_normalize('optional-dep-test%clang@35', Spec('optional-dep-test%clang@35', Spec('e'))) - def test_multiple_conditionals(self): - self.check_normalize('optional-dep-test+a@1.1', - Spec('optional-dep-test+a@1.1', Spec('a'), Spec('b'))) + self.check_normalize( + 'optional-dep-test+a@1.1', + Spec('optional-dep-test+a@1.1', Spec('a'), Spec('b'))) - self.check_normalize('optional-dep-test+a%intel', - Spec('optional-dep-test+a%intel', Spec('a'), Spec('c'))) + self.check_normalize( + 'optional-dep-test+a%intel', + Spec('optional-dep-test+a%intel', Spec('a'), Spec('c'))) - self.check_normalize('optional-dep-test@1.1%intel', - Spec('optional-dep-test@1.1%intel', Spec('b'), Spec('c'))) + self.check_normalize( + 'optional-dep-test@1.1%intel', + Spec('optional-dep-test@1.1%intel', Spec('b'), Spec('c'))) self.check_normalize('optional-dep-test@1.1%intel@64.1.2+a', Spec('optional-dep-test@1.1%intel@64.1.2+a', @@ -83,14 +86,12 @@ def test_multiple_conditionals(self): Spec('optional-dep-test@1.1%clang@36.5+a', Spec('b'), Spec('a'), Spec('e'))) - def test_chained_mpi(self): self.check_normalize('optional-dep-test-2+mpi', Spec('optional-dep-test-2+mpi', Spec('optional-dep-test+mpi', Spec('mpi')))) - def test_default_variant(self): spec = Spec('optional-dep-test-3') spec.concretize() @@ -104,7 +105,6 @@ def test_default_variant(self): spec.concretize() self.assertTrue('b' in spec) - def test_transitive_chain(self): # Each of these dependencies comes from a conditional # dependency on another. This requires iterating to evaluate diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index 9198986f5da..c3c3923855c 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -38,12 +38,10 @@ def check_db(self): for name in spack.repo.all_package_names(): spack.repo.get(name) - def test_get_all_packages(self): """Get all packages once and make sure that works.""" self.check_db() - def test_get_all_mock_packages(self): """Get the mock packages once each too.""" db = RepoPath(spack.mock_packages_path) @@ -51,7 +49,6 @@ def test_get_all_mock_packages(self): self.check_db() spack.repo.swap(db) - def test_url_versions(self): """Check URLs for regular packages, if they are explicitly defined.""" for pkg in spack.repo.all_packages(): diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index bea42bb33a3..fdd079a8f76 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -22,7 +22,6 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## - import spack from llnl.util.filesystem import join_path from spack.repository import Repo @@ -33,33 +32,26 @@ class PackagesTest(MockPackagesTest): def test_load_package(self): - pkg = spack.repo.get('mpich') - + spack.repo.get('mpich') def test_package_name(self): pkg = spack.repo.get('mpich') self.assertEqual(pkg.name, 'mpich') - def test_package_filename(self): repo = Repo(spack.mock_packages_path) filename = repo.filename_for_package_name('mpich') self.assertEqual(filename, - join_path(spack.mock_packages_path, 'packages', 'mpich', 'package.py')) - - - def test_package_name(self): - pkg = spack.repo.get('mpich') - self.assertEqual(pkg.name, 'mpich') - + join_path(spack.mock_packages_path, + 'packages', 'mpich', 'package.py')) def test_nonexisting_package_filename(self): repo = Repo(spack.mock_packages_path) filename = repo.filename_for_package_name('some-nonexisting-package') self.assertEqual( filename, - join_path(spack.mock_packages_path, 'packages', 'some-nonexisting-package', 'package.py')) - + join_path(spack.mock_packages_path, + 'packages', 'some-nonexisting-package', 'package.py')) def test_package_class_names(self): self.assertEqual('Mpich', mod_to_class('mpich')) @@ -68,37 +60,32 @@ def test_package_class_names(self): self.assertEqual('Pmgrcollective', mod_to_class('PmgrCollective')) self.assertEqual('_3db', mod_to_class('3db')) - # # Below tests target direct imports of spack packages from the # spack.pkg namespace # def test_import_package(self): - import spack.pkg.builtin.mock.mpich - + import spack.pkg.builtin.mock.mpich # noqa def test_import_package_as(self): - import spack.pkg.builtin.mock.mpich as mp - + import spack.pkg.builtin.mock.mpich as mp # noqa def test_import_class_from_package(self): - from spack.pkg.builtin.mock.mpich import Mpich - + from spack.pkg.builtin.mock.mpich import Mpich # noqa def test_import_module_from_package(self): - from spack.pkg.builtin.mock import mpich - + from spack.pkg.builtin.mock import mpich # noqa def test_import_namespace_container_modules(self): - import spack.pkg - import spack.pkg as p - from spack import pkg + import spack.pkg # noqa + import spack.pkg as p # noqa + from spack import pkg # noqa - import spack.pkg.builtin - import spack.pkg.builtin as b - from spack.pkg import builtin + import spack.pkg.builtin # noqa + import spack.pkg.builtin as b # noqa + from spack.pkg import builtin # noqa - import spack.pkg.builtin.mock - import spack.pkg.builtin.mock as m - from spack.pkg.builtin import mock + import spack.pkg.builtin.mock # noqa + import spack.pkg.builtin.mock as m # noqa + from spack.pkg.builtin import mock # noqa diff --git a/lib/spack/spack/test/pattern.py b/lib/spack/spack/test/pattern.py index 3419d600b8b..0c772a0d2d1 100644 --- a/lib/spack/spack/test/pattern.py +++ b/lib/spack/spack/test/pattern.py @@ -41,6 +41,7 @@ def subtract(self): raise NotImplemented('subtract not implemented') class One(Base): + def add(self): Base.counter += 1 @@ -48,6 +49,7 @@ def subtract(self): Base.counter -= 1 class Two(Base): + def add(self): Base.counter += 2 diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py index 6c09effc56d..5af55bdc5fd 100644 --- a/lib/spack/spack/test/python_version.py +++ b/lib/spack/spack/test/python_version.py @@ -36,7 +36,8 @@ import pyqver2 import spack -spack_max_version = (2,6) +spack_max_version = (2, 6) + class PythonVersionTest(unittest.TestCase): @@ -51,12 +52,10 @@ def pyfiles(self, *search_paths): if re.match(r'^[^.#].*\.py$', filename): yield os.path.join(root, filename) - def package_py_files(self): for name in spack.repo.all_package_names(): yield spack.repo.filename_for_package_name(name) - def check_python_versions(self, *files): # dict version -> filename -> reasons all_issues = {} @@ -66,7 +65,7 @@ def check_python_versions(self, *files): versions = pyqver2.get_versions(pyfile.read()) for ver, reasons in versions.items(): if ver > spack_max_version: - if not ver in all_issues: + if ver not in all_issues: all_issues[ver] = {} all_issues[ver][fn] = reasons @@ -87,7 +86,7 @@ def check_python_versions(self, *files): tty.error("These files require version %d.%d:" % v) maxlen = max(len(f) for f, prob in msgs) - fmt = "%%-%ds%%s" % (maxlen+3) + fmt = "%%-%ds%%s" % (maxlen + 3) print fmt % ('File', 'Reason') print fmt % ('-' * (maxlen), '-' * 20) for msg in msgs: @@ -95,10 +94,8 @@ def check_python_versions(self, *files): self.assertTrue(len(all_issues) == 0) - def test_core_module_compatibility(self): self.check_python_versions(*self.pyfiles(spack.lib_path)) - def test_package_module_compatibility(self): self.check_python_versions(*self.pyfiles(spack.packages_path)) diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py index 4ce854a1d80..12abce7b35a 100644 --- a/lib/spack/spack/test/sbang.py +++ b/lib/spack/spack/test/sbang.py @@ -44,6 +44,7 @@ class SbangTest(unittest.TestCase): + def setUp(self): self.tempdir = tempfile.mkdtemp() diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 8522431fbbe..8f61c7ac76c 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -455,6 +455,7 @@ def test_copy_concretized(self): run3 -b-> build3 """ + def test_deptype_traversal(self): dag = Spec('dtuse') dag.normalize() diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index b174e5305cf..79ffc99298e 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -22,18 +22,18 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest import spack.architecture from spack.spec import * from spack.test.mock_packages_test import * + class SpecSematicsTest(MockPackagesTest): """This tests satisfies(), constrain() and other semantic operations on specs.""" - # ================================================================================ + # ======================================================================== # Utility functions to set everything up. - # ================================================================================ + # ======================================================================== def check_satisfies(self, spec, anon_spec, concrete=False): left = Spec(spec, concrete=concrete) try: @@ -49,7 +49,6 @@ def check_satisfies(self, spec, anon_spec, concrete=False): # right by left. Reverse is not always true. right.copy().constrain(left) - def check_unsatisfiable(self, spec, anon_spec, concrete=False): left = Spec(spec, concrete=concrete) try: @@ -62,7 +61,6 @@ def check_unsatisfiable(self, spec, anon_spec, concrete=False): self.assertRaises(UnsatisfiableSpecError, right.copy().constrain, left) - def check_constrain(self, expected, spec, constraint): exp = Spec(expected) spec = Spec(spec) @@ -70,53 +68,48 @@ def check_constrain(self, expected, spec, constraint): spec.constrain(constraint) self.assertEqual(exp, spec) - def check_constrain_changed(self, spec, constraint): spec = Spec(spec) self.assertTrue(spec.constrain(constraint)) - def check_constrain_not_changed(self, spec, constraint): spec = Spec(spec) self.assertFalse(spec.constrain(constraint)) - def check_invalid_constraint(self, spec, constraint): spec = Spec(spec) constraint = Spec(constraint) self.assertRaises(UnsatisfiableSpecError, spec.constrain, constraint) - - # ================================================================================ + # ======================================================================== # Satisfiability - # ================================================================================ + # ======================================================================== def test_satisfies(self): self.check_satisfies('libelf@0.8.13', '@0:1') self.check_satisfies('libdwarf^libelf@0.8.13', '^libelf@0:1') - def test_satisfies_namespace(self): self.check_satisfies('builtin.mpich', 'mpich') self.check_satisfies('builtin.mock.mpich', 'mpich') - # TODO: only works for deps now, but shouldn't we allow this for root spec? + # TODO: only works for deps now, but shouldn't we allow for root spec? # self.check_satisfies('builtin.mock.mpich', 'mpi') self.check_satisfies('builtin.mock.mpich', 'builtin.mock.mpich') self.check_unsatisfiable('builtin.mock.mpich', 'builtin.mpich') - def test_satisfies_namespaced_dep(self): """Ensure spec from same or unspecified namespace satisfies namespace constraint.""" self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich') self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi') - self.check_satisfies('mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich') - - self.check_unsatisfiable('mpileaks ^builtin.mock.mpich', '^builtin.mpich') + self.check_satisfies( + 'mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich') + self.check_unsatisfiable( + 'mpileaks ^builtin.mock.mpich', '^builtin.mpich') def test_satisfies_compiler(self): self.check_satisfies('foo%gcc', '%gcc') @@ -124,7 +117,6 @@ def test_satisfies_compiler(self): self.check_unsatisfiable('foo%intel', '%gcc') self.check_unsatisfiable('foo%intel', '%pgi') - def test_satisfies_compiler_version(self): self.check_satisfies('foo%gcc', '%gcc@4.7.2') self.check_satisfies('foo%intel', '%intel@4.7.2') @@ -139,7 +131,6 @@ def test_satisfies_compiler_version(self): self.check_satisfies('foo %gcc@4.7.3', '%gcc@4.7') self.check_unsatisfiable('foo %gcc@4.7', '%gcc@4.7.3') - def test_satisfies_architecture(self): self.check_satisfies( 'foo platform=test target=frontend os=frontend', @@ -151,7 +142,6 @@ def test_satisfies_architecture(self): 'foo platform=test target=default_target os=default_os', 'platform=test target=default_target os=default_os') - def test_satisfies_dependencies(self): self.check_satisfies('mpileaks^mpich', '^mpich') self.check_satisfies('mpileaks^zmpi', '^zmpi') @@ -159,7 +149,6 @@ def test_satisfies_dependencies(self): self.check_unsatisfiable('mpileaks^mpich', '^zmpi') self.check_unsatisfiable('mpileaks^zmpi', '^mpich') - def test_satisfies_dependency_versions(self): self.check_satisfies('mpileaks^mpich@2.0', '^mpich@1:3') self.check_unsatisfiable('mpileaks^mpich@1.2', '^mpich@2.0') @@ -173,7 +162,6 @@ def test_satisfies_dependency_versions(self): self.check_unsatisfiable( 'mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6') - def test_satisfies_virtual_dependencies(self): self.check_satisfies('mpileaks^mpi', '^mpi') self.check_satisfies('mpileaks^mpi', '^mpich') @@ -181,7 +169,6 @@ def test_satisfies_virtual_dependencies(self): self.check_satisfies('mpileaks^mpi', '^zmpi') self.check_unsatisfiable('mpileaks^mpich', '^zmpi') - def test_satisfies_virtual_dependency_versions(self): self.check_satisfies('mpileaks^mpi@1.5', '^mpi@1.2:1.6') self.check_unsatisfiable('mpileaks^mpi@3', '^mpi@1.2:1.6') @@ -197,26 +184,23 @@ def test_satisfies_virtual_dependency_versions(self): self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich2') self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich@1.0') - def test_satisfies_matching_variant(self): self.check_satisfies('mpich+foo', 'mpich+foo') self.check_satisfies('mpich~foo', 'mpich~foo') self.check_satisfies('mpich foo=1', 'mpich foo=1') - #confirm that synonymous syntax works correctly + # confirm that synonymous syntax works correctly self.check_satisfies('mpich+foo', 'mpich foo=True') self.check_satisfies('mpich foo=true', 'mpich+foo') self.check_satisfies('mpich~foo', 'mpich foo=FALSE') self.check_satisfies('mpich foo=False', 'mpich~foo') - def test_satisfies_unconstrained_variant(self): # only asked for mpich, no constraints. Either will do. self.check_satisfies('mpich+foo', 'mpich') self.check_satisfies('mpich~foo', 'mpich') self.check_satisfies('mpich foo=1', 'mpich') - def test_unsatisfiable_variants(self): # This case is different depending on whether the specs are concrete. @@ -230,24 +214,21 @@ def test_unsatisfiable_variants(self): self.check_unsatisfiable('mpich', 'mpich~foo', True) self.check_unsatisfiable('mpich', 'mpich foo=1', True) - def test_unsatisfiable_variant_mismatch(self): # No matchi in specs self.check_unsatisfiable('mpich~foo', 'mpich+foo') self.check_unsatisfiable('mpich+foo', 'mpich~foo') self.check_unsatisfiable('mpich foo=1', 'mpich foo=2') - def test_satisfies_matching_compiler_flag(self): self.check_satisfies('mpich cppflags="-O3"', 'mpich cppflags="-O3"') - self.check_satisfies('mpich cppflags="-O3 -Wall"', 'mpich cppflags="-O3 -Wall"') - + self.check_satisfies('mpich cppflags="-O3 -Wall"', + 'mpich cppflags="-O3 -Wall"') def test_satisfies_unconstrained_compiler_flag(self): # only asked for mpich, no constraints. Any will do. self.check_satisfies('mpich cppflags="-O3"', 'mpich') - def test_unsatisfiable_compiler_flag(self): # This case is different depending on whether the specs are concrete. @@ -257,11 +238,10 @@ def test_unsatisfiable_compiler_flag(self): # 'mpich' is concrete: self.check_unsatisfiable('mpich', 'mpich cppflags="-O3"', True) - def test_unsatisfiable_compiler_flag_mismatch(self): # No matchi in specs - self.check_unsatisfiable('mpich cppflags="-O3"', 'mpich cppflags="-O2"') - + self.check_unsatisfiable( + 'mpich cppflags="-O3"', 'mpich cppflags="-O2"') def test_satisfies_virtual(self): # Don't use check_satisfies: it checks constrain() too, and @@ -270,25 +250,30 @@ def test_satisfies_virtual(self): self.assertTrue(Spec('mpich2').satisfies(Spec('mpi'))) self.assertTrue(Spec('zmpi').satisfies(Spec('mpi'))) - def test_satisfies_virtual_dep_with_virtual_constraint(self): """Ensure we can satisfy virtual constraints when there are multiple vdep providers in the specs.""" - self.assertTrue(Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^openblas')) - self.assertFalse(Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^openblas')) + self.assertTrue( + Spec('netlib-lapack ^openblas').satisfies( + 'netlib-lapack ^openblas')) + self.assertFalse( + Spec('netlib-lapack ^netlib-blas').satisfies( + 'netlib-lapack ^openblas')) - self.assertFalse(Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^netlib-blas')) - self.assertTrue(Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^netlib-blas')) + self.assertFalse( + Spec('netlib-lapack ^openblas').satisfies( + 'netlib-lapack ^netlib-blas')) + self.assertTrue( + Spec('netlib-lapack ^netlib-blas').satisfies( + 'netlib-lapack ^netlib-blas')) - - # ================================================================================ + # ======================================================================== # Indexing specs - # ================================================================================ + # ======================================================================== def test_self_index(self): s = Spec('callpath') self.assertTrue(s['callpath'] == s) - def test_dep_index(self): s = Spec('callpath') s.normalize() @@ -304,7 +289,6 @@ def test_dep_index(self): self.assertTrue(s['libelf'].name == 'libelf') self.assertTrue(s['mpi'].name == 'mpi') - def test_spec_contains_deps(self): s = Spec('callpath') s.normalize() @@ -313,7 +297,6 @@ def test_spec_contains_deps(self): self.assertTrue('libelf' in s) self.assertTrue('mpi' in s) - def test_virtual_index(self): s = Spec('callpath') s.concretize() @@ -327,7 +310,6 @@ def test_virtual_index(self): s_zmpi = Spec('callpath ^zmpi') s_zmpi.concretize() - self.assertTrue(s['mpi'].name != 'mpi') self.assertTrue(s_mpich['mpi'].name == 'mpich') self.assertTrue(s_mpich2['mpi'].name == 'mpich2') @@ -336,29 +318,34 @@ def test_virtual_index(self): for spec in [s, s_mpich, s_mpich2, s_zmpi]: self.assertTrue('mpi' in spec) - - # ================================================================================ + # ======================================================================== # Constraints - # ================================================================================ + # ======================================================================== def test_constrain_variants(self): self.check_constrain('libelf@2.1:2.5', 'libelf@0:2.5', 'libelf@2.1:3') self.check_constrain('libelf@2.1:2.5%gcc@4.5:4.6', - 'libelf@0:2.5%gcc@2:4.6', 'libelf@2.1:3%gcc@4.5:4.7') + 'libelf@0:2.5%gcc@2:4.6', + 'libelf@2.1:3%gcc@4.5:4.7') self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+foo') - self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+debug+foo') + self.check_constrain('libelf+debug+foo', + 'libelf+debug', 'libelf+debug+foo') - self.check_constrain('libelf debug=2 foo=1', 'libelf debug=2', 'libelf foo=1') - self.check_constrain('libelf debug=2 foo=1', 'libelf debug=2', 'libelf debug=2 foo=1') + self.check_constrain('libelf debug=2 foo=1', + 'libelf debug=2', 'libelf foo=1') + self.check_constrain('libelf debug=2 foo=1', + 'libelf debug=2', 'libelf debug=2 foo=1') self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo') - self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo') - + self.check_constrain('libelf+debug~foo', + 'libelf+debug', 'libelf+debug~foo') def test_constrain_compiler_flags(self): - self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cppflags="-Wall"') - self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cflags="-O3" cppflags="-Wall"') - + self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', + 'libelf cflags="-O3"', 'libelf cppflags="-Wall"') + self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', + 'libelf cflags="-O3"', + 'libelf cflags="-O3" cppflags="-Wall"') def test_constrain_architecture(self): self.check_constrain('libelf target=default_target os=default_os', @@ -369,21 +356,24 @@ def test_constrain_architecture(self): 'libelf target=default_target os=default_os') def test_constrain_compiler(self): - self.check_constrain('libelf %gcc@4.4.7', 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7') - self.check_constrain('libelf %gcc@4.4.7', 'libelf', 'libelf %gcc@4.4.7') - + self.check_constrain('libelf %gcc@4.4.7', + 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7') + self.check_constrain('libelf %gcc@4.4.7', + 'libelf', 'libelf %gcc@4.4.7') def test_invalid_constraint(self): self.check_invalid_constraint('libelf@0:2.0', 'libelf@2.1:3') - self.check_invalid_constraint('libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7') + self.check_invalid_constraint( + 'libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7') self.check_invalid_constraint('libelf+debug', 'libelf~debug') self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo') self.check_invalid_constraint('libelf debug=2', 'libelf debug=1') - self.check_invalid_constraint('libelf cppflags="-O3"', 'libelf cppflags="-O2"') + self.check_invalid_constraint( + 'libelf cppflags="-O3"', 'libelf cppflags="-O2"') self.check_invalid_constraint('libelf platform=test target=be os=be', - 'libelf target=fe os=fe') + 'libelf target=fe os=fe') def test_constrain_changed(self): self.check_constrain_changed('libelf', '@1.0') @@ -396,9 +386,10 @@ def test_constrain_changed(self): self.check_constrain_changed('libelf', 'cppflags="-O3"') platform = spack.architecture.platform() - self.check_constrain_changed('libelf', 'target='+platform.target('default_target').name) - self.check_constrain_changed('libelf', 'os='+platform.operating_system('default_os').name) - + self.check_constrain_changed( + 'libelf', 'target=' + platform.target('default_target').name) + self.check_constrain_changed( + 'libelf', 'os=' + platform.operating_system('default_os').name) def test_constrain_not_changed(self): self.check_constrain_not_changed('libelf', 'libelf') @@ -409,12 +400,13 @@ def test_constrain_not_changed(self): self.check_constrain_not_changed('libelf+debug', '+debug') self.check_constrain_not_changed('libelf~debug', '~debug') self.check_constrain_not_changed('libelf debug=2', 'debug=2') - self.check_constrain_not_changed('libelf cppflags="-O3"', 'cppflags="-O3"') + self.check_constrain_not_changed( + 'libelf cppflags="-O3"', 'cppflags="-O3"') platform = spack.architecture.platform() default_target = platform.target('default_target').name - self.check_constrain_not_changed('libelf target='+default_target, 'target='+default_target) - + self.check_constrain_not_changed( + 'libelf target=' + default_target, 'target=' + default_target) def test_constrain_dependency_changed(self): self.check_constrain_changed('libelf^foo', 'libelf^foo@1.0') @@ -426,18 +418,25 @@ def test_constrain_dependency_changed(self): platform = spack.architecture.platform() default_target = platform.target('default_target').name - self.check_constrain_changed('libelf^foo', 'libelf^foo target='+default_target) - + self.check_constrain_changed( + 'libelf^foo', 'libelf^foo target=' + default_target) def test_constrain_dependency_not_changed(self): self.check_constrain_not_changed('libelf^foo@1.0', 'libelf^foo@1.0') - self.check_constrain_not_changed('libelf^foo@1.0:5.0', 'libelf^foo@1.0:5.0') + self.check_constrain_not_changed( + 'libelf^foo@1.0:5.0', 'libelf^foo@1.0:5.0') self.check_constrain_not_changed('libelf^foo%gcc', 'libelf^foo%gcc') - self.check_constrain_not_changed('libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5') - self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug') - self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug') - self.check_constrain_not_changed('libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"') + self.check_constrain_not_changed( + 'libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5') + self.check_constrain_not_changed( + 'libelf^foo+debug', 'libelf^foo+debug') + self.check_constrain_not_changed( + 'libelf^foo~debug', 'libelf^foo~debug') + self.check_constrain_not_changed( + 'libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"') platform = spack.architecture.platform() default_target = platform.target('default_target').name - self.check_constrain_not_changed('libelf^foo target='+default_target, 'libelf^foo target='+default_target) + self.check_constrain_not_changed( + 'libelf^foo target=' + default_target, + 'libelf^foo target=' + default_target) diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index 4a534d7b5ce..3079288c77c 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -55,17 +55,22 @@ class SpecSyntaxTest(unittest.TestCase): - # ================================================================================ + # ======================================================================== # Parse checks - # ================================================================================ + # ======================================================================== + def check_parse(self, expected, spec=None, remove_arch=True): """Assert that the provided spec is able to be parsed. - If this is called with one argument, it assumes that the string is - canonical (i.e., no spaces and ~ instead of - for variants) and that it - will convert back to the string it came from. - If this is called with two arguments, the first argument is the expected - canonical form and the second is a non-canonical input to be parsed. + If this is called with one argument, it assumes that the + string is canonical (i.e., no spaces and ~ instead of - for + variants) and that it will convert back to the string it came + from. + + If this is called with two arguments, the first argument is + the expected canonical form and the second is a non-canonical + input to be parsed. + """ if spec is None: spec = expected @@ -74,9 +79,8 @@ def check_parse(self, expected, spec=None, remove_arch=True): parsed = (" ".join(str(spec) for spec in output)) self.assertEqual(expected, parsed) - def check_lex(self, tokens, spec): - """Check that the provided spec parses to the provided list of tokens.""" + """Check that the provided spec parses to the provided token list.""" lex_output = SpecLexer().lex(spec) for tok, spec_tok in zip(tokens, lex_output): if tok.type == ID: @@ -85,9 +89,9 @@ def check_lex(self, tokens, spec): # Only check the type for non-identifiers. self.assertEqual(tok.type, spec_tok.type) - # ================================================================================ + # ======================================================================== # Parse checks - # =============================================================================== + # ======================================================================== def test_package_names(self): self.check_parse("mvapich") self.check_parse("mvapich_foo") @@ -104,18 +108,37 @@ def test_dependencies_with_versions(self): self.check_parse("openmpi^hwloc@1.2e6:1.4b7-rc3") def test_full_specs(self): - self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4^stackwalker@8.1_1e") - self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2~qt_4^stackwalker@8.1_1e") - self.check_parse('mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3"+debug~qt_4^stackwalker@8.1_1e') + self.check_parse( + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4" + "^stackwalker@8.1_1e") + self.check_parse( + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2~qt_4" + "^stackwalker@8.1_1e") + self.check_parse( + 'mvapich_foo' + '^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3"+debug~qt_4' + '^stackwalker@8.1_1e') def test_canonicalize(self): self.check_parse( - "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e", - "mvapich_foo ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e") + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4" + "^stackwalker@8.1_1e", + + "mvapich_foo " + "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 " + "^stackwalker@8.1_1e") self.check_parse( - "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e", - "mvapich_foo ^stackwalker@8.1_1e ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug") + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4" + "^stackwalker@8.1_1e", + + "mvapich_foo " + "^stackwalker@8.1_1e " + "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug") self.check_parse( "x^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f", @@ -130,58 +153,81 @@ def test_parse_errors(self): self.assertRaises(SpecParseError, self.check_parse, "x::") def test_duplicate_variant(self): - self.assertRaises(DuplicateVariantError, self.check_parse, "x@1.2+debug+debug") - self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2+debug debug=true") - self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2 debug=false debug=true") - self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2 debug=false~debug") - + self.assertRaises(DuplicateVariantError, + self.check_parse, "x@1.2+debug+debug") + self.assertRaises(DuplicateVariantError, + self.check_parse, "x ^y@1.2+debug debug=true") + self.assertRaises(DuplicateVariantError, self.check_parse, + "x ^y@1.2 debug=false debug=true") + self.assertRaises(DuplicateVariantError, + self.check_parse, "x ^y@1.2 debug=false~debug") def test_duplicate_depdendence(self): - self.assertRaises(DuplicateDependencyError, self.check_parse, "x ^y ^y") + self.assertRaises(DuplicateDependencyError, + self.check_parse, "x ^y ^y") def test_duplicate_compiler(self): - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%intel%intel") - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%intel%gcc") - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%gcc%intel") - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%intel%intel") - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%intel%gcc") - self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%gcc%intel") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x%intel%intel") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x%intel%gcc") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x%gcc%intel") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x ^y%intel%intel") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x ^y%intel%gcc") + self.assertRaises(DuplicateCompilerSpecError, + self.check_parse, "x ^y%gcc%intel") - - # ================================================================================ + # ======================================================================== # Lex checks - # ================================================================================ + # ======================================================================== def test_ambiguous(self): # This first one is ambiguous because - can be in an identifier AND # indicate disabling an option. self.assertRaises( AssertionError, self.check_lex, complex_lex, - "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug-qt_4^stackwalker@8.1_1e") + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug-qt_4" + "^stackwalker@8.1_1e") - # The following lexes are non-ambiguous (add a space before -qt_4) and should all - # result in the tokens in complex_lex + # The following lexes are non-ambiguous (add a space before -qt_4) + # and should all result in the tokens in complex_lex def test_minimal_spaces(self): self.check_lex( complex_lex, - "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4^stackwalker@8.1_1e") + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4" + "^stackwalker@8.1_1e") self.check_lex( complex_lex, - "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e") + "mvapich_foo" + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4" + "^stackwalker@8.1_1e") def test_spaces_between_dependences(self): self.check_lex( complex_lex, - "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 ^stackwalker @ 8.1_1e") + "mvapich_foo " + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 " + "^stackwalker @ 8.1_1e") self.check_lex( complex_lex, - "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 ^stackwalker @ 8.1_1e") + "mvapich_foo " + "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 " + "^stackwalker @ 8.1_1e") def test_spaces_between_options(self): self.check_lex( complex_lex, - "mvapich_foo ^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 ^stackwalker @8.1_1e") + "mvapich_foo " + "^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 " + "^stackwalker @8.1_1e") def test_way_too_many_spaces(self): self.check_lex( complex_lex, - "mvapich_foo ^ _openmpi @ 1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 ^ stackwalker @ 8.1_1e") + "mvapich_foo " + "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 " + "^ stackwalker @ 8.1_1e") diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index d3e3bf1383d..ec661bfe50e 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -62,6 +62,7 @@ def use_tmp(use_tmp): class StageTest(unittest.TestCase): + def setUp(self): """This sets up a mock archive to fetch, and a mock temp space for use by the Stage class. It doesn't actually create the Stage -- that @@ -89,7 +90,6 @@ def setUp(self): # be removed. self.working_dir = os.getcwd() - def tearDown(self): """Blows away the test environment directory.""" shutil.rmtree(test_files_dir) @@ -100,7 +100,6 @@ def tearDown(self): # restore spack's original tmp environment spack.tmp_dirs = self.old_tmp_dirs - def get_stage_path(self, stage, stage_name): """Figure out where a stage should be living. This depends on whether it's named. @@ -114,7 +113,6 @@ def get_stage_path(self, stage, stage_name): self.assertTrue(stage.path.startswith(spack.stage_path)) return stage.path - def check_setup(self, stage, stage_name): """Figure out whether a stage was set up correctly.""" stage_path = self.get_stage_path(stage, stage_name) @@ -139,14 +137,12 @@ def check_setup(self, stage, stage_name): # Make sure the stage path is NOT a link for a non-tmp stage self.assertFalse(os.path.islink(stage_path)) - def check_fetch(self, stage, stage_name): stage_path = self.get_stage_path(stage, stage_name) self.assertTrue(archive_name in os.listdir(stage_path)) self.assertEqual(join_path(stage_path, archive_name), stage.fetcher.archive_file) - def check_expand_archive(self, stage, stage_name): stage_path = self.get_stage_path(stage, stage_name) self.assertTrue(archive_name in os.listdir(stage_path)) @@ -162,19 +158,16 @@ def check_expand_archive(self, stage, stage_name): with open(readme) as file: self.assertEqual(readme_text, file.read()) - def check_chdir(self, stage, stage_name): stage_path = self.get_stage_path(stage, stage_name) self.assertEqual(os.path.realpath(stage_path), os.getcwd()) - def check_chdir_to_source(self, stage, stage_name): stage_path = self.get_stage_path(stage, stage_name) self.assertEqual( join_path(os.path.realpath(stage_path), archive_dir), os.getcwd()) - def check_destroy(self, stage, stage_name): """Figure out whether a stage was destroyed correctly.""" stage_path = self.get_stage_path(stage, stage_name) @@ -187,35 +180,30 @@ def check_destroy(self, stage, stage_name): target = os.path.realpath(stage_path) self.assertFalse(os.path.exists(target)) - def test_setup_and_destroy_name_with_tmp(self): with use_tmp(True): with Stage(archive_url, name=stage_name) as stage: self.check_setup(stage, stage_name) self.check_destroy(stage, stage_name) - def test_setup_and_destroy_name_without_tmp(self): with use_tmp(False): with Stage(archive_url, name=stage_name) as stage: self.check_setup(stage, stage_name) self.check_destroy(stage, stage_name) - def test_setup_and_destroy_no_name_with_tmp(self): with use_tmp(True): with Stage(archive_url) as stage: self.check_setup(stage, None) self.check_destroy(stage, None) - def test_setup_and_destroy_no_name_without_tmp(self): with use_tmp(False): with Stage(archive_url) as stage: self.check_setup(stage, None) self.check_destroy(stage, None) - def test_chdir(self): with Stage(archive_url, name=stage_name) as stage: stage.chdir() @@ -223,7 +211,6 @@ def test_chdir(self): self.check_chdir(stage, stage_name) self.check_destroy(stage, stage_name) - def test_fetch(self): with Stage(archive_url, name=stage_name) as stage: stage.fetch() @@ -232,7 +219,6 @@ def test_fetch(self): self.check_fetch(stage, stage_name) self.check_destroy(stage, stage_name) - def test_expand_archive(self): with Stage(archive_url, name=stage_name) as stage: stage.fetch() @@ -242,8 +228,7 @@ def test_expand_archive(self): self.check_expand_archive(stage, stage_name) self.check_destroy(stage, stage_name) - - def test_expand_archive(self): + def test_expand_archive_with_chdir(self): with Stage(archive_url, name=stage_name) as stage: stage.fetch() self.check_setup(stage, stage_name) @@ -254,7 +239,6 @@ def test_expand_archive(self): self.check_chdir_to_source(stage, stage_name) self.check_destroy(stage, stage_name) - def test_restage(self): with Stage(archive_url, name=stage_name) as stage: stage.fetch() @@ -278,20 +262,17 @@ def test_restage(self): self.assertFalse('foobar' in os.listdir(stage.source_path)) self.check_destroy(stage, stage_name) - def test_no_keep_without_exceptions(self): with Stage(archive_url, name=stage_name, keep=False) as stage: pass self.check_destroy(stage, stage_name) - def test_keep_without_exceptions(self): with Stage(archive_url, name=stage_name, keep=True) as stage: pass path = self.get_stage_path(stage, stage_name) self.assertTrue(os.path.isdir(path)) - def test_no_keep_with_exceptions(self): try: with Stage(archive_url, name=stage_name, keep=False) as stage: @@ -300,8 +281,7 @@ def test_no_keep_with_exceptions(self): path = self.get_stage_path(stage, stage_name) self.assertTrue(os.path.isdir(path)) except: - pass # ignore here. - + pass # ignore here. def test_keep_exceptions(self): try: @@ -311,4 +291,4 @@ def test_keep_exceptions(self): path = self.get_stage_path(stage, stage_name) self.assertTrue(os.path.isdir(path)) except: - pass # ignore here. + pass # ignore here. diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py index 0a745a090bb..9ef7593ed11 100644 --- a/lib/spack/spack/test/svn_fetch.py +++ b/lib/spack/spack/test/svn_fetch.py @@ -94,17 +94,15 @@ def try_fetch(self, rev, test_file, args): self.assert_rev(rev) - def test_fetch_default(self): """Test a default checkout and make sure it's on rev 1""" self.try_fetch(self.repo.r1, self.repo.r1_file, { - 'svn' : self.repo.url + 'svn': self.repo.url }) - def test_fetch_r1(self): """Test fetching an older revision (0).""" self.try_fetch(self.repo.r0, self.repo.r0_file, { - 'svn' : self.repo.url, - 'revision' : self.repo.r0 + 'svn': self.repo.url, + 'revision': self.repo.r0 }) diff --git a/lib/spack/spack/test/tally_plugin.py b/lib/spack/spack/test/tally_plugin.py index 96af1c9b21f..808694d186c 100644 --- a/lib/spack/spack/test/tally_plugin.py +++ b/lib/spack/spack/test/tally_plugin.py @@ -26,6 +26,7 @@ from nose.plugins import Plugin + class Tally(Plugin): name = 'tally' diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py index ffd4230f718..ca14dab958f 100644 --- a/lib/spack/spack/test/url_extrapolate.py +++ b/lib/spack/spack/test/url_extrapolate.py @@ -34,20 +34,21 @@ class UrlExtrapolateTest(unittest.TestCase): def check_url(self, base, version, new_url): self.assertEqual(url.substitute_version(base, version), new_url) - def test_libelf_version(self): base = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" self.check_url(base, '0.8.13', base) - self.check_url(base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz") - self.check_url(base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz") - self.check_url(base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz") - + self.check_url( + base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz") + self.check_url( + base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz") + self.check_url( + base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz") def test_libdwarf_version(self): base = "http://www.prevanders.net/libdwarf-20130729.tar.gz" self.check_url(base, '20130729', base) - self.check_url(base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz") - + self.check_url( + base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz") def test_dyninst_version(self): # Dyninst has a version twice in the URL. @@ -58,7 +59,6 @@ def test_dyninst_version(self): self.check_url(base, '8.3.1', "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz") - def test_partial_version_prefix(self): # Test now with a partial prefix earlier in the URL -- this is # hard to figure out so Spack only substitutes the last @@ -72,7 +72,6 @@ def test_partial_version_prefix(self): self.check_url(base, '8.3.1', "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.3.1.tgz") - def test_scalasca_partial_version(self): # Note that this probably doesn't actually work, but sites are # inconsistent about their directory structure, so it's not @@ -84,19 +83,16 @@ def test_scalasca_partial_version(self): self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1', 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz') - def test_mpileaks_version(self): self.check_url('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3', 'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz') - def test_gcc(self): self.check_url('http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2', '4.7', 'http://open-source-box.org/gcc/gcc-4.7/gcc-4.7.tar.bz2') self.check_url('http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2', '4.4.7', 'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2') - def test_github_raw(self): self.check_url('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '2.0.7', 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true') diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py index 648996aaaa6..6c944a3e7a8 100644 --- a/lib/spack/spack/test/url_parse.py +++ b/lib/spack/spack/test/url_parse.py @@ -32,11 +32,11 @@ class UrlParseTest(unittest.TestCase): + def assert_not_detected(self, string): self.assertRaises( url.UndetectableVersionError, url.parse_name_and_version, string) - def check(self, name, v, string, **kwargs): # Make sure correct name and version are extracted. parsed_name, parsed_v = url.parse_name_and_version(string) @@ -52,7 +52,6 @@ def check(self, name, v, string, **kwargs): # build one with a specific version. self.assertEqual(string, url.substitute_version(string, v)) - def test_wwwoffle_version(self): self.check( 'wwwoffle', '2.9h', @@ -72,7 +71,7 @@ def test_no_version(self): def test_version_all_dots(self): self.check( - 'foo.bar.la', '1.14','http://example.com/foo.bar.la.1.14.zip') + 'foo.bar.la', '1.14', 'http://example.com/foo.bar.la.1.14.zip') def test_version_underscore_separator(self): self.check( @@ -286,7 +285,7 @@ def test_mvapich2_19_version(self): 'mvapich2', '1.9', 'http://mvapich.cse.ohio-state.edu/download/mvapich2/mv2/mvapich2-1.9.tgz') - def test_mvapich2_19_version(self): + def test_mvapich2_20_version(self): self.check( 'mvapich2', '2.0', 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz') diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py index 9cc04834b68..ea6374e3d28 100644 --- a/lib/spack/spack/test/url_substitution.py +++ b/lib/spack/spack/test/url_substitution.py @@ -26,37 +26,31 @@ This test does sanity checks on substituting new versions into URLs """ import unittest - import spack.url as url -class PackageSanityTest(unittest.TestCase): - def test_hypre_url_substitution(self): - base = "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.9.0b.tar.gz" +base = "https://comp.llnl.gov/linear_solvers/download/hypre-2.9.0b.tar.gz" +stem = "https://comp.llnl.gov/linear_solvers/download/hypre-" + +class PackageSanityTest(unittest.TestCase): + + def test_hypre_url_substitution(self): self.assertEqual(url.substitute_version(base, '2.9.0b'), base) self.assertEqual( - url.substitute_version(base, '2.8.0b'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.8.0b.tar.gz") + url.substitute_version(base, '2.8.0b'), stem + "2.8.0b.tar.gz") self.assertEqual( - url.substitute_version(base, '2.7.0b'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.7.0b.tar.gz") + url.substitute_version(base, '2.7.0b'), stem + "2.7.0b.tar.gz") self.assertEqual( - url.substitute_version(base, '2.6.0b'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.6.0b.tar.gz") + url.substitute_version(base, '2.6.0b'), stem + "2.6.0b.tar.gz") self.assertEqual( - url.substitute_version(base, '1.14.0b'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.14.0b.tar.gz") + url.substitute_version(base, '1.14.0b'), stem + "1.14.0b.tar.gz") self.assertEqual( - url.substitute_version(base, '1.13.0b'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.13.0b.tar.gz") + url.substitute_version(base, '1.13.0b'), stem + "1.13.0b.tar.gz") self.assertEqual( - url.substitute_version(base, '2.0.0'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.0.0.tar.gz") + url.substitute_version(base, '2.0.0'), stem + "2.0.0.tar.gz") self.assertEqual( - url.substitute_version(base, '1.6.0'), - "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.6.0.tar.gz") - + url.substitute_version(base, '1.6.0'), stem + "1.6.0.tar.gz") def test_otf2_url_substitution(self): base = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz" diff --git a/lib/spack/spack/test/yaml.py b/lib/spack/spack/test/yaml.py index f1b83e7b717..dedbd15d106 100644 --- a/lib/spack/spack/test/yaml.py +++ b/lib/spack/spack/test/yaml.py @@ -45,26 +45,25 @@ """ test_data = { - 'config_file' : syaml.syaml_dict([ + 'config_file': syaml.syaml_dict([ ('x86_64', syaml.syaml_dict([ ('foo', '/path/to/foo'), ('bar', '/path/to/bar'), - ('baz', '/path/to/baz' )])), - ('some_list', [ 'item 1', 'item 2', 'item 3' ]), - ('another_list', [ 1, 2, 3 ]), + ('baz', '/path/to/baz')])), + ('some_list', ['item 1', 'item 2', 'item 3']), + ('another_list', [1, 2, 3]), ('some_key', 'some_string') ])} + class YamlTest(unittest.TestCase): def setUp(self): self.data = syaml.load(test_file) - def test_parse(self): self.assertEqual(test_data, self.data) - def test_dict_order(self): self.assertEqual( ['x86_64', 'some_list', 'another_list', 'some_key'], @@ -74,7 +73,6 @@ def test_dict_order(self): ['foo', 'bar', 'baz'], self.data['config_file']['x86_64'].keys()) - def test_line_numbers(self): def check(obj, start_line, end_line): self.assertEqual(obj._start_mark.line, start_line) diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index f678a2dca92..02c9c04380b 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -56,12 +56,12 @@ import spack.util.compression as comp from spack.version import Version + # # Note: We call the input to most of these functions a "path" but the functions # work on paths and URLs. There's not a good word for both of these, but # "path" seemed like the most generic term. # - def find_list_url(url): """Finds a good list URL for the supplied URL. This depends on the site. By default, just assumes that a good list URL is the @@ -71,8 +71,8 @@ def find_list_url(url): url_types = [ # e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz - (r'^(https://github.com/[^/]+/[^/]+)/archive/', lambda m: m.group(1) + '/releases') - ] + (r'^(https://github.com/[^/]+/[^/]+)/archive/', + lambda m: m.group(1) + '/releases')] for pattern, fun in url_types: match = re.search(pattern, url) @@ -89,8 +89,10 @@ def strip_query_and_fragment(path): query, frag = components[3:5] suffix = '' - if query: suffix += '?' + query - if frag: suffix += '#' + frag + if query: + suffix += '?' + query + if frag: + suffix += '#' + frag return (urlunsplit(stripped), suffix) @@ -152,8 +154,10 @@ def downloaded_file_extension(path): """ match = re.search(r'github.com/.+/(zip|tar)ball/', path) if match: - if match.group(1) == 'zip': return 'zip' - elif match.group(1) == 'tar': return 'tar.gz' + if match.group(1) == 'zip': + return 'zip' + elif match.group(1) == 'tar': + return 'tar.gz' prefix, ext, suffix = split_url_extension(path) if not ext: @@ -193,7 +197,8 @@ def parse_version_offset(path): (r'[-_](R\d+[AB]\d*(-\d+)?)', path), # e.g., https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz - # e.g., https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz + # e.g., + # https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz (r'github.com/[^/]+/[^/]+/releases/download/v?([^/]+)/.*$', path), # e.g. boost_1_39_0 @@ -201,7 +206,7 @@ def parse_version_offset(path): # e.g. foobar-4.5.1-1 # e.g. ruby-1.9.1-p243 - (r'-((\d+\.)*\d\.\d+-(p|rc|RC)?\d+)(?:[-._](?:bin|dist|stable|src|sources))?$', stem), + (r'-((\d+\.)*\d\.\d+-(p|rc|RC)?\d+)(?:[-._](?:bin|dist|stable|src|sources))?$', stem), # noqa # e.g. lame-398-1 (r'-((\d)+-\d)', stem), @@ -275,7 +280,8 @@ def parse_name_offset(path, v=None): name_types = [ (r'/sourceforge/([^/]+)/', path), - (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' % (v, v), path), + (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' % + (v, v), path), (r'/([^/]+)/(tarball|zipball)/', path), (r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % v, path), (r'github.com/[^/]+/([^/]+)/archive', path), @@ -283,7 +289,8 @@ def parse_name_offset(path, v=None): (r'([^/]+)[_.-]v?%s' % v, stem), # prefer the stem (r'([^/]+)%s' % v, stem), - (r'/([^/]+)[_.-]v?%s' % v, path), # accept the path if name is not in stem. + # accept the path if name is not in stem. + (r'/([^/]+)[_.-]v?%s' % v, path), (r'/([^/]+)%s' % v, path), (r'^([^/]+)[_.-]v?%s' % v, path), @@ -326,7 +333,7 @@ def to_ins(match): return re.sub(r'([a-zA-Z])', to_ins, string) -def cumsum(elts, init=0, fn=lambda x:x): +def cumsum(elts, init=0, fn=lambda x: x): """Return cumulative sum of result of fn on each element in elts.""" sums = [] s = init @@ -337,21 +344,20 @@ def cumsum(elts, init=0, fn=lambda x:x): def substitution_offsets(path): - """This returns offsets for substituting versions and names in the provided path. - It is a helper for substitute_version(). + """This returns offsets for substituting versions and names in the + provided path. It is a helper for substitute_version(). """ # Get name and version offsets try: ver, vs, vl = parse_version_offset(path) name, ns, nl = parse_name_offset(path, ver) - except UndetectableNameError, e: + except UndetectableNameError: return (None, -1, -1, (), ver, vs, vl, (vs,)) - except UndetectableVersionError, e: + except UndetectableVersionError: return (None, -1, -1, (), None, -1, -1, ()) # protect extensions like bz2 from getting inadvertently # considered versions. - ext = comp.extension(path) path = comp.strip_extension(path) # Construct a case-insensitive regular expression for the package name. @@ -449,7 +455,7 @@ def color_url(path, **kwargs): Cyan: The version found by parse_version_offset(). Red: The name found by parse_name_offset(). - Green: Instances of version string substituted by substitute_version(). + Green: Instances of version string from substitute_version(). Magenta: Instances of the name (protected from substitution). Optional args: @@ -469,31 +475,46 @@ def color_url(path, **kwargs): nerr = verr = 0 out = StringIO() for i in range(len(path)): - if i == vs: out.write('@c'); verr += 1 - elif i == ns: out.write('@r'); nerr += 1 + if i == vs: + out.write('@c') + verr += 1 + elif i == ns: + out.write('@r') + nerr += 1 elif subs: - if i in voffs: out.write('@g') - elif i in noffs: out.write('@m') + if i in voffs: + out.write('@g') + elif i in noffs: + out.write('@m') out.write(path[i]) - if i == vs + vl - 1: out.write('@.'); verr += 1 - elif i == ns + nl - 1: out.write('@.'); nerr += 1 + if i == vs + vl - 1: + out.write('@.') + verr += 1 + elif i == ns + nl - 1: + out.write('@.') + nerr += 1 elif subs: if i in vends or i in nends: out.write('@.') if errors: - if nerr == 0: out.write(" @r{[no name]}") - if verr == 0: out.write(" @r{[no version]}") - if nerr == 1: out.write(" @r{[incomplete name]}") - if verr == 1: out.write(" @r{[incomplete version]}") + if nerr == 0: + out.write(" @r{[no name]}") + if verr == 0: + out.write(" @r{[no version]}") + if nerr == 1: + out.write(" @r{[incomplete name]}") + if verr == 1: + out.write(" @r{[incomplete version]}") return colorize(out.getvalue()) class UrlParseError(spack.error.SpackError): """Raised when the URL module can't parse something correctly.""" + def __init__(self, msg, path): super(UrlParseError, self).__init__(msg) self.path = path @@ -501,6 +522,7 @@ def __init__(self, msg, path): class UndetectableVersionError(UrlParseError): """Raised when we can't parse a version from a string.""" + def __init__(self, path): super(UndetectableVersionError, self).__init__( "Couldn't detect version in: " + path, path) @@ -508,6 +530,7 @@ def __init__(self, path): class UndetectableNameError(UrlParseError): """Raised when we can't parse a package name from a string.""" + def __init__(self, path): super(UndetectableNameError, self).__init__( "Couldn't parse package name in: " + path, path) diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py index dc1188eb0f8..64554ab2f71 100644 --- a/lib/spack/spack/util/compression.py +++ b/lib/spack/spack/util/compression.py @@ -32,7 +32,9 @@ EXTS = ["gz", "bz2", "xz", "Z", "zip", "tgz"] # Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz -ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(PRE_EXTS, EXTS)] + PRE_EXTS + EXTS +ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product( + PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + def allowed_archive(path): return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES) diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py index 1ae97935188..22777fdb689 100644 --- a/lib/spack/spack/util/crypto.py +++ b/lib/spack/spack/util/crypto.py @@ -31,7 +31,7 @@ hashlib.sha224, hashlib.sha256, hashlib.sha384, - hashlib.sha512 ] + hashlib.sha512] """Index for looking up hasher for a digest.""" _size_to_hash = dict((h().digest_size, h) for h in _acceptable_hashes) @@ -52,7 +52,6 @@ def checksum(hashlib_algo, filename, **kwargs): return hasher.hexdigest() - class Checker(object): """A checker checks files against one particular hex digest. It will automatically determine what hashing algorithm @@ -74,25 +73,25 @@ class Checker(object): adjusting the block_size optional arg. By default it's a 1MB (2**20 bytes) buffer. """ + def __init__(self, hexdigest, **kwargs): self.block_size = kwargs.get('block_size', 2**20) self.hexdigest = hexdigest self.sum = None bytes = len(hexdigest) / 2 - if not bytes in _size_to_hash: + if bytes not in _size_to_hash: raise ValueError( - 'Spack knows no hash algorithm for this digest: %s' % hexdigest) + 'Spack knows no hash algorithm for this digest: %s' + % hexdigest) self.hash_fun = _size_to_hash[bytes] - @property def hash_name(self): """Get the name of the hash function this Checker is using.""" return self.hash_fun().name - def check(self, filename): """Read the file with the specified name and check its checksum against self.hexdigest. Return True if they match, False diff --git a/lib/spack/spack/util/debug.py b/lib/spack/spack/util/debug.py index e8a0595416e..cf485a611d1 100644 --- a/lib/spack/spack/util/debug.py +++ b/lib/spack/spack/util/debug.py @@ -33,10 +33,11 @@ import traceback import signal + def debug_handler(sig, frame): """Interrupt running process, and provide a python prompt for interactive debugging.""" - d = {'_frame':frame} # Allow access to frame object. + d = {'_frame': frame} # Allow access to frame object. d.update(frame.f_globals) # Unless shadowed by global d.update(frame.f_locals) @@ -48,5 +49,5 @@ def debug_handler(sig, frame): def register_interrupt_handler(): - """Register a handler to print a stack trace and enter an interpreter on Ctrl-C""" + """Print traceback and enter an interpreter on Ctrl-C""" signal.signal(signal.SIGINT, debug_handler) diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index 14b56e8d6c3..5c27b92df5f 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -165,7 +165,6 @@ def streamify(arg, mode): raise ProcessError("Command exited with status %d:" % proc.returncode, cmd_line) - if output is str or error is str: result = '' if output is str: @@ -227,6 +226,7 @@ def which(name, **kwargs): class ProcessError(spack.error.SpackError): + def __init__(self, msg, long_message=None): # These are used for detailed debugging information for # package builds. They're built up gradually as the exception diff --git a/lib/spack/spack/util/multiproc.py b/lib/spack/spack/util/multiproc.py index 038cd90121e..6a25c45713e 100644 --- a/lib/spack/spack/util/multiproc.py +++ b/lib/spack/spack/util/multiproc.py @@ -32,18 +32,21 @@ __all__ = ['spawn', 'parmap', 'Barrier'] + def spawn(f): - def fun(pipe,x): + def fun(pipe, x): pipe.send(f(x)) pipe.close() return fun -def parmap(f,X): - pipe=[Pipe() for x in X] - proc=[Process(target=spawn(f),args=(c,x)) for x,(p,c) in izip(X,pipe)] + +def parmap(f, X): + pipe = [Pipe() for x in X] + proc = [Process(target=spawn(f), args=(c, x)) + for x, (p, c) in izip(X, pipe)] [p.start() for p in proc] [p.join() for p in proc] - return [p.recv() for (p,c) in pipe] + return [p.recv() for (p, c) in pipe] class Barrier: @@ -53,6 +56,7 @@ class Barrier: See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41. """ + def __init__(self, n, timeout=None): self.n = n self.to = timeout @@ -61,7 +65,6 @@ def __init__(self, n, timeout=None): self.turnstile1 = Semaphore(0) self.turnstile2 = Semaphore(1) - def wait(self): if not self.mutex.acquire(timeout=self.to): raise BarrierTimeoutError() @@ -90,4 +93,5 @@ def wait(self): self.turnstile2.release() -class BarrierTimeoutError: pass +class BarrierTimeoutError: + pass diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py index 2d9762942d0..9a5cdee4112 100644 --- a/lib/spack/spack/util/naming.py +++ b/lib/spack/spack/util/naming.py @@ -31,9 +31,15 @@ import spack -__all__ = ['mod_to_class', 'spack_module_to_python_module', 'valid_module_name', - 'valid_fully_qualified_module_name', 'validate_fully_qualified_module_name', - 'validate_module_name', 'possible_spack_module_names', 'NamespaceTrie'] +__all__ = [ + 'mod_to_class', + 'spack_module_to_python_module', + 'valid_module_name', + 'valid_fully_qualified_module_name', + 'validate_fully_qualified_module_name', + 'validate_module_name', + 'possible_spack_module_names', + 'NamespaceTrie'] # Valid module names can contain '-' but can't start with it. _valid_module_re = r'^\w[\w-]*$' @@ -67,8 +73,8 @@ def mod_to_class(mod_name): class_name = string.capwords(class_name, '-') class_name = class_name.replace('-', '') - # If a class starts with a number, prefix it with Number_ to make it a valid - # Python class name. + # If a class starts with a number, prefix it with Number_ to make it + # a valid Python class name. if re.match(r'^[0-9]', class_name): class_name = "_%s" % class_name @@ -126,6 +132,7 @@ def validate_fully_qualified_module_name(mod_name): class InvalidModuleNameError(spack.error.SpackError): """Raised when we encounter a bad module name.""" + def __init__(self, name): super(InvalidModuleNameError, self).__init__( "Invalid module name: " + name) @@ -134,6 +141,7 @@ def __init__(self, name): class InvalidFullyQualifiedModuleNameError(spack.error.SpackError): """Raised when we encounter a bad full package name.""" + def __init__(self, name): super(InvalidFullyQualifiedModuleNameError, self).__init__( "Invalid fully qualified package name: " + name) @@ -141,17 +149,17 @@ def __init__(self, name): class NamespaceTrie(object): + class Element(object): + def __init__(self, value): self.value = value - def __init__(self, separator='.'): self._subspaces = {} self._value = None self._sep = separator - def __setitem__(self, namespace, value): first, sep, rest = namespace.partition(self._sep) @@ -164,7 +172,6 @@ def __setitem__(self, namespace, value): self._subspaces[first][rest] = value - def _get_helper(self, namespace, full_name): first, sep, rest = namespace.partition(self._sep) if not first: @@ -176,13 +183,12 @@ def _get_helper(self, namespace, full_name): else: return self._subspaces[first]._get_helper(rest, full_name) - def __getitem__(self, namespace): return self._get_helper(namespace, namespace) - def is_prefix(self, namespace): - """True if the namespace has a value, or if it's the prefix of one that does.""" + """True if the namespace has a value, or if it's the prefix of one that + does.""" first, sep, rest = namespace.partition(self._sep) if not first: return True @@ -191,7 +197,6 @@ def is_prefix(self, namespace): else: return self._subspaces[first].is_prefix(rest) - def is_leaf(self, namespace): """True if this namespace has no children in the trie.""" first, sep, rest = namespace.partition(self._sep) @@ -202,7 +207,6 @@ def is_leaf(self, namespace): else: return self._subspaces[first].is_leaf(rest) - def has_value(self, namespace): """True if there is a value set for the given namespace.""" first, sep, rest = namespace.partition(self._sep) @@ -213,20 +217,17 @@ def has_value(self, namespace): else: return self._subspaces[first].has_value(rest) - def __contains__(self, namespace): """Returns whether a value has been set for the namespace.""" return self.has_value(namespace) - def _str_helper(self, stream, level=0): indent = (level * ' ') for name in sorted(self._subspaces): stream.write(indent + name + '\n') if self._value: stream.write(indent + ' ' + repr(self._value.value)) - stream.write(self._subspaces[name]._str_helper(stream, level+1)) - + stream.write(self._subspaces[name]._str_helper(stream, level + 1)) def __str__(self): stream = StringIO() diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py index bc5e9d2ffef..c36445193c0 100644 --- a/lib/spack/spack/util/pattern.py +++ b/lib/spack/spack/util/pattern.py @@ -53,7 +53,9 @@ def composite(interface=None, method_list=None, container=list): # Check if at least one of the 'interface' or the 'method_list' arguments # are defined if interface is None and method_list is None: - raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") # NOQA : ignore=E501 + raise TypeError( + "Either 'interface' or 'method_list' must be defined on a call " + "to composite") def cls_decorator(cls): # Retrieve the base class of the composite. Inspect its methods and @@ -102,7 +104,8 @@ def getter(*args, **kwargs): # python@2.7: interface_methods = {name: method for name, method in # inspect.getmembers(interface, predicate=no_special_no_private)} interface_methods = {} - for name, method in inspect.getmembers(interface, predicate=no_special_no_private): # NOQA: ignore=E501 + for name, method in inspect.getmembers( + interface, predicate=no_special_no_private): interface_methods[name] = method ########## # python@2.7: interface_methods_dict = {name: IterateOver(name, @@ -118,7 +121,8 @@ def getter(*args, **kwargs): # python@2.7: cls_method = {name: method for name, method in # inspect.getmembers(cls, predicate=inspect.ismethod)} cls_method = {} - for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): # NOQA: ignore=E501 + for name, method in inspect.getmembers( + cls, predicate=inspect.ismethod): cls_method[name] = method ########## dictionary_for_type_call.update(cls_method) diff --git a/lib/spack/spack/util/prefix.py b/lib/spack/spack/util/prefix.py index e1a0f2958bd..985d8622693 100644 --- a/lib/spack/spack/util/prefix.py +++ b/lib/spack/spack/util/prefix.py @@ -27,6 +27,7 @@ """ from llnl.util.filesystem import join_path + class Prefix(str): """This class represents an installation prefix, but provides useful attributes for referring to directories inside the prefix. diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py index 909f9a57a8b..7bcdf2d61f0 100644 --- a/lib/spack/spack/util/spack_yaml.py +++ b/lib/spack/spack/util/spack_yaml.py @@ -34,7 +34,6 @@ import yaml from yaml.nodes import * from yaml.constructor import ConstructorError -from yaml.representer import SafeRepresenter from ordereddict_backport import OrderedDict # Only export load and dump @@ -42,15 +41,23 @@ # Make new classes so we can add custom attributes. # Also, use OrderedDict instead of just dict. + + class syaml_dict(OrderedDict): + def __repr__(self): - mappings = ('%r: %r' % (k,v) for k,v in self.items()) + mappings = ('%r: %r' % (k, v) for k, v in self.items()) return '{%s}' % ', '.join(mappings) + + class syaml_list(list): __repr__ = list.__repr__ + + class syaml_str(str): __repr__ = str.__repr__ + def mark(obj, node): """Add start and end markers to an object.""" obj._start_mark = node.start_mark @@ -73,6 +80,7 @@ class OrderedLineLoader(yaml.Loader): # The standard YAML constructors return empty instances and fill # in with mappings later. We preserve this behavior. # + def construct_yaml_str(self, node): value = self.construct_scalar(node) try: @@ -83,14 +91,12 @@ def construct_yaml_str(self, node): mark(value, node) return value - def construct_yaml_seq(self, node): data = syaml_list() mark(data, node) yield data data.extend(self.construct_sequence(node)) - def construct_yaml_map(self, node): data = syaml_dict() mark(data, node) @@ -104,22 +110,23 @@ def construct_yaml_map(self, node): # def construct_sequence(self, node, deep=False): if not isinstance(node, SequenceNode): - raise ConstructorError(None, None, - "expected a sequence node, but found %s" % node.id, - node.start_mark) - value = syaml_list(self.construct_object(child, deep=deep) - for child in node.value) + raise ConstructorError( + None, None, + "expected a sequence node, but found %s" % node.id, + node.start_mark) + value = syaml_list(self.construct_object(child, deep=deep) + for child in node.value) mark(value, node) return value - def construct_mapping(self, node, deep=False): """Store mappings as OrderedDicts instead of as regular python dictionaries to preserve file ordering.""" if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) + raise ConstructorError( + None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) mapping = syaml_dict() for key_node, value_node in node.value: @@ -127,22 +134,26 @@ def construct_mapping(self, node, deep=False): try: hash(key) except TypeError, exc: - raise ConstructorError("while constructing a mapping", node.start_mark, - "found unacceptable key (%s)" % exc, key_node.start_mark) + raise ConstructorError( + "while constructing a mapping", node.start_mark, + "found unacceptable key (%s)" % exc, key_node.start_mark) value = self.construct_object(value_node, deep=deep) if key in mapping: - raise ConstructorError("while constructing a mapping", node.start_mark, - "found already in-use key (%s)" % key, key_node.start_mark) + raise ConstructorError( + "while constructing a mapping", node.start_mark, + "found already in-use key (%s)" % key, key_node.start_mark) mapping[key] = value mark(mapping, node) return mapping # register above new constructors -OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map) -OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq) -OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str) - +OrderedLineLoader.add_constructor( + u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map) +OrderedLineLoader.add_constructor( + u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq) +OrderedLineLoader.add_constructor( + u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str) class OrderedLineDumper(yaml.Dumper): @@ -154,6 +165,7 @@ class OrderedLineDumper(yaml.Dumper): regular Python equivalents, instead of ugly YAML pyobjects. """ + def represent_mapping(self, tag, mapping, flow_style=None): value = [] node = MappingNode(tag, value, flow_style=flow_style) @@ -173,7 +185,8 @@ def represent_mapping(self, tag, mapping, flow_style=None): node_value = self.represent_data(item_value) if not (isinstance(node_key, ScalarNode) and not node_key.style): best_style = False - if not (isinstance(node_value, ScalarNode) and not node_value.style): + if not (isinstance(node_value, ScalarNode) and + not node_value.style): best_style = False value.append((node_key, node_value)) if flow_style is None: diff --git a/lib/spack/spack/util/string.py b/lib/spack/spack/util/string.py index ce017b81260..dae7afbf46a 100644 --- a/lib/spack/spack/util/string.py +++ b/lib/spack/spack/util/string.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## + def comma_list(sequence, article=''): if type(sequence) != list: sequence = list(sequence) @@ -32,7 +33,7 @@ def comma_list(sequence, article=''): elif len(sequence) == 1: return sequence[0] else: - out = ', '.join(str(s) for s in sequence[:-1]) + out = ', '.join(str(s) for s in sequence[:-1]) if len(sequence) != 2: out += ',' # oxford comma out += ' ' @@ -41,6 +42,7 @@ def comma_list(sequence, article=''): out += str(sequence[-1]) return out + def comma_or(sequence): return comma_list(sequence, 'or') diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index cac783a3688..25f1e605d6e 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -43,6 +43,7 @@ class LinkParser(HTMLParser): """This parser just takes an HTML page and strips out the hrefs on the links. Good enough for a really simple spider. """ + def __init__(self): HTMLParser.__init__(self) self.links = [] diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index ad875f5ef5c..b2c1a734891 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -29,8 +29,10 @@ """ + class Variant(object): """Represents a variant on a build. Can be either on or off.""" + def __init__(self, default, description): self.default = default self.description = str(description) diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py index 6f6c83b3d8f..e1311eb0d9a 100644 --- a/lib/spack/spack/version.py +++ b/lib/spack/spack/version.py @@ -50,6 +50,8 @@ from functools_backport import total_ordering +__all__ = ['Version', 'VersionRange', 'VersionList', 'ver'] + # Valid version characters VALID_VERSION = r'[A-Za-z0-9_.-]' @@ -346,8 +348,8 @@ def __lt__(self, other): s, o = self, other if s.start != o.start: - return s.start is None or (o.start is not None and s.start < o.start) # NOQA: ignore=E501 - + return s.start is None or ( + o.start is not None and s.start < o.start) return (s.end != o.end and o.end is None or (s.end is not None and s.end < o.end)) diff --git a/lib/spack/spack/yaml_version_check.py b/lib/spack/spack/yaml_version_check.py index c2d084d6c3b..2c5b511d7f8 100644 --- a/lib/spack/spack/yaml_version_check.py +++ b/lib/spack/spack/yaml_version_check.py @@ -34,6 +34,7 @@ def check_yaml_versions(): check_compiler_yaml_version() + def check_compiler_yaml_version(): config_scopes = spack.config.config_scopes for scope in config_scopes.values(): @@ -46,7 +47,8 @@ def check_compiler_yaml_version(): if data: compilers = data['compilers'] if len(compilers) > 0: - if (not isinstance(compilers, list)) or 'operating_system' not in compilers[0]['compiler']: + if (not isinstance(compilers, list) or + 'operating_system' not in compilers[0]['compiler']): new_file = os.path.join(scope.path, '_old_compilers.yaml') tty.warn('%s in out of date compilers format. ' 'Moved to %s. Spack automatically generate ' diff --git a/share/spack/qa/run-flake8 b/share/spack/qa/run-flake8 index 595df417ecd..c59bfc9490a 100755 --- a/share/spack/qa/run-flake8 +++ b/share/spack/qa/run-flake8 @@ -22,9 +22,13 @@ changed=$(git diff --name-only --find-renames develop... | grep '.py$') # Add approved style exemptions to the changed packages. for file in $changed; do - if [[ $file = *package.py ]]; then - cp "$file" "$file~" + # Make a backup to restore later + cp "$file" "$file.sbak~" + # + # Exemptions for package.py files + # + if [[ $file = *package.py ]]; then # Exempt lines with urls and descriptions from overlong line errors. perl -i -pe 's/^(\s*homepage\s*=.*)$/\1 # NOQA: ignore=E501/' $file perl -i -pe 's/^(\s*url\s*=.*)$/\1 # NOQA: ignore=E501/' $file @@ -36,6 +40,11 @@ for file in $changed; do # Exempt '@when' decorated functions from redefinition errors. perl -i -pe 's/^(\s*\@when\(.*\).*)$/\1 # NOQA: ignore=F811/' $file fi + + # + # Exemptions for all files + # + perl -i -pe 's/^(.*(https?|file)\:.*)$/\1 # NOQA: ignore=E501/' $file done return_code=0 @@ -58,8 +67,8 @@ fi # Restore original package files after modifying them. for file in $changed; do - if [[ $file = *package.py ]]; then - mv "${file}~" "${file}" + if [[ -e "${file}.sbak~" ]]; then + mv "${file}.sbak~" "${file}" fi done From 867121ca68e436e95fe338b4ca80ab154dd6388a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 10 Aug 2016 00:21:48 -0700 Subject: [PATCH 281/284] Move jsonschema files out of `config.py` to their own package. --- lib/spack/spack/config.py | 300 ++-------------------------- lib/spack/spack/schema/__init__.py | 33 +++ lib/spack/spack/schema/compilers.py | 80 ++++++++ lib/spack/spack/schema/mirrors.py | 44 ++++ lib/spack/spack/schema/modules.py | 158 +++++++++++++++ lib/spack/spack/schema/packages.py | 86 ++++++++ lib/spack/spack/schema/repos.py | 41 ++++ lib/spack/spack/schema/targets.py | 45 +++++ 8 files changed, 509 insertions(+), 278 deletions(-) create mode 100644 lib/spack/spack/schema/__init__.py create mode 100644 lib/spack/spack/schema/compilers.py create mode 100644 lib/spack/spack/schema/mirrors.py create mode 100644 lib/spack/spack/schema/modules.py create mode 100644 lib/spack/spack/schema/packages.py create mode 100644 lib/spack/spack/schema/repos.py create mode 100644 lib/spack/spack/schema/targets.py diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index a4a4f5411ee..c90eff42292 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -1,4 +1,3 @@ -# flake8: noqa ############################################################################## # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. @@ -123,15 +122,18 @@ import re import sys -import jsonschema -import llnl.util.tty as tty -import spack import yaml -from jsonschema import Draft4Validator, validators -from llnl.util.filesystem import mkdirp -from ordereddict_backport import OrderedDict -from spack.error import SpackError +import jsonschema from yaml.error import MarkedYAMLError +from jsonschema import Draft4Validator, validators +from ordereddict_backport import OrderedDict + +import llnl.util.tty as tty +from llnl.util.filesystem import mkdirp + +import spack +from spack.error import SpackError +import spack.schema # Hacked yaml for configuration files preserves line numbers. import spack.util.spack_yaml as syaml @@ -139,272 +141,12 @@ """Dict from section names -> schema for that section.""" section_schemas = { - 'compilers': { - '$schema': 'http://json-schema.org/schema#', - 'title': 'Spack compiler configuration file schema', - 'type': 'object', - 'additionalProperties': False, - 'patternProperties': { - 'compilers:?': { # optional colon for overriding site config. - 'type': 'array', - 'items': { - 'compiler': { - 'type': 'object', - 'additionalProperties': False, - 'required': ['paths', 'spec', 'modules', 'operating_system'], - 'properties': { - 'paths': { - 'type': 'object', - 'required': ['cc', 'cxx', 'f77', 'fc'], - 'additionalProperties': False, - 'properties': { - 'cc': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'cxx': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'f77': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'fc': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'cflags': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'cxxflags': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'fflags': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'cppflags': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'ldflags': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'ldlibs': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}}}, - 'spec': {'type': 'string'}, - 'operating_system': {'type': 'string'}, - 'alias': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'modules': {'anyOf': [{'type': 'string'}, - {'type': 'null'}, - {'type': 'array'}, - ]} - }, }, }, }, }, }, - 'mirrors': { - '$schema': 'http://json-schema.org/schema#', - 'title': 'Spack mirror configuration file schema', - 'type': 'object', - 'additionalProperties': False, - 'patternProperties': { - r'mirrors:?': { - 'type': 'object', - 'default': {}, - 'additionalProperties': False, - 'patternProperties': { - r'\w[\w-]*': { - 'type': 'string'}, }, }, }, }, - - 'repos': { - '$schema': 'http://json-schema.org/schema#', - 'title': 'Spack repository configuration file schema', - 'type': 'object', - 'additionalProperties': False, - 'patternProperties': { - r'repos:?': { - 'type': 'array', - 'default': [], - 'items': { - 'type': 'string'}, }, }, }, - 'packages': { - '$schema': 'http://json-schema.org/schema#', - 'title': 'Spack package configuration file schema', - 'type': 'object', - 'additionalProperties': False, - 'patternProperties': { - r'packages:?': { - 'type': 'object', - 'default': {}, - 'additionalProperties': False, - 'patternProperties': { - r'\w[\w-]*': { # package name - 'type': 'object', - 'default': {}, - 'additionalProperties': False, - 'properties': { - 'version': { - 'type': 'array', - 'default': [], - 'items': {'anyOf': [{'type': 'string'}, - {'type': 'number'}]}}, # version strings - 'compiler': { - 'type': 'array', - 'default': [], - 'items': {'type': 'string'}}, # compiler specs - 'buildable': { - 'type': 'boolean', - 'default': True, - }, - 'modules': { - 'type': 'object', - 'default': {}, - }, - 'providers': { - 'type': 'object', - 'default': {}, - 'additionalProperties': False, - 'patternProperties': { - r'\w[\w-]*': { - 'type': 'array', - 'default': [], - 'items': {'type': 'string'}, }, }, }, - 'paths': { - 'type': 'object', - 'default': {}, - }, - 'variants': { - 'oneOf': [ - {'type': 'string'}, - {'type': 'array', - 'items': {'type': 'string'}}, - ], }, - }, }, }, }, }, }, - - 'targets': { - '$schema': 'http://json-schema.org/schema#', - 'title': 'Spack target configuration file schema', - 'type': 'object', - 'additionalProperties': False, - 'patternProperties': { - r'targets:?': { - 'type': 'object', - 'default': {}, - 'additionalProperties': False, - 'patternProperties': { - r'\w[\w-]*': { # target name - 'type': 'string', }, }, }, }, }, - 'modules': { - '$schema': 'http://json-schema.org/schema#', - 'title': 'Spack module file configuration file schema', - 'type': 'object', - 'additionalProperties': False, - 'definitions': { - 'array_of_strings': { - 'type': 'array', - 'default': [], - 'items': { - 'type': 'string' - } - }, - 'dictionary_of_strings': { - 'type': 'object', - 'patternProperties': { - r'\w[\w-]*': { # key - 'type': 'string' - } - } - }, - 'dependency_selection': { - 'type': 'string', - 'enum': ['none', 'direct', 'all'] - }, - 'module_file_configuration': { - 'type': 'object', - 'default': {}, - 'additionalProperties': False, - 'properties': { - 'filter': { - 'type': 'object', - 'default': {}, - 'additionalProperties': False, - 'properties': { - 'environment_blacklist': { - 'type': 'array', - 'default': [], - 'items': { - 'type': 'string' - } - } - } - }, - 'autoload': {'$ref': '#/definitions/dependency_selection'}, - 'prerequisites': {'$ref': '#/definitions/dependency_selection'}, - 'conflict': {'$ref': '#/definitions/array_of_strings'}, - 'load': {'$ref': '#/definitions/array_of_strings'}, - 'suffixes': {'$ref': '#/definitions/dictionary_of_strings'}, - 'environment': { - 'type': 'object', - 'default': {}, - 'additionalProperties': False, - 'properties': { - 'set': {'$ref': '#/definitions/dictionary_of_strings'}, - 'unset': {'$ref': '#/definitions/array_of_strings'}, - 'prepend_path': {'$ref': '#/definitions/dictionary_of_strings'}, - 'append_path': {'$ref': '#/definitions/dictionary_of_strings'} - } - } - } - }, - 'module_type_configuration': { - 'type': 'object', - 'default': {}, - 'anyOf': [ - { - 'properties': { - 'hash_length': { - 'type': 'integer', - 'minimum': 0, - 'default': 7 - }, - 'whitelist': {'$ref': '#/definitions/array_of_strings'}, - 'blacklist': {'$ref': '#/definitions/array_of_strings'}, - 'naming_scheme': { - 'type': 'string' # Can we be more specific here? - } - } - }, - { - 'patternProperties': {r'\w[\w-]*': {'$ref': '#/definitions/module_file_configuration'}} - } - ] - } - }, - 'patternProperties': { - r'modules:?': { - 'type': 'object', - 'default': {}, - 'additionalProperties': False, - 'properties': { - 'prefix_inspections': { - 'type': 'object', - 'patternProperties': { - r'\w[\w-]*': { # path to be inspected for existence (relative to prefix) - '$ref': '#/definitions/array_of_strings' - } - } - }, - 'enable': { - 'type': 'array', - 'default': [], - 'items': { - 'type': 'string', - 'enum': ['tcl', 'dotkit'] - } - }, - 'tcl': { - 'allOf': [ - # Base configuration - {'$ref': '#/definitions/module_type_configuration'}, - {} # Specific tcl extensions - ] - }, - 'dotkit': { - 'allOf': [ - # Base configuration - {'$ref': '#/definitions/module_type_configuration'}, - {} # Specific dotkit extensions - ] - }, - } - }, - }, - }, + 'compilers': spack.schema.compilers.schema, + 'mirrors': spack.schema.mirrors.schema, + 'repos': spack.schema.repos.schema, + 'packages': spack.schema.packages.schema, + 'targets': spack.schema.targets.schema, + 'modules': spack.schema.modules.schema, } """OrderedDict of config scopes keyed by name. @@ -421,7 +163,7 @@ def validate_section_name(section): def extend_with_default(validator_class): - """Add support for the 'default' attribute for properties and patternProperties. + """Add support for the 'default' attr for properties and patternProperties. jsonschema does not handle this out of the box -- it only validates. This allows us to set default values for configs @@ -437,7 +179,8 @@ def set_defaults(validator, properties, instance, schema): for property, subschema in properties.iteritems(): if "default" in subschema: instance.setdefault(property, subschema["default"]) - for err in validate_properties(validator, properties, instance, schema): + for err in validate_properties( + validator, properties, instance, schema): yield err def set_pp_defaults(validator, properties, instance, schema): @@ -448,7 +191,8 @@ def set_pp_defaults(validator, properties, instance, schema): if re.match(property, key) and val is None: instance[key] = subschema["default"] - for err in validate_pattern_properties(validator, properties, instance, schema): + for err in validate_pattern_properties( + validator, properties, instance, schema): yield err return validators.extend(validator_class, { @@ -712,7 +456,7 @@ def print_section(section): data = syaml.syaml_dict() data[section] = get_config(section) syaml.dump(data, stream=sys.stdout, default_flow_style=False) - except (yaml.YAMLError, IOError) as e: + except (yaml.YAMLError, IOError): raise ConfigError("Error reading configuration: %s" % section) diff --git a/lib/spack/spack/schema/__init__.py b/lib/spack/spack/schema/__init__.py new file mode 100644 index 00000000000..de45ea921f4 --- /dev/null +++ b/lib/spack/spack/schema/__init__.py @@ -0,0 +1,33 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""This module contains jsonschema files for all of Spack's YAML formats. +""" +from llnl.util.lang import list_modules + +# Automatically bring in all sub-modules +__all__ = [] +for mod in list_modules(__path__[0]): + __import__('%s.%s' % (__name__, mod)) + __all__.append(mod) diff --git a/lib/spack/spack/schema/compilers.py b/lib/spack/spack/schema/compilers.py new file mode 100644 index 00000000000..2ffac03a66d --- /dev/null +++ b/lib/spack/spack/schema/compilers.py @@ -0,0 +1,80 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""Schema for compiler configuration files.""" + + +schema = { + '$schema': 'http://json-schema.org/schema#', + 'title': 'Spack compiler configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + 'compilers:?': { # optional colon for overriding site config. + 'type': 'array', + 'items': { + 'compiler': { + 'type': 'object', + 'additionalProperties': False, + 'required': [ + 'paths', 'spec', 'modules', 'operating_system'], + 'properties': { + 'paths': { + 'type': 'object', + 'required': ['cc', 'cxx', 'f77', 'fc'], + 'additionalProperties': False, + 'properties': { + 'cc': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cxx': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'f77': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'fc': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cxxflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'fflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'cppflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'ldflags': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'ldlibs': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}}}, + 'spec': {'type': 'string'}, + 'operating_system': {'type': 'string'}, + 'alias': {'anyOf': [{'type': 'string'}, + {'type': 'null'}]}, + 'modules': {'anyOf': [{'type': 'string'}, + {'type': 'null'}, + {'type': 'array'}]} + }, + }, + }, + }, + }, +} diff --git a/lib/spack/spack/schema/mirrors.py b/lib/spack/spack/schema/mirrors.py new file mode 100644 index 00000000000..ff599b9c7d4 --- /dev/null +++ b/lib/spack/spack/schema/mirrors.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""Schema for mirror configuration files.""" + + +schema = { + '$schema': 'http://json-schema.org/schema#', + 'title': 'Spack mirror configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + r'mirrors:?': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'patternProperties': { + r'\w[\w-]*': { + 'type': 'string'}, + }, + }, + }, +} diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py new file mode 100644 index 00000000000..f8066919f15 --- /dev/null +++ b/lib/spack/spack/schema/modules.py @@ -0,0 +1,158 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""Schema for mirror configuration files.""" + + +schema = { + '$schema': 'http://json-schema.org/schema#', + 'title': 'Spack module file configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'definitions': { + 'array_of_strings': { + 'type': 'array', + 'default': [], + 'items': { + 'type': 'string' + } + }, + 'dictionary_of_strings': { + 'type': 'object', + 'patternProperties': { + r'\w[\w-]*': { # key + 'type': 'string' + } + } + }, + 'dependency_selection': { + 'type': 'string', + 'enum': ['none', 'direct', 'all'] + }, + 'module_file_configuration': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'properties': { + 'filter': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'properties': { + 'environment_blacklist': { + 'type': 'array', + 'default': [], + 'items': { + 'type': 'string' + } + } + } + }, + 'autoload': { + '$ref': '#/definitions/dependency_selection'}, + 'prerequisites': { + '$ref': '#/definitions/dependency_selection'}, + 'conflict': { + '$ref': '#/definitions/array_of_strings'}, + 'load': { + '$ref': '#/definitions/array_of_strings'}, + 'suffixes': { + '$ref': '#/definitions/dictionary_of_strings'}, + 'environment': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'properties': { + 'set': { + '$ref': '#/definitions/dictionary_of_strings'}, + 'unset': { + '$ref': '#/definitions/array_of_strings'}, + 'prepend_path': { + '$ref': '#/definitions/dictionary_of_strings'}, + 'append_path': { + '$ref': '#/definitions/dictionary_of_strings'} + } + } + } + }, + 'module_type_configuration': { + 'type': 'object', + 'default': {}, + 'anyOf': [ + {'properties': { + 'hash_length': { + 'type': 'integer', + 'minimum': 0, + 'default': 7 + }, + 'whitelist': { + '$ref': '#/definitions/array_of_strings'}, + 'blacklist': { + '$ref': '#/definitions/array_of_strings'}, + 'naming_scheme': { + 'type': 'string' # Can we be more specific here? + } + }}, + {'patternProperties': { + r'\w[\w-]*': { + '$ref': '#/definitions/module_file_configuration' + } + }} + ] + } + }, + 'patternProperties': { + r'modules:?': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'properties': { + 'prefix_inspections': { + 'type': 'object', + 'patternProperties': { + # prefix-relative path to be inspected for existence + r'\w[\w-]*': { + '$ref': '#/definitions/array_of_strings'}}}, + 'enable': { + 'type': 'array', + 'default': [], + 'items': { + 'type': 'string', + 'enum': ['tcl', 'dotkit']}}, + 'tcl': { + 'allOf': [ + # Base configuration + {'$ref': '#/definitions/module_type_configuration'}, + {} # Specific tcl extensions + ]}, + 'dotkit': { + 'allOf': [ + # Base configuration + {'$ref': '#/definitions/module_type_configuration'}, + {} # Specific dotkit extensions + ]}, + } + }, + }, +} diff --git a/lib/spack/spack/schema/packages.py b/lib/spack/spack/schema/packages.py new file mode 100644 index 00000000000..e19f3f533b7 --- /dev/null +++ b/lib/spack/spack/schema/packages.py @@ -0,0 +1,86 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""Schema for packages.yaml configuration files.""" + + +schema = { + '$schema': 'http://json-schema.org/schema#', + 'title': 'Spack package configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + r'packages:?': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'patternProperties': { + r'\w[\w-]*': { # package name + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'properties': { + 'version': { + 'type': 'array', + 'default': [], + # version strings + 'items': {'anyOf': [{'type': 'string'}, + {'type': 'number'}]}}, + 'compiler': { + 'type': 'array', + 'default': [], + 'items': {'type': 'string'}}, # compiler specs + 'buildable': { + 'type': 'boolean', + 'default': True, + }, + 'modules': { + 'type': 'object', + 'default': {}, + }, + 'providers': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'patternProperties': { + r'\w[\w-]*': { + 'type': 'array', + 'default': [], + 'items': {'type': 'string'}, }, }, }, + 'paths': { + 'type': 'object', + 'default': {}, + }, + 'variants': { + 'oneOf': [ + {'type': 'string'}, + {'type': 'array', + 'items': {'type': 'string'}}], + }, + }, + }, + }, + }, + }, +} diff --git a/lib/spack/spack/schema/repos.py b/lib/spack/spack/schema/repos.py new file mode 100644 index 00000000000..9f01942422a --- /dev/null +++ b/lib/spack/spack/schema/repos.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""Schema for repository configuration files.""" + + +schema = { + '$schema': 'http://json-schema.org/schema#', + 'title': 'Spack repository configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + r'repos:?': { + 'type': 'array', + 'default': [], + 'items': { + 'type': 'string'}, + }, + }, +} diff --git a/lib/spack/spack/schema/targets.py b/lib/spack/spack/schema/targets.py new file mode 100644 index 00000000000..312474cab4e --- /dev/null +++ b/lib/spack/spack/schema/targets.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""Schema for target configuration files.""" + + +schema = { + '$schema': 'http://json-schema.org/schema#', + 'title': 'Spack target configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + r'targets:?': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'patternProperties': { + r'\w[\w-]*': { # target name + 'type': 'string', + }, + }, + }, + }, +} From 240f1fd2239552f37d0f8a8841496414b163ef10 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 10 Aug 2016 01:50:00 -0700 Subject: [PATCH 282/284] Spack packages now PEP8 compliant. --- .../repos/builtin.mock/packages/a/package.py | 1 + .../repos/builtin.mock/packages/b/package.py | 1 + .../repos/builtin.mock/packages/c/package.py | 1 + .../builtin.mock/packages/callpath/package.py | 1 + .../packages/cmake-client/package.py | 15 +- .../builtin.mock/packages/cmake/package.py | 4 +- .../packages/direct_mpich/package.py | 1 + .../builtin.mock/packages/dyninst/package.py | 1 + .../repos/builtin.mock/packages/e/package.py | 1 + .../packages/externalmodule/package.py | 1 + .../packages/externalprereq/package.py | 1 + .../packages/externaltest/package.py | 1 + .../packages/externaltool/package.py | 1 + .../packages/externalvirtual/package.py | 1 + .../builtin.mock/packages/fake/package.py | 1 + .../builtin.mock/packages/git-test/package.py | 1 + .../builtin.mock/packages/hg-test/package.py | 1 + .../builtin.mock/packages/hypre/package.py | 1 + .../packages/indirect_mpich/package.py | 1 + .../builtin.mock/packages/libdwarf/package.py | 2 +- .../builtin.mock/packages/libelf/package.py | 1 + .../builtin.mock/packages/mpich/package.py | 1 + .../builtin.mock/packages/mpich2/package.py | 1 + .../packages/multimethod/package.py | 15 +- .../packages/netlib-blas/package.py | 1 + .../packages/netlib-lapack/package.py | 1 + .../packages/openblas-with-lapack/package.py | 1 + .../builtin.mock/packages/openblas/package.py | 1 + .../packages/optional-dep-test-2/package.py | 1 + .../packages/optional-dep-test-3/package.py | 1 + .../packages/optional-dep-test/package.py | 1 + .../builtin.mock/packages/python/package.py | 2 +- .../builtin.mock/packages/svn-test/package.py | 1 + .../trivial_install_test_package/package.py | 1 + .../builtin.mock/packages/zmpi/package.py | 1 + .../repos/builtin/packages/LuaJIT/package.py | 1 + .../repos/builtin/packages/SAMRAI/package.py | 10 +- .../builtin/packages/activeharmony/package.py | 7 +- .../builtin/packages/adept-utils/package.py | 1 + .../repos/builtin/packages/adol-c/package.py | 58 ++- .../repos/builtin/packages/antlr/package.py | 16 +- .../repos/builtin/packages/apex/package.py | 17 +- .../builtin/packages/apr-util/package.py | 1 + .../repos/builtin/packages/apr/package.py | 1 + .../builtin/packages/arpack-ng/package.py | 3 +- .../builtin/packages/asciidoc/package.py | 1 + .../repos/builtin/packages/atop/package.py | 3 +- .../builtin/packages/automaded/package.py | 1 + .../repos/builtin/packages/bash/package.py | 1 + .../repos/builtin/packages/bbcp/package.py | 10 +- .../repos/builtin/packages/bdw-gc/package.py | 3 +- .../repos/builtin/packages/bear/package.py | 4 +- .../builtin/packages/bib2xhtml/package.py | 3 +- .../repos/builtin/packages/bison/package.py | 5 +- .../repos/builtin/packages/blitz/package.py | 1 + .../repos/builtin/packages/boost/package.py | 22 +- .../repos/builtin/packages/bowtie2/package.py | 8 +- .../repos/builtin/packages/boxlib/package.py | 4 +- .../repos/builtin/packages/c-blosc/package.py | 1 + .../repos/builtin/packages/caliper/package.py | 9 +- .../builtin/packages/callpath/package.py | 1 + .../repos/builtin/packages/cantera/package.py | 15 +- .../repos/builtin/packages/cblas/package.py | 11 +- .../packages/cbtf-argonavis/package.py | 104 ++-- .../builtin/packages/cbtf-krell/package.py | 227 ++++---- .../builtin/packages/cbtf-lanl/package.py | 79 +-- .../repos/builtin/packages/cbtf/package.py | 129 ++--- .../repos/builtin/packages/cereal/package.py | 10 +- .../repos/builtin/packages/cfitsio/package.py | 1 + .../repos/builtin/packages/cgal/package.py | 22 +- .../repos/builtin/packages/cgm/package.py | 4 +- .../builtin/packages/cityhash/package.py | 8 +- .../builtin/packages/cleverleaf/package.py | 15 +- .../repos/builtin/packages/cloog/package.py | 1 + .../repos/builtin/packages/cmake/package.py | 13 +- .../repos/builtin/packages/cmocka/package.py | 7 +- .../repos/builtin/packages/cnmem/package.py | 7 +- .../builtin/packages/coreutils/package.py | 1 + .../builtin/packages/cppcheck/package.py | 1 + .../repos/builtin/packages/cram/package.py | 1 + .../builtin/packages/cryptopp/package.py | 11 +- .../repos/builtin/packages/cscope/package.py | 1 + .../repos/builtin/packages/cuda/package.py | 38 +- .../repos/builtin/packages/czmq/package.py | 20 +- .../repos/builtin/packages/dakota/package.py | 38 +- .../builtin/packages/damselfly/package.py | 10 +- .../repos/builtin/packages/dbus/package.py | 1 + .../repos/builtin/packages/dealii/package.py | 46 +- .../repos/builtin/packages/dia/package.py | 3 +- .../repos/builtin/packages/doxygen/package.py | 5 +- .../builtin/packages/dri2proto/package.py | 1 + .../repos/builtin/packages/dtcmp/package.py | 2 +- .../repos/builtin/packages/dyninst/package.py | 11 +- .../repos/builtin/packages/eigen/package.py | 13 +- .../builtin/packages/elfutils/package.py | 2 +- .../repos/builtin/packages/elpa/package.py | 3 +- .../repos/builtin/packages/emacs/package.py | 3 +- .../packages/environment-modules/package.py | 9 +- .../builtin/packages/exodusii/package.py | 26 +- .../repos/builtin/packages/expat/package.py | 2 +- .../repos/builtin/packages/extrae/package.py | 38 +- .../packages/exuberant-ctags/package.py | 1 + .../repos/builtin/packages/fenics/package.py | 18 +- .../repos/builtin/packages/fftw/package.py | 34 +- .../repos/builtin/packages/fish/package.py | 1 + .../repos/builtin/packages/fltk/package.py | 18 +- .../repos/builtin/packages/flux/package.py | 11 +- .../builtin/packages/foam-extend/package.py | 26 +- .../builtin/packages/fontconfig/package.py | 3 +- .../repos/builtin/packages/gasnet/package.py | 29 +- .../repos/builtin/packages/gdal/package.py | 1 + .../repos/builtin/packages/gdb/package.py | 7 +- .../builtin/packages/gdk-pixbuf/package.py | 1 + .../repos/builtin/packages/geos/package.py | 2 +- .../repos/builtin/packages/gflags/package.py | 2 +- .../repos/builtin/packages/gl2ps/package.py | 1 + .../repos/builtin/packages/glm/package.py | 7 +- .../repos/builtin/packages/glog/package.py | 2 +- .../repos/builtin/packages/glpk/package.py | 13 +- .../repos/builtin/packages/gmsh/package.py | 18 +- .../builtin/packages/gnu-prolog/package.py | 1 + .../repos/builtin/packages/gnuplot/package.py | 17 +- .../repos/builtin/packages/gnutls/package.py | 1 + .../builtin/packages/googletest/package.py | 8 +- .../repos/builtin/packages/gperf/package.py | 1 + .../builtin/packages/gperftools/package.py | 14 +- .../builtin/packages/graphlib/package.py | 1 + .../repos/builtin/packages/gsl/package.py | 16 +- .../repos/builtin/packages/gtkplus/package.py | 9 +- .../repos/builtin/packages/hdf/package.py | 10 +- .../builtin/packages/hdf5-blosc/package.py | 27 +- .../repos/builtin/packages/hdf5/package.py | 9 +- .../builtin/packages/hoomd-blue/package.py | 7 +- .../repos/builtin/packages/hpx5/package.py | 1 + .../repos/builtin/packages/hwloc/package.py | 1 + .../repos/builtin/packages/hydra/package.py | 2 +- .../repos/builtin/packages/hypre/package.py | 6 +- .../repos/builtin/packages/ibmisc/package.py | 25 +- .../repos/builtin/packages/icu/package.py | 3 +- .../repos/builtin/packages/icu4c/package.py | 7 +- .../packages/intel-parallel-studio/package.py | 73 +-- .../builtin/packages/intltool/package.py | 8 +- .../repos/builtin/packages/ior/package.py | 2 +- .../repos/builtin/packages/ipopt/package.py | 11 +- .../repos/builtin/packages/isl/package.py | 1 + .../repos/builtin/packages/jdk/package.py | 4 +- .../builtin/packages/jemalloc/package.py | 6 +- .../repos/builtin/packages/jpeg/package.py | 1 + .../repos/builtin/packages/judy/package.py | 3 +- .../repos/builtin/packages/kealib/package.py | 10 +- .../repos/builtin/packages/kripke/package.py | 1 + .../repos/builtin/packages/lcms/package.py | 1 + .../repos/builtin/packages/leveldb/package.py | 2 +- .../repos/builtin/packages/libNBC/package.py | 1 + .../builtin/packages/libarchive/package.py | 1 + .../repos/builtin/packages/libcerf/package.py | 7 +- .../builtin/packages/libcircle/package.py | 4 +- .../repos/builtin/packages/libdrm/package.py | 3 +- .../repos/builtin/packages/libedit/package.py | 4 +- .../repos/builtin/packages/libelf/package.py | 1 + .../builtin/packages/libevent/package.py | 11 +- .../repos/builtin/packages/libffi/package.py | 11 +- .../builtin/packages/libgcrypt/package.py | 1 + .../repos/builtin/packages/libgd/package.py | 13 +- .../builtin/packages/libgpg-error/package.py | 1 + .../builtin/packages/libjpeg-turbo/package.py | 1 + .../repos/builtin/packages/libmng/package.py | 4 +- .../builtin/packages/libmonitor/package.py | 6 +- .../builtin/packages/libpciaccess/package.py | 2 +- .../repos/builtin/packages/libpng/package.py | 1 + .../packages/libpthread-stubs/package.py | 1 + .../builtin/packages/libsigsegv/package.py | 1 + .../builtin/packages/libsodium/package.py | 1 + .../builtin/packages/libtermkey/package.py | 10 +- .../repos/builtin/packages/libtiff/package.py | 1 + .../builtin/packages/libunwind/package.py | 1 + .../repos/builtin/packages/libuuid/package.py | 1 + .../repos/builtin/packages/libuv/package.py | 1 + .../builtin/packages/libvterm/package.py | 1 + .../repos/builtin/packages/libxc/package.py | 2 +- .../repos/builtin/packages/libxcb/package.py | 10 +- .../repos/builtin/packages/libxml2/package.py | 4 +- .../builtin/packages/libxshmfence/package.py | 3 +- .../repos/builtin/packages/libxslt/package.py | 1 + .../builtin/packages/llvm-lld/package.py | 1 + .../repos/builtin/packages/llvm/package.py | 351 +++++++------ .../repos/builtin/packages/lmdb/package.py | 2 +- .../repos/builtin/packages/lwgrp/package.py | 2 +- .../repos/builtin/packages/lwm2/package.py | 1 + .../repos/builtin/packages/m4/package.py | 7 +- .../repos/builtin/packages/mbedtls/package.py | 16 +- .../repos/builtin/packages/memaxes/package.py | 1 + .../repos/builtin/packages/mesa/package.py | 3 +- .../repos/builtin/packages/metis/package.py | 12 +- .../repos/builtin/packages/mfem/package.py | 60 ++- .../repos/builtin/packages/mpc/package.py | 1 + .../repos/builtin/packages/mpe2/package.py | 3 +- .../repos/builtin/packages/mpfr/package.py | 1 + .../repos/builtin/packages/mpibash/package.py | 2 +- .../builtin/packages/mpileaks/package.py | 4 +- .../repos/builtin/packages/mrnet/package.py | 24 +- .../builtin/packages/msgpack-c/package.py | 1 + .../repos/builtin/packages/mumps/package.py | 39 +- .../repos/builtin/packages/munge/package.py | 5 +- .../builtin/packages/muparser/package.py | 1 + .../repos/builtin/packages/muster/package.py | 1 + .../builtin/packages/mvapich2/package.py | 39 +- .../repos/builtin/packages/mxml/package.py | 24 + .../repos/builtin/packages/nag/package.py | 4 +- .../repos/builtin/packages/nasm/package.py | 1 + .../repos/builtin/packages/nccmp/package.py | 1 + .../repos/builtin/packages/ncdu/package.py | 9 +- .../repos/builtin/packages/nco/package.py | 6 +- .../repos/builtin/packages/ncurses/package.py | 11 +- .../repos/builtin/packages/ncview/package.py | 1 + .../repos/builtin/packages/ndiff/package.py | 12 +- .../builtin/packages/netcdf-cxx/package.py | 1 + .../repos/builtin/packages/netcdf/package.py | 27 +- .../builtin/packages/netgauge/package.py | 1 + .../builtin/packages/netlib-lapack/package.py | 50 +- .../packages/netlib-scalapack/package.py | 3 +- .../repos/builtin/packages/nettle/package.py | 1 + .../repos/builtin/packages/ninja/package.py | 2 +- .../repos/builtin/packages/numdiff/package.py | 3 +- .../repos/builtin/packages/oce/package.py | 3 +- .../repos/builtin/packages/ompss/package.py | 37 +- .../builtin/packages/ompt-openmp/package.py | 14 +- .../builtin/packages/openspeedshop/package.py | 493 ++++++++++-------- .../packages/osu-micro-benchmarks/package.py | 2 +- .../repos/builtin/packages/otf/package.py | 1 + .../repos/builtin/packages/otf2/package.py | 12 +- .../repos/builtin/packages/papi/package.py | 3 +- .../builtin/packages/paradiseo/package.py | 71 ++- .../packages/parallel-netcdf/package.py | 9 +- .../repos/builtin/packages/paraver/package.py | 19 +- .../builtin/packages/paraview/package.py | 66 ++- .../builtin/packages/parmetis/package.py | 6 +- .../repos/builtin/packages/parpack/package.py | 5 +- .../builtin/packages/patchelf/package.py | 4 +- .../repos/builtin/packages/pcre2/package.py | 1 + .../repos/builtin/packages/pdt/package.py | 12 +- .../repos/builtin/packages/petsc/package.py | 21 +- .../repos/builtin/packages/pidx/package.py | 1 + .../repos/builtin/packages/pixman/package.py | 1 + .../packages/pmgr_collective/package.py | 1 + .../repos/builtin/packages/ppl/package.py | 1 + .../repos/builtin/packages/proj/package.py | 1 + .../builtin/packages/protobuf/package.py | 2 +- .../repos/builtin/packages/psi4/package.py | 10 +- .../builtin/packages/py-SQLAlchemy/package.py | 1 + .../packages/py-argcomplete/package.py | 1 + .../builtin/packages/py-astroid/package.py | 1 - .../builtin/packages/py-autopep8/package.py | 2 +- .../builtin/packages/py-biopython/package.py | 7 +- .../builtin/packages/py-blessings/package.py | 1 + .../builtin/packages/py-bottleneck/package.py | 3 +- .../repos/builtin/packages/py-cffi/package.py | 1 + .../builtin/packages/py-coverage/package.py | 1 + .../builtin/packages/py-csvkit/package.py | 1 + .../builtin/packages/py-cython/package.py | 1 + .../repos/builtin/packages/py-dask/package.py | 1 + .../builtin/packages/py-dateutil/package.py | 1 + .../repos/builtin/packages/py-dbf/package.py | 1 + .../builtin/packages/py-decorator/package.py | 5 +- .../builtin/packages/py-epydoc/package.py | 4 +- .../builtin/packages/py-funcsigs/package.py | 5 +- .../builtin/packages/py-genders/package.py | 11 +- .../builtin/packages/py-gnuplot/package.py | 4 +- .../builtin/packages/py-ipython/package.py | 4 +- .../builtin/packages/py-jdcal/package.py | 1 + .../builtin/packages/py-jinja2/package.py | 1 - .../builtin/packages/py-lockfile/package.py | 1 + .../packages/py-logilab-common/package.py | 1 - .../repos/builtin/packages/py-mako/package.py | 7 +- .../builtin/packages/py-markupsafe/package.py | 1 - .../builtin/packages/py-matplotlib/package.py | 5 +- .../repos/builtin/packages/py-mock/package.py | 1 + .../builtin/packages/py-mpi4py/package.py | 8 +- .../builtin/packages/py-mpmath/package.py | 1 + .../repos/builtin/packages/py-mx/package.py | 8 +- .../builtin/packages/py-mysqldb1/package.py | 2 +- .../builtin/packages/py-netcdf/package.py | 1 + .../builtin/packages/py-numexpr/package.py | 2 +- .../builtin/packages/py-openpyxl/package.py | 1 + .../builtin/packages/py-pandas/package.py | 12 +- .../repos/builtin/packages/py-pbr/package.py | 7 +- .../repos/builtin/packages/py-pep8/package.py | 2 +- .../packages/py-periodictable/package.py | 1 + .../builtin/packages/py-pexpect/package.py | 1 + .../builtin/packages/py-phonopy/package.py | 1 + .../builtin/packages/py-pillow/package.py | 3 +- .../repos/builtin/packages/py-pmw/package.py | 4 +- .../builtin/packages/py-py2neo/package.py | 1 - .../builtin/packages/py-pychecker/package.py | 1 + .../builtin/packages/py-pycparser/package.py | 4 +- .../builtin/packages/py-pyelftools/package.py | 4 +- .../builtin/packages/py-pygments/package.py | 1 + .../builtin/packages/py-pylint/package.py | 2 +- .../builtin/packages/py-pypar/package.py | 8 +- .../builtin/packages/py-pyparsing/package.py | 1 + .../repos/builtin/packages/py-pyqt/package.py | 1 + .../builtin/packages/py-pyside/package.py | 5 +- .../packages/py-python-daemon/package.py | 2 +- .../repos/builtin/packages/py-pytz/package.py | 1 + .../builtin/packages/py-pyyaml/package.py | 1 + .../repos/builtin/packages/py-rpy2/package.py | 10 +- .../packages/py-scientificpython/package.py | 1 + .../packages/py-scikit-learn/package.py | 1 + .../builtin/packages/py-scipy/package.py | 4 +- .../builtin/packages/py-shiboken/package.py | 7 +- .../repos/builtin/packages/py-sip/package.py | 4 +- .../repos/builtin/packages/py-six/package.py | 1 + .../builtin/packages/py-sphinx/package.py | 1 + .../builtin/packages/py-sympy/package.py | 1 + .../builtin/packages/py-tappy/package.py | 1 + .../builtin/packages/py-tuiview/package.py | 1 + .../builtin/packages/py-twisted/package.py | 1 + .../builtin/packages/py-urwid/package.py | 2 +- .../builtin/packages/py-virtualenv/package.py | 2 +- .../builtin/packages/py-wheel/package.py | 1 + .../repos/builtin/packages/py-xlrd/package.py | 1 + .../repos/builtin/packages/py-yapf/package.py | 1 + .../repos/builtin/packages/python/package.py | 3 +- .../repos/builtin/packages/qhull/package.py | 3 +- .../builtin/packages/qrupdate/package.py | 1 + .../repos/builtin/packages/qt/package.py | 47 +- .../builtin/packages/qthreads/package.py | 1 + .../repos/builtin/packages/raja/package.py | 7 +- .../repos/builtin/packages/ravel/package.py | 1 + .../builtin/packages/readline/package.py | 1 + .../repos/builtin/packages/rose/package.py | 9 +- .../repos/builtin/packages/rsync/package.py | 3 +- .../repos/builtin/packages/rust/package.py | 1 + .../builtin/packages/scalasca/package.py | 15 +- .../repos/builtin/packages/scons/package.py | 1 + .../repos/builtin/packages/scotch/package.py | 15 +- .../repos/builtin/packages/scr/package.py | 7 +- .../repos/builtin/packages/silo/package.py | 9 +- .../repos/builtin/packages/snappy/package.py | 2 +- .../builtin/packages/sparsehash/package.py | 1 + .../repos/builtin/packages/spindle/package.py | 1 + .../repos/builtin/packages/spot/package.py | 7 +- .../repos/builtin/packages/sqlite/package.py | 1 + .../repos/builtin/packages/stat/package.py | 6 +- .../builtin/packages/subversion/package.py | 27 +- .../builtin/packages/sundials/package.py | 12 +- .../builtin/packages/superlu-mt/package.py | 6 +- .../builtin/packages/swiftsim/package.py | 6 +- .../repos/builtin/packages/szip/package.py | 9 +- .../repos/builtin/packages/tar/package.py | 2 - .../repos/builtin/packages/task/package.py | 1 + .../repos/builtin/packages/taskd/package.py | 1 + .../repos/builtin/packages/tau/package.py | 36 +- .../repos/builtin/packages/tetgen/package.py | 3 +- .../repos/builtin/packages/texinfo/package.py | 11 +- .../repos/builtin/packages/texlive/package.py | 3 +- .../repos/builtin/packages/thrift/package.py | 23 +- .../builtin/packages/tmuxinator/package.py | 9 +- .../builtin/packages/triangle/package.py | 1 + .../builtin/packages/trilinos/package.py | 21 +- .../builtin/packages/udunits2/package.py | 1 + .../builtin/packages/uncrustify/package.py | 3 +- .../builtin/packages/unibilium/package.py | 5 +- .../builtin/packages/util-linux/package.py | 8 +- .../builtin/packages/valgrind/package.py | 16 +- .../repos/builtin/packages/vim/package.py | 69 +-- .../repos/builtin/packages/visit/package.py | 18 +- .../repos/builtin/packages/vtk/package.py | 15 +- .../repos/builtin/packages/wget/package.py | 11 +- .../repos/builtin/packages/wx/package.py | 5 +- .../builtin/packages/wxpropgrid/package.py | 5 +- .../builtin/packages/xcb-proto/package.py | 1 + .../packages/xorg-util-macros/package.py | 5 +- .../repos/builtin/packages/xproto/package.py | 1 + .../repos/builtin/packages/xz/package.py | 2 +- .../repos/builtin/packages/yasm/package.py | 1 + .../repos/builtin/packages/zeromq/package.py | 3 +- .../repos/builtin/packages/zfp/package.py | 13 +- .../repos/builtin/packages/zlib/package.py | 7 +- .../repos/builtin/packages/zoltan/package.py | 48 +- .../repos/builtin/packages/zsh/package.py | 1 + 381 files changed, 2457 insertions(+), 1617 deletions(-) diff --git a/var/spack/repos/builtin.mock/packages/a/package.py b/var/spack/repos/builtin.mock/packages/a/package.py index 40b92240fc8..0d75ee1256a 100644 --- a/var/spack/repos/builtin.mock/packages/a/package.py +++ b/var/spack/repos/builtin.mock/packages/a/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class A(Package): """Simple package with no dependencies""" diff --git a/var/spack/repos/builtin.mock/packages/b/package.py b/var/spack/repos/builtin.mock/packages/b/package.py index c447a56b485..5729f24e79b 100644 --- a/var/spack/repos/builtin.mock/packages/b/package.py +++ b/var/spack/repos/builtin.mock/packages/b/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class B(Package): """Simple package with no dependencies""" diff --git a/var/spack/repos/builtin.mock/packages/c/package.py b/var/spack/repos/builtin.mock/packages/c/package.py index 5b6079c4e3c..80777a05bb1 100644 --- a/var/spack/repos/builtin.mock/packages/c/package.py +++ b/var/spack/repos/builtin.mock/packages/c/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class C(Package): """Simple package with no dependencies""" diff --git a/var/spack/repos/builtin.mock/packages/callpath/package.py b/var/spack/repos/builtin.mock/packages/callpath/package.py index c297a123b85..56b969df98a 100644 --- a/var/spack/repos/builtin.mock/packages/callpath/package.py +++ b/var/spack/repos/builtin.mock/packages/callpath/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Callpath(Package): homepage = "https://github.com/tgamblin/callpath" url = "http://github.com/tgamblin/callpath-1.0.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/cmake-client/package.py b/var/spack/repos/builtin.mock/packages/cmake-client/package.py index c58430317ad..0c78f0bf688 100644 --- a/var/spack/repos/builtin.mock/packages/cmake-client/package.py +++ b/var/spack/repos/builtin.mock/packages/cmake-client/package.py @@ -25,6 +25,7 @@ from spack import * import os + def check(condition, msg): """Raise an install error if condition is False.""" if not condition: @@ -40,15 +41,14 @@ class CmakeClient(Package): depends_on('cmake', type='build') - def setup_environment(self, spack_env, run_env): spack_cc # Ensure spack module-scope variable is avaiabl check(from_cmake == "from_cmake", "setup_environment couldn't read global set by cmake.") check(self.spec['cmake'].link_arg == "test link arg", - "link arg on dependency spec not readable from setup_environment.") - + "link arg on dependency spec not readable from " + "setup_environment.") def setup_dependent_environment(self, spack_env, run_env, dspec): spack_cc # Ensure spack module-scope variable is avaiable @@ -56,8 +56,8 @@ def setup_dependent_environment(self, spack_env, run_env, dspec): "setup_dependent_environment couldn't read global set by cmake.") check(self.spec['cmake'].link_arg == "test link arg", - "link arg on dependency spec not readable from setup_dependent_environment.") - + "link arg on dependency spec not readable from " + "setup_dependent_environment.") def setup_dependent_package(self, module, dspec): spack_cc # Ensure spack module-scope variable is avaiable @@ -65,9 +65,8 @@ def setup_dependent_package(self, module, dspec): "setup_dependent_package couldn't read global set by cmake.") check(self.spec['cmake'].link_arg == "test link arg", - "link arg on dependency spec not readable from setup_dependent_package.") - - + "link arg on dependency spec not readable from " + "setup_dependent_package.") def install(self, spec, prefix): # check that cmake is in the global scope. diff --git a/var/spack/repos/builtin.mock/packages/cmake/package.py b/var/spack/repos/builtin.mock/packages/cmake/package.py index 0356cf8afb4..c8b6464e69c 100644 --- a/var/spack/repos/builtin.mock/packages/cmake/package.py +++ b/var/spack/repos/builtin.mock/packages/cmake/package.py @@ -25,6 +25,7 @@ from spack import * import os + def check(condition, msg): """Raise an install error if condition is False.""" if not condition: @@ -39,7 +40,6 @@ class Cmake(Package): version('3.4.3', '4cb3ff35b2472aae70f542116d616e63', url='https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz') - def setup_environment(self, spack_env, run_env): spack_cc # Ensure spack module-scope variable is avaiable spack_env.set('for_install', 'for_install') @@ -48,7 +48,6 @@ def setup_dependent_environment(self, spack_env, run_env, dspec): spack_cc # Ensure spack module-scope variable is avaiable spack_env.set('from_cmake', 'from_cmake') - def setup_dependent_package(self, module, dspec): spack_cc # Ensure spack module-scope variable is avaiable @@ -57,7 +56,6 @@ def setup_dependent_package(self, module, dspec): self.spec.link_arg = "test link arg" - def install(self, spec, prefix): mkdirp(prefix.bin) diff --git a/var/spack/repos/builtin.mock/packages/direct_mpich/package.py b/var/spack/repos/builtin.mock/packages/direct_mpich/package.py index 663908d56c7..f38589ad4d6 100644 --- a/var/spack/repos/builtin.mock/packages/direct_mpich/package.py +++ b/var/spack/repos/builtin.mock/packages/direct_mpich/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class DirectMpich(Package): homepage = "http://www.example.com" url = "http://www.example.com/direct_mpich-1.0.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/dyninst/package.py b/var/spack/repos/builtin.mock/packages/dyninst/package.py index ad486011e2d..daf1b82ec67 100644 --- a/var/spack/repos/builtin.mock/packages/dyninst/package.py +++ b/var/spack/repos/builtin.mock/packages/dyninst/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Dyninst(Package): homepage = "https://paradyn.org" url = "http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz" diff --git a/var/spack/repos/builtin.mock/packages/e/package.py b/var/spack/repos/builtin.mock/packages/e/package.py index b951a3eaa6e..c7640075631 100644 --- a/var/spack/repos/builtin.mock/packages/e/package.py +++ b/var/spack/repos/builtin.mock/packages/e/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class E(Package): """Simple package with no dependencies""" diff --git a/var/spack/repos/builtin.mock/packages/externalmodule/package.py b/var/spack/repos/builtin.mock/packages/externalmodule/package.py index f7b0da3fd91..f7c9b056a45 100644 --- a/var/spack/repos/builtin.mock/packages/externalmodule/package.py +++ b/var/spack/repos/builtin.mock/packages/externalmodule/package.py @@ -25,6 +25,7 @@ ############################################################################## from spack import * + class Externalmodule(Package): homepage = "http://somewhere.com" url = "http://somewhere.com/module-1.0.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/externalprereq/package.py b/var/spack/repos/builtin.mock/packages/externalprereq/package.py index bd3c4348bfc..226742f2cb1 100644 --- a/var/spack/repos/builtin.mock/packages/externalprereq/package.py +++ b/var/spack/repos/builtin.mock/packages/externalprereq/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Externalprereq(Package): homepage = "http://somewhere.com" url = "http://somewhere.com/prereq-1.0.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/externaltest/package.py b/var/spack/repos/builtin.mock/packages/externaltest/package.py index 2318887aec0..252c42556ee 100644 --- a/var/spack/repos/builtin.mock/packages/externaltest/package.py +++ b/var/spack/repos/builtin.mock/packages/externaltest/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Externaltest(Package): homepage = "http://somewhere.com" url = "http://somewhere.com/test-1.0.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/externaltool/package.py b/var/spack/repos/builtin.mock/packages/externaltool/package.py index 9ff2396f361..d2daddd350b 100644 --- a/var/spack/repos/builtin.mock/packages/externaltool/package.py +++ b/var/spack/repos/builtin.mock/packages/externaltool/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Externaltool(Package): homepage = "http://somewhere.com" url = "http://somewhere.com/tool-1.0.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/externalvirtual/package.py b/var/spack/repos/builtin.mock/packages/externalvirtual/package.py index e19ef332f01..1f3553014b2 100644 --- a/var/spack/repos/builtin.mock/packages/externalvirtual/package.py +++ b/var/spack/repos/builtin.mock/packages/externalvirtual/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Externalvirtual(Package): homepage = "http://somewhere.com" url = "http://somewhere.com/stuff-1.0.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/fake/package.py b/var/spack/repos/builtin.mock/packages/fake/package.py index 15aabf11016..b83eec74703 100644 --- a/var/spack/repos/builtin.mock/packages/fake/package.py +++ b/var/spack/repos/builtin.mock/packages/fake/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Fake(Package): homepage = "http://www.fake-spack-example.org" url = "http://www.fake-spack-example.org/downloads/fake-1.0.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/git-test/package.py b/var/spack/repos/builtin.mock/packages/git-test/package.py index aeea41146fd..730e71ac6b3 100644 --- a/var/spack/repos/builtin.mock/packages/git-test/package.py +++ b/var/spack/repos/builtin.mock/packages/git-test/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class GitTest(Package): """Mock package that uses git for fetching.""" homepage = "http://www.git-fetch-example.com" diff --git a/var/spack/repos/builtin.mock/packages/hg-test/package.py b/var/spack/repos/builtin.mock/packages/hg-test/package.py index 64719eb53c9..70a9b7f2c76 100644 --- a/var/spack/repos/builtin.mock/packages/hg-test/package.py +++ b/var/spack/repos/builtin.mock/packages/hg-test/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class HgTest(Package): """Test package that does fetching with mercurial.""" homepage = "http://www.hg-fetch-example.com" diff --git a/var/spack/repos/builtin.mock/packages/hypre/package.py b/var/spack/repos/builtin.mock/packages/hypre/package.py index 3aedea9bf2b..b9e31b09dc0 100644 --- a/var/spack/repos/builtin.mock/packages/hypre/package.py +++ b/var/spack/repos/builtin.mock/packages/hypre/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Hypre(Package): """Hypre is included here as an example of a package that depends on both LAPACK and BLAS.""" diff --git a/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py b/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py index 6ed779889bc..bbbf196129b 100644 --- a/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py +++ b/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class IndirectMpich(Package): """Test case for a package that depends on MPI and one of its dependencies requires a *particular version* of MPI. diff --git a/var/spack/repos/builtin.mock/packages/libdwarf/package.py b/var/spack/repos/builtin.mock/packages/libdwarf/package.py index b53e295e23d..0fcbe4a62e8 100644 --- a/var/spack/repos/builtin.mock/packages/libdwarf/package.py +++ b/var/spack/repos/builtin.mock/packages/libdwarf/package.py @@ -23,11 +23,11 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os # Only build certain parts of dwarf because the other ones break. dwarf_dirs = ['libdwarf', 'dwarfdump2'] + class Libdwarf(Package): homepage = "http://www.prevanders.net/dwarf.html" url = "http://www.prevanders.net/libdwarf-20130729.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/libelf/package.py b/var/spack/repos/builtin.mock/packages/libelf/package.py index f52d8cefe1d..90d00ad339f 100644 --- a/var/spack/repos/builtin.mock/packages/libelf/package.py +++ b/var/spack/repos/builtin.mock/packages/libelf/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libelf(Package): homepage = "http://www.mr511.de/software/english.html" url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/mpich/package.py b/var/spack/repos/builtin.mock/packages/mpich/package.py index f278f26b8be..936127398c5 100644 --- a/var/spack/repos/builtin.mock/packages/mpich/package.py +++ b/var/spack/repos/builtin.mock/packages/mpich/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Mpich(Package): homepage = "http://www.mpich.org" url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/mpich2/package.py b/var/spack/repos/builtin.mock/packages/mpich2/package.py index e6b68d2490f..c92b4ba43a8 100644 --- a/var/spack/repos/builtin.mock/packages/mpich2/package.py +++ b/var/spack/repos/builtin.mock/packages/mpich2/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Mpich2(Package): homepage = "http://www.mpich.org" url = "http://www.mpich.org/static/downloads/1.5/mpich2-1.5.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/multimethod/package.py b/var/spack/repos/builtin.mock/packages/multimethod/package.py index ca991632dd8..fa3f8151350 100644 --- a/var/spack/repos/builtin.mock/packages/multimethod/package.py +++ b/var/spack/repos/builtin.mock/packages/multimethod/package.py @@ -22,12 +22,10 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import imp -from llnl.util.filesystem import join_path -from spack.util.naming import mod_to_class from spack import * import spack.architecture + class Multimethod(Package): """This package is designed for use with Spack's multimethod test. It has a bunch of test cases for the @when decorator that the @@ -52,7 +50,6 @@ def no_version_2(self): def no_version_2(self): return 4 - # # These functions overlap, so there is ambiguity, but we'll take # the first one. @@ -65,7 +62,6 @@ def version_overlap(self): def version_overlap(self): return 2 - # # More complicated case with cascading versions. # @@ -84,7 +80,6 @@ def mpi_version(self): def mpi_version(self): return 1 - # # Use these to test whether the default method is called when no # match is found. This also tests whether we can switch methods @@ -101,8 +96,6 @@ def has_a_default(self): def has_a_default(self): return 'intel' - - # # Make sure we can switch methods on different target # @@ -112,15 +105,16 @@ def has_a_default(self): targets = targets[:-1] for target in targets: - @when('target='+target.name) + @when('target=' + target.name) def different_by_target(self): - if isinstance(self.spec.architecture.target,basestring): + if isinstance(self.spec.architecture.target, basestring): return self.spec.architecture.target else: return self.spec.architecture.target.name # # Make sure we can switch methods on different dependencies # + @when('^mpich') def different_by_dep(self): return 'mpich' @@ -129,7 +123,6 @@ def different_by_dep(self): def different_by_dep(self): return 'zmpi' - # # Make sure we can switch on virtual dependencies # diff --git a/var/spack/repos/builtin.mock/packages/netlib-blas/package.py b/var/spack/repos/builtin.mock/packages/netlib-blas/package.py index 9d567f2e9b2..0a5b1d0e6a8 100644 --- a/var/spack/repos/builtin.mock/packages/netlib-blas/package.py +++ b/var/spack/repos/builtin.mock/packages/netlib-blas/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class NetlibBlas(Package): homepage = "http://www.netlib.org/lapack/" url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" diff --git a/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py b/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py index 46d6ae43dc6..755d3001a4a 100644 --- a/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py +++ b/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class NetlibLapack(Package): homepage = "http://www.netlib.org/lapack/" url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" diff --git a/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py b/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py index b36237c1e2a..0f14fbaa61e 100644 --- a/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py +++ b/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class OpenblasWithLapack(Package): """Dummy version of OpenBLAS that also provides LAPACK, for testing.""" homepage = "http://www.openblas.net" diff --git a/var/spack/repos/builtin.mock/packages/openblas/package.py b/var/spack/repos/builtin.mock/packages/openblas/package.py index 5b39447e832..f6cdeeea499 100644 --- a/var/spack/repos/builtin.mock/packages/openblas/package.py +++ b/var/spack/repos/builtin.mock/packages/openblas/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Openblas(Package): """OpenBLAS: An optimized BLAS library""" homepage = "http://www.openblas.net" diff --git a/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py index f97959c7638..337f54e24e0 100644 --- a/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py +++ b/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class OptionalDepTest2(Package): """Depends on the optional-dep-test package""" diff --git a/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py index d8fe33c3dab..2904b3782d8 100644 --- a/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py +++ b/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class OptionalDepTest3(Package): """Depends on the optional-dep-test package""" diff --git a/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py index 80c1da55f8f..2c07e61769e 100644 --- a/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py +++ b/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class OptionalDepTest(Package): """Description""" diff --git a/var/spack/repos/builtin.mock/packages/python/package.py b/var/spack/repos/builtin.mock/packages/python/package.py index dc21b475e50..a5290161ad9 100644 --- a/var/spack/repos/builtin.mock/packages/python/package.py +++ b/var/spack/repos/builtin.mock/packages/python/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Python(Package): """Dummy Python package to demonstrate preferred versions.""" homepage = "http://www.python.org" @@ -40,4 +41,3 @@ class Python(Package): def install(self, spec, prefix): pass - diff --git a/var/spack/repos/builtin.mock/packages/svn-test/package.py b/var/spack/repos/builtin.mock/packages/svn-test/package.py index 2f197593e01..01d0929c284 100644 --- a/var/spack/repos/builtin.mock/packages/svn-test/package.py +++ b/var/spack/repos/builtin.mock/packages/svn-test/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class SvnTest(Package): """Mock package that uses svn for fetching.""" url = "http://www.example.com/svn-test-1.0.tar.gz" diff --git a/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py b/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py index 7c65909ad2c..2129d9788bb 100644 --- a/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py +++ b/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class TrivialInstallTestPackage(Package): """This package is a stub with a trivial install method. It allows us to test the install and uninstall logic of spack.""" diff --git a/var/spack/repos/builtin.mock/packages/zmpi/package.py b/var/spack/repos/builtin.mock/packages/zmpi/package.py index fcd3afe93b1..b6a5b33011c 100644 --- a/var/spack/repos/builtin.mock/packages/zmpi/package.py +++ b/var/spack/repos/builtin.mock/packages/zmpi/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Zmpi(Package): """This is a fake MPI package used to demonstrate virtual package providers with dependencies.""" diff --git a/var/spack/repos/builtin/packages/LuaJIT/package.py b/var/spack/repos/builtin/packages/LuaJIT/package.py index db6f7d3cad0..244c63c8fb6 100644 --- a/var/spack/repos/builtin/packages/LuaJIT/package.py +++ b/var/spack/repos/builtin/packages/LuaJIT/package.py @@ -25,6 +25,7 @@ import os from spack import * + class Luajit(Package): """Flast flexible JITed lua""" homepage = "http://www.luajit.org" diff --git a/var/spack/repos/builtin/packages/SAMRAI/package.py b/var/spack/repos/builtin/packages/SAMRAI/package.py index 73c51ced237..e0648290d60 100644 --- a/var/spack/repos/builtin/packages/SAMRAI/package.py +++ b/var/spack/repos/builtin/packages/SAMRAI/package.py @@ -24,12 +24,14 @@ ############################################################################## from spack import * + class Samrai(Package): """SAMRAI (Structured Adaptive Mesh Refinement Application Infrastructure) - is an object-oriented C++ software library enables exploration of numerical, - algorithmic, parallel computing, and software issues associated with applying - structured adaptive mesh refinement (SAMR) technology in large-scale parallel - application development. + is an object-oriented C++ software library enables exploration of + numerical, algorithmic, parallel computing, and software issues + associated with applying structured adaptive mesh refinement + (SAMR) technology in large-scale parallel application development. + """ homepage = "https://computation.llnl.gov/project/SAMRAI/" url = "https://computation.llnl.gov/project/SAMRAI/download/SAMRAI-v3.9.1.tar.gz" diff --git a/var/spack/repos/builtin/packages/activeharmony/package.py b/var/spack/repos/builtin/packages/activeharmony/package.py index 9d15bd71d96..6a4e67a1ca6 100644 --- a/var/spack/repos/builtin/packages/activeharmony/package.py +++ b/var/spack/repos/builtin/packages/activeharmony/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class Activeharmony(Package): - """Active Harmony: a framework for auto-tuning (the automated search for values to improve the performance of a target application).""" + """Active Harmony: a framework for auto-tuning (the automated search for + values to improve the performance of a target application).""" homepage = "http://www.dyninst.org/harmony" url = "http://www.dyninst.org/sites/default/files/downloads/harmony/ah-4.5.tar.gz" @@ -34,6 +36,3 @@ class Activeharmony(Package): def install(self, spec, prefix): make("CFLAGS=-O3") make("install", 'PREFIX=%s' % prefix) - -from spack import * - diff --git a/var/spack/repos/builtin/packages/adept-utils/package.py b/var/spack/repos/builtin/packages/adept-utils/package.py index 592681bb822..1a6998fd964 100644 --- a/var/spack/repos/builtin/packages/adept-utils/package.py +++ b/var/spack/repos/builtin/packages/adept-utils/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class AdeptUtils(Package): """Utility libraries for LLNL performance tools.""" diff --git a/var/spack/repos/builtin/packages/adol-c/package.py b/var/spack/repos/builtin/packages/adol-c/package.py index a6052ad7bde..c5d53de2305 100644 --- a/var/spack/repos/builtin/packages/adol-c/package.py +++ b/var/spack/repos/builtin/packages/adol-c/package.py @@ -23,21 +23,24 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import sys + class AdolC(Package): - """A package for the automatic differentiation of first and higher derivatives of vector functions in C and C++ programs by operator overloading.""" + """A package for the automatic differentiation of first and higher + derivatives of vector functions in C and C++ programs by operator + overloading.""" homepage = "https://projects.coin-or.org/ADOL-C" url = "http://www.coin-or.org/download/source/ADOL-C/ADOL-C-2.6.1.tgz" version('head', svn='https://projects.coin-or.org/svn/ADOL-C/trunk/') version('2.6.1', '1032b28427d6e399af4610e78c0f087b') - + variant('doc', default=True, description='Install documentation') variant('openmp', default=False, description='Enable OpenMP support') variant('sparse', default=False, description='Enable sparse drivers') - variant('tests', default=True, description='Build all included examples as a test case') - + variant('tests', default=True, + description='Build all included examples as a test case') + patch('openmp_exam.patch') def install(self, spec, prefix): @@ -49,10 +52,14 @@ def install(self, spec, prefix): if '+openmp' in spec: if spec.satisfies('%gcc'): make_args.extend([ - '--with-openmp-flag=-fopenmp' # FIXME: Is this required? -I -L + # FIXME: Is this required? -I -L + '--with-openmp-flag=-fopenmp' ]) else: - raise InstallError("OpenMP flags for compilers other than GCC are not implemented.") + raise InstallError( + "OpenMP flags for compilers other than GCC " + "are not implemented.") if '+sparse' in spec: make_args.extend([ @@ -63,7 +70,7 @@ def install(self, spec, prefix): # whether Adol-C works as expected if '+tests' in spec: make_args.extend([ - '--enable-docexa', # Documeted examples + '--enable-docexa', # Documeted examples '--enable-addexa' # Additional examples ]) if '+openmp' in spec: @@ -74,31 +81,36 @@ def install(self, spec, prefix): configure(*make_args) make() make("install") - + # Copy the config.h file, as some packages might require it source_directory = self.stage.source_path - config_h = join_path(source_directory,'ADOL-C','src','config.h') - install(config_h, join_path(prefix.include,'adolc')) - + config_h = join_path(source_directory, 'ADOL-C', 'src', 'config.h') + install(config_h, join_path(prefix.include, 'adolc')) + # Install documentation to {prefix}/share if '+doc' in spec: - install_tree(join_path('ADOL-C','doc'), - join_path(prefix.share,'doc')) - + install_tree(join_path('ADOL-C', 'doc'), + join_path(prefix.share, 'doc')) + # Install examples to {prefix}/share if '+tests' in spec: - install_tree(join_path('ADOL-C','examples'), - join_path(prefix.share,'examples')) - + install_tree(join_path('ADOL-C', 'examples'), + join_path(prefix.share, 'examples')) + # Run some examples that don't require user input # TODO: Check that bundled examples produce the correct results - with working_dir(join_path(source_directory,'ADOL-C','examples')): + with working_dir(join_path( + source_directory, 'ADOL-C', 'examples')): Executable('./tapeless_scalar')() Executable('./tapeless_vector')() - - with working_dir(join_path(source_directory,'ADOL-C','examples','additional_examples')): + + with working_dir(join_path( + source_directory, + 'ADOL-C', 'examples', 'additional_examples')): Executable('./checkpointing/checkpointing')() - + if '+openmp' in spec: - with working_dir(join_path(source_directory,'ADOL-C','examples','additional_examples')): + with working_dir(join_path( + source_directory, + 'ADOL-C', 'examples', 'additional_examples')): Executable('./checkpointing/checkpointing')() diff --git a/var/spack/repos/builtin/packages/antlr/package.py b/var/spack/repos/builtin/packages/antlr/package.py index eb67facaf39..891061c62f4 100644 --- a/var/spack/repos/builtin/packages/antlr/package.py +++ b/var/spack/repos/builtin/packages/antlr/package.py @@ -24,8 +24,9 @@ ############################################################################## from spack import * + class Antlr(Package): - + homepage = "http://www.antlr.org" url = "https://github.com/antlr/antlr/tarball/v2.7.7" @@ -41,22 +42,23 @@ class Antlr(Package): # CharScanner.hpp must include this line: #include or else # ncap2 will not compile (this tarball is already patched). version('2.7.7', '914865e853fe8e1e61a9f23d045cb4ab', - # Patched version as described above - url='http://dust.ess.uci.edu/tmp/antlr-2.7.7.tar.gz') - # Unpatched version - # url='http://dust.ess.uci.edu/nco/antlr-2.7.7.tar.gz') + # Patched version as described above + url='http://dust.ess.uci.edu/tmp/antlr-2.7.7.tar.gz') + # Unpatched version + # url='http://dust.ess.uci.edu/nco/antlr-2.7.7.tar.gz') variant('cxx', default=False, description='Enable ANTLR for C++') variant('java', default=False, description='Enable ANTLR for Java') variant('python', default=False, description='Enable ANTLR for Python') variant('csharp', default=False, description='Enable ANTLR for Csharp') - def install(self, spec, prefix): # Check for future enabling of variants for v in ('+java', '+python', '+csharp'): if v in spec: - raise Error('Illegal variant %s; for now, Spack only knows how to build antlr or antlr+cxx') + raise Error( + ('Illegal variant %s; ' % v) + 'for now, ' + 'Spack only knows how to build antlr or antlr+cxx') config_args = [ '--prefix=%s' % prefix, diff --git a/var/spack/repos/builtin/packages/apex/package.py b/var/spack/repos/builtin/packages/apex/package.py index 59fbe50c93f..832e10a1ec7 100644 --- a/var/spack/repos/builtin/packages/apex/package.py +++ b/var/spack/repos/builtin/packages/apex/package.py @@ -25,6 +25,7 @@ from spack import * from spack.util.environment import * + class Apex(Package): homepage = "http://github.com/khuck/xpress-apex" url = "http://github.com/khuck/xpress-apex/archive/v0.1.tar.gz" @@ -39,17 +40,17 @@ class Apex(Package): def install(self, spec, prefix): - path=get_path("PATH") + path = get_path("PATH") path.remove(spec["binutils"].prefix.bin) path_set("PATH", path) with working_dir("build", create=True): cmake('-DBOOST_ROOT=%s' % spec['boost'].prefix, - '-DUSE_BFD=TRUE', - '-DBFD_ROOT=%s' % spec['binutils'].prefix, - '-DUSE_ACTIVEHARMONY=TRUE', - '-DACTIVEHARMONY_ROOT=%s' % spec['activeharmony'].prefix, - '-DUSE_OMPT=TRUE', - '-DOMPT_ROOT=%s' % spec['ompt-openmp'].prefix, - '..', *std_cmake_args) + '-DUSE_BFD=TRUE', + '-DBFD_ROOT=%s' % spec['binutils'].prefix, + '-DUSE_ACTIVEHARMONY=TRUE', + '-DACTIVEHARMONY_ROOT=%s' % spec['activeharmony'].prefix, + '-DUSE_OMPT=TRUE', + '-DOMPT_ROOT=%s' % spec['ompt-openmp'].prefix, + '..', *std_cmake_args) make() make("install") diff --git a/var/spack/repos/builtin/packages/apr-util/package.py b/var/spack/repos/builtin/packages/apr-util/package.py index 05dc670aed9..8e01d3bbdd3 100644 --- a/var/spack/repos/builtin/packages/apr-util/package.py +++ b/var/spack/repos/builtin/packages/apr-util/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class AprUtil(Package): """Apache Portable Runtime Utility""" homepage = 'https://apr.apache.org/' diff --git a/var/spack/repos/builtin/packages/apr/package.py b/var/spack/repos/builtin/packages/apr/package.py index 398e1c323d7..0cd51f52e36 100644 --- a/var/spack/repos/builtin/packages/apr/package.py +++ b/var/spack/repos/builtin/packages/apr/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Apr(Package): """Apache portable runtime.""" homepage = 'https://apr.apache.org/' diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py index 2874930cdda..728c2345f2e 100644 --- a/var/spack/repos/builtin/packages/arpack-ng/package.py +++ b/var/spack/repos/builtin/packages/arpack-ng/package.py @@ -59,7 +59,8 @@ class ArpackNg(Package): version('3.4.0', 'ae9ca13f2143a7ea280cb0e2fd4bfae4') version('3.3.0', 'ed3648a23f0a868a43ef44c97a21bad5') - variant('shared', default=True, description='Enables the build of shared libraries') + variant('shared', default=True, + description='Enables the build of shared libraries') variant('mpi', default=False, description='Activates MPI support') # The function pdlamch10 does not set the return variable. diff --git a/var/spack/repos/builtin/packages/asciidoc/package.py b/var/spack/repos/builtin/packages/asciidoc/package.py index a846e0ba658..be8db684f5d 100644 --- a/var/spack/repos/builtin/packages/asciidoc/package.py +++ b/var/spack/repos/builtin/packages/asciidoc/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Asciidoc(Package): """ A presentable text document format for writing articles, UNIX man pages and other small to medium sized documents.""" diff --git a/var/spack/repos/builtin/packages/atop/package.py b/var/spack/repos/builtin/packages/atop/package.py index 9cacafc634a..e3a9d464a95 100644 --- a/var/spack/repos/builtin/packages/atop/package.py +++ b/var/spack/repos/builtin/packages/atop/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Atop(Package): """Atop is an ASCII full-screen performance monitor for Linux""" homepage = "http://www.atoptool.nl/index.php" @@ -37,4 +38,4 @@ def install(self, spec, prefix): install("atop", join_path(prefix.bin, "atop")) mkdirp(join_path(prefix.man, "man1")) install(join_path("man", "atop.1"), - join_path(prefix.man, "man1", "atop.1")) + join_path(prefix.man, "man1", "atop.1")) diff --git a/var/spack/repos/builtin/packages/automaded/package.py b/var/spack/repos/builtin/packages/automaded/package.py index 2b4681778f0..7e586b29918 100644 --- a/var/spack/repos/builtin/packages/automaded/package.py +++ b/var/spack/repos/builtin/packages/automaded/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Automaded(Package): """AutomaDeD (Automata-based Debugging for Dissimilar parallel tasks) is a tool for automatic diagnosis of performance and diff --git a/var/spack/repos/builtin/packages/bash/package.py b/var/spack/repos/builtin/packages/bash/package.py index 5820595be90..e0cd114635e 100644 --- a/var/spack/repos/builtin/packages/bash/package.py +++ b/var/spack/repos/builtin/packages/bash/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Bash(Package): """The GNU Project's Bourne Again SHell.""" diff --git a/var/spack/repos/builtin/packages/bbcp/package.py b/var/spack/repos/builtin/packages/bbcp/package.py index 09e897f34ec..f69ed395a3d 100644 --- a/var/spack/repos/builtin/packages/bbcp/package.py +++ b/var/spack/repos/builtin/packages/bbcp/package.py @@ -24,18 +24,22 @@ ############################################################################## from spack import * + class Bbcp(Package): """Securely and quickly copy data from source to target""" homepage = "http://www.slac.stanford.edu/~abh/bbcp/" - version('git', git='http://www.slac.stanford.edu/~abh/bbcp/bbcp.git', branch="master") + version('git', git='http://www.slac.stanford.edu/~abh/bbcp/bbcp.git', + branch="master") def install(self, spec, prefix): cd("src") make() - # BBCP wants to build the executable in a directory whose name depends on the system type + # BBCP wants to build the executable in a directory whose name depends + # on the system type makesname = Executable("../MakeSname") - bbcp_executable_path = "../bin/%s/bbcp" % makesname(output=str).rstrip("\n") + bbcp_executable_path = "../bin/%s/bbcp" % makesname( + output=str).rstrip("\n") destination_path = "%s/bin/" % prefix mkdirp(destination_path) install(bbcp_executable_path, destination_path) diff --git a/var/spack/repos/builtin/packages/bdw-gc/package.py b/var/spack/repos/builtin/packages/bdw-gc/package.py index 2c61c21b43c..51202663196 100644 --- a/var/spack/repos/builtin/packages/bdw-gc/package.py +++ b/var/spack/repos/builtin/packages/bdw-gc/package.py @@ -34,7 +34,8 @@ class BdwGc(Package): version('7.4.4', '96d18b0448a841c88d56e4ab3d180297') - variant('libatomic-ops', default=True, description='Use external libatomic-ops') + variant('libatomic-ops', default=True, + description='Use external libatomic-ops') depends_on('libatomic-ops', when='+libatomic-ops') diff --git a/var/spack/repos/builtin/packages/bear/package.py b/var/spack/repos/builtin/packages/bear/package.py index a137fded088..f52050d7b9d 100644 --- a/var/spack/repos/builtin/packages/bear/package.py +++ b/var/spack/repos/builtin/packages/bear/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class Bear(Package): - """Bear is a tool that generates a compilation database for clang tooling from non-cmake build systems.""" + """Bear is a tool that generates a compilation database for clang tooling + from non-cmake build systems.""" homepage = "https://github.com/rizsotto/Bear" url = "https://github.com/rizsotto/Bear/archive/2.0.4.tar.gz" diff --git a/var/spack/repos/builtin/packages/bib2xhtml/package.py b/var/spack/repos/builtin/packages/bib2xhtml/package.py index a9cbd204b63..b3560381800 100644 --- a/var/spack/repos/builtin/packages/bib2xhtml/package.py +++ b/var/spack/repos/builtin/packages/bib2xhtml/package.py @@ -25,10 +25,11 @@ from spack import * from glob import glob + class Bib2xhtml(Package): """bib2xhtml is a program that converts BibTeX files into HTML.""" homepage = "http://www.spinellis.gr/sw/textproc/bib2xhtml/" - url='http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz' + url = 'http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz' version('3.0-15-gf506', 'a26ba02fe0053bbbf2277bdf0acf8645') diff --git a/var/spack/repos/builtin/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py index c7a125df15b..70795f05ccf 100644 --- a/var/spack/repos/builtin/packages/bison/package.py +++ b/var/spack/repos/builtin/packages/bison/package.py @@ -24,9 +24,10 @@ ############################################################################## from spack import * + class Bison(Package): - """Bison is a general-purpose parser generator that converts - an annotated context-free grammar into a deterministic LR or + """Bison is a general-purpose parser generator that converts + an annotated context-free grammar into a deterministic LR or generalized LR (GLR) parser employing LALR(1) parser tables.""" homepage = "http://www.gnu.org/software/bison/" diff --git a/var/spack/repos/builtin/packages/blitz/package.py b/var/spack/repos/builtin/packages/blitz/package.py index acc6ddcd07d..c92e49a7324 100644 --- a/var/spack/repos/builtin/packages/blitz/package.py +++ b/var/spack/repos/builtin/packages/blitz/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Blitz(Package): """N-dimensional arrays for C++""" homepage = "http://github.com/blitzpp/blitz" diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py index 67294fb9a1e..690a05a1508 100644 --- a/var/spack/repos/builtin/packages/boost/package.py +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -102,13 +102,18 @@ class Boost(Package): for lib in all_libs: variant(lib, default=(lib not in default_noinstall_libs), - description="Compile with {0} library".format(lib)) + description="Compile with {0} library".format(lib)) - variant('debug', default=False, description='Switch to the debug version of Boost') - variant('shared', default=True, description="Additionally build shared libraries") - variant('multithreaded', default=True, description="Build multi-threaded versions of libraries") - variant('singlethreaded', default=True, description="Build single-threaded versions of libraries") - variant('icu_support', default=False, description="Include ICU support (for regex/locale libraries)") + variant('debug', default=False, + description='Switch to the debug version of Boost') + variant('shared', default=True, + description="Additionally build shared libraries") + variant('multithreaded', default=True, + description="Build multi-threaded versions of libraries") + variant('singlethreaded', default=True, + description="Build single-threaded versions of libraries") + variant('icu_support', default=False, + description="Include ICU support (for regex/locale libraries)") variant('graph', default=False, description="Build the Boost Graph library") depends_on('icu', when='+icu_support') @@ -128,8 +133,7 @@ def url_for_version(self, version): parts = [str(p) for p in Version(version)] dots = ".".join(parts) underscores = "_".join(parts) - return "http://downloads.sourceforge.net/project/boost" \ - "/boost/%s/boost_%s.tar.bz2" % (dots, underscores) + return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % (dots, underscores) def determine_toolset(self, spec): if spec.satisfies("platform=darwin"): @@ -158,7 +162,7 @@ def determine_bootstrap_options(self, spec, withLibs, options): with open('user-config.jam', 'w') as f: compiler_wrapper = join_path(spack.build_env_path, 'c++') f.write("using {0} : : {1} ;\n".format(boostToolsetId, - compiler_wrapper)) + compiler_wrapper)) if '+mpi' in spec: f.write('using mpi : %s ;\n' % diff --git a/var/spack/repos/builtin/packages/bowtie2/package.py b/var/spack/repos/builtin/packages/bowtie2/package.py index 6d641da49bf..a8a1a34ed42 100644 --- a/var/spack/repos/builtin/packages/bowtie2/package.py +++ b/var/spack/repos/builtin/packages/bowtie2/package.py @@ -24,12 +24,15 @@ ############################################################################## from spack import * from glob import glob + + class Bowtie2(Package): """Description""" homepage = "bowtie-bio.sourceforge.net/bowtie2/index.shtml" - version('2.2.5','51fa97a862d248d7ee660efc1147c75f', url = "http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.2.5/bowtie2-2.2.5-source.zip") + version('2.2.5', '51fa97a862d248d7ee660efc1147c75f', + url="http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.2.5/bowtie2-2.2.5-source.zip") - patch('bowtie2-2.5.patch',when='@2.2.5', level=0) + patch('bowtie2-2.5.patch', when='@2.2.5', level=0) def install(self, spec, prefix): make() @@ -45,4 +48,3 @@ def install(self, spec, prefix): # install('bowtie2-inspect',prefix.bin) # install('bowtie2-inspect-l',prefix.bin) # install('bowtie2-inspect-s',prefix.bin) - diff --git a/var/spack/repos/builtin/packages/boxlib/package.py b/var/spack/repos/builtin/packages/boxlib/package.py index 216ae1ec122..4f41aba6c6a 100644 --- a/var/spack/repos/builtin/packages/boxlib/package.py +++ b/var/spack/repos/builtin/packages/boxlib/package.py @@ -24,12 +24,13 @@ ############################################################################## from spack import * + class Boxlib(Package): """BoxLib, a software framework for massively parallel block-structured adaptive mesh refinement (AMR) codes.""" homepage = "https://ccse.lbl.gov/BoxLib/" - url = "https://ccse.lbl.gov/pub/Downloads/BoxLib.git"; + url = "https://ccse.lbl.gov/pub/Downloads/BoxLib.git" # TODO: figure out how best to version this. No tags in the repo! version('master', git='https://ccse.lbl.gov/pub/Downloads/BoxLib.git') @@ -47,4 +48,3 @@ def install(self, spec, prefix): cmake('.', *args) make() make("install") - diff --git a/var/spack/repos/builtin/packages/c-blosc/package.py b/var/spack/repos/builtin/packages/c-blosc/package.py index 49f98611268..4ebf3811a58 100644 --- a/var/spack/repos/builtin/packages/c-blosc/package.py +++ b/var/spack/repos/builtin/packages/c-blosc/package.py @@ -27,6 +27,7 @@ from spack import * + class CBlosc(Package): """Blosc, an extremely fast, multi-threaded, meta-compressor library""" homepage = "http://www.blosc.org" diff --git a/var/spack/repos/builtin/packages/caliper/package.py b/var/spack/repos/builtin/packages/caliper/package.py index 4a0fc54acc7..e5aed5ed65a 100644 --- a/var/spack/repos/builtin/packages/caliper/package.py +++ b/var/spack/repos/builtin/packages/caliper/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Caliper(Package): """ Caliper is a generic context annotation system. It gives programmers the @@ -44,7 +45,7 @@ class Caliper(Package): depends_on('cmake', type='build') def install(self, spec, prefix): - with working_dir('build', create=True): - cmake('..', *std_cmake_args) - make() - make("install") + with working_dir('build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/callpath/package.py b/var/spack/repos/builtin/packages/callpath/package.py index 2f171fb9ce6..f8227fa49e7 100644 --- a/var/spack/repos/builtin/packages/callpath/package.py +++ b/var/spack/repos/builtin/packages/callpath/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Callpath(Package): """Library for representing callpaths consistently in distributed-memory performance tools.""" diff --git a/var/spack/repos/builtin/packages/cantera/package.py b/var/spack/repos/builtin/packages/cantera/package.py index 8e4f23046b4..9c9c0353254 100644 --- a/var/spack/repos/builtin/packages/cantera/package.py +++ b/var/spack/repos/builtin/packages/cantera/package.py @@ -35,11 +35,16 @@ class Cantera(Package): version('2.2.1', '9d1919bdef39ddec54485fc8a741a3aa') - variant('lapack', default=True, description='Build with external BLAS/LAPACK libraries') - variant('threadsafe', default=True, description='Build threadsafe, requires Boost') - variant('sundials', default=True, description='Build with external Sundials') - variant('python', default=False, description='Build the Cantera Python module') - variant('matlab', default=False, description='Build the Cantera Matlab toolbox') + variant('lapack', default=True, + description='Build with external BLAS/LAPACK libraries') + variant('threadsafe', default=True, + description='Build threadsafe, requires Boost') + variant('sundials', default=True, + description='Build with external Sundials') + variant('python', default=False, + description='Build the Cantera Python module') + variant('matlab', default=False, + description='Build the Cantera Matlab toolbox') # Required dependencies depends_on('scons', type='build') diff --git a/var/spack/repos/builtin/packages/cblas/package.py b/var/spack/repos/builtin/packages/cblas/package.py index 0b85c5842ae..08281413078 100644 --- a/var/spack/repos/builtin/packages/cblas/package.py +++ b/var/spack/repos/builtin/packages/cblas/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os + class Cblas(Package): """The BLAS (Basic Linear Algebra Subprograms) are routines that @@ -42,11 +42,11 @@ class Cblas(Package): def patch(self): mf = FileFilter('Makefile.in') - mf.filter('^BLLIB =.*', 'BLLIB = %s/libblas.a' % self.spec['blas'].prefix.lib) + mf.filter('^BLLIB =.*', 'BLLIB = %s/libblas.a' % + self.spec['blas'].prefix.lib) mf.filter('^CC =.*', 'CC = cc') mf.filter('^FC =.*', 'FC = f90') - def install(self, spec, prefix): make('all') mkdirp(prefix.lib) @@ -54,6 +54,5 @@ def install(self, spec, prefix): # Rename the generated lib file to libcblas.a install('./lib/cblas_LINUX.a', '%s/libcblas.a' % prefix.lib) - install('./include/cblas.h','%s' % prefix.include) - install('./include/cblas_f77.h','%s' % prefix.include) - + install('./include/cblas.h', '%s' % prefix.include) + install('./include/cblas_f77.h', '%s' % prefix.include) diff --git a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py index 321d02b5568..cbf36c3b61f 100644 --- a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py +++ b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py @@ -22,7 +22,7 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -################################################################################ +########################################################################## # Copyright (c) 2015-2016 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under @@ -38,20 +38,24 @@ # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 59 Temple # Place, Suite 330, Boston, MA 02111-1307 USA -################################################################################ +########################################################################## from spack import * + class CbtfArgonavis(Package): - """CBTF Argo Navis project contains the CUDA collector and supporting - libraries that was done as a result of a DOE SBIR grant.""" + """CBTF Argo Navis project contains the CUDA collector and supporting + libraries that was done as a result of a DOE SBIR grant. + + """ homepage = "http://sourceforge.net/p/cbtf/wiki/Home/" # Mirror access template example - #url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/cbtf-argonavis-1.6.tar.gz" - #version('1.6', '0fafa0008478405c2c2319450f174ed4') + # url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/cbtf-argonavis-1.6.tar.gz" + # version('1.6', '0fafa0008478405c2c2319450f174ed4') - version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-argonavis.git') + version('1.6', branch='master', + git='https://github.com/OpenSpeedShop/cbtf-argonavis.git') depends_on("cmake@3.0.2", type='build') depends_on("boost@1.50.0:") @@ -60,17 +64,19 @@ class CbtfArgonavis(Package): depends_on("cbtf") depends_on("cbtf-krell") depends_on("cuda@6.0.37") - #depends_on("cuda") + # depends_on("cuda") parallel = False def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): - # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings + # Sets build type parameters into cmakeOptions the options that will + # enable the cbtf-krell built type settings - compile_flags="-O2 -g" + compile_flags = "-O2 -g" BuildTypeOptions = [] - # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs + # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the + # stdcmakeargs for word in cmakeOptions[:]: if word.startswith('-DCMAKE_BUILD_TYPE'): cmakeOptions.remove(word) @@ -81,50 +87,54 @@ def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'): cmakeOptions.remove(word) BuildTypeOptions.extend([ - '-DCMAKE_VERBOSE_MAKEFILE=ON', - '-DCMAKE_BUILD_TYPE=None', - '-DCMAKE_CXX_FLAGS=%s' % compile_flags, - '-DCMAKE_C_FLAGS=%s' % compile_flags + '-DCMAKE_VERBOSE_MAKEFILE=ON', + '-DCMAKE_BUILD_TYPE=None', + '-DCMAKE_CXX_FLAGS=%s' % compile_flags, + '-DCMAKE_C_FLAGS=%s' % compile_flags ]) cmakeOptions.extend(BuildTypeOptions) - def install(self, spec, prefix): - # Look for package installation information in the cbtf and cbtf-krell prefixes - cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) + # Look for package installation information in the cbtf and cbtf-krell + # prefixes + cmake_prefix_path = join_path( + spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) - with working_dir('CUDA'): - with working_dir('build', create=True): + with working_dir('CUDA'): + with working_dir('build', create=True): - cmakeOptions = [] - cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, - '-DCUDA_DIR=%s' % spec['cuda'].prefix, - '-DCUDA_INSTALL_PATH=%s' % spec['cuda'].prefix, - '-DCUDA_TOOLKIT_ROOT_DIR=%s' % spec['cuda'].prefix, - '-DCUPTI_DIR=%s' % join_path(spec['cuda'].prefix + '/extras/CUPTI'), - '-DCUPTI_ROOT=%s' % join_path(spec['cuda'].prefix + '/extras/CUPTI'), - '-DPAPI_ROOT=%s' % spec['papi'].prefix, - '-DCBTF_DIR=%s' % spec['cbtf'].prefix, - '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, - '-DBOOST_ROOT=%s' % spec['boost'].prefix, - '-DBoost_DIR=%s' % spec['boost'].prefix, - '-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib, - '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - '-DBoost_NO_SYSTEM_PATHS=ON' - ]) + cmakeOptions = [] + cmakeOptions.extend( + ['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, + '-DCUDA_DIR=%s' % spec['cuda'].prefix, + '-DCUDA_INSTALL_PATH=%s' % spec['cuda'].prefix, + '-DCUDA_TOOLKIT_ROOT_DIR=%s' % spec['cuda'].prefix, + '-DCUPTI_DIR=%s' % join_path( + spec['cuda'].prefix + '/extras/CUPTI'), + '-DCUPTI_ROOT=%s' % join_path( + spec['cuda'].prefix + '/extras/CUPTI'), + '-DPAPI_ROOT=%s' % spec['papi'].prefix, + '-DCBTF_DIR=%s' % spec['cbtf'].prefix, + '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, + '-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DBoost_DIR=%s' % spec['boost'].prefix, + '-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + '-DBoost_NO_SYSTEM_PATHS=ON']) - # Add in the standard cmake arguments - cmakeOptions.extend(std_cmake_args) + # Add in the standard cmake arguments + cmakeOptions.extend(std_cmake_args) - # Adjust the standard cmake arguments to what we want the build type, etc to be - self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) - - # Invoke cmake - cmake('..', *cmakeOptions) + # Adjust the standard cmake arguments to what we want the build + # type, etc to be + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) - make("clean") - make() - make("install") + # Invoke cmake + cmake('..', *cmakeOptions) + + make("clean") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cbtf-krell/package.py b/var/spack/repos/builtin/packages/cbtf-krell/package.py index acb78a73951..6f15c3f8353 100644 --- a/var/spack/repos/builtin/packages/cbtf-krell/package.py +++ b/var/spack/repos/builtin/packages/cbtf-krell/package.py @@ -22,7 +22,7 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -################################################################################ +########################################################################## # Copyright (c) 2015-2016 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under @@ -38,30 +38,40 @@ # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 59 Temple # Place, Suite 330, Boston, MA 02111-1307 USA -################################################################################ +########################################################################## from spack import * + class CbtfKrell(Package): - """CBTF Krell project contains the Krell Institute contributions to the CBTF project. - These contributions include many performance data collectors and support - libraries as well as some example tools that drive the data collection at - HPC levels of scale.""" + """CBTF Krell project contains the Krell Institute contributions to the + CBTF project. These contributions include many performance data + collectors and support libraries as well as some example tools + that drive the data collection at HPC levels of scale. + + """ homepage = "http://sourceforge.net/p/cbtf/wiki/Home/" # optional mirror access template - #url = "file:/home/jeg/cbtf-krell-1.6.tar.gz" - #version('1.6', 'edeb61cd488f16e7b124f77db9ce762d') + # url = "file:/home/jeg/cbtf-krell-1.6.tar.gz" + # version('1.6', 'edeb61cd488f16e7b124f77db9ce762d') - version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-krell.git') + version('1.6', branch='master', + git='https://github.com/OpenSpeedShop/cbtf-krell.git') # MPI variants - variant('openmpi', default=False, description="Build mpi experiment collector for openmpi MPI when this variant is enabled.") - variant('mpt', default=False, description="Build mpi experiment collector for SGI MPT MPI when this variant is enabled.") - variant('mvapich2', default=False, description="Build mpi experiment collector for mvapich2 MPI when this variant is enabled.") - variant('mvapich', default=False, description="Build mpi experiment collector for mvapich MPI when this variant is enabled.") - variant('mpich2', default=False, description="Build mpi experiment collector for mpich2 MPI when this variant is enabled.") - variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.") + variant('openmpi', default=False, + description="Build mpi experiment collector for openmpi MPI..") + variant('mpt', default=False, + description="Build mpi experiment collector for SGI MPT MPI.") + variant('mvapich2', default=False, + description="Build mpi experiment collector for mvapich2 MPI.") + variant('mvapich', default=False, + description="Build mpi experiment collector for mvapich MPI.") + variant('mpich2', default=False, + description="Build mpi experiment collector for mpich2 MPI.") + variant('mpich', default=False, + description="Build mpi experiment collector for mpich MPI.") # Dependencies for cbtf-krell depends_on("cmake@3.0.2", type='build') @@ -83,7 +93,8 @@ class CbtfKrell(Package): depends_on("papi") # MPI Installations - # These have not worked either for build or execution, commenting out for now + # These have not worked either for build or execution, commenting out for + # now depends_on("openmpi", when='+openmpi') depends_on("mpich", when='+mpich') depends_on("mpich2", when='+mpich2') @@ -94,11 +105,13 @@ class CbtfKrell(Package): parallel = False def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): - # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings - - compile_flags="-O2 -g" + # Sets build type parameters into cmakeOptions the options that will + # enable the cbtf-krell built type settings + + compile_flags = "-O2 -g" BuildTypeOptions = [] - # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs + # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the + # stdcmakeargs for word in cmakeOptions[:]: if word.startswith('-DCMAKE_BUILD_TYPE'): cmakeOptions.remove(word) @@ -109,75 +122,76 @@ def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'): cmakeOptions.remove(word) BuildTypeOptions.extend([ - '-DCMAKE_VERBOSE_MAKEFILE=ON', - '-DCMAKE_BUILD_TYPE=None', - '-DCMAKE_CXX_FLAGS=%s' % compile_flags, - '-DCMAKE_C_FLAGS=%s' % compile_flags + '-DCMAKE_VERBOSE_MAKEFILE=ON', + '-DCMAKE_BUILD_TYPE=None', + '-DCMAKE_CXX_FLAGS=%s' % compile_flags, + '-DCMAKE_C_FLAGS=%s' % compile_flags ]) cmakeOptions.extend(BuildTypeOptions) - - def set_mpi_cmakeOptions(self, spec, cmakeOptions): - # Appends to cmakeOptions the options that will enable the appropriate MPI implementations - + # Appends to cmakeOptions the options that will enable the appropriate + # MPI implementations + MPIOptions = [] # openmpi if '+openmpi' in spec: MPIOptions.extend([ - '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix + '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix ]) # mpich if '+mpich' in spec: MPIOptions.extend([ - '-DMPICH_DIR=%s' % spec['mpich'].prefix + '-DMPICH_DIR=%s' % spec['mpich'].prefix ]) # mpich2 if '+mpich2' in spec: MPIOptions.extend([ - '-DMPICH2_DIR=%s' % spec['mpich2'].prefix + '-DMPICH2_DIR=%s' % spec['mpich2'].prefix ]) # mvapich if '+mvapich' in spec: MPIOptions.extend([ - '-DMVAPICH_DIR=%s' % spec['mvapich'].prefix + '-DMVAPICH_DIR=%s' % spec['mvapich'].prefix ]) # mvapich2 if '+mvapich2' in spec: MPIOptions.extend([ - '-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix + '-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix ]) # mpt if '+mpt' in spec: MPIOptions.extend([ - '-DMPT_DIR=%s' % spec['mpt'].prefix + '-DMPT_DIR=%s' % spec['mpt'].prefix ]) cmakeOptions.extend(MPIOptions) def install(self, spec, prefix): - # Add in paths for finding package config files that tell us where to find these packages - #cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['dyninst'].prefix) - #'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path + # Add in paths for finding package config files that tell us + # where to find these packages + # cmake_prefix_path = \ + # join_path(spec['cbtf'].prefix) + ':' + \ + # join_path(spec['dyninst'].prefix) + # '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path - # Build cbtf-krell with cmake + # Build cbtf-krell with cmake with working_dir('build_cbtf_krell', create=True): cmakeOptions = [] - cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCBTF_DIR=%s' % spec['cbtf'].prefix, - '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, - '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, - '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, - '-DPAPI_DIR=%s' % spec['papi'].prefix, - '-DBOOST_DIR=%s' % spec['boost'].prefix, - '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, - '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix - ]) - + cmakeOptions.extend( + ['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCBTF_DIR=%s' % spec['cbtf'].prefix, + '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, + '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, + '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, + '-DPAPI_DIR=%s' % spec['papi'].prefix, + '-DBOOST_DIR=%s' % spec['boost'].prefix, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, + '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix]) # Add any MPI implementations coming from variant settings self.set_mpi_cmakeOptions(spec, cmakeOptions) @@ -185,9 +199,10 @@ def install(self, spec, prefix): # Add in the standard cmake arguments cmakeOptions.extend(std_cmake_args) - # Adjust the standard cmake arguments to what we want the build type, etc to be + # Adjust the standard cmake arguments to what we want the build + # type, etc to be self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) - + # Invoke cmake cmake('..', *cmakeOptions) @@ -195,56 +210,54 @@ def install(self, spec, prefix): make() make("install") - - - #if '+cray' in spec: - #if 'cray' in self.spec.architecture: + # if '+cray' in spec: + # if 'cray' in self.spec.architecture: # if '+runtime' in spec: # with working_dir('build_cbtf_cray_runtime', create=True): # python_vers='%d.%d' % spec['python'].version[:2] # cmake .. \ - # -DCMAKE_BUILD_TYPE=Debug \ - # -DTARGET_OS="cray" \ - # -DRUNTIME_ONLY="true" \ - # -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \ - # -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \ - # -DCBTF_DIR=${CBTF_ROOT} \ - # -DBOOST_ROOT=${BOOST_INSTALL_PREFIX} \ - # -DXERCESC_DIR=${XERCESC_INSTALL_PREFIX} \ - # -DBINUTILS_DIR=${KRELL_ROOT} \ - # -DLIBMONITOR_DIR=${KRELL_ROOT_COMPUTE} \ - # -DLIBUNWIND_DIR=${KRELL_ROOT_COMPUTE} \ - # -DPAPI_DIR=${PAPI_ROOT} \ - # -DDYNINST_DIR=${DYNINST_CN_ROOT} \ - # -DMRNET_DIR=${MRNET_INSTALL_PREFIX} \ - # -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48 + # -DCMAKE_BUILD_TYPE=Debug \ + # -DTARGET_OS="cray" \ + # -DRUNTIME_ONLY="true" \ + # -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \ + # -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \ + # -DCBTF_DIR=${CBTF_ROOT} \ + # -DBOOST_ROOT=${BOOST_INSTALL_PREFIX} \ + # -DXERCESC_DIR=${XERCESC_INSTALL_PREFIX} \ + # -DBINUTILS_DIR=${KRELL_ROOT} \ + # -DLIBMONITOR_DIR=${KRELL_ROOT_COMPUTE} \ + # -DLIBUNWIND_DIR=${KRELL_ROOT_COMPUTE} \ + # -DPAPI_DIR=${PAPI_ROOT} \ + # -DDYNINST_DIR=${DYNINST_CN_ROOT} \ + # -DMRNET_DIR=${MRNET_INSTALL_PREFIX} \ + # -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48 # else: # with working_dir('build_cbtf_cray_frontend', create=True): # python_vers='%d.%d' % spec['python'].version[:2] # cmake .. \ - # -DCMAKE_BUILD_TYPE=Debug \ - # -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \ - # -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \ - # -DCBTF_DIR=${CBTF_ROOT} \ - # -DRUNTIME_TARGET_OS="cray" \ - # -DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_RUNTIME_ROOT} \ - # -DCBTF_CN_RUNTIME_DIR=${CBTF_CN_RUNTIME_ROOT} \ - # -DLIBMONITOR_CN_RUNTIME_DIR=${LIBMONITOR_CN_ROOT} \ - # -DLIBUNWIND_CN_RUNTIME_DIR=${LIBUNWIND_CN_ROOT} \ - # -DPAPI_CN_RUNTIME_DIR=${PAPI_CN_ROOT} \ - # -DXERCESC_CN_RUNTIME_DIR=/${XERCESC_CN_ROOT} \ - # -DMRNET_CN_RUNTIME_DIR=${MRNET_CN_ROOT} \ - # -DBOOST_CN_RUNTIME_DIR=${BOOST_CN_ROOT} \ - # -DDYNINST_CN_RUNTIME_DIR=${DYNINST_CN_ROOT} \ - # -DBOOST_ROOT=/${KRELL_ROOT} \ - # -DXERCESC_DIR=/${KRELL_ROOT} \ - # -DBINUTILS_DIR=/${KRELL_ROOT} \ - # -DLIBMONITOR_DIR=${KRELL_ROOT} \ - # -DLIBUNWIND_DIR=${KRELL_ROOT} \ - # -DPAPI_DIR=${PAPI_ROOT} \ - # -DDYNINST_DIR=${KRELL_ROOT} \ - # -DMRNET_DIR=${KRELL_ROOT} \ - # -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48 + # -DCMAKE_BUILD_TYPE=Debug \ + # -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \ + # -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \ + # -DCBTF_DIR=${CBTF_ROOT} \ + # -DRUNTIME_TARGET_OS="cray" \ + # -DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_RUNTIME_ROOT} \ + # -DCBTF_CN_RUNTIME_DIR=${CBTF_CN_RUNTIME_ROOT} \ + # -DLIBMONITOR_CN_RUNTIME_DIR=${LIBMONITOR_CN_ROOT} \ + # -DLIBUNWIND_CN_RUNTIME_DIR=${LIBUNWIND_CN_ROOT} \ + # -DPAPI_CN_RUNTIME_DIR=${PAPI_CN_ROOT} \ + # -DXERCESC_CN_RUNTIME_DIR=/${XERCESC_CN_ROOT} \ + # -DMRNET_CN_RUNTIME_DIR=${MRNET_CN_ROOT} \ + # -DBOOST_CN_RUNTIME_DIR=${BOOST_CN_ROOT} \ + # -DDYNINST_CN_RUNTIME_DIR=${DYNINST_CN_ROOT} \ + # -DBOOST_ROOT=/${KRELL_ROOT} \ + # -DXERCESC_DIR=/${KRELL_ROOT} \ + # -DBINUTILS_DIR=/${KRELL_ROOT} \ + # -DLIBMONITOR_DIR=${KRELL_ROOT} \ + # -DLIBUNWIND_DIR=${KRELL_ROOT} \ + # -DPAPI_DIR=${PAPI_ROOT} \ + # -DDYNINST_DIR=${KRELL_ROOT} \ + # -DMRNET_DIR=${KRELL_ROOT} \ + # -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48 # fi # # make("clean") @@ -264,22 +277,22 @@ def install(self, spec, prefix): # fi # # else: -# # Build cbtf-krell with cmake +# # Build cbtf-krell with cmake # with working_dir('build_cbtf_krell', create=True): # cmake('..', # '-DCMAKE_BUILD_TYPE=Debug', -# '-DCMAKE_INSTALL_PREFIX=%s' % prefix, -# '-DCBTF_DIR=%s' % spec['cbtf'].prefix, -# '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, -# '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, -# '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, -# '-DPAPI_DIR=%s' % spec['papi'].prefix, -# '-DBOOST_DIR=%s' % spec['boost'].prefix, -# '-DMRNET_DIR=%s' % spec['mrnet'].prefix, -# '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, -# '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, -# '-DOPENMPI_DIR=%s' % openmpi_prefix_path, -# '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, +# '-DCMAKE_INSTALL_PREFIX=%s' % prefix, +# '-DCBTF_DIR=%s' % spec['cbtf'].prefix, +# '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, +# '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, +# '-DLIBUNWIND_DIR=%s'% spec['libunwind'].prefix, +# '-DPAPI_DIR=%s' % spec['papi'].prefix, +# '-DBOOST_DIR=%s' % spec['boost'].prefix, +# '-DMRNET_DIR=%s' % spec['mrnet'].prefix, +# '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, +# '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, +# '-DOPENMPI_DIR=%s' % openmpi_prefix_path, +# '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, # *std_cmake_args) # # make("clean") diff --git a/var/spack/repos/builtin/packages/cbtf-lanl/package.py b/var/spack/repos/builtin/packages/cbtf-lanl/package.py index 5694535fccf..b614a325f9e 100644 --- a/var/spack/repos/builtin/packages/cbtf-lanl/package.py +++ b/var/spack/repos/builtin/packages/cbtf-lanl/package.py @@ -22,7 +22,7 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -################################################################################ +########################################################################## # Copyright (c) 2015-2016 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under @@ -38,20 +38,22 @@ # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 59 Temple # Place, Suite 330, Boston, MA 02111-1307 USA -################################################################################ +########################################################################## from spack import * + class CbtfLanl(Package): - """CBTF LANL project contains a memory tool and data center type system command monitoring tool.""" + """CBTF LANL project contains a memory tool and data center type system + command monitoring tool.""" homepage = "http://sourceforge.net/p/cbtf/wiki/Home/" - # Mirror access template example - #url = "file:/g/g24/jeg/cbtf-lanl-1.5.tar.gz" - #version('1.5', 'c3f78f967b0a42c6734ce4be0e602426') + # url = "file:/g/g24/jeg/cbtf-lanl-1.5.tar.gz" + # version('1.5', 'c3f78f967b0a42c6734ce4be0e602426') - version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-lanl/cbtf-lanl') + version('1.6', branch='master', + git='http://git.code.sf.net/p/cbtf-lanl/cbtf-lanl') depends_on("cmake@3.0.2", type='build') # Dependencies for cbtf-krell @@ -63,11 +65,13 @@ class CbtfLanl(Package): parallel = False def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): - # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings + # Sets build type parameters into cmakeOptions the options that will + # enable the cbtf-krell built type settings - compile_flags="-O2 -g" + compile_flags = "-O2 -g" BuildTypeOptions = [] - # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs + # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the + # stdcmakeargs for word in cmakeOptions[:]: if word.startswith('-DCMAKE_BUILD_TYPE'): cmakeOptions.remove(word) @@ -78,40 +82,43 @@ def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'): cmakeOptions.remove(word) BuildTypeOptions.extend([ - '-DCMAKE_VERBOSE_MAKEFILE=ON', - '-DCMAKE_BUILD_TYPE=None', - '-DCMAKE_CXX_FLAGS=%s' % compile_flags, - '-DCMAKE_C_FLAGS=%s' % compile_flags + '-DCMAKE_VERBOSE_MAKEFILE=ON', + '-DCMAKE_BUILD_TYPE=None', + '-DCMAKE_CXX_FLAGS=%s' % compile_flags, + '-DCMAKE_C_FLAGS=%s' % compile_flags ]) cmakeOptions.extend(BuildTypeOptions) def install(self, spec, prefix): - # Add in paths for finding package config files that tell us where to find these packages - cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) + # Add in paths for finding package config files that tell us where to + # find these packages + cmake_prefix_path = join_path( + spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) - with working_dir('build', create=True): - cmakeOptions = [] - cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCBTF_DIR=%s' % spec['cbtf'].prefix, - '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, - '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, - '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, - '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake') - ]) + with working_dir('build', create=True): + cmakeOptions = [] + cmakeOptions.extend( + ['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCBTF_DIR=%s' % spec['cbtf'].prefix, + '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, + '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, + '-DCMAKE_MODULE_PATH=%s' % join_path( + prefix.share, 'KrellInstitute', 'cmake')]) - # Add in the standard cmake arguments - cmakeOptions.extend(std_cmake_args) + # Add in the standard cmake arguments + cmakeOptions.extend(std_cmake_args) - # Adjust the standard cmake arguments to what we want the build type, etc to be - self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) - - # Invoke cmake - cmake('..', *cmakeOptions) + # Adjust the standard cmake arguments to what we want the build + # type, etc to be + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) - make("clean") - make() - make("install") + # Invoke cmake + cmake('..', *cmakeOptions) + make("clean") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cbtf/package.py b/var/spack/repos/builtin/packages/cbtf/package.py index 017b897f3c6..bc3116bf2a4 100644 --- a/var/spack/repos/builtin/packages/cbtf/package.py +++ b/var/spack/repos/builtin/packages/cbtf/package.py @@ -22,7 +22,7 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -################################################################################ +########################################################################## # Copyright (c) 2015-2016 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under @@ -38,24 +38,30 @@ # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 59 Temple # Place, Suite 330, Boston, MA 02111-1307 USA -################################################################################ +########################################################################## from spack import * + class Cbtf(Package): - """CBTF project contains the base code for CBTF that supports creating components, - component networks and the support to connect these components and component - networks into sequential and distributed network tools.""" + """CBTF project contains the base code for CBTF that supports creating + components, component networks and the support to connect these + components and component networks into sequential and distributed + network tools. + + """ homepage = "http://sourceforge.net/p/cbtf/wiki/Home" # Mirror access template example - #url = "file:/home/jeg/cbtf-1.6.tar.gz" - #version('1.6', 'c1ef4e5aa4e470dffb042abdba0b9987') + # url = "file:/home/jeg/cbtf-1.6.tar.gz" + # version('1.6', 'c1ef4e5aa4e470dffb042abdba0b9987') # Use when the git repository is available - version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf.git') + version('1.6', branch='master', + git='https://github.com/OpenSpeedShop/cbtf.git') - variant('runtime', default=False, description="build only the runtime libraries and collectors.") + variant('runtime', default=False, + description="build only the runtime libraries and collectors.") depends_on("cmake@3.0.2", type='build') depends_on("boost@1.50.0:") @@ -67,11 +73,13 @@ class Cbtf(Package): parallel = False def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): - # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings - - compile_flags="-O2 -g" + # Sets build type parameters into cmakeOptions the options that will + # enable the cbtf-krell built type settings + + compile_flags = "-O2 -g" BuildTypeOptions = [] - # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs + # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the + # stdcmakeargs for word in cmakeOptions[:]: if word.startswith('-DCMAKE_BUILD_TYPE'): cmakeOptions.remove(word) @@ -80,61 +88,66 @@ def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): if word.startswith('-DCMAKE_C_FLAGS'): cmakeOptions.remove(word) BuildTypeOptions.extend([ - '-DCMAKE_BUILD_TYPE=None', - '-DCMAKE_CXX_FLAGS=%s' % compile_flags, - '-DCMAKE_C_FLAGS=%s' % compile_flags + '-DCMAKE_BUILD_TYPE=None', + '-DCMAKE_CXX_FLAGS=%s' % compile_flags, + '-DCMAKE_C_FLAGS=%s' % compile_flags ]) cmakeOptions.extend(BuildTypeOptions) def install(self, spec, prefix): - with working_dir('build', create=True): + with working_dir('build', create=True): - # Boost_NO_SYSTEM_PATHS Set to TRUE to suppress searching - # in system paths (or other locations outside of BOOST_ROOT - # or BOOST_INCLUDEDIR). Useful when specifying BOOST_ROOT. - # Defaults to OFF. + # Boost_NO_SYSTEM_PATHS Set to TRUE to suppress searching + # in system paths (or other locations outside of BOOST_ROOT + # or BOOST_INCLUDEDIR). Useful when specifying BOOST_ROOT. + # Defaults to OFF. - if '+runtime' in spec: - # Install message tag include file for use in Intel MIC cbtf-krell build - # FIXME - cmakeOptions = [] - cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DBoost_NO_SYSTEM_PATHS=TRUE', - '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, - '-DBOOST_ROOT=%s' % spec['boost'].prefix, - '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake') - ]) + if '+runtime' in spec: + # Install message tag include file for use in Intel MIC + # cbtf-krell build + # FIXME + cmakeOptions = [] + cmakeOptions.extend( + ['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DBoost_NO_SYSTEM_PATHS=TRUE', + '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, + '-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + '-DCMAKE_MODULE_PATH=%s' % join_path( + prefix.share, 'KrellInstitute', 'cmake')]) - # Add in the standard cmake arguments - cmakeOptions.extend(std_cmake_args) + # Add in the standard cmake arguments + cmakeOptions.extend(std_cmake_args) - # Adjust the standard cmake arguments to what we want the build type, etc to be - self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) - - # Invoke cmake - cmake('..', *cmakeOptions) + # Adjust the standard cmake arguments to what we want the build + # type, etc to be + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) - else: - cmakeOptions = [] - cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DBoost_NO_SYSTEM_PATHS=TRUE', - '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, - '-DBOOST_ROOT=%s' % spec['boost'].prefix, - '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake') - ]) + # Invoke cmake + cmake('..', *cmakeOptions) - # Add in the standard cmake arguments - cmakeOptions.extend(std_cmake_args) + else: + cmakeOptions = [] + cmakeOptions.extend( + ['-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DBoost_NO_SYSTEM_PATHS=TRUE', + '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, + '-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + '-DCMAKE_MODULE_PATH=%s' % join_path( + prefix.share, 'KrellInstitute', 'cmake')]) - # Adjust the standard cmake arguments to what we want the build type, etc to be - self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) - - # Invoke cmake - cmake('..', *cmakeOptions) + # Add in the standard cmake arguments + cmakeOptions.extend(std_cmake_args) - make("clean") - make() - make("install") + # Adjust the standard cmake arguments to what we want the build + # type, etc to be + self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) + + # Invoke cmake + cmake('..', *cmakeOptions) + + make("clean") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cereal/package.py b/var/spack/repos/builtin/packages/cereal/package.py index 41dc9a274b8..716e0103d15 100644 --- a/var/spack/repos/builtin/packages/cereal/package.py +++ b/var/spack/repos/builtin/packages/cereal/package.py @@ -26,8 +26,16 @@ import os import shutil + class Cereal(Package): - """cereal is a header-only C++11 serialization library. cereal takes arbitrary data types and reversibly turns them into different representations, such as compact binary encodings, XML, or JSON. cereal was designed to be fast, light-weight, and easy to extend - it has no external dependencies and can be easily bundled with other code or used standalone.""" + """cereal is a header-only C++11 serialization library. cereal takes + arbitrary data types and reversibly turns them into different + representations, such as compact binary encodings, XML, or + JSON. cereal was designed to be fast, light-weight, and easy to + extend - it has no external dependencies and can be easily bundled + with other code or used standalone. + + """ homepage = "http://uscilab.github.io/cereal/" url = "https://github.com/USCiLab/cereal/archive/v1.1.2.tar.gz" diff --git a/var/spack/repos/builtin/packages/cfitsio/package.py b/var/spack/repos/builtin/packages/cfitsio/package.py index ed49ae5808e..c68b3ffc0dd 100644 --- a/var/spack/repos/builtin/packages/cfitsio/package.py +++ b/var/spack/repos/builtin/packages/cfitsio/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Cfitsio(Package): """ CFITSIO is a library of C and Fortran subroutines for reading and writing diff --git a/var/spack/repos/builtin/packages/cgal/package.py b/var/spack/repos/builtin/packages/cgal/package.py index 5c87978339e..e522b4a5a03 100644 --- a/var/spack/repos/builtin/packages/cgal/package.py +++ b/var/spack/repos/builtin/packages/cgal/package.py @@ -27,10 +27,12 @@ class Cgal(Package): - """ - CGAL is a software project that provides easy access to efficient and reliable geometric algorithms in the form of - a C++ library. CGAL is used in various areas needing geometric computation, such as geographic information systems, - computer aided design, molecular biology, medical imaging, computer graphics, and robotics. + """CGAL is a software project that provides easy access to efficient and + reliable geometric algorithms in the form of a C++ library. CGAL + is used in various areas needing geometric computation, such as + geographic information systems, computer aided design, molecular + biology, medical imaging, computer graphics, and robotics. + """ homepage = 'http://www.cgal.org/' url = 'https://github.com/CGAL/cgal/archive/releases/CGAL-4.7.tar.gz' @@ -38,9 +40,12 @@ class Cgal(Package): version('4.7', '4826714810f3b4c65cac96b90fb03b67') version('4.6.3', 'e8ee2ecc8d2b09b94a121c09257b576d') - # Installation instructions : http://doc.cgal.org/latest/Manual/installation.html - variant('shared', default=True, description='Enables the build of shared libraries') - variant('debug', default=False, description='Builds a debug version of the libraries') + # Installation instructions : + # http://doc.cgal.org/latest/Manual/installation.html + variant('shared', default=True, + description='Enables the build of shared libraries') + variant('debug', default=False, + description='Builds a debug version of the libraries') depends_on('boost') depends_on('mpfr') @@ -55,7 +60,8 @@ def install(self, spec, prefix): options = [] options.extend(std_cmake_args) - # CGAL supports only Release and Debug build type. Any other build type will raise an error at configure time + # CGAL supports only Release and Debug build type. Any other build type + # will raise an error at configure time if '+debug' in spec: options.append('-DCMAKE_BUILD_TYPE:STRING=Debug') else: diff --git a/var/spack/repos/builtin/packages/cgm/package.py b/var/spack/repos/builtin/packages/cgm/package.py index c5da72d25a6..5a998d471c9 100644 --- a/var/spack/repos/builtin/packages/cgm/package.py +++ b/var/spack/repos/builtin/packages/cgm/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Cgm(Package): """The Common Geometry Module, Argonne (CGMA) is a code library which provides geometry functionality used for mesh generation and @@ -33,7 +34,7 @@ class Cgm(Package): version('13.1.1', '4e8dbc4ba8f65767b29f985f7a23b01f') version('13.1.0', 'a6c7b22660f164ce893fb974f9cb2028') - version('13.1' , '95f724bda04919fc76818a5b7bc0b4ed') + version('13.1', '95f724bda04919fc76818a5b7bc0b4ed') depends_on("mpi") @@ -42,7 +43,6 @@ def patch(self): '//\1', 'geom/parallel/CGMReadParallel.cpp') - def install(self, spec, prefix): configure("--with-mpi", "--prefix=%s" % prefix, diff --git a/var/spack/repos/builtin/packages/cityhash/package.py b/var/spack/repos/builtin/packages/cityhash/package.py index caa15780e31..d6d7f515585 100644 --- a/var/spack/repos/builtin/packages/cityhash/package.py +++ b/var/spack/repos/builtin/packages/cityhash/package.py @@ -25,16 +25,18 @@ from spack import * from spack.util.environment import * + class Cityhash(Package): homepage = "https://github.com/google/cityhash" url = "https://github.com/google/cityhash" - version('2013-07-31', git='https://github.com/google/cityhash.git', commit='8af9b8c2b889d80c22d6bc26ba0df1afb79a30db') - version('master', branch='master', git='https://github.com/google/cityhash.git') + version('2013-07-31', git='https://github.com/google/cityhash.git', + commit='8af9b8c2b889d80c22d6bc26ba0df1afb79a30db') + version('master', branch='master', + git='https://github.com/google/cityhash.git') def install(self, spec, prefix): configure('--enable-sse4.2', '--prefix=%s' % prefix) make() make("install") - diff --git a/var/spack/repos/builtin/packages/cleverleaf/package.py b/var/spack/repos/builtin/packages/cleverleaf/package.py index c258e89514a..61e6dca4333 100644 --- a/var/spack/repos/builtin/packages/cleverleaf/package.py +++ b/var/spack/repos/builtin/packages/cleverleaf/package.py @@ -24,18 +24,21 @@ ############################################################################## from spack import * + class Cleverleaf(Package): - """ - CleverLeaf is a hydrodynamics mini-app that extends CloverLeaf with Adaptive - Mesh Refinement using the SAMRAI toolkit from Lawrence Livermore National - Laboratory. The primary goal of CleverLeaf is to evaluate the application of - AMR to the Lagrangian-Eulerian hydrodynamics scheme used by CloverLeaf. + """CleverLeaf is a hydrodynamics mini-app that extends CloverLeaf with + Adaptive Mesh Refinement using the SAMRAI toolkit from Lawrence + Livermore National Laboratory. The primary goal of CleverLeaf is + to evaluate the application of AMR to the Lagrangian-Eulerian + hydrodynamics scheme used by CloverLeaf. + """ homepage = "http://uk-mac.github.io/CleverLeaf/" url = "https://github.com/UK-MAC/CleverLeaf/tarball/master" - version('develop', git='https://github.com/UK-MAC/CleverLeaf_ref.git', branch='develop') + version('develop', git='https://github.com/UK-MAC/CleverLeaf_ref.git', + branch='develop') depends_on("SAMRAI@3.8.0:") depends_on("hdf5+mpi") diff --git a/var/spack/repos/builtin/packages/cloog/package.py b/var/spack/repos/builtin/packages/cloog/package.py index db3d2ac928b..a979ae83fc7 100644 --- a/var/spack/repos/builtin/packages/cloog/package.py +++ b/var/spack/repos/builtin/packages/cloog/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Cloog(Package): """CLooG is a free software and library to generate code for scanning Z-polyhedra. That is, it finds a code (e.g. in C, diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index bfb8764feb1..90a7c20d19a 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Cmake(Package): """A cross-platform, open-source build system. CMake is a family of tools designed to build, test and package software.""" @@ -40,10 +41,13 @@ class Cmake(Package): version('3.0.2', 'db4c687a31444a929d2fdc36c4dfb95f') version('2.8.10.2', '097278785da7182ec0aea8769d06860c') - variant('ncurses', default=True, description='Enables the build of the ncurses gui') - variant('openssl', default=True, description="Enables CMake's OpenSSL features") + variant('ncurses', default=True, + description='Enables the build of the ncurses gui') + variant('openssl', default=True, + description="Enables CMake's OpenSSL features") variant('qt', default=False, description='Enables the build of cmake-gui') - variant('doc', default=False, description='Enables the generation of html and man page documentation') + variant('doc', default=False, + description='Enables the generation of html and man page docs') depends_on('ncurses', when='+ncurses') depends_on('openssl', when='+openssl') @@ -53,7 +57,8 @@ class Cmake(Package): def url_for_version(self, version): """Handle CMake's version-based custom URLs.""" - return 'https://cmake.org/files/v%s/cmake-%s.tar.gz' % (version.up_to(2), version) + return 'https://cmake.org/files/v%s/cmake-%s.tar.gz' % ( + version.up_to(2), version) def validate(self, spec): """ diff --git a/var/spack/repos/builtin/packages/cmocka/package.py b/var/spack/repos/builtin/packages/cmocka/package.py index aa2b3cc3bb2..274b78379a5 100644 --- a/var/spack/repos/builtin/packages/cmocka/package.py +++ b/var/spack/repos/builtin/packages/cmocka/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Cmocka(Package): """Unit-testing framework in pure C""" homepage = "https://cmocka.org/" @@ -36,7 +37,7 @@ class Cmocka(Package): def install(self, spec, prefix): with working_dir('spack-build', create=True): - cmake('..', *std_cmake_args) + cmake('..', *std_cmake_args) - make() - make("install") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cnmem/package.py b/var/spack/repos/builtin/packages/cnmem/package.py index f4c05f5b5f8..0c620239529 100644 --- a/var/spack/repos/builtin/packages/cnmem/package.py +++ b/var/spack/repos/builtin/packages/cnmem/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Cnmem(Package): """CNMem mempool for CUDA devices""" homepage = "https://github.com/NVIDIA/cnmem" @@ -31,6 +32,6 @@ class Cnmem(Package): version('git', git='https://github.com/NVIDIA/cnmem.git', branch="master") def install(self, spec, prefix): - cmake('.',*std_cmake_args) - make() - make('install') + cmake('.', *std_cmake_args) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/coreutils/package.py b/var/spack/repos/builtin/packages/coreutils/package.py index cb8f596b412..94cfa113410 100644 --- a/var/spack/repos/builtin/packages/coreutils/package.py +++ b/var/spack/repos/builtin/packages/coreutils/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Coreutils(Package): """The GNU Core Utilities are the basic file, shell and text manipulation utilities of the GNU operating system. These are diff --git a/var/spack/repos/builtin/packages/cppcheck/package.py b/var/spack/repos/builtin/packages/cppcheck/package.py index 16f052ef2ff..fd48fcb7e30 100644 --- a/var/spack/repos/builtin/packages/cppcheck/package.py +++ b/var/spack/repos/builtin/packages/cppcheck/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Cppcheck(Package): """A tool for static C/C++ code analysis.""" homepage = "http://cppcheck.sourceforge.net/" diff --git a/var/spack/repos/builtin/packages/cram/package.py b/var/spack/repos/builtin/packages/cram/package.py index 283bc5adea3..bef26cdcbd0 100644 --- a/var/spack/repos/builtin/packages/cram/package.py +++ b/var/spack/repos/builtin/packages/cram/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Cram(Package): """Cram runs many small MPI jobs inside one large MPI job.""" homepage = "https://github.com/llnl/cram" diff --git a/var/spack/repos/builtin/packages/cryptopp/package.py b/var/spack/repos/builtin/packages/cryptopp/package.py index 77895a7331a..e9294a14a6d 100644 --- a/var/spack/repos/builtin/packages/cryptopp/package.py +++ b/var/spack/repos/builtin/packages/cryptopp/package.py @@ -25,12 +25,15 @@ import glob from spack import * + class Cryptopp(Package): """Crypto++ is an open-source C++ library of cryptographic schemes. The - library supports a number of different cryptography algorithms, including - authenticated encryption schemes (GCM, CCM), hash functions (SHA-1, SHA2), - public-key encryption (RSA, DSA), and a few obsolete/historical encryption - algorithms (MD5, Panama).""" + library supports a number of different cryptography algorithms, + including authenticated encryption schemes (GCM, CCM), hash + functions (SHA-1, SHA2), public-key encryption (RSA, DSA), and a + few obsolete/historical encryption algorithms (MD5, Panama). + + """ homepage = "http://www.cryptopp.com" base_url = "http://www.cryptopp.com" diff --git a/var/spack/repos/builtin/packages/cscope/package.py b/var/spack/repos/builtin/packages/cscope/package.py index 88d522f486a..f21226cce18 100644 --- a/var/spack/repos/builtin/packages/cscope/package.py +++ b/var/spack/repos/builtin/packages/cscope/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Cscope(Package): """Cscope is a developer's tool for browsing source code.""" homepage = "http://http://cscope.sourceforge.net/" diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py index 96694670ad9..558535bec64 100644 --- a/var/spack/repos/builtin/packages/cuda/package.py +++ b/var/spack/repos/builtin/packages/cuda/package.py @@ -26,22 +26,27 @@ from glob import glob import os + class Cuda(Package): - """CUDA is a parallel computing platform and programming model invented by - NVIDIA. It enables dramatic increases in computing performance by harnessing - the power of the graphics processing unit (GPU). + """CUDA is a parallel computing platform and programming model invented + by NVIDIA. It enables dramatic increases in computing performance by + harnessing the power of the graphics processing unit (GPU). - Note: NVIDIA does not provide a download URL for CUDA so you will need to - download it yourself. Go to https://developer.nvidia.com/cuda-downloads - and select your Operating System, Architecture, Distribution, and Version. - For the Installer Type, select runfile and click Download. Spack will search - your current directory for this file. Alternatively, add this file to a - mirror so that Spack can find it. For instructions on how to set up a mirror, - see http://software.llnl.gov/spack/mirrors.html + Note: NVIDIA does not provide a download URL for CUDA so you will + need to download it yourself. Go to + https://developer.nvidia.com/cuda-downloads and select your Operating + System, Architecture, Distribution, and Version. For the Installer + Type, select runfile and click Download. Spack will search your + current directory for this file. Alternatively, add this file to a + mirror so that Spack can find it. For instructions on how to set up a + mirror, see http://software.llnl.gov/spack/mirrors.html - Note: This package does not currently install the drivers necessary to run - CUDA. These will need to be installed manually. See: - http://docs.nvidia.com/cuda/cuda-getting-started-guide-for-linux for details.""" + Note: This package does not currently install the drivers necessary + to run CUDA. These will need to be installed manually. See: + http://docs.nvidia.com/cuda/cuda-getting-started-guide-for-linux for + details. + + """ homepage = "http://www.nvidia.com/object/cuda_home_new.html" @@ -50,15 +55,15 @@ class Cuda(Package): version('6.5.14', '90b1b8f77313600cc294d9271741f4da', expand=False, url="file://%s/cuda_6.5.14_linux_64.run" % os.getcwd()) - def install(self, spec, prefix): runfile = glob(os.path.join(self.stage.path, 'cuda*.run'))[0] chmod = which('chmod') chmod('+x', runfile) runfile = which(runfile) - # Note: NVIDIA does not officially support many newer versions of compilers. - # For example, on CentOS 6, you must use GCC 4.4.7 or older. See: + # Note: NVIDIA does not officially support many newer versions of + # compilers. For example, on CentOS 6, you must use GCC 4.4.7 or + # older. See: # http://docs.nvidia.com/cuda/cuda-installation-guide-linux/#system-requirements # for details. @@ -68,4 +73,3 @@ def install(self, spec, prefix): '--toolkit', # install CUDA Toolkit '--toolkitpath=%s' % prefix ) - diff --git a/var/spack/repos/builtin/packages/czmq/package.py b/var/spack/repos/builtin/packages/czmq/package.py index a251a94470f..ef6374619bc 100644 --- a/var/spack/repos/builtin/packages/czmq/package.py +++ b/var/spack/repos/builtin/packages/czmq/package.py @@ -25,12 +25,14 @@ from spack import * import os + class Czmq(Package): """ A C interface to the ZMQ library """ homepage = "http://czmq.zeromq.org" url = "https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz" - version('3.0.2', '23e9885f7ee3ce88d99d0425f52e9be1', url='https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz') + version('3.0.2', '23e9885f7ee3ce88d99d0425f52e9be1', + url='https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz') depends_on('libtool', type='build') depends_on('automake', type='build') @@ -39,19 +41,21 @@ class Czmq(Package): depends_on('zeromq') def install(self, spec, prefix): - bash = which("bash") # Work around autogen.sh oddities + # bash = which("bash") # bash("./autogen.sh") mkdirp("config") autoreconf = which("autoreconf") autoreconf("--install", "--verbose", "--force", - "-I", "config", - "-I", os.path.join(spec['pkg-config'].prefix, "share", "aclocal"), - "-I", os.path.join(spec['automake'].prefix, "share", "aclocal"), - "-I", os.path.join(spec['libtool'].prefix, "share", "aclocal"), - ) + "-I", "config", + "-I", os.path.join(spec['pkg-config'].prefix, + "share", "aclocal"), + "-I", os.path.join(spec['automake'].prefix, + "share", "aclocal"), + "-I", os.path.join(spec['libtool'].prefix, + "share", "aclocal"), + ) configure("--prefix=%s" % prefix) make() make("install") - diff --git a/var/spack/repos/builtin/packages/dakota/package.py b/var/spack/repos/builtin/packages/dakota/package.py index d0d22d97286..e8f7d0889b4 100644 --- a/var/spack/repos/builtin/packages/dakota/package.py +++ b/var/spack/repos/builtin/packages/dakota/package.py @@ -26,17 +26,22 @@ class Dakota(Package): - """ - The Dakota toolkit provides a flexible, extensible interface between analysis codes and iterative systems - analysis methods. Dakota contains algorithms for: + """The Dakota toolkit provides a flexible, extensible interface between + analysis codes and iterative systems analysis methods. Dakota + contains algorithms for: - optimization with gradient and non gradient-based methods; - - uncertainty quantification with sampling, reliability, stochastic expansion, and epistemic methods; + - uncertainty quantification with sampling, reliability, stochastic + - expansion, and epistemic methods; - parameter estimation with nonlinear least squares methods; - - sensitivity/variance analysis with design of experiments and parameter study methods. + - sensitivity/variance analysis with design of experiments and + - parameter study methods. + + These capabilities may be used on their own or as components within + advanced strategies such as hybrid optimization, surrogate-based + optimization, mixed integer nonlinear programming, or optimization + under uncertainty. - These capabilities may be used on their own or as components within advanced strategies such as hybrid optimization, - surrogate-based optimization, mixed integer nonlinear programming, or optimization under uncertainty. """ homepage = 'https://dakota.sandia.gov/' @@ -45,8 +50,10 @@ class Dakota(Package): version('6.3', '05a58d209fae604af234c894c3f73f6d') - variant('debug', default=False, description='Builds a debug version of the libraries') - variant('shared', default=True, description='Enables the build of shared libraries') + variant('debug', default=False, + description='Builds a debug version of the libraries') + variant('shared', default=True, + description='Enables the build of shared libraries') variant('mpi', default=True, description='Activates MPI support') depends_on('blas') @@ -64,12 +71,17 @@ def install(self, spec, prefix): options = [] options.extend(std_cmake_args) - options.extend(['-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'), - '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if '+shared' in spec else 'OFF')]) + options.extend([ + '-DCMAKE_BUILD_TYPE:STRING=%s' % ( + 'Debug' if '+debug' in spec else 'Release'), + '-DBUILD_SHARED_LIBS:BOOL=%s' % ( + 'ON' if '+shared' in spec else 'OFF')]) if '+mpi' in spec: - options.extend(['-DDAKOTA_HAVE_MPI:BOOL=ON', - '-DMPI_CXX_COMPILER:STRING=%s' % join_path(spec['mpi'].prefix.bin, 'mpicxx')]) + options.extend([ + '-DDAKOTA_HAVE_MPI:BOOL=ON', + '-DMPI_CXX_COMPILER:STRING=%s' % join_path( + spec['mpi'].prefix.bin, 'mpicxx')]) build_directory = join_path(self.stage.path, 'spack-build') source_directory = self.stage.source_path diff --git a/var/spack/repos/builtin/packages/damselfly/package.py b/var/spack/repos/builtin/packages/damselfly/package.py index 427997072c4..a37728c92b1 100644 --- a/var/spack/repos/builtin/packages/damselfly/package.py +++ b/var/spack/repos/builtin/packages/damselfly/package.py @@ -24,17 +24,19 @@ ############################################################################## from spack import * + class Damselfly(Package): """Damselfly is a model-based parallel network simulator.""" homepage = "https://github.com/llnl/damselfly" url = "https://github.com/llnl/damselfly" - version('1.0', '05cf7e2d8ece4408c0f2abb7ab63fd74c0d62895', git='https://github.com/llnl/damselfly.git', tag='v1.0') + version('1.0', '05cf7e2d8ece4408c0f2abb7ab63fd74c0d62895', + git='https://github.com/llnl/damselfly.git', tag='v1.0') depends_on('cmake', type='build') def install(self, spec, prefix): with working_dir('spack-build', create=True): - cmake('-DCMAKE_BUILD_TYPE=release', '..', *std_cmake_args) - make() - make('install') + cmake('-DCMAKE_BUILD_TYPE=release', '..', *std_cmake_args) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py index 130ba2ea1fa..fdca68f53f2 100644 --- a/var/spack/repos/builtin/packages/dbus/package.py +++ b/var/spack/repos/builtin/packages/dbus/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Dbus(Package): """D-Bus is a message bus system, a simple way for applications to talk to one another. D-Bus supplies both a system daemon (for diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 18c0849f68d..1411494c6f0 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -40,18 +40,27 @@ class Dealii(Package): version('develop', git='https://github.com/dealii/dealii.git') variant('mpi', default=True, description='Compile with MPI') - variant('arpack', default=True, description='Compile with Arpack and PArpack (only with MPI)') - variant('doc', default=False, description='Compile with documentation') + variant('arpack', default=True, + description='Compile with Arpack and PArpack (only with MPI)') + variant('doc', default=False, + description='Compile with documentation') variant('gsl', default=True, description='Compile with GSL') - variant('hdf5', default=True, description='Compile with HDF5 (only with MPI)') + variant('hdf5', default=True, + description='Compile with HDF5 (only with MPI)') variant('metis', default=True, description='Compile with Metis') - variant('netcdf', default=True, description='Compile with Netcdf (only with MPI)') + variant('netcdf', default=True, + description='Compile with Netcdf (only with MPI)') variant('oce', default=True, description='Compile with OCE') - variant('p4est', default=True, description='Compile with P4est (only with MPI)') - variant('petsc', default=True, description='Compile with Petsc (only with MPI)') - variant('slepc', default=True, description='Compile with Slepc (only with Petsc and MPI)') - variant('trilinos', default=True, description='Compile with Trilinos (only with MPI)') - variant('python', default=True, description='Compile with Python bindings') + variant('p4est', default=True, + description='Compile with P4est (only with MPI)') + variant('petsc', default=True, + description='Compile with Petsc (only with MPI)') + variant('slepc', default=True, + description='Compile with Slepc (only with Petsc and MPI)') + variant('trilinos', default=True, + description='Compile with Trilinos (only with MPI)') + variant('python', default=True, + description='Compile with Python bindings') # required dependencies, light version depends_on("blas") @@ -59,13 +68,20 @@ class Dealii(Package): # https://github.com/dealii/dealii/issues/1591 # Require at least 1.59 # +python won't affect @:8.4.1 - depends_on("boost@1.59.0:+thread+system+serialization+iostreams", when='@:8.4.1~mpi') - depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi", when='@:8.4.1+mpi') + depends_on("boost@1.59.0:+thread+system+serialization+iostreams", + when='@:8.4.1~mpi') + depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi", + when='@:8.4.1+mpi') # since @8.5.0: (and @develop) python bindings are introduced: - depends_on("boost@1.59.0:+thread+system+serialization+iostreams", when='@8.5.0:~mpi~python') - depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi", when='@8.5.0:+mpi~python') - depends_on("boost@1.59.0:+thread+system+serialization+iostreams+python", when='@8.5.0:~mpi+python') - depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi+python", when='@8.5.0:+mpi+python') + depends_on("boost@1.59.0:+thread+system+serialization+iostreams", + when='@8.5.0:~mpi~python') + depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi", + when='@8.5.0:+mpi~python') + depends_on("boost@1.59.0:+thread+system+serialization+iostreams+python", + when='@8.5.0:~mpi+python') + depends_on( + "boost@1.59.0:+thread+system+serialization+iostreams+mpi+python", + when='@8.5.0:+mpi+python') depends_on("bzip2") depends_on("cmake", type='build') depends_on("lapack") diff --git a/var/spack/repos/builtin/packages/dia/package.py b/var/spack/repos/builtin/packages/dia/package.py index 1685f0009f5..94cd75656d5 100644 --- a/var/spack/repos/builtin/packages/dia/package.py +++ b/var/spack/repos/builtin/packages/dia/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Dia(Package): """Dia is a program for drawing structured diagrams.""" homepage = 'https://wiki.gnome.org/Apps/Dia' @@ -34,7 +35,7 @@ class Dia(Package): depends_on('intltool', type='build') depends_on('gtkplus@2.6.0:') depends_on('cairo') - #depends_on('libart') # optional dependency, not yet supported by spack. + # depends_on('libart') # optional dependency, not yet supported by spack. depends_on('libpng') depends_on('libxslt') depends_on('python') diff --git a/var/spack/repos/builtin/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py index 267ba617565..f29110ac1dc 100644 --- a/var/spack/repos/builtin/packages/doxygen/package.py +++ b/var/spack/repos/builtin/packages/doxygen/package.py @@ -24,8 +24,6 @@ ############################################################################## from spack import * -import sys - class Doxygen(Package): """Doxygen is the de facto standard tool for generating documentation @@ -41,7 +39,8 @@ class Doxygen(Package): version('1.8.10', '79767ccd986f12a0f949015efb5f058f') # graphviz appears to be a run-time optional dependency - variant('graphviz', default=True, description='Build with dot command support from Graphviz.') # NOQA: ignore=E501 + variant('graphviz', default=True, + description='Build with dot command support from Graphviz.') depends_on("cmake@2.8.12:", type='build') depends_on("flex", type='build') diff --git a/var/spack/repos/builtin/packages/dri2proto/package.py b/var/spack/repos/builtin/packages/dri2proto/package.py index 25ea783c0c5..65b86714f12 100644 --- a/var/spack/repos/builtin/packages/dri2proto/package.py +++ b/var/spack/repos/builtin/packages/dri2proto/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Dri2proto(Package): """DRI2 Protocol Headers.""" homepage = "http://http://cgit.freedesktop.org/xorg/proto/dri2proto/" diff --git a/var/spack/repos/builtin/packages/dtcmp/package.py b/var/spack/repos/builtin/packages/dtcmp/package.py index b50b2ae3aec..e59e246d47d 100644 --- a/var/spack/repos/builtin/packages/dtcmp/package.py +++ b/var/spack/repos/builtin/packages/dtcmp/package.py @@ -22,9 +22,9 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os from spack import * + class Dtcmp(Package): """The Datatype Comparison Library provides comparison operations and parallel sort algorithms for MPI applications.""" diff --git a/var/spack/repos/builtin/packages/dyninst/package.py b/var/spack/repos/builtin/packages/dyninst/package.py index efe4de4abf1..90c83bdc3a9 100644 --- a/var/spack/repos/builtin/packages/dyninst/package.py +++ b/var/spack/repos/builtin/packages/dyninst/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Dyninst(Package): """API for dynamic binary instrumentation. Modify programs while they are executing without recompiling, re-linking, or re-executing.""" @@ -55,16 +56,18 @@ def install(self, spec, prefix): '-DBoost_INCLUDE_DIR=%s' % spec['boost'].prefix.include, '-DBoost_LIBRARY_DIR=%s' % spec['boost'].prefix.lib, '-DBoost_NO_SYSTEM_PATHS=TRUE', - '-DLIBELF_INCLUDE_DIR=%s' % join_path(libelf.include, 'libelf'), - '-DLIBELF_LIBRARIES=%s' % join_path(libelf.lib, 'libelf.so'), + '-DLIBELF_INCLUDE_DIR=%s' % join_path( + libelf.include, 'libelf'), + '-DLIBELF_LIBRARIES=%s' % join_path( + libelf.lib, 'libelf.so'), '-DLIBDWARF_INCLUDE_DIR=%s' % libdwarf.include, - '-DLIBDWARF_LIBRARIES=%s' % join_path(libdwarf.lib, 'libdwarf.so'), + '-DLIBDWARF_LIBRARIES=%s' % join_path( + libdwarf.lib, 'libdwarf.so'), *std_cmake_args) make() make("install") - @when('@:8.1') def install(self, spec, prefix): configure("--prefix=" + prefix) diff --git a/var/spack/repos/builtin/packages/eigen/package.py b/var/spack/repos/builtin/packages/eigen/package.py index 97343f499b1..36d6850c1ae 100644 --- a/var/spack/repos/builtin/packages/eigen/package.py +++ b/var/spack/repos/builtin/packages/eigen/package.py @@ -22,7 +22,6 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## - from spack import * @@ -36,15 +35,19 @@ class Eigen(Package): homepage = 'http://eigen.tuxfamily.org/' url = 'http://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2' - version('3.2.7', 'cc1bacbad97558b97da6b77c9644f184', url='http://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2') + version('3.2.7', 'cc1bacbad97558b97da6b77c9644f184', + url='http://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2') - variant('debug', default=False, description='Builds the library in debug mode') + variant('debug', default=False, + description='Builds the library in debug mode') variant('metis', default=True, description='Enables metis backend') variant('scotch', default=True, description='Enables scotch backend') variant('fftw', default=True, description='Enables FFTW backend') - variant('suitesparse', default=True, description='Enables SuiteSparse support') - variant('mpfr', default=True, description='Enables support for multi-precisions floating points via mpfr') + variant('suitesparse', default=True, + description='Enables SuiteSparse support') + variant('mpfr', default=True, + description='Enables support for multi-precisions FP via mpfr') # TODO : dependency on googlehash, superlu, adolc missing depends_on('cmake', type='build') diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py index ecb5759ddce..ef8c2433c95 100644 --- a/var/spack/repos/builtin/packages/elfutils/package.py +++ b/var/spack/repos/builtin/packages/elfutils/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Elfutils(Package): """elfutils is a collection of various binary tools such as eu-objdump, eu-readelf, and other utilities that allow you to @@ -47,4 +48,3 @@ def install(self, spec, prefix): configure('--prefix=%s' % prefix, '--enable-maintainer-mode') make() make("install") - diff --git a/var/spack/repos/builtin/packages/elpa/package.py b/var/spack/repos/builtin/packages/elpa/package.py index ae81422495a..f28d63f6c3a 100644 --- a/var/spack/repos/builtin/packages/elpa/package.py +++ b/var/spack/repos/builtin/packages/elpa/package.py @@ -34,7 +34,8 @@ class Elpa(Package): homepage = 'http://elpa.mpcdf.mpg.de/' url = 'http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz' - version('2015.11.001', 'de0f35b7ee7c971fd0dca35c900b87e6', url='http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz') + version('2015.11.001', 'de0f35b7ee7c971fd0dca35c900b87e6', + url='http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz') variant('openmp', default=False, description='Activates OpenMP support') diff --git a/var/spack/repos/builtin/packages/emacs/package.py b/var/spack/repos/builtin/packages/emacs/package.py index a9ebd6d42ff..94d42a3c9ff 100644 --- a/var/spack/repos/builtin/packages/emacs/package.py +++ b/var/spack/repos/builtin/packages/emacs/package.py @@ -35,7 +35,8 @@ class Emacs(Package): version('24.5', 'd74b597503a68105e61b5b9f6d065b44') variant('X', default=True, description="Enable a X toolkit (GTK+)") - variant('gtkplus', default=False, description="Enable a GTK+ as X toolkit (this variant is ignored if ~X)") + variant('gtkplus', default=False, + description="Enable a GTK+ as X toolkit (ignored if ~X)") depends_on('ncurses') depends_on('libtiff', when='+X') diff --git a/var/spack/repos/builtin/packages/environment-modules/package.py b/var/spack/repos/builtin/packages/environment-modules/package.py index 5d5bb3be029..85594bab476 100644 --- a/var/spack/repos/builtin/packages/environment-modules/package.py +++ b/var/spack/repos/builtin/packages/environment-modules/package.py @@ -46,17 +46,18 @@ def install(self, spec, prefix): "--without-tclx", "--with-tclx-ver=0.0", "--prefix=%s" % prefix, - "--with-tcl=%s" % join_path(tcl_spec.prefix, 'lib'), # It looks for tclConfig.sh - "--with-tcl-ver=%d.%d" % (tcl_spec.version.version[0], tcl_spec.version.version[1]), + # It looks for tclConfig.sh + "--with-tcl=%s" % join_path(tcl_spec.prefix, 'lib'), + "--with-tcl-ver=%d.%d" % (tcl_spec.version.version[ + 0], tcl_spec.version.version[1]), '--disable-debug', '--disable-dependency-tracking', '--disable-silent-rules', - '--disable-versioning', + '--disable-versioning', '--datarootdir=%s' % prefix.share, 'CPPFLAGS=%s' % ' '.join(CPPFLAGS) ] - configure(*config_args) make() make('install') diff --git a/var/spack/repos/builtin/packages/exodusii/package.py b/var/spack/repos/builtin/packages/exodusii/package.py index eb6c23afeb6..15fe1e89a66 100644 --- a/var/spack/repos/builtin/packages/exodusii/package.py +++ b/var/spack/repos/builtin/packages/exodusii/package.py @@ -32,17 +32,22 @@ # TODO: Create installation options for NetCDF that support larger page size # TODO: suggested by Exodus (see the repository "README" file). + class Exodusii(Package): - """Exodus II is a C++/Fortran library developed to store and retrieve data for - finite element analyses. It's used for preprocessing (problem definition), - postprocessing (results visualization), and data transfer between codes. - An Exodus II data file is a random access, machine independent, binary - file that is written and read via C, C++, or Fortran API routines.""" + """Exodus II is a C++/Fortran library developed to store and retrieve + data for finite element analyses. It's used for preprocessing + (problem definition), postprocessing (results visualization), and + data transfer between codes. An Exodus II data file is a random + access, machine independent, binary file that is written and read + via C, C++, or Fortran API routines. + + """ homepage = "https://github.com/gsjaardema/seacas" url = "https://github.com/gsjaardema/seacas/archive/master.zip" - version('2016-02-08', git='https://github.com/gsjaardema/seacas.git', commit='dcf3529') + version('2016-02-08', + git='https://github.com/gsjaardema/seacas.git', commit='dcf3529') depends_on('cmake@2.8.7:', type='build') depends_on('hdf5~shared~mpi') @@ -54,11 +59,14 @@ def patch(self): ff = FileFilter('cmake-exodus') ff.filter('CMAKE_INSTALL_PREFIX:PATH=${ACCESS}', - 'CMAKE_INSTALL_PREFIX:PATH=%s' % self.spec.prefix, string=True) + 'CMAKE_INSTALL_PREFIX:PATH=%s' % self.spec.prefix, + string=True) ff.filter('NetCDF_DIR:PATH=${TPL}', - 'NetCDF_DIR:PATH=%s' % self.spec['netcdf'].prefix, string=True) + 'NetCDF_DIR:PATH=%s' % self.spec['netcdf'].prefix, + string=True) ff.filter('HDF5_ROOT:PATH=${TPL}', - 'HDF5_ROOT:PATH=%s' % self.spec['hdf5'].prefix, string=True) + 'HDF5_ROOT:PATH=%s' % self.spec['hdf5'].prefix, + string=True) def install(self, spec, prefix): mkdirp('build') diff --git a/var/spack/repos/builtin/packages/expat/package.py b/var/spack/repos/builtin/packages/expat/package.py index 2a9ac123f34..7d0fdae1d44 100644 --- a/var/spack/repos/builtin/packages/expat/package.py +++ b/var/spack/repos/builtin/packages/expat/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Expat(Package): """ is an XML parser library written in C""" homepage = "http://expat.sourceforge.net/" @@ -39,4 +40,3 @@ def install(self, spec, prefix): cmake('..', *std_cmake_args) make() make('install') - diff --git a/var/spack/repos/builtin/packages/extrae/package.py b/var/spack/repos/builtin/packages/extrae/package.py index 84c410e4ba2..72bfbe6326a 100644 --- a/var/spack/repos/builtin/packages/extrae/package.py +++ b/var/spack/repos/builtin/packages/extrae/package.py @@ -25,7 +25,23 @@ from spack import * # typical working line with extrae 3.0.1 -# ./configure --prefix=/usr/local --with-mpi=/usr/lib64/mpi/gcc/openmpi --with-unwind=/usr/local --with-papi=/usr --with-dwarf=/usr --with-elf=/usr --with-dyninst=/usr --with-binutils=/usr --with-xml-prefix=/usr --enable-openmp --enable-nanos --enable-pthread --disable-parallel-merge LDFLAGS=-pthread +# ./configure +# --prefix=/usr/local +# --with-mpi=/usr/lib64/mpi/gcc/openmpi +# --with-unwind=/usr/local +# --with-papi=/usr +# --with-dwarf=/usr +# --with-elf=/usr +# --with-dyninst=/usr +# --with-binutils=/usr +# --with-xml-prefix=/usr +# --enable-openmp +# --enable-nanos +# --enable-pthread +# --disable-parallel-merge +# +# LDFLAGS=-pthread + class Extrae(Package): """Extrae is the package devoted to generate tracefiles which can @@ -55,16 +71,16 @@ def install(self, spec, prefix): elif 'mvapich2' in spec: mpi = spec['mvapich2'] - configure("--prefix=%s" % prefix, - "--with-mpi=%s" % mpi.prefix, - "--with-unwind=%s" % spec['libunwind'].prefix, - "--with-dyninst=%s" % spec['dyninst'].prefix, - "--with-boost=%s" % spec['boost'].prefix, - "--with-dwarf=%s" % spec['libdwarf'].prefix, - "--with-papi=%s" % spec['papi'].prefix, - "--with-dyninst-headers=%s" % spec['dyninst'].prefix.include, - "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib) + configure("--prefix=%s" % prefix, + "--with-mpi=%s" % mpi.prefix, + "--with-unwind=%s" % spec['libunwind'].prefix, + "--with-dyninst=%s" % spec['dyninst'].prefix, + "--with-boost=%s" % spec['boost'].prefix, + "--with-dwarf=%s" % spec['libdwarf'].prefix, + "--with-papi=%s" % spec['papi'].prefix, + "--with-dyninst-headers=%s" % spec[ + 'dyninst'].prefix.include, + "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib) make() make("install", parallel=False) - diff --git a/var/spack/repos/builtin/packages/exuberant-ctags/package.py b/var/spack/repos/builtin/packages/exuberant-ctags/package.py index c49d0624f67..10be30ab8bf 100644 --- a/var/spack/repos/builtin/packages/exuberant-ctags/package.py +++ b/var/spack/repos/builtin/packages/exuberant-ctags/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class ExuberantCtags(Package): """The canonical ctags generator""" homepage = "ctags.sourceforge.net" diff --git a/var/spack/repos/builtin/packages/fenics/package.py b/var/spack/repos/builtin/packages/fenics/package.py index 465ab651bec..1762b15b669 100644 --- a/var/spack/repos/builtin/packages/fenics/package.py +++ b/var/spack/repos/builtin/packages/fenics/package.py @@ -44,13 +44,18 @@ class Fenics(Package): variant('petsc', default=True, description='Compile with PETSc') variant('slepc', default=True, description='Compile with SLEPc') variant('trilinos', default=True, description='Compile with Trilinos') - variant('suite-sparse', default=True, description='Compile with SuiteSparse solvers') + variant('suite-sparse', default=True, + description='Compile with SuiteSparse solvers') variant('vtk', default=False, description='Compile with VTK') variant('qt', default=False, description='Compile with QT') - variant('mpi', default=True, description='Enables the distributed memory support') - variant('openmp', default=True, description='Enables the shared memory support') - variant('shared', default=True, description='Enables the build of shared libraries') - variant('debug', default=False, description='Builds a debug version of the libraries') + variant('mpi', default=True, + description='Enables the distributed memory support') + variant('openmp', default=True, + description='Enables the shared memory support') + variant('shared', default=True, + description='Enables the build of shared libraries') + variant('debug', default=False, + description='Builds a debug version of the libraries') # not part of spack list for now # variant('petsc4py', default=True, description='Uses PETSc4py') @@ -109,7 +114,8 @@ class Fenics(Package): ] for release in releases: - version(release['version'], release['md5'], url=base_url.format(pkg='dolfin', version=release['version'])) + version(release['version'], release['md5'], url=base_url.format( + pkg='dolfin', version=release['version'])) for name, md5 in release['resources'].items(): resource(name=name, url=base_url.format(pkg=name, **release), diff --git a/var/spack/repos/builtin/packages/fftw/package.py b/var/spack/repos/builtin/packages/fftw/package.py index 434aeea6163..570cd1bbdd1 100644 --- a/var/spack/repos/builtin/packages/fftw/package.py +++ b/var/spack/repos/builtin/packages/fftw/package.py @@ -28,26 +28,36 @@ class Fftw(Package): - """ - FFTW is a C subroutine library for computing the discrete Fourier transform (DFT) in one or more dimensions, of - arbitrary input size, and of both real and complex data (as well as of even/odd data, i.e. the discrete cosine/sine - transforms or DCT/DST). We believe that FFTW, which is free software, should become the FFT library of choice for - most applications. + """FFTW is a C subroutine library for computing the discrete Fourier + transform (DFT) in one or more dimensions, of arbitrary input + size, and of both real and complex data (as well as of even/odd + data, i.e. the discrete cosine/sine transforms or DCT/DST). We + believe that FFTW, which is free software, should become the FFT + library of choice for most applications. + """ homepage = "http://www.fftw.org" url = "http://www.fftw.org/fftw-3.3.4.tar.gz" version('3.3.4', '2edab8c06b24feeb3b82bbb3ebf3e7b3') - variant('float', default=True, description='Produces a single precision version of the library') - variant('long_double', default=True, description='Produces a long double precision version of the library') - variant('quad', default=False, description='Produces a quad precision version of the library (works only with GCC and libquadmath)') + variant( + 'float', default=True, + description='Produces a single precision version of the library') + variant( + 'long_double', default=True, + description='Produces a long double precision version of the library') + variant( + 'quad', default=False, + description='Produces a quad precision version of the library ' + '(works only with GCC and libquadmath)') variant('openmp', default=False, description="Enable OpenMP support.") variant('mpi', default=False, description='Activate MPI support') depends_on('mpi', when='+mpi') - # TODO : add support for architecture specific optimizations as soon as targets are supported + # TODO : add support for architecture specific optimizations as soon as + # targets are supported def install(self, spec, prefix): options = ['--prefix=%s' % prefix, @@ -57,9 +67,9 @@ def install(self, spec, prefix): if '+openmp' in spec: # Note: Apple's Clang does not support OpenMP. if spec.satisfies('%clang'): - ver = str(self.compiler.version) - if ver.endswith('-apple'): - raise InstallError("Apple's clang does not support OpenMP") + ver = str(self.compiler.version) + if ver.endswith('-apple'): + raise InstallError("Apple's clang does not support OpenMP") options.append('--enable-openmp') if not self.compiler.f77 or not self.compiler.fc: options.append("--disable-fortran") diff --git a/var/spack/repos/builtin/packages/fish/package.py b/var/spack/repos/builtin/packages/fish/package.py index 9d23a3d63d4..0e85f410c19 100644 --- a/var/spack/repos/builtin/packages/fish/package.py +++ b/var/spack/repos/builtin/packages/fish/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Fish(Package): """fish is a smart and user-friendly command line shell for OS X, Linux, and the rest of the family. diff --git a/var/spack/repos/builtin/packages/fltk/package.py b/var/spack/repos/builtin/packages/fltk/package.py index f8ac5bc2a46..f29b64b02b2 100644 --- a/var/spack/repos/builtin/packages/fltk/package.py +++ b/var/spack/repos/builtin/packages/fltk/package.py @@ -26,13 +26,16 @@ class Fltk(Package): - """ - FLTK (pronounced "fulltick") is a cross-platform C++ GUI toolkit for UNIX/Linux (X11), Microsoft Windows, and - MacOS X. FLTK provides modern GUI functionality without the bloat and supports 3D graphics via OpenGL and its - built-in GLUT emulation. + """FLTK (pronounced "fulltick") is a cross-platform C++ GUI toolkit for + UNIX/Linux (X11), Microsoft Windows, and MacOS X. FLTK provides + modern GUI functionality without the bloat and supports 3D + graphics via OpenGL and its built-in GLUT emulation. + + FLTK is designed to be small and modular enough to be statically + linked, but works fine as a shared library. FLTK also includes an + excellent UI builder called FLUID that can be used to create + applications in minutes. - FLTK is designed to be small and modular enough to be statically linked, but works fine as a shared library. FLTK - also includes an excellent UI builder called FLUID that can be used to create applications in minutes. """ homepage = 'http://www.fltk.org/' url = 'http://fltk.org/pub/fltk/1.3.3/fltk-1.3.3-source.tar.gz' @@ -41,7 +44,8 @@ class Fltk(Package): patch('font.patch', when='@1.3.3') - variant('shared', default=True, description='Enables the build of shared libraries') + variant('shared', default=True, + description='Enables the build of shared libraries') def install(self, spec, prefix): options = ['--prefix=%s' % prefix, diff --git a/var/spack/repos/builtin/packages/flux/package.py b/var/spack/repos/builtin/packages/flux/package.py index dec339a6aff..2fd2392b5ee 100644 --- a/var/spack/repos/builtin/packages/flux/package.py +++ b/var/spack/repos/builtin/packages/flux/package.py @@ -25,13 +25,15 @@ from spack import * import os + class Flux(Package): """ A next-generation resource manager (pre-alpha) """ homepage = "https://github.com/flux-framework/flux-core" url = "https://github.com/flux-framework/flux-core" - version('master', branch='master', git='https://github.com/flux-framework/flux-core') + version('master', branch='master', + git='https://github.com/flux-framework/flux-core') # Also needs autotools, but should use the system version if available depends_on("zeromq@4.0.4:") @@ -52,12 +54,11 @@ def install(self, spec, prefix): # Bootstrap with autotools bash = which('bash') bash('./autogen.sh') - bash('./autogen.sh') #yes, twice, intentionally + bash('./autogen.sh') # yes, twice, intentionally # Fix asciidoc dependency on xml style sheets and whatnot - os.environ['XML_CATALOG_FILES'] = os.path.join(spec['docbook-xml'].prefix, - 'catalog.xml') + os.environ['XML_CATALOG_FILES'] = os.path.join( + spec['docbook-xml'].prefix, 'catalog.xml') # Configure, compile & install configure("--prefix=" + prefix) make("install", "V=1") - diff --git a/var/spack/repos/builtin/packages/foam-extend/package.py b/var/spack/repos/builtin/packages/foam-extend/package.py index 0ff57658950..6b3b87a1105 100644 --- a/var/spack/repos/builtin/packages/foam-extend/package.py +++ b/var/spack/repos/builtin/packages/foam-extend/package.py @@ -16,12 +16,24 @@ class FoamExtend(Package): version('3.0', git='http://git.code.sf.net/p/foam-extend/foam-extend-3.0') variant('paraview', default=False, description='Enable ParaFOAM') - variant('scotch', default=True, description='Activate Scotch as a possible decomposition library') - variant('ptscotch', default=True, description='Activate PT-Scotch as a possible decomposition library') - variant('metis', default=True, description='Activate Metis as a possible decomposition library') - variant('parmetis', default=True, description='Activate Parmetis as a possible decomposition library') - variant('parmgridgen', default=True, description='Activate Parmgridgen support') - variant('source', default=True, description='Installs also the source folder') + variant( + 'scotch', default=True, + description='Activate Scotch as a possible decomposition library') + variant( + 'ptscotch', default=True, + description='Activate PT-Scotch as a possible decomposition library') + variant( + 'metis', default=True, + description='Activate Metis as a possible decomposition library') + variant( + 'parmetis', default=True, + description='Activate Parmetis as a possible decomposition library') + variant( + 'parmgridgen', default=True, + description='Activate Parmgridgen support') + variant( + 'source', default=True, + description='Installs also the source folder') supported_compilers = {'clang': 'Clang', 'gcc': 'Gcc', 'intel': 'Icc'} @@ -80,7 +92,7 @@ def patch(self): filter_file( r'-lMGridGen', r'-lmgrid', - 'src/fvAgglomerationMethods/MGridGenGamgAgglomeration/Make/options') # NOQA: ignore=501 + 'src/fvAgglomerationMethods/MGridGenGamgAgglomeration/Make/options') # noqa # Get the wmake arch and compiler (arch, foam_compiler) = self.set_arch() diff --git a/var/spack/repos/builtin/packages/fontconfig/package.py b/var/spack/repos/builtin/packages/fontconfig/package.py index 76e9d8cb3f0..0d7e47f228e 100644 --- a/var/spack/repos/builtin/packages/fontconfig/package.py +++ b/var/spack/repos/builtin/packages/fontconfig/package.py @@ -24,12 +24,13 @@ ############################################################################## from spack import * + class Fontconfig(Package): """Fontconfig customizing font access""" homepage = "http://www.freedesktop.org/wiki/Software/fontconfig/" url = "http://www.freedesktop.org/software/fontconfig/release/fontconfig-2.11.1.tar.gz" - version('2.11.1' , 'e75e303b4f7756c2b16203a57ac87eba') + version('2.11.1', 'e75e303b4f7756c2b16203a57ac87eba') depends_on('freetype') depends_on('libxml2') diff --git a/var/spack/repos/builtin/packages/gasnet/package.py b/var/spack/repos/builtin/packages/gasnet/package.py index b3bd6c25f29..12ecd9fd6f2 100644 --- a/var/spack/repos/builtin/packages/gasnet/package.py +++ b/var/spack/repos/builtin/packages/gasnet/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Gasnet(Package): """GASNet is a language-independent, low-level networking layer that provides network-independent, high-performance communication @@ -36,24 +37,24 @@ class Gasnet(Package): version('1.24.0', 'c8afdf48381e8b5a7340bdb32ca0f41a') - def install(self, spec, prefix): # TODO: don't use paths with @ in them. change_sed_delimiter('@', ';', 'configure') - configure("--prefix=%s" % prefix, - # TODO: factor IB suport out into architecture description. - "--enable-ibv", - "--enable-udp", - "--disable-mpi", - "--enable-par", - "--enable-mpi-compat", - "--enable-segment-fast", - "--disable-aligned-segments", - # TODO: make an option so that Legion can request builds with/without this. - # See the Legion webpage for details on when to/not to use. - "--disable-pshm", - "--with-segment-mmap-max=64MB") + configure( + "--prefix=%s" % prefix, + # TODO: factor IB suport out into architecture description. + "--enable-ibv", + "--enable-udp", + "--disable-mpi", + "--enable-par", + "--enable-mpi-compat", + "--enable-segment-fast", + "--disable-aligned-segments", + # TODO: make option so Legion can request builds with/without this. + # See the Legion webpage for details on when to/not to use. + "--disable-pshm", + "--with-segment-mmap-max=64MB") make() make("install") diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py index 9ed83bc3be9..4fdfafc9929 100644 --- a/var/spack/repos/builtin/packages/gdal/package.py +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Gdal(Package): """ GDAL is a translator library for raster and vector geospatial diff --git a/var/spack/repos/builtin/packages/gdb/package.py b/var/spack/repos/builtin/packages/gdb/package.py index 78ad4b307dc..9145009fa42 100644 --- a/var/spack/repos/builtin/packages/gdb/package.py +++ b/var/spack/repos/builtin/packages/gdb/package.py @@ -27,9 +27,10 @@ class Gdb(Package): - """ - GDB, the GNU Project debugger, allows you to see what is going on `inside' another program while it executes - -- or what another program was doing at the moment it crashed. + """GDB, the GNU Project debugger, allows you to see what is going on + `inside' another program while it executes -- or what another + program was doing at the moment it crashed. + """ homepage = "https://www.gnu.org/software/gdb" url = "http://ftp.gnu.org/gnu/gdb/gdb-7.10.tar.gz" diff --git a/var/spack/repos/builtin/packages/gdk-pixbuf/package.py b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py index daf43f32562..d7a02003959 100644 --- a/var/spack/repos/builtin/packages/gdk-pixbuf/package.py +++ b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class GdkPixbuf(Package): """The Gdk Pixbuf is a toolkit for image loading and pixel buffer manipulation. It is used by GTK+ 2 and GTK+ 3 to load and diff --git a/var/spack/repos/builtin/packages/geos/package.py b/var/spack/repos/builtin/packages/geos/package.py index 88438b0a99d..324186cfbc7 100644 --- a/var/spack/repos/builtin/packages/geos/package.py +++ b/var/spack/repos/builtin/packages/geos/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os + class Geos(Package): """GEOS (Geometry Engine - Open Source) is a C++ port of the Java diff --git a/var/spack/repos/builtin/packages/gflags/package.py b/var/spack/repos/builtin/packages/gflags/package.py index 47bbf369efd..7e04c9b682e 100644 --- a/var/spack/repos/builtin/packages/gflags/package.py +++ b/var/spack/repos/builtin/packages/gflags/package.py @@ -22,9 +22,9 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os from spack import * + class Gflags(Package): """The gflags package contains a C++ library that implements commandline flags processing. It includes built-in support for diff --git a/var/spack/repos/builtin/packages/gl2ps/package.py b/var/spack/repos/builtin/packages/gl2ps/package.py index 25172bd544f..d5e7b00027d 100644 --- a/var/spack/repos/builtin/packages/gl2ps/package.py +++ b/var/spack/repos/builtin/packages/gl2ps/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Gl2ps(Package): """GL2PS is a C library providing high quality vector output for any OpenGL application.""" diff --git a/var/spack/repos/builtin/packages/glm/package.py b/var/spack/repos/builtin/packages/glm/package.py index 442c1cdf40a..c565b3cae76 100644 --- a/var/spack/repos/builtin/packages/glm/package.py +++ b/var/spack/repos/builtin/packages/glm/package.py @@ -26,9 +26,10 @@ class Glm(Package): - """ - OpenGL Mathematics (GLM) is a header only C++ mathematics library for graphics software based on - the OpenGL Shading Language (GLSL) specification. + """OpenGL Mathematics (GLM) is a header only C++ mathematics library for + graphics software based on the OpenGL Shading Language (GLSL) + specification. + """ homepage = "https://github.com/g-truc/glm" diff --git a/var/spack/repos/builtin/packages/glog/package.py b/var/spack/repos/builtin/packages/glog/package.py index 03ee092429b..14f042732b8 100644 --- a/var/spack/repos/builtin/packages/glog/package.py +++ b/var/spack/repos/builtin/packages/glog/package.py @@ -22,9 +22,9 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os from spack import * + class Glog(Package): """C++ implementation of the Google logging module.""" diff --git a/var/spack/repos/builtin/packages/glpk/package.py b/var/spack/repos/builtin/packages/glpk/package.py index 2ab3c38150c..1b52643e595 100644 --- a/var/spack/repos/builtin/packages/glpk/package.py +++ b/var/spack/repos/builtin/packages/glpk/package.py @@ -22,22 +22,23 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## - from spack import * class Glpk(Package): - """ - The GLPK (GNU Linear Programming Kit) package is intended for solving large-scale linear programming (LP), mixed - integer programming (MIP), and other related problems. It is a set of routines written in ANSI C and organized in - the form of a callable library + """The GLPK (GNU Linear Programming Kit) package is intended for solving + large-scale linear programming (LP), mixed integer programming + (MIP), and other related problems. It is a set of routines written + in ANSI C and organized in the form of a callable library + """ homepage = "https://www.gnu.org/software/glpk" url = "http://ftp.gnu.org/gnu/glpk/glpk-4.57.tar.gz" version('4.57', '237531a54f73155842f8defe51aedb0f') - variant('gmp', default=False, description='Activates support for GMP library') + variant('gmp', default=False, + description='Activates support for GMP library') depends_on('gmp', when='+gmp') diff --git a/var/spack/repos/builtin/packages/gmsh/package.py b/var/spack/repos/builtin/packages/gmsh/package.py index fe26cb3bf2d..72f490e2f48 100644 --- a/var/spack/repos/builtin/packages/gmsh/package.py +++ b/var/spack/repos/builtin/packages/gmsh/package.py @@ -41,15 +41,21 @@ class Gmsh(Package): version('2.12.0', '7fbd2ec8071e79725266e72744d21e902d4fe6fa9e7c52340ad5f4be5c159d09') version('2.11.0', 'f15b6e7ac9ca649c9a74440e1259d0db') - variant('shared', default=True, description='Enables the build of shared libraries') - variant('debug', default=False, description='Builds the library in debug mode') - variant('mpi', default=False, description='Builds MPI support for parser and solver') - variant('fltk', default=False, description='Enables the build of the FLTK GUI') + variant('shared', default=True, + description='Enables the build of shared libraries') + variant('debug', default=False, + description='Builds the library in debug mode') + variant('mpi', default=False, + description='Builds MPI support for parser and solver') + variant('fltk', default=False, + description='Enables the build of the FLTK GUI') variant('hdf5', default=False, description='Enables HDF5 support') - variant('compression', default=True, description='Enables IO compression through zlib') + variant('compression', default=True, + description='Enables IO compression through zlib') variant('oce', default=False, description='Build with OCE') variant('petsc', default=False, description='Build with PETSc') - variant('slepc', default=False, description='Build with SLEPc (only when PETSc is enabled)') + variant('slepc', default=False, + description='Build with SLEPc (only when PETSc is enabled)') depends_on('blas') depends_on('lapack') diff --git a/var/spack/repos/builtin/packages/gnu-prolog/package.py b/var/spack/repos/builtin/packages/gnu-prolog/package.py index f497ed30019..1e0487c6541 100644 --- a/var/spack/repos/builtin/packages/gnu-prolog/package.py +++ b/var/spack/repos/builtin/packages/gnu-prolog/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class GnuProlog(Package): """A free Prolog compiler with constraint solving over finite domains.""" homepage = "http://www.gprolog.org/" diff --git a/var/spack/repos/builtin/packages/gnuplot/package.py b/var/spack/repos/builtin/packages/gnuplot/package.py index a76677e066a..600b6d285f1 100644 --- a/var/spack/repos/builtin/packages/gnuplot/package.py +++ b/var/spack/repos/builtin/packages/gnuplot/package.py @@ -27,13 +27,18 @@ import os + class Gnuplot(Package): - """ - Gnuplot is a portable command-line driven graphing utility for Linux, OS/2, MS Windows, OSX, VMS, and many other - platforms. The source code is copyrighted but freely distributed (i.e., you don't have to pay for it). It was - originally created to allow scientists and students to visualize mathematical functions and data interactively, - but has grown to support many non-interactive uses such as web scripting. It is also used as a plotting engine by - third-party applications like Octave. Gnuplot has been supported and under active development since 1986 + """Gnuplot is a portable command-line driven graphing utility for Linux, + OS/2, MS Windows, OSX, VMS, and many other platforms. The source + code is copyrighted but freely distributed (i.e., you don't have + to pay for it). It was originally created to allow scientists and + students to visualize mathematical functions and data + interactively, but has grown to support many non-interactive uses + such as web scripting. It is also used as a plotting engine by + third-party applications like Octave. Gnuplot has been supported + and under active development since 1986 + """ homepage = "http://www.gnuplot.info" url = "http://downloads.sourceforge.net/project/gnuplot/gnuplot/5.0.1/gnuplot-5.0.1.tar.gz" diff --git a/var/spack/repos/builtin/packages/gnutls/package.py b/var/spack/repos/builtin/packages/gnutls/package.py index 71b571bc884..5f7b0daf9bb 100644 --- a/var/spack/repos/builtin/packages/gnutls/package.py +++ b/var/spack/repos/builtin/packages/gnutls/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Gnutls(Package): """GnuTLS is a secure communications library implementing the SSL, TLS and DTLS protocols and technologies around them. It diff --git a/var/spack/repos/builtin/packages/googletest/package.py b/var/spack/repos/builtin/packages/googletest/package.py index 1dc74fbcf4b..6f3cafec06b 100644 --- a/var/spack/repos/builtin/packages/googletest/package.py +++ b/var/spack/repos/builtin/packages/googletest/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Googletest(Package): """Google test framework for C++. Also called gtest.""" homepage = "https://github.com/google/googletest" @@ -40,9 +41,8 @@ def install(self, spec, prefix): # Google Test doesn't have a make install # We have to do our own install here. - install_tree('include', prefix.include) + install_tree('include', prefix.include) mkdirp(prefix.lib) - install('./libgtest.a', '%s' % prefix.lib) - install('./libgtest_main.a', '%s' % prefix.lib) - + install('./libgtest.a', '%s' % prefix.lib) + install('./libgtest_main.a', '%s' % prefix.lib) diff --git a/var/spack/repos/builtin/packages/gperf/package.py b/var/spack/repos/builtin/packages/gperf/package.py index af176afcf61..0e54d0e0372 100644 --- a/var/spack/repos/builtin/packages/gperf/package.py +++ b/var/spack/repos/builtin/packages/gperf/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Gperf(Package): """GNU gperf is a perfect hash function generator. For a given list of strings, it produces a hash function and hash table, in diff --git a/var/spack/repos/builtin/packages/gperftools/package.py b/var/spack/repos/builtin/packages/gperftools/package.py index 1f17ab71a41..c6ca6c8057d 100644 --- a/var/spack/repos/builtin/packages/gperftools/package.py +++ b/var/spack/repos/builtin/packages/gperftools/package.py @@ -24,14 +24,20 @@ ############################################################################## from spack import * + class Gperftools(Package): - """Google's fast malloc/free implementation, especially for multi-threaded applications. - Contains tcmalloc, heap-checker, heap-profiler, and cpu-profiler.""" + """Google's fast malloc/free implementation, especially for + multi-threaded applications. Contains tcmalloc, heap-checker, + heap-profiler, and cpu-profiler. + + """ homepage = "https://code.google.com/p/gperftools" url = "https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz" - version('2.4', '2171cea3bbe053036fb5d5d25176a160', url="https://github.com/gperftools/gperftools/releases/download/gperftools-2.4/gperftools-2.4.tar.gz") - version('2.3', 'f54dd119f0e46ac1f13264f8d97adf90', url="https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz") + version('2.4', '2171cea3bbe053036fb5d5d25176a160', + url="https://github.com/gperftools/gperftools/releases/download/gperftools-2.4/gperftools-2.4.tar.gz") + version('2.3', 'f54dd119f0e46ac1f13264f8d97adf90', + url="https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz") depends_on("libunwind") diff --git a/var/spack/repos/builtin/packages/graphlib/package.py b/var/spack/repos/builtin/packages/graphlib/package.py index 087a322acc9..0c3cd9b649d 100644 --- a/var/spack/repos/builtin/packages/graphlib/package.py +++ b/var/spack/repos/builtin/packages/graphlib/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Graphlib(Package): """Library to create, manipulate, and export graphs Graphlib.""" homepage = "http://https://github.com/lee218llnl/graphlib" diff --git a/var/spack/repos/builtin/packages/gsl/package.py b/var/spack/repos/builtin/packages/gsl/package.py index c1695a6f025..574d3b94029 100644 --- a/var/spack/repos/builtin/packages/gsl/package.py +++ b/var/spack/repos/builtin/packages/gsl/package.py @@ -27,17 +27,19 @@ class Gsl(Package): - """ - The GNU Scientific Library (GSL) is a numerical library for C and C++ programmers. It is free software under the - GNU General Public License. The library provides a wide range of mathematical routines such as random number - generators, special functions and least-squares fitting. There are over 1000 functions in total with an extensive - test suite. + """The GNU Scientific Library (GSL) is a numerical library for C and C++ + programmers. It is free software under the GNU General Public + License. The library provides a wide range of mathematical + routines such as random number generators, special functions and + least-squares fitting. There are over 1000 functions in total with + an extensive test suite. + """ homepage = "http://www.gnu.org/software/gsl" url = "http://mirror.switch.ch/ftp/mirror/gnu/gsl/gsl-2.1.tar.gz" - version('2.1' , 'd8f70abafd3e9f0bae03c52d1f4e8de5') - version('2.0' , 'ae44cdfed78ece40e73411b63a78c375') + version('2.1', 'd8f70abafd3e9f0bae03c52d1f4e8de5') + version('2.0', 'ae44cdfed78ece40e73411b63a78c375') version('1.16', 'e49a664db13d81c968415cd53f62bc8b') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/gtkplus/package.py b/var/spack/repos/builtin/packages/gtkplus/package.py index c135e89f78b..c85bbe4094a 100644 --- a/var/spack/repos/builtin/packages/gtkplus/package.py +++ b/var/spack/repos/builtin/packages/gtkplus/package.py @@ -24,12 +24,15 @@ ############################################################################## from spack import * + class Gtkplus(Package): - """The GTK+ 2 package contains libraries used for creating graphical user interfaces for applications.""" + """The GTK+ 2 package contains libraries used for creating graphical user + interfaces for applications.""" homepage = "http://www.gtk.org" - version('2.24.25', '612350704dd3aacb95355a4981930c6f', - url="http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.25.tar.xz") + version( + '2.24.25', '612350704dd3aacb95355a4981930c6f', + url="http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.25.tar.xz") depends_on("atk") depends_on("gdk-pixbuf") diff --git a/var/spack/repos/builtin/packages/hdf/package.py b/var/spack/repos/builtin/packages/hdf/package.py index 7ad4df2fdea..aafb345be6a 100644 --- a/var/spack/repos/builtin/packages/hdf/package.py +++ b/var/spack/repos/builtin/packages/hdf/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Hdf(Package): """HDF4 (also known as HDF) is a library and multi-object file format for storing and managing data between machines.""" @@ -41,10 +42,9 @@ class Hdf(Package): depends_on("szip", when='+szip') depends_on("zlib") - def url_for_version(self, version): - return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz" - + return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str( + version) + "/src/hdf-" + str(version) + ".tar.gz" def install(self, spec, prefix): config_args = [ @@ -52,9 +52,9 @@ def install(self, spec, prefix): '--prefix=%s' % prefix, '--with-jpeg=%s' % spec['jpeg'].prefix, '--with-zlib=%s' % spec['zlib'].prefix, - '--disable-netcdf', # must be disabled to build NetCDF with HDF4 support + '--disable-netcdf', # must be disabled to build NetCDF with HDF4 '--enable-fortran', - '--disable-shared', # fortran and shared libraries are not compatible + '--disable-shared', # fortran and shared libs are not compatible '--enable-static', '--enable-production' ] diff --git a/var/spack/repos/builtin/packages/hdf5-blosc/package.py b/var/spack/repos/builtin/packages/hdf5-blosc/package.py index 28b65711b0f..89c20f3998d 100644 --- a/var/spack/repos/builtin/packages/hdf5-blosc/package.py +++ b/var/spack/repos/builtin/packages/hdf5-blosc/package.py @@ -29,6 +29,7 @@ from spack import * + def _install_shlib(name, src, dst): """Install a shared library from directory src to directory dst""" if sys.platform == "darwin": @@ -44,6 +45,7 @@ def _install_shlib(name, src, dst): os.symlink(shlib000, join_path(dst, shlib0)) os.symlink(shlib0, join_path(dst, shlib)) + class Hdf5Blosc(Package): """Blosc filter for HDF5""" homepage = "https://github.com/Blosc/hdf5-blosc" @@ -60,18 +62,21 @@ class Hdf5Blosc(Package): def install(self, spec, prefix): # The included cmake recipe doesn"t work for Darwin # cmake(".", *std_cmake_args) - # + # # make() # make("install") # if sys.platform == "darwin": # fix_darwin_install_name(prefix.lib) libtool = Executable(join_path(spec["libtool"].prefix.bin, "libtool")) - if "+mpi" in spec["hdf5"]: - cc = "mpicc" - else: - cc = "cc" - shlibext = "so" if sys.platform!="darwin" else "dylib" + + # TODO: these vars are not used. + # if "+mpi" in spec["hdf5"]: + # cc = "mpicc" # noqa + # else: + # cc = "cc" # noqa + # shlibext = "so" if sys.platform != "darwin" else "dylib" # noqa + mkdirp(prefix.include) mkdirp(prefix.lib) @@ -118,7 +123,7 @@ def check_install(self, spec): #include #include -#define FILTER_BLOSC 32001 /* Blosc filter ID registered with the HDF group */ +#define FILTER_BLOSC 32001 /* Blosc filter ID registered with the HDF group */ int main(int argc, char **argv) { herr_t herr; @@ -184,13 +189,13 @@ def check_install(self, spec): if not success: print "Produced output does not match expected output." print "Expected output:" - print "-"*80 + print "-" * 80 print expected - print "-"*80 + print "-" * 80 print "Produced output:" - print "-"*80 + print "-" * 80 print output - print "-"*80 + print "-" * 80 print "Environment:" env = which("env") env() diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index d169940c863..aedaf182181 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -44,15 +44,18 @@ class Hdf5(Package): version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24') version('1.8.13', 'c03426e9e77d7766944654280b467289') - variant('debug', default=False, description='Builds a debug version of the library') - variant('shared', default=True, description='Builds a shared version of the library') + variant('debug', default=False, + description='Builds a debug version of the library') + variant('shared', default=True, + description='Builds a shared version of the library') variant('cxx', default=True, description='Enable C++ support') variant('fortran', default=True, description='Enable Fortran support') variant('mpi', default=False, description='Enable MPI support') variant('szip', default=False, description='Enable szip support') - variant('threadsafe', default=False, description='Enable thread-safe capabilities') + variant('threadsafe', default=False, + description='Enable thread-safe capabilities') depends_on("mpi", when='+mpi') depends_on("szip", when='+szip') diff --git a/var/spack/repos/builtin/packages/hoomd-blue/package.py b/var/spack/repos/builtin/packages/hoomd-blue/package.py index 060cd0b713d..79e99e7b894 100644 --- a/var/spack/repos/builtin/packages/hoomd-blue/package.py +++ b/var/spack/repos/builtin/packages/hoomd-blue/package.py @@ -25,6 +25,7 @@ from spack import * import os + class HoomdBlue(Package): """HOOMD-blue is a general-purpose particle simulation toolkit. It scales from a single CPU core to thousands of GPUs. @@ -56,7 +57,7 @@ def install(self, spec, prefix): cmake_args = [ '-DPYTHON_EXECUTABLE=%s/python' % spec['python'].prefix.bin, - '-DBOOST_ROOT=%s' % spec['boost' ].prefix + '-DBOOST_ROOT=%s' % spec['boost'].prefix ] # MPI support @@ -73,9 +74,9 @@ def install(self, spec, prefix): cmake_args.append('-DENABLE_CUDA=OFF') # CUDA-aware MPI library support - #if '+cuda' in spec and '+mpi' in spec: + # if '+cuda' in spec and '+mpi' in spec: # cmake_args.append('-DENABLE_MPI_CUDA=ON') - #else: + # else: # cmake_args.append('-DENABLE_MPI_CUDA=OFF') # There may be a bug in the MPI-CUDA code. See: diff --git a/var/spack/repos/builtin/packages/hpx5/package.py b/var/spack/repos/builtin/packages/hpx5/package.py index cd0c0b7a7b9..686e9597194 100644 --- a/var/spack/repos/builtin/packages/hpx5/package.py +++ b/var/spack/repos/builtin/packages/hpx5/package.py @@ -25,6 +25,7 @@ from spack import * import os + class Hpx5(Package): """The HPX-5 Runtime System. HPX-5 (High Performance ParalleX) is an open source, portable, performance-oriented runtime developed at diff --git a/var/spack/repos/builtin/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py index ee0168c38e7..c1636288403 100644 --- a/var/spack/repos/builtin/packages/hwloc/package.py +++ b/var/spack/repos/builtin/packages/hwloc/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Hwloc(Package): """The Portable Hardware Locality (hwloc) software package provides a portable abstraction (across OS, versions, diff --git a/var/spack/repos/builtin/packages/hydra/package.py b/var/spack/repos/builtin/packages/hydra/package.py index 3d56056022f..eee346ba498 100644 --- a/var/spack/repos/builtin/packages/hydra/package.py +++ b/var/spack/repos/builtin/packages/hydra/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Hydra(Package): """Hydra is a process management system for starting parallel jobs. Hydra is designed to natively work with existing launcher daemons @@ -37,7 +38,6 @@ class Hydra(Package): version('3.2', '4d670916695bf7e3a869cc336a881b39') - def install(self, spec, prefix): configure('--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index b339e068bfe..fdc236dcf41 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -39,9 +39,11 @@ class Hypre(Package): version('2.10.0b', '768be38793a35bb5d055905b271f5b8e') # hypre does not know how to build shared libraries on Darwin - variant('shared', default=(sys.platform != 'darwin'), description="Build shared library version (disables static library)") + variant('shared', default=(sys.platform != 'darwin'), + description="Build shared library (disables static library)") # SuperluDist have conflicting headers with those in Hypre - variant('internal-superlu', default=True, description="Use internal Superlu routines") + variant('internal-superlu', default=True, + description="Use internal Superlu routines") depends_on("mpi") depends_on("blas") diff --git a/var/spack/repos/builtin/packages/ibmisc/package.py b/var/spack/repos/builtin/packages/ibmisc/package.py index ed1e6c6cbf3..8e83058e94f 100644 --- a/var/spack/repos/builtin/packages/ibmisc/package.py +++ b/var/spack/repos/builtin/packages/ibmisc/package.py @@ -1,5 +1,6 @@ from spack import * + class Ibmisc(CMakePackage): """Misc. reusable utilities used by IceBin.""" @@ -8,14 +9,22 @@ class Ibmisc(CMakePackage): version('0.1.0', '12f2a32432a11db48e00217df18e59fa') - variant('everytrace', default=False, description='Report errors through Everytrace') - variant('proj', default=True, description='Compile utilities for PROJ.4 library') - variant('blitz', default=True, description='Compile utilities for Blitz library') - variant('netcdf', default=True, description='Compile utilities for NetCDF library') - variant('boost', default=True, description='Compile utilities for Boost library') - variant('udunits2', default=True, description='Compile utilities for UDUNITS2 library') - variant('googletest', default=True, description='Compile utilities for Google Test library') - variant('python', default=True, description='Compile utilities for use with Python/Cython') + variant('everytrace', default=False, + description='Report errors through Everytrace') + variant('proj', default=True, + description='Compile utilities for PROJ.4 library') + variant('blitz', default=True, + description='Compile utilities for Blitz library') + variant('netcdf', default=True, + description='Compile utilities for NetCDF library') + variant('boost', default=True, + description='Compile utilities for Boost library') + variant('udunits2', default=True, + description='Compile utilities for UDUNITS2 library') + variant('googletest', default=True, + description='Compile utilities for Google Test library') + variant('python', default=True, + description='Compile utilities for use with Python/Cython') extends('python') diff --git a/var/spack/repos/builtin/packages/icu/package.py b/var/spack/repos/builtin/packages/icu/package.py index c7cabb5d953..b8d366b905b 100644 --- a/var/spack/repos/builtin/packages/icu/package.py +++ b/var/spack/repos/builtin/packages/icu/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Icu(Package): """The International Components for Unicode (ICU) package is a mature, widely used set of C/C++ libraries providing Unicode and @@ -36,12 +37,10 @@ class Icu(Package): version('54.1', 'e844caed8f2ca24c088505b0d6271bc0') - def url_for_version(self, version): return "http://download.icu-project.org/files/icu4c/%s/icu4c-%s-src.tgz" % ( version, str(version).replace('.', '_')) - def install(self, spec, prefix): with working_dir("source"): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/icu4c/package.py b/var/spack/repos/builtin/packages/icu4c/package.py index 6cfec99c3f3..2bcf86cd13c 100644 --- a/var/spack/repos/builtin/packages/icu4c/package.py +++ b/var/spack/repos/builtin/packages/icu4c/package.py @@ -24,9 +24,12 @@ ############################################################################## from spack import * + class Icu4c(Package): - """ICU is a mature, widely used set of C/C++ and Java libraries - providing Unicode and Globalization support for software applications.""" + """ICU is a mature, widely used set of C/C++ and Java libraries providing + Unicode and Globalization support for software applications. + + """ homepage = "http://site.icu-project.org/" url = "http://downloads.sourceforge.net/project/icu/ICU4C/54.1/icu4c-54_1-src.tgz" diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 9ef1724ae00..65db3351a15 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -17,19 +17,19 @@ class IntelParallelStudio(IntelInstaller): # TODO: can also try the online installer (will download files on demand) version('composer.2016.2', '1133fb831312eb519f7da897fec223fa', - url="file://%s/parallel_studio_xe_2016_composer_edition_update2.tgz" # NOQA: ignore=E501 + url="file://%s/parallel_studio_xe_2016_composer_edition_update2.tgz" % os.getcwd()) version('professional.2016.2', '70be832f2d34c9bf596a5e99d5f2d832', - url="file://%s/parallel_studio_xe_2016_update2.tgz" % os.getcwd()) # NOQA: ignore=E501 + url="file://%s/parallel_studio_xe_2016_update2.tgz" % os.getcwd()) version('cluster.2016.2', '70be832f2d34c9bf596a5e99d5f2d832', - url="file://%s/parallel_studio_xe_2016_update2.tgz" % os.getcwd()) # NOQA: ignore=E501 + url="file://%s/parallel_studio_xe_2016_update2.tgz" % os.getcwd()) version('composer.2016.3', '3208eeabee951fc27579177b593cefe9', - url="file://%s/parallel_studio_xe_2016_composer_edition_update3.tgz" # NOQA: ignore=E501 + url="file://%s/parallel_studio_xe_2016_composer_edition_update3.tgz" % os.getcwd()) version('professional.2016.3', 'eda19bb0d0d19709197ede58f13443f3', - url="file://%s/parallel_studio_xe_2016_update3.tgz" % os.getcwd()) # NOQA: ignore=E501 + url="file://%s/parallel_studio_xe_2016_update3.tgz" % os.getcwd()) version('cluster.2016.3', 'eda19bb0d0d19709197ede58f13443f3', - url="file://%s/parallel_studio_xe_2016_update3.tgz" % os.getcwd()) # NOQA: ignore=E501 + url="file://%s/parallel_studio_xe_2016_update3.tgz" % os.getcwd()) variant('rpath', default=True, description="Add rpath to .cfg files") variant('newdtags', default=False, @@ -104,7 +104,7 @@ def install(self, spec, prefix): if spec.satisfies('+ipp'): components += ipp_components if spec.satisfies('+tools') and (spec.satisfies('@cluster') or - spec.satisfies('@professional')): + spec.satisfies('@professional')): components += tool_components if spec.satisfies('+all'): @@ -113,10 +113,11 @@ def install(self, spec, prefix): self.intel_components = ';'.join(components) IntelInstaller.install(self, spec, prefix) - absbindir = os.path.dirname(os.path.realpath(os.path.join( - self.prefix.bin, "icc"))) - abslibdir = os.path.dirname(os.path.realpath(os.path.join - (self.prefix.lib, "intel64", "libimf.a"))) + absbindir = os.path.dirname( + os.path.realpath(os.path.join(self.prefix.bin, "icc"))) + abslibdir = os.path.dirname( + os.path.realpath(os.path.join( + self.prefix.lib, "intel64", "libimf.a"))) os.symlink(self.global_license_file, os.path.join(absbindir, "license.lic")) @@ -134,31 +135,31 @@ def install(self, spec, prefix): if (spec.satisfies('+all') or spec.satisfies('+mpi')) and \ spec.satisfies('@cluster'): - for ifile in os.listdir(os.path.join(self.prefix, "itac")): - if os.path.isdir(os.path.join(self.prefix, "itac", ifile)): - os.symlink(self.global_license_file, - os.path.join(self.prefix, "itac", ifile, - "license.lic")) - if os.path.isdir(os.path.join(self.prefix, "itac", - ifile, "intel64")): - os.symlink(self.global_license_file, - os.path.join(self.prefix, "itac", - ifile, "intel64", - "license.lic")) - if spec.satisfies('~newdtags'): - wrappers = ["mpif77", "mpif77", "mpif90", "mpif90", - "mpigcc", "mpigcc", "mpigxx", "mpigxx", - "mpiicc", "mpiicc", "mpiicpc", "mpiicpc", - "mpiifort", "mpiifort"] - wrapper_paths = [] - for root, dirs, files in os.walk(spec.prefix): - for name in files: - if name in wrappers: - wrapper_paths.append(os.path.join(spec.prefix, - root, name)) - for wrapper in wrapper_paths: - filter_file(r'-Xlinker --enable-new-dtags', r' ', - wrapper) + for ifile in os.listdir(os.path.join(self.prefix, "itac")): + if os.path.isdir(os.path.join(self.prefix, "itac", ifile)): + os.symlink(self.global_license_file, + os.path.join(self.prefix, "itac", ifile, + "license.lic")) + if os.path.isdir(os.path.join(self.prefix, "itac", + ifile, "intel64")): + os.symlink(self.global_license_file, + os.path.join(self.prefix, "itac", + ifile, "intel64", + "license.lic")) + if spec.satisfies('~newdtags'): + wrappers = ["mpif77", "mpif77", "mpif90", "mpif90", + "mpigcc", "mpigcc", "mpigxx", "mpigxx", + "mpiicc", "mpiicc", "mpiicpc", "mpiicpc", + "mpiifort", "mpiifort"] + wrapper_paths = [] + for root, dirs, files in os.walk(spec.prefix): + for name in files: + if name in wrappers: + wrapper_paths.append(os.path.join(spec.prefix, + root, name)) + for wrapper in wrapper_paths: + filter_file(r'-Xlinker --enable-new-dtags', r' ', + wrapper) if spec.satisfies('+rpath'): for compiler_command in ["icc", "icpc", "ifort"]: diff --git a/var/spack/repos/builtin/packages/intltool/package.py b/var/spack/repos/builtin/packages/intltool/package.py index 48830c474a4..e9347346911 100644 --- a/var/spack/repos/builtin/packages/intltool/package.py +++ b/var/spack/repos/builtin/packages/intltool/package.py @@ -24,8 +24,12 @@ ############################################################################## from spack import * + class Intltool(Package): - """intltool is a set of tools to centralize translation of many different file formats using GNU gettext-compatible PO files.""" + """intltool is a set of tools to centralize translation of many different + file formats using GNU gettext-compatible PO files. + + """ homepage = 'https://freedesktop.org/wiki/Software/intltool/' version('0.51.0', '12e517cac2b57a0121cda351570f1e63') @@ -37,7 +41,7 @@ def url_for_version(self, version): def install(self, spec, prefix): # configure, build, install: - options = ['--prefix=%s' % prefix ] + options = ['--prefix=%s' % prefix] configure(*options) make() make('install') diff --git a/var/spack/repos/builtin/packages/ior/package.py b/var/spack/repos/builtin/packages/ior/package.py index 6aa72e5cc99..b8825c0fe4c 100644 --- a/var/spack/repos/builtin/packages/ior/package.py +++ b/var/spack/repos/builtin/packages/ior/package.py @@ -25,6 +25,7 @@ from spack import * import os + class Ior(Package): """The IOR software is used for benchmarking parallel file systems using POSIX, MPI-IO, or HDF5 interfaces.""" @@ -41,7 +42,6 @@ class Ior(Package): depends_on('hdf5+mpi', when='+hdf5') depends_on('netcdf+mpi', when='+ncmpi') - def install(self, spec, prefix): os.system('./bootstrap') diff --git a/var/spack/repos/builtin/packages/ipopt/package.py b/var/spack/repos/builtin/packages/ipopt/package.py index 3bd21df89ea..d5981e99759 100644 --- a/var/spack/repos/builtin/packages/ipopt/package.py +++ b/var/spack/repos/builtin/packages/ipopt/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Ipopt(Package): """Ipopt (Interior Point OPTimizer, pronounced eye-pea-Opt) is a software package for large-scale nonlinear optimization.""" @@ -39,8 +40,8 @@ class Ipopt(Package): depends_on("blas") depends_on("lapack") depends_on("pkg-config", type='build') - depends_on("mumps+double~mpi") - + depends_on("mumps+double~mpi") + def install(self, spec, prefix): # Dependency directories blas_dir = spec['blas'].prefix @@ -55,7 +56,7 @@ def install(self, spec, prefix): # By convention, spack links blas & lapack libs to libblas & liblapack blas_lib = "-L%s" % blas_dir.lib + " -lblas" lapack_lib = "-L%s" % lapack_dir.lib + " -llapack" - + configure_args = [ "--prefix=%s" % prefix, "--with-mumps-incdir=%s" % mumps_dir.include, @@ -65,8 +66,8 @@ def install(self, spec, prefix): "--with-blas-lib=%s" % blas_lib, "--with-lapack-incdir=%s" % lapack_dir.include, "--with-lapack-lib=%s" % lapack_lib - ] - + ] + configure(*configure_args) # IPOPT does not build correctly in parallel on OS X diff --git a/var/spack/repos/builtin/packages/isl/package.py b/var/spack/repos/builtin/packages/isl/package.py index f456f622256..259f4881f5a 100644 --- a/var/spack/repos/builtin/packages/isl/package.py +++ b/var/spack/repos/builtin/packages/isl/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Isl(Package): """isl is a thread-safe C library for manipulating sets and relations of integer points bounded by affine constraints.""" diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py index 794966f1c39..39ec39b66d9 100644 --- a/var/spack/repos/builtin/packages/jdk/package.py +++ b/var/spack/repos/builtin/packages/jdk/package.py @@ -38,9 +38,9 @@ class Jdk(Package): homepage = "http://www.oracle.com/technetwork/java/javase/downloads/index.html" # noqa: E501 version('8u66-linux-x64', '88f31f3d642c3287134297b8c10e61bf', - url="http://download.oracle.com/otn-pub/java/jdk/8u66-b17/jdk-8u66-linux-x64.tar.gz") + url="http://download.oracle.com/otn-pub/java/jdk/8u66-b17/jdk-8u66-linux-x64.tar.gz") version('8u92-linux-x64', '65a1cc17ea362453a6e0eb4f13be76e4', - url="http://download.oracle.com/otn-pub/java/jdk/8u92-b14/jdk-8u92-linux-x64.tar.gz") + url="http://download.oracle.com/otn-pub/java/jdk/8u92-b14/jdk-8u92-linux-x64.tar.gz") # Oracle requires that you accept their License Agreement in order # to access the Java packages in download.oracle.com. In order to diff --git a/var/spack/repos/builtin/packages/jemalloc/package.py b/var/spack/repos/builtin/packages/jemalloc/package.py index a67754a513a..f5a983b7a9f 100644 --- a/var/spack/repos/builtin/packages/jemalloc/package.py +++ b/var/spack/repos/builtin/packages/jemalloc/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class Jemalloc(Package): - """jemalloc is a general purpose malloc(3) implementation that emphasizes fragmentation avoidance and scalable concurrency support.""" + """jemalloc is a general purpose malloc(3) implementation that emphasizes + fragmentation avoidance and scalable concurrency support.""" homepage = "http://www.canonware.com/jemalloc/" url = "https://github.com/jemalloc/jemalloc/releases/download/4.0.4/jemalloc-4.0.4.tar.bz2" @@ -36,7 +38,7 @@ class Jemalloc(Package): variant('prof', default=False, description='Enable heap profiling') def install(self, spec, prefix): - configure_args = ['--prefix=%s' % prefix,] + configure_args = ['--prefix=%s' % prefix, ] if '+stats' in spec: configure_args.append('--enable-stats') diff --git a/var/spack/repos/builtin/packages/jpeg/package.py b/var/spack/repos/builtin/packages/jpeg/package.py index e4a9d8535b5..594240d9507 100644 --- a/var/spack/repos/builtin/packages/jpeg/package.py +++ b/var/spack/repos/builtin/packages/jpeg/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Jpeg(Package): """libjpeg is a widely used free library with functions for handling the JPEG image data format. It implements a JPEG codec (encoding and decoding) diff --git a/var/spack/repos/builtin/packages/judy/package.py b/var/spack/repos/builtin/packages/judy/package.py index 8d47767ef05..8b8b261e535 100644 --- a/var/spack/repos/builtin/packages/judy/package.py +++ b/var/spack/repos/builtin/packages/judy/package.py @@ -24,8 +24,9 @@ ############################################################################## from spack import * + class Judy(Package): - """A general-purpose dynamic array, associative array and hash-trie - Judy""" + """Judy: General-purpose dynamic array, associative array and hash-trie.""" homepage = "http://judy.sourceforge.net/" url = "http://downloads.sourceforge.net/project/judy/judy/Judy-1.0.5/Judy-1.0.5.tar.gz" diff --git a/var/spack/repos/builtin/packages/kealib/package.py b/var/spack/repos/builtin/packages/kealib/package.py index 7c73c4518b5..5346fc8cb98 100644 --- a/var/spack/repos/builtin/packages/kealib/package.py +++ b/var/spack/repos/builtin/packages/kealib/package.py @@ -24,20 +24,21 @@ ############################################################################## from spack import * + class Kealib(Package): """An HDF5 Based Raster File Format - + KEALib provides an implementation of the GDAL data model. The format supports raster attribute tables, image pyramids, meta-data and in-built statistics while also handling very large files and compression throughout. - + Based on the HDF5 standard, it also provides a base from which other formats can be derived and is a good choice for long term data archiving. An independent software library (libkea) provides complete access to the KEA image format and a GDAL driver allowing KEA images to be used from any GDAL supported software. - + Development work on this project has been funded by Landcare Research. """ homepage = "http://kealib.org/" @@ -51,7 +52,8 @@ def install(self, spec, prefix): with working_dir('trunk', create=False): cmake_args = [] cmake_args.append("-DCMAKE_INSTALL_PREFIX=%s" % prefix) - cmake_args.append("-DHDF5_INCLUDE_DIR=%s" % spec['hdf5'].prefix.include) + cmake_args.append("-DHDF5_INCLUDE_DIR=%s" % + spec['hdf5'].prefix.include) cmake_args.append("-DHDF5_LIB_PATH=%s" % spec['hdf5'].prefix.lib) cmake('.', *cmake_args) diff --git a/var/spack/repos/builtin/packages/kripke/package.py b/var/spack/repos/builtin/packages/kripke/package.py index d405e9a51d6..cf8d2b7e391 100644 --- a/var/spack/repos/builtin/packages/kripke/package.py +++ b/var/spack/repos/builtin/packages/kripke/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Kripke(Package): """Kripke is a simple, scalable, 3D Sn deterministic particle transport proxy/mini app. diff --git a/var/spack/repos/builtin/packages/lcms/package.py b/var/spack/repos/builtin/packages/lcms/package.py index 434d8e6c983..4d3fc59568a 100644 --- a/var/spack/repos/builtin/packages/lcms/package.py +++ b/var/spack/repos/builtin/packages/lcms/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Lcms(Package): """Little cms is a color management library. Implements fast transforms between ICC profiles. It is focused on speed, and is diff --git a/var/spack/repos/builtin/packages/leveldb/package.py b/var/spack/repos/builtin/packages/leveldb/package.py index 408f1d31c12..f571baa1ce1 100644 --- a/var/spack/repos/builtin/packages/leveldb/package.py +++ b/var/spack/repos/builtin/packages/leveldb/package.py @@ -22,10 +22,10 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os import glob from spack import * + class Leveldb(Package): """LevelDB is a fast key-value storage library written at Google that provides an ordered mapping from string keys to string values.""" diff --git a/var/spack/repos/builtin/packages/libNBC/package.py b/var/spack/repos/builtin/packages/libNBC/package.py index ed1d0ce96f0..414498a37ae 100644 --- a/var/spack/repos/builtin/packages/libNBC/package.py +++ b/var/spack/repos/builtin/packages/libNBC/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libnbc(Package): """LibNBC is a prototypic implementation of a nonblocking interface for MPI collective operations. Based on ANSI C and diff --git a/var/spack/repos/builtin/packages/libarchive/package.py b/var/spack/repos/builtin/packages/libarchive/package.py index f11d732afab..0cf3932957e 100644 --- a/var/spack/repos/builtin/packages/libarchive/package.py +++ b/var/spack/repos/builtin/packages/libarchive/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libarchive(Package): """libarchive: C library and command-line tools for reading and writing tar, cpio, zip, ISO, and other archive formats.""" diff --git a/var/spack/repos/builtin/packages/libcerf/package.py b/var/spack/repos/builtin/packages/libcerf/package.py index b30d76f4e95..7fb47f8dcd4 100644 --- a/var/spack/repos/builtin/packages/libcerf/package.py +++ b/var/spack/repos/builtin/packages/libcerf/package.py @@ -27,9 +27,10 @@ class Libcerf(Package): - """ - A self-contained C library providing complex error functions, based on Faddeeva's plasma dispersion function - w(z). Also provides Dawson's integral and Voigt's convolution of a Gaussian and a Lorentzian + """A self-contained C library providing complex error functions, based + on Faddeeva's plasma dispersion function w(z). Also provides Dawson's + integral and Voigt's convolution of a Gaussian and a Lorentzian + """ homepage = "http://sourceforge.net/projects/libcerf" url = "http://downloads.sourceforge.net/project/libcerf/libcerf-1.3.tgz" diff --git a/var/spack/repos/builtin/packages/libcircle/package.py b/var/spack/repos/builtin/packages/libcircle/package.py index 75fdb961256..971c29f5f1c 100644 --- a/var/spack/repos/builtin/packages/libcircle/package.py +++ b/var/spack/repos/builtin/packages/libcircle/package.py @@ -22,9 +22,9 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os from spack import * + class Libcircle(Package): """libcircle provides an efficient distributed queue on a cluster, using self-stabilizing work stealing.""" @@ -32,7 +32,7 @@ class Libcircle(Package): homepage = "https://github.com/hpc/libcircle" version('0.2.1-rc.1', '2b1369a5736457239f908abf88143ec2', - url='https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz') + url='https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz') depends_on('mpi') diff --git a/var/spack/repos/builtin/packages/libdrm/package.py b/var/spack/repos/builtin/packages/libdrm/package.py index b05412e5886..543eab11b38 100644 --- a/var/spack/repos/builtin/packages/libdrm/package.py +++ b/var/spack/repos/builtin/packages/libdrm/package.py @@ -24,12 +24,13 @@ ############################################################################## from spack import * + class Libdrm(Package): """A userspace library for accessing the DRM, direct rendering manager, on Linux, BSD and other operating systems that support the ioctl interface.""" - homepage = "http://dri.freedesktop.org/libdrm/" # no real website... + homepage = "http://dri.freedesktop.org/libdrm/" # no real website... url = "http://dri.freedesktop.org/libdrm/libdrm-2.4.59.tar.gz" version('2.4.59', '105ac7af1afcd742d402ca7b4eb168b6') diff --git a/var/spack/repos/builtin/packages/libedit/package.py b/var/spack/repos/builtin/packages/libedit/package.py index 4366344679c..235e7648bca 100644 --- a/var/spack/repos/builtin/packages/libedit/package.py +++ b/var/spack/repos/builtin/packages/libedit/package.py @@ -24,12 +24,14 @@ ############################################################################## from spack import * + class Libedit(Package): """An autotools compatible port of the NetBSD editline library""" homepage = "http://thrysoee.dk/editline/" url = "http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz" - version('3.1', '43cdb5df3061d78b5e9d59109871b4f6', url="http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz") + version('3.1', '43cdb5df3061d78b5e9d59109871b4f6', + url="http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz") depends_on('ncurses') diff --git a/var/spack/repos/builtin/packages/libelf/package.py b/var/spack/repos/builtin/packages/libelf/package.py index 1e1deea8184..3304d27bdb0 100644 --- a/var/spack/repos/builtin/packages/libelf/package.py +++ b/var/spack/repos/builtin/packages/libelf/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libelf(Package): """libelf lets you read, modify or create ELF object files in an architecture-independent way. The library takes care of size diff --git a/var/spack/repos/builtin/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py index 3ae427a2eb6..65b3a716c0a 100644 --- a/var/spack/repos/builtin/packages/libevent/package.py +++ b/var/spack/repos/builtin/packages/libevent/package.py @@ -24,11 +24,13 @@ ############################################################################## from spack import * + class Libevent(Package): """The libevent API provides a mechanism to execute a callback function - when a specific event occurs on a file descriptor or after a timeout has been - reached. Furthermore, libevent also support callbacks due to signals or regular - timeouts. + when a specific event occurs on a file descriptor or after a + timeout has been reached. Furthermore, libevent also support + callbacks due to signals or regular timeouts. + """ homepage = "http://libevent.org" @@ -46,7 +48,8 @@ class Libevent(Package): version('2.0.13', 'af786b4b3f790c9d3279792edf7867fc') version('2.0.12', '42986228baf95e325778ed328a93e070') - variant('openssl', default=True, description="Build with encryption enabled at the libevent level.") + variant('openssl', default=True, + description="Build with encryption enabled at the libevent level.") depends_on('openssl', when='+openssl') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/libffi/package.py b/var/spack/repos/builtin/packages/libffi/package.py index 1d3c85c8e06..fa113ee86c3 100644 --- a/var/spack/repos/builtin/packages/libffi/package.py +++ b/var/spack/repos/builtin/packages/libffi/package.py @@ -24,18 +24,21 @@ ############################################################################## from spack import * + class Libffi(Package): """The libffi library provides a portable, high level programming interface to various calling conventions. This allows a programmer to call any function specified by a call interface description at run time.""" homepage = "https://sourceware.org/libffi/" - - version('3.2.1','83b89587607e3eb65c70d361f13bab43',url = "ftp://sourceware.org/pub/libffi/libffi-3.2.1.tar.gz") - #version('3.1', 'f5898b29bbfd70502831a212d9249d10',url = "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz") # Has a bug $(lib64) instead of ${lib64} in libffi.pc + + version('3.2.1', '83b89587607e3eb65c70d361f13bab43', + url="ftp://sourceware.org/pub/libffi/libffi-3.2.1.tar.gz") + # version('3.1', 'f5898b29bbfd70502831a212d9249d10',url = + # "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz") # Has a bug + # $(lib64) instead of ${lib64} in libffi.pc def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install") - diff --git a/var/spack/repos/builtin/packages/libgcrypt/package.py b/var/spack/repos/builtin/packages/libgcrypt/package.py index b1a316cc1b7..b556def4d3b 100644 --- a/var/spack/repos/builtin/packages/libgcrypt/package.py +++ b/var/spack/repos/builtin/packages/libgcrypt/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libgcrypt(Package): """Libgcrypt is a general purpose cryptographic library based on the code from GnuPG. It provides functions for all cryptographic diff --git a/var/spack/repos/builtin/packages/libgd/package.py b/var/spack/repos/builtin/packages/libgd/package.py index 938f4f6f3b7..acfdebb9722 100644 --- a/var/spack/repos/builtin/packages/libgd/package.py +++ b/var/spack/repos/builtin/packages/libgd/package.py @@ -27,11 +27,14 @@ class Libgd(Package): - """ - GD is an open source code library for the dynamic creation of images by programmers. GD is written in C, and - "wrappers" are available for Perl, PHP and other languages. GD creates PNG, JPEG, GIF, WebP, XPM, BMP images, - among other formats. GD is commonly used to generate charts, graphics, thumbnails, and most anything else, on the - fly. While not restricted to use on the web, the most common applications of GD involve website development. + """GD is an open source code library for the dynamic creation of images + by programmers. GD is written in C, and "wrappers" are available + for Perl, PHP and other languages. GD creates PNG, JPEG, GIF, + WebP, XPM, BMP images, among other formats. GD is commonly used to + generate charts, graphics, thumbnails, and most anything else, on + the fly. While not restricted to use on the web, the most common + applications of GD involve website development. + """ homepage = "https://github.com/libgd/libgd" diff --git a/var/spack/repos/builtin/packages/libgpg-error/package.py b/var/spack/repos/builtin/packages/libgpg-error/package.py index 3fe82a69e7d..a0e2acd5160 100644 --- a/var/spack/repos/builtin/packages/libgpg-error/package.py +++ b/var/spack/repos/builtin/packages/libgpg-error/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class LibgpgError(Package): """Libgpg-error is a small library that defines common error values for all GnuPG components. Among these are GPG, GPGSM, diff --git a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py index d1239ba0d98..6252a885422 100644 --- a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py +++ b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class LibjpegTurbo(Package): """libjpeg-turbo is a fork of the original IJG libjpeg which uses SIMD to accelerate baseline JPEG compression and diff --git a/var/spack/repos/builtin/packages/libmng/package.py b/var/spack/repos/builtin/packages/libmng/package.py index dd0903c4bd9..a77aada79cc 100644 --- a/var/spack/repos/builtin/packages/libmng/package.py +++ b/var/spack/repos/builtin/packages/libmng/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libmng(Package): """libmng -THE reference library for reading, displaying, writing and examining Multiple-Image Network Graphics. MNG is the animation @@ -39,7 +40,8 @@ class Libmng(Package): def patch(self): # jpeg requires stdio to beincluded before its headrs. - filter_file(r'^(\#include \)', '#include\n\\1', 'libmng_types.h') + filter_file(r'^(\#include \)', + '#include\n\\1', 'libmng_types.h') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/libmonitor/package.py b/var/spack/repos/builtin/packages/libmonitor/package.py index 611e602e2f9..f680baa2655 100644 --- a/var/spack/repos/builtin/packages/libmonitor/package.py +++ b/var/spack/repos/builtin/packages/libmonitor/package.py @@ -28,8 +28,10 @@ class Libmonitor(Package): """Libmonitor is a library for process and thread control.""" homepage = "https://github.com/HPCToolkit/libmonitor" - version('20130218', git='https://github.com/HPCToolkit/libmonitor.git', commit='4f2311e') - variant('krellpatch', default=False, description="build with openspeedshop based patch.") + version('20130218', git='https://github.com/HPCToolkit/libmonitor.git', + commit='4f2311e') + variant('krellpatch', default=False, + description="build with openspeedshop based patch.") patch('libmonitorkrell-0000.patch', when='@20130218+krellpatch') patch('libmonitorkrell-0001.patch', when='@20130218+krellpatch') diff --git a/var/spack/repos/builtin/packages/libpciaccess/package.py b/var/spack/repos/builtin/packages/libpciaccess/package.py index 91cef95cec2..a65b81b69bf 100644 --- a/var/spack/repos/builtin/packages/libpciaccess/package.py +++ b/var/spack/repos/builtin/packages/libpciaccess/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os.path + class Libpciaccess(Package): """Generic PCI access library.""" diff --git a/var/spack/repos/builtin/packages/libpng/package.py b/var/spack/repos/builtin/packages/libpng/package.py index 951b91eabd4..1afe4911e60 100644 --- a/var/spack/repos/builtin/packages/libpng/package.py +++ b/var/spack/repos/builtin/packages/libpng/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libpng(Package): """libpng graphics file format""" homepage = "http://www.libpng.org/pub/png/libpng.html" diff --git a/var/spack/repos/builtin/packages/libpthread-stubs/package.py b/var/spack/repos/builtin/packages/libpthread-stubs/package.py index 4bcca43c246..ea36758a83c 100644 --- a/var/spack/repos/builtin/packages/libpthread-stubs/package.py +++ b/var/spack/repos/builtin/packages/libpthread-stubs/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class LibpthreadStubs(Package): """The libpthread-stubs package provides weak aliases for pthread functions not provided in libc or otherwise available by diff --git a/var/spack/repos/builtin/packages/libsigsegv/package.py b/var/spack/repos/builtin/packages/libsigsegv/package.py index 14acdcbcd22..715d24bcf7a 100644 --- a/var/spack/repos/builtin/packages/libsigsegv/package.py +++ b/var/spack/repos/builtin/packages/libsigsegv/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libsigsegv(Package): """GNU libsigsegv is a library for handling page faults in user mode.""" homepage = "https://www.gnu.org/software/libsigsegv/" diff --git a/var/spack/repos/builtin/packages/libsodium/package.py b/var/spack/repos/builtin/packages/libsodium/package.py index 831a75e659d..1c930e21ddf 100644 --- a/var/spack/repos/builtin/packages/libsodium/package.py +++ b/var/spack/repos/builtin/packages/libsodium/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libsodium(Package): """Sodium is a modern, easy-to-use software library for encryption, decryption, signatures, password hashing and more.""" diff --git a/var/spack/repos/builtin/packages/libtermkey/package.py b/var/spack/repos/builtin/packages/libtermkey/package.py index c7db959a405..64688505c45 100644 --- a/var/spack/repos/builtin/packages/libtermkey/package.py +++ b/var/spack/repos/builtin/packages/libtermkey/package.py @@ -24,17 +24,17 @@ ############################################################################## from spack import * + class Libtermkey(Package): """Easy keyboard entry processing for terminal programs""" homepage = "http://www.leonerd.org.uk/code/libtermkey/" url = "http://www.leonerd.org.uk/code/libtermkey/libtermkey-0.18.tar.gz" - version('0.18' , '3be2e3e5a851a49cc5e8567ac108b520') - version('0.17' , '20edb99e0d95ec1690fe90e6a555ae6d') - version('0.16' , '7a24b675aaeb142d30db28e7554987d4') + version('0.18', '3be2e3e5a851a49cc5e8567ac108b520') + version('0.17', '20edb99e0d95ec1690fe90e6a555ae6d') + version('0.16', '7a24b675aaeb142d30db28e7554987d4') version('0.15b', '27689756e6c86c56ae454f2ac259bc3d') - version('0.14' , 'e08ce30f440f9715c459060e0e048978') - + version('0.14', 'e08ce30f440f9715c459060e0e048978') def install(self, spec, prefix): make() diff --git a/var/spack/repos/builtin/packages/libtiff/package.py b/var/spack/repos/builtin/packages/libtiff/package.py index 4b03e7997b0..cef9fcaae53 100644 --- a/var/spack/repos/builtin/packages/libtiff/package.py +++ b/var/spack/repos/builtin/packages/libtiff/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libtiff(Package): """libtiff graphics format library""" homepage = "http://www.remotesensing.org/libtiff/" diff --git a/var/spack/repos/builtin/packages/libunwind/package.py b/var/spack/repos/builtin/packages/libunwind/package.py index 980b765c027..63ab4aec59b 100644 --- a/var/spack/repos/builtin/packages/libunwind/package.py +++ b/var/spack/repos/builtin/packages/libunwind/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libunwind(Package): """A portable and efficient C programming interface (API) to determine the call-chain of a program.""" diff --git a/var/spack/repos/builtin/packages/libuuid/package.py b/var/spack/repos/builtin/packages/libuuid/package.py index 0dd32ec77dc..553f0dddb88 100644 --- a/var/spack/repos/builtin/packages/libuuid/package.py +++ b/var/spack/repos/builtin/packages/libuuid/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libuuid(Package): """Portable uuid C library""" # FIXME: add a proper url for your package's homepage here. diff --git a/var/spack/repos/builtin/packages/libuv/package.py b/var/spack/repos/builtin/packages/libuv/package.py index 0d29270b381..dae10809f22 100644 --- a/var/spack/repos/builtin/packages/libuv/package.py +++ b/var/spack/repos/builtin/packages/libuv/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libuv(Package): """Multi-platform library with a focus on asynchronous IO""" homepage = "http://libuv.org" diff --git a/var/spack/repos/builtin/packages/libvterm/package.py b/var/spack/repos/builtin/packages/libvterm/package.py index e57af273adb..2e1ef99b98e 100644 --- a/var/spack/repos/builtin/packages/libvterm/package.py +++ b/var/spack/repos/builtin/packages/libvterm/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libvterm(Package): """An abstract library implementation of a terminal emulator""" homepage = "http://www.leonerd.org.uk/code/libvterm/" diff --git a/var/spack/repos/builtin/packages/libxc/package.py b/var/spack/repos/builtin/packages/libxc/package.py index 87437373d67..9ea4d1c3261 100644 --- a/var/spack/repos/builtin/packages/libxc/package.py +++ b/var/spack/repos/builtin/packages/libxc/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libxc(Package): """Libxc is a library of exchange-correlation functionals for density-functional theory.""" @@ -33,7 +34,6 @@ class Libxc(Package): version('2.2.2', 'd9f90a0d6e36df6c1312b6422280f2ec') - def install(self, spec, prefix): configure('--prefix=%s' % prefix, '--enable-shared') diff --git a/var/spack/repos/builtin/packages/libxcb/package.py b/var/spack/repos/builtin/packages/libxcb/package.py index 586eb970d8f..82ddb2742e6 100644 --- a/var/spack/repos/builtin/packages/libxcb/package.py +++ b/var/spack/repos/builtin/packages/libxcb/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libxcb(Package): """The X protocol C-language Binding (XCB) is a replacement for Xlib featuring a small footprint, latency hiding, direct @@ -43,11 +44,14 @@ class Libxcb(Package): depends_on('libxau') def patch(self): - filter_file('typedef struct xcb_auth_info_t {', 'typedef struct {', 'src/xcb.h') - + filter_file( + 'typedef struct xcb_auth_info_t {', + 'typedef struct {', + 'src/xcb.h') def install(self, spec, prefix): - env['PKG_CONFIG_PATH'] = env['PKG_CONFIG_PATH'] + ':/usr/lib64/pkgconfig' + env['PKG_CONFIG_PATH'] = env[ + 'PKG_CONFIG_PATH'] + ':/usr/lib64/pkgconfig' configure("--prefix=%s" % prefix) make() diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py index a6dabd2c05d..0f4810fa8aa 100644 --- a/var/spack/repos/builtin/packages/libxml2/package.py +++ b/var/spack/repos/builtin/packages/libxml2/package.py @@ -36,7 +36,9 @@ class Libxml2(Package): variant('python', default=False, description='Enable Python support') - extends('python', when='+python', ignore=r'(bin.*$)|(include.*$)|(share.*$)|(lib/libxml2.*$)|(lib/xml2.*$)|(lib/cmake.*$)') + extends('python', when='+python', + ignore=r'(bin.*$)|(include.*$)|(share.*$)|(lib/libxml2.*$)|' + '(lib/xml2.*$)|(lib/cmake.*$)') depends_on('zlib') depends_on('xz') diff --git a/var/spack/repos/builtin/packages/libxshmfence/package.py b/var/spack/repos/builtin/packages/libxshmfence/package.py index 6d63ea6426e..fe5d5667e9b 100644 --- a/var/spack/repos/builtin/packages/libxshmfence/package.py +++ b/var/spack/repos/builtin/packages/libxshmfence/package.py @@ -24,11 +24,12 @@ ############################################################################## from spack import * + class Libxshmfence(Package): """This is a tiny library that exposes a event API on top of Linux futexes.""" - homepage = "http://keithp.com/blogs/dri3_extension/" # not really... + homepage = "http://keithp.com/blogs/dri3_extension/" # not really... url = "http://xorg.freedesktop.org/archive/individual/lib/libxshmfence-1.2.tar.gz" version('1.2', 'f0b30c0fc568b22ec524859ee28556f1') diff --git a/var/spack/repos/builtin/packages/libxslt/package.py b/var/spack/repos/builtin/packages/libxslt/package.py index c6c439ad096..47d1c170c65 100644 --- a/var/spack/repos/builtin/packages/libxslt/package.py +++ b/var/spack/repos/builtin/packages/libxslt/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Libxslt(Package): """Libxslt is the XSLT C library developed for the GNOME project. XSLT itself is a an XML language to define diff --git a/var/spack/repos/builtin/packages/llvm-lld/package.py b/var/spack/repos/builtin/packages/llvm-lld/package.py index 127fe1204d2..1dcf88e3c0a 100644 --- a/var/spack/repos/builtin/packages/llvm-lld/package.py +++ b/var/spack/repos/builtin/packages/llvm-lld/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class LlvmLld(Package): """lld - The LLVM Linker lld is a new set of modular code for creating linker tools.""" diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index e79c123a7ea..61ea8daac49 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -23,32 +23,51 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os, glob +import os class Llvm(Package): """The LLVM Project is a collection of modular and reusable compiler and - toolchain technologies. Despite its name, LLVM has little to do with - traditional virtual machines, though it does provide helpful libraries - that can be used to build them. The name "LLVM" itself is not an acronym; - it is the full name of the project. + toolchain technologies. Despite its name, LLVM has little to do + with traditional virtual machines, though it does provide helpful + libraries that can be used to build them. The name "LLVM" itself + is not an acronym; it is the full name of the project. + """ homepage = 'http://llvm.org/' url = 'http://llvm.org/releases/3.7.1/llvm-3.7.1.src.tar.xz' - version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') # currently required by mesa package + # currently required by mesa package + version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', + url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') - variant('debug', default=False, description="Build a debug version of LLVM, this increases binary size by an order of magnitude, make sure you have 20-30gb of space available to build this") - variant('clang', default=True, description="Build the LLVM C/C++/Objective-C compiler frontend") + variant('debug', default=False, + description="Build a debug version of LLVM, this increases " + "binary size by an order of magnitude, make sure you have " + "20-30gb of space available to build this") + variant('clang', default=True, + description="Build the LLVM C/C++/Objective-C compiler frontend") variant('lldb', default=True, description="Build the LLVM debugger") - variant('internal_unwind', default=True, description="Build the libcxxabi libunwind") - variant('polly', default=True, description="Build the LLVM polyhedral optimization plugin, only builds for 3.7.0+") - variant('libcxx', default=True, description="Build the LLVM C++ standard library") - variant('compiler-rt', default=True, description="Build the LLVM compiler runtime, including sanitizers") - variant('gold', default=True, description="Add support for LTO with the gold linker plugin") - variant('shared_libs', default=False, description="Build all components as shared libraries, faster, less memory to build, less stable") - variant('link_dylib', default=False, description="Build and link the libLLVM shared library rather than static") - variant('all_targets', default=True, description="Build all supported targets, default targets ,NVPTX,AMDGPU,CppBackend") + variant('internal_unwind', default=True, + description="Build the libcxxabi libunwind") + variant('polly', default=True, + description="Build the LLVM polyhedral optimization plugin, " + "only builds for 3.7.0+") + variant('libcxx', default=True, + description="Build the LLVM C++ standard library") + variant('compiler-rt', default=True, + description="Build LLVM compiler runtime, including sanitizers") + variant('gold', default=True, + description="Add support for LTO with the gold linker plugin") + variant('shared_libs', default=False, + description="Build all components as shared libraries, faster, " + "less memory to build, less stable") + variant('link_dylib', default=False, + description="Build and link the libLLVM shared library rather " + "than static") + variant('all_targets', default=True, + description="Build all supported targets, default targets " + ",NVPTX,AMDGPU,CppBackend") # Build dependency depends_on('cmake@2.8.12.2:', type='build') @@ -68,147 +87,147 @@ class Llvm(Package): depends_on('gmp', when='@:3.6.999 +polly') depends_on('isl', when='@:3.6.999 +polly') - base_url = 'http://llvm.org/releases/%%(version)s/%(pkg)s-%%(version)s.src.tar.xz' - llvm_url = base_url % { 'pkg' : 'llvm'} + base_url = 'http://llvm.org/releases/%%(version)s/%(pkg)s-%%(version)s.src.tar.xz' + llvm_url = base_url % {'pkg': 'llvm'} resources = { - 'compiler-rt' : { - 'url' : base_url % { 'pkg' : 'compiler-rt'}, - 'destination' : 'projects', - 'placement' : 'compiler-rt', - }, - 'openmp' : { - 'url' : base_url % { 'pkg' : 'openmp'}, - 'destination' : 'projects', - 'placement' : 'openmp', - }, - 'libcxx' : { - 'url' : base_url % { 'pkg' : 'libcxx'}, - 'destination' : 'projects', - 'placement' : 'libcxx', - }, - 'libcxxabi' : { - 'url' : base_url % { 'pkg' : 'libcxxabi'}, - 'destination' : 'projects', - 'placement' : 'libcxxabi', - }, - 'clang' : { - 'url' : base_url % { 'pkg' : 'cfe'}, - 'destination' : 'tools', - 'placement' : 'clang', - }, - 'clang-tools-extra' : { - 'url' : base_url % { 'pkg' : 'clang-tools-extra'}, - 'destination' : 'tools/clang/tools', - 'placement' : 'extra', - }, - 'lldb' : { - 'url' : base_url % { 'pkg' : 'lldb'}, - 'destination' : 'tools', - 'placement' : 'lldb', - }, - 'polly' : { - 'url' : base_url % { 'pkg' : 'polly'}, - 'destination' : 'tools', - 'placement' : 'polly', - }, - 'llvm-libunwind' : { - 'url' : base_url % { 'pkg' : 'libunwind'}, - 'destination' : 'projects', - 'placement' : 'libunwind', - }, - } + 'compiler-rt': { + 'url': base_url % {'pkg': 'compiler-rt'}, + 'destination': 'projects', + 'placement': 'compiler-rt', + }, + 'openmp': { + 'url': base_url % {'pkg': 'openmp'}, + 'destination': 'projects', + 'placement': 'openmp', + }, + 'libcxx': { + 'url': base_url % {'pkg': 'libcxx'}, + 'destination': 'projects', + 'placement': 'libcxx', + }, + 'libcxxabi': { + 'url': base_url % {'pkg': 'libcxxabi'}, + 'destination': 'projects', + 'placement': 'libcxxabi', + }, + 'clang': { + 'url': base_url % {'pkg': 'cfe'}, + 'destination': 'tools', + 'placement': 'clang', + }, + 'clang-tools-extra': { + 'url': base_url % {'pkg': 'clang-tools-extra'}, + 'destination': 'tools/clang/tools', + 'placement': 'extra', + }, + 'lldb': { + 'url': base_url % {'pkg': 'lldb'}, + 'destination': 'tools', + 'placement': 'lldb', + }, + 'polly': { + 'url': base_url % {'pkg': 'polly'}, + 'destination': 'tools', + 'placement': 'polly', + }, + 'llvm-libunwind': { + 'url': base_url % {'pkg': 'libunwind'}, + 'destination': 'projects', + 'placement': 'libunwind', + }, + } releases = [ - { - 'version' : 'trunk', - 'repo' : 'http://llvm.org/svn/llvm-project/llvm/trunk', - 'resources' : { - 'compiler-rt' : 'http://llvm.org/svn/llvm-project/compiler-rt/trunk', - 'openmp' : 'http://llvm.org/svn/llvm-project/openmp/trunk', - 'polly' : 'http://llvm.org/svn/llvm-project/polly/trunk', - 'libcxx' : 'http://llvm.org/svn/llvm-project/libcxx/trunk', - 'libcxxabi' : 'http://llvm.org/svn/llvm-project/libcxxabi/trunk', - 'clang' : 'http://llvm.org/svn/llvm-project/cfe/trunk', - 'clang-tools-extra' : 'http://llvm.org/svn/llvm-project/clang-tools-extra/trunk', - 'lldb' : 'http://llvm.org/svn/llvm-project/lldb/trunk', - 'llvm-libunwind' : 'http://llvm.org/svn/llvm-project/libunwind/trunk', - } - }, - { - 'version' : '3.8.0', - 'md5':'07a7a74f3c6bd65de4702bf941b511a0', - 'resources' : { - 'compiler-rt' : 'd6fcbe14352ffb708e4d1ac2e48bb025', - 'openmp' : '8fd7cc35d48051613cf1e750e9f22e40', - 'polly' : '1b3b20f52d34a4024e21a4ea7112caa7', - 'libcxx' : 'd6e0bdbbee39f7907ad74fd56d03b88a', - 'libcxxabi' : 'bbe6b4d72c7c5978550d370af529bcf7', - 'clang' : 'cc99e7019bb74e6459e80863606250c5', - 'clang-tools-extra' : 'c2344f50e0eea0b402f0092a80ddc036', - 'lldb' : 'a5da35ed9cc8c8817ee854e3dbfba00e', - 'llvm-libunwind' : '162ade468607f153cca12be90b5194fa', - } - }, - { - 'version' : '3.7.1', - 'md5':'bf8b3a2c79e61212c5409041dfdbd319', - 'resources' : { - 'compiler-rt' : '1c6975daf30bb3b0473b53c3a1a6ff01', - 'openmp' : 'b4ad08cda4e5c22e42b66062b140438e', - 'polly' : '3a2a7367002740881637f4d47bca4dc3', - 'libcxx' : 'f9c43fa552a10e14ff53b94d04bea140', - 'libcxxabi' : '52d925afac9f97e9dcac90745255c169', - 'clang' : '0acd026b5529164197563d135a8fd83e', - 'clang-tools-extra' : '5d49ff745037f061a7c86aeb6a24c3d2', - 'lldb' : 'a106d8a0d21fc84d76953822fbaf3398', - 'llvm-libunwind' : '814bd52c9247c5d04629658fbcb3ab8c', - } - }, - { - 'version' : '3.7.0', - 'md5':'b98b9495e5655a672d6cb83e1a180f8e', - 'resources' : { - 'compiler-rt' : '383c10affd513026f08936b5525523f5', - 'openmp' : 'f482c86fdead50ba246a1a2b0bbf206f', - 'polly' : '32f93ffc9cc7e042df22089761558f8b', - 'libcxx' : '46aa5175cbe1ad42d6e9c995968e56dd', - 'libcxxabi' : '5aa769e2fca79fa5335cfae8f6258772', - 'clang' : '8f9d27335e7331cf0a4711e952f21f01', - 'clang-tools-extra' : 'd5a87dacb65d981a427a536f6964642e', - 'lldb' : 'e5931740400d1dc3e7db4c7ba2ceff68', - 'llvm-libunwind' : '9a75392eb7eb8ed5c0840007e212baf5', - } - }, - { - 'version' : '3.6.2', - 'md5':'0c1ee3597d75280dee603bae9cbf5cc2', - 'resources' : { - 'compiler-rt' : 'e3bc4eb7ba8c39a6fe90d6c988927f3c', - 'openmp' : '65dd5863b9b270960a96817e9152b123', - 'libcxx' : '22214c90697636ef960a49aef7c1823a', - 'libcxxabi' : '17518e361e4e228f193dd91e8ef54ba2', - 'clang' : 'ff862793682f714bb7862325b9c06e20', - 'clang-tools-extra' : '3ebc1dc41659fcec3db1b47d81575e06', - 'lldb' : '51e5eb552f777b950bb0ff326e60d5f0', - } - }, - { - 'version' : '3.5.1', - 'md5':'2d3d8004f38852aa679e5945b8ce0b14', - 'resources' : { - 'compiler-rt' : 'd626cfb8a9712cb92b820798ab5bc1f8', - 'openmp' : '121ddb10167d7fc38b1f7e4b029cf059', - 'libcxx' : '406f09b1dab529f3f7879f4d548329d2', - 'libcxxabi' : 'b22c707e8d474a99865ad3c521c3d464', - 'clang' : '93f9532f8f7e6f1d8e5c1116907051cb', - 'clang-tools-extra' : 'f13f31ed3038acadc6fa63fef812a246', - 'lldb' : 'cc5ea8a414c62c33e760517f8929a204', - } - }, - ] + { + 'version': 'trunk', + 'repo': 'http://llvm.org/svn/llvm-project/llvm/trunk', + 'resources': { + 'compiler-rt': 'http://llvm.org/svn/llvm-project/compiler-rt/trunk', + 'openmp': 'http://llvm.org/svn/llvm-project/openmp/trunk', + 'polly': 'http://llvm.org/svn/llvm-project/polly/trunk', + 'libcxx': 'http://llvm.org/svn/llvm-project/libcxx/trunk', + 'libcxxabi': 'http://llvm.org/svn/llvm-project/libcxxabi/trunk', + 'clang': 'http://llvm.org/svn/llvm-project/cfe/trunk', + 'clang-tools-extra': 'http://llvm.org/svn/llvm-project/clang-tools-extra/trunk', + 'lldb': 'http://llvm.org/svn/llvm-project/lldb/trunk', + 'llvm-libunwind': 'http://llvm.org/svn/llvm-project/libunwind/trunk', + } + }, + { + 'version': '3.8.0', + 'md5': '07a7a74f3c6bd65de4702bf941b511a0', + 'resources': { + 'compiler-rt': 'd6fcbe14352ffb708e4d1ac2e48bb025', + 'openmp': '8fd7cc35d48051613cf1e750e9f22e40', + 'polly': '1b3b20f52d34a4024e21a4ea7112caa7', + 'libcxx': 'd6e0bdbbee39f7907ad74fd56d03b88a', + 'libcxxabi': 'bbe6b4d72c7c5978550d370af529bcf7', + 'clang': 'cc99e7019bb74e6459e80863606250c5', + 'clang-tools-extra': 'c2344f50e0eea0b402f0092a80ddc036', + 'lldb': 'a5da35ed9cc8c8817ee854e3dbfba00e', + 'llvm-libunwind': '162ade468607f153cca12be90b5194fa', + } + }, + { + 'version': '3.7.1', + 'md5': 'bf8b3a2c79e61212c5409041dfdbd319', + 'resources': { + 'compiler-rt': '1c6975daf30bb3b0473b53c3a1a6ff01', + 'openmp': 'b4ad08cda4e5c22e42b66062b140438e', + 'polly': '3a2a7367002740881637f4d47bca4dc3', + 'libcxx': 'f9c43fa552a10e14ff53b94d04bea140', + 'libcxxabi': '52d925afac9f97e9dcac90745255c169', + 'clang': '0acd026b5529164197563d135a8fd83e', + 'clang-tools-extra': '5d49ff745037f061a7c86aeb6a24c3d2', + 'lldb': 'a106d8a0d21fc84d76953822fbaf3398', + 'llvm-libunwind': '814bd52c9247c5d04629658fbcb3ab8c', + } + }, + { + 'version': '3.7.0', + 'md5': 'b98b9495e5655a672d6cb83e1a180f8e', + 'resources': { + 'compiler-rt': '383c10affd513026f08936b5525523f5', + 'openmp': 'f482c86fdead50ba246a1a2b0bbf206f', + 'polly': '32f93ffc9cc7e042df22089761558f8b', + 'libcxx': '46aa5175cbe1ad42d6e9c995968e56dd', + 'libcxxabi': '5aa769e2fca79fa5335cfae8f6258772', + 'clang': '8f9d27335e7331cf0a4711e952f21f01', + 'clang-tools-extra': 'd5a87dacb65d981a427a536f6964642e', + 'lldb': 'e5931740400d1dc3e7db4c7ba2ceff68', + 'llvm-libunwind': '9a75392eb7eb8ed5c0840007e212baf5', + } + }, + { + 'version': '3.6.2', + 'md5': '0c1ee3597d75280dee603bae9cbf5cc2', + 'resources': { + 'compiler-rt': 'e3bc4eb7ba8c39a6fe90d6c988927f3c', + 'openmp': '65dd5863b9b270960a96817e9152b123', + 'libcxx': '22214c90697636ef960a49aef7c1823a', + 'libcxxabi': '17518e361e4e228f193dd91e8ef54ba2', + 'clang': 'ff862793682f714bb7862325b9c06e20', + 'clang-tools-extra': '3ebc1dc41659fcec3db1b47d81575e06', + 'lldb': '51e5eb552f777b950bb0ff326e60d5f0', + } + }, + { + 'version': '3.5.1', + 'md5': '2d3d8004f38852aa679e5945b8ce0b14', + 'resources': { + 'compiler-rt': 'd626cfb8a9712cb92b820798ab5bc1f8', + 'openmp': '121ddb10167d7fc38b1f7e4b029cf059', + 'libcxx': '406f09b1dab529f3f7879f4d548329d2', + 'libcxxabi': 'b22c707e8d474a99865ad3c521c3d464', + 'clang': '93f9532f8f7e6f1d8e5c1116907051cb', + 'clang-tools-extra': 'f13f31ed3038acadc6fa63fef812a246', + 'lldb': 'cc5ea8a414c62c33e760517f8929a204', + } + }, + ] for release in releases: - if release['version'] == 'trunk' : + if release['version'] == 'trunk': version(release['version'], svn=release['repo']) for name, repo in release['resources'].items(): @@ -230,18 +249,19 @@ class Llvm(Package): def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag - cmake_args = [ arg for arg in std_cmake_args if 'BUILD_TYPE' not in arg ] + cmake_args = [arg for arg in std_cmake_args if 'BUILD_TYPE' not in arg] build_type = 'RelWithDebInfo' if '+debug' in spec else 'Release' cmake_args.extend([ - '..', - '-DCMAKE_BUILD_TYPE=' + build_type, - '-DLLVM_REQUIRES_RTTI:BOOL=ON', - '-DCLANG_DEFAULT_OPENMP_RUNTIME:STRING=libomp', - '-DPYTHON_EXECUTABLE:PATH=%s/bin/python' % spec['python'].prefix ]) + '..', + '-DCMAKE_BUILD_TYPE=' + build_type, + '-DLLVM_REQUIRES_RTTI:BOOL=ON', + '-DCLANG_DEFAULT_OPENMP_RUNTIME:STRING=libomp', + '-DPYTHON_EXECUTABLE:PATH=%s/bin/python' % spec['python'].prefix]) if '+gold' in spec: - cmake_args.append('-DLLVM_BINUTILS_INCDIR=' + os.path.join( spec['binutils'].prefix, 'include')) + cmake_args.append('-DLLVM_BINUTILS_INCDIR=' + + os.path.join(spec['binutils'].prefix, 'include')) if '+polly' in spec: cmake_args.append('-DLINK_POLLY_INTO_TOOLS:Bool=ON') else: @@ -265,7 +285,7 @@ def install(self, spec, prefix): if '+link_dylib' in spec: cmake_args.append('-DLLVM_LINK_LLVM_DYLIB:Bool=ON') - if '+all_targets' not in spec: # all is default on cmake + if '+all_targets' not in spec: # all is default on cmake targets = ['CppBackend', 'NVPTX', 'AMDGPU'] if 'x86' in spec.architecture.target.lower(): targets.append('X86') @@ -279,13 +299,16 @@ def install(self, spec, prefix): 'power' in spec.architecture.target.lower()): targets.append('PowerPC') - cmake_args.append('-DLLVM_TARGETS_TO_BUILD:Bool=' + ';'.join(targets)) + cmake_args.append( + '-DLLVM_TARGETS_TO_BUILD:Bool=' + ';'.join(targets)) - if '+clang' not in spec: + if '+clang' not in spec: if '+clang_extra' in spec: - raise SpackException('The clang_extra variant requires the clang variant to be selected') + raise SpackException( + 'The clang_extra variant requires the `+clang` variant.') if '+lldb' in spec: - raise SpackException('The lldb variant requires the clang variant to be selected') + raise SpackException( + 'The lldb variant requires the `+clang` variant') with working_dir('spack-build', create=True): cmake(*cmake_args) diff --git a/var/spack/repos/builtin/packages/lmdb/package.py b/var/spack/repos/builtin/packages/lmdb/package.py index 79c020b2dfc..8c6c23d8dcb 100644 --- a/var/spack/repos/builtin/packages/lmdb/package.py +++ b/var/spack/repos/builtin/packages/lmdb/package.py @@ -25,12 +25,12 @@ import os from spack import * + class Lmdb(Package): """Read-only mirror of official repo on openldap.org. Issues and pull requests here are ignored. Use OpenLDAP ITS for issues. http://www.openldap.org/software/repo.html""" - homepage = "http://www.openldap.org/software/repo.html" url = "https://github.com/LMDB/lmdb/archive/LMDB_0.9.16.tar.gz" diff --git a/var/spack/repos/builtin/packages/lwgrp/package.py b/var/spack/repos/builtin/packages/lwgrp/package.py index 471098c8735..9322d69b9b0 100644 --- a/var/spack/repos/builtin/packages/lwgrp/package.py +++ b/var/spack/repos/builtin/packages/lwgrp/package.py @@ -22,9 +22,9 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os from spack import * + class Lwgrp(Package): """Thie light-weight group library provides process group representations using O(log N) space and time.""" diff --git a/var/spack/repos/builtin/packages/lwm2/package.py b/var/spack/repos/builtin/packages/lwm2/package.py index 340474b47ea..063204b84a4 100644 --- a/var/spack/repos/builtin/packages/lwm2/package.py +++ b/var/spack/repos/builtin/packages/lwm2/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Lwm2(Package): """LWM2: Light Weight Measurement Module. This is a PMPI module that can collect a number of time-sliced MPI and POSIX I/O diff --git a/var/spack/repos/builtin/packages/m4/package.py b/var/spack/repos/builtin/packages/m4/package.py index dcb306dcd3b..b3bb5e61ce1 100644 --- a/var/spack/repos/builtin/packages/m4/package.py +++ b/var/spack/repos/builtin/packages/m4/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class M4(Package): """GNU M4 is an implementation of the traditional Unix macro processor.""" homepage = "https://www.gnu.org/software/m4/m4.html" @@ -33,14 +34,16 @@ class M4(Package): patch('pgi.patch', when='@1.4.17') - variant('sigsegv', default=True, description="Build the libsigsegv dependency") + variant('sigsegv', default=True, + description="Build the libsigsegv dependency") depends_on('libsigsegv', when='+sigsegv') def install(self, spec, prefix): configure_args = [] if 'libsigsegv' in spec: - configure_args.append('--with-libsigsegv-prefix=%s' % spec['libsigsegv'].prefix) + configure_args.append('--with-libsigsegv-prefix=%s' % + spec['libsigsegv'].prefix) else: configure_args.append('--without-libsigsegv-prefix') diff --git a/var/spack/repos/builtin/packages/mbedtls/package.py b/var/spack/repos/builtin/packages/mbedtls/package.py index 13c0ce768fe..e1a42c0d9a4 100644 --- a/var/spack/repos/builtin/packages/mbedtls/package.py +++ b/var/spack/repos/builtin/packages/mbedtls/package.py @@ -24,17 +24,21 @@ ############################################################################## from spack import * + class Mbedtls(Package): - """ - mbed TLS (formerly known as PolarSSL) makes it trivially easy for developers to include cryptographic and SSL/TLS capabilities in their (embedded) products, facilitating this functionality with a minimal coding footprint. + """mbed TLS (formerly known as PolarSSL) makes it trivially easy for + developers to include cryptographic and SSL/TLS capabilities in + their (embedded) products, facilitating this functionality with a + minimal coding footprint. + """ homepage = "https://tls.mbed.org" url = "https://github.com/ARMmbed/mbedtls/archive/mbedtls-2.2.1.tar.gz" - version('2.2.1' , '73a38f96898d6d03e32f55dd9f9a67be') - version('2.2.0' , 'eaf4586c1ef93ae872e606b6c1203942') - version('2.1.4' , '40cdf67b6c6d92c9cbcfd552d39ea3ae') - version('2.1.3' , '7eb4cf1dfa68578a2c8dbd0b6fa752dd') + version('2.2.1', '73a38f96898d6d03e32f55dd9f9a67be') + version('2.2.0', 'eaf4586c1ef93ae872e606b6c1203942') + version('2.1.4', '40cdf67b6c6d92c9cbcfd552d39ea3ae') + version('2.1.3', '7eb4cf1dfa68578a2c8dbd0b6fa752dd') version('1.3.16', '4144d7320c691f721aeb9e67a1bc38e0') depends_on('cmake', type='build') diff --git a/var/spack/repos/builtin/packages/memaxes/package.py b/var/spack/repos/builtin/packages/memaxes/package.py index 31672abaec0..ffad1677889 100644 --- a/var/spack/repos/builtin/packages/memaxes/package.py +++ b/var/spack/repos/builtin/packages/memaxes/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Memaxes(Package): """MemAxes is a visualizer for sampled memory trace data.""" diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py index 3bad17b5cb0..299b9a9267e 100644 --- a/var/spack/repos/builtin/packages/mesa/package.py +++ b/var/spack/repos/builtin/packages/mesa/package.py @@ -24,13 +24,13 @@ ############################################################################## from spack import * + class Mesa(Package): """Mesa is an open-source implementation of the OpenGL specification - a system for rendering interactive 3D graphics.""" homepage = "http://www.mesa3d.org" url = "ftp://ftp.freedesktop.org/pub/mesa/older-versions/8.x/8.0.5/MesaLib-8.0.5.tar.gz" - # url = "ftp://ftp.freedesktop.org/pub/mesa/10.4.4/MesaLib-10.4.4.tar.gz" # version('10.4.4', '8d863a3c209bf5116b2babfccccc68ce') version('8.0.5', 'cda5d101f43b8784fa60bdeaca4056f2') @@ -50,7 +50,6 @@ class Mesa(Package): # depends_on("libxcb") # depends_on("libxshmfence") - def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py index 2180f2cce2f..9f8ed5c9e82 100644 --- a/var/spack/repos/builtin/packages/metis/package.py +++ b/var/spack/repos/builtin/packages/metis/package.py @@ -43,12 +43,16 @@ class Metis(Package): version('5.0.2', 'acb521a4e8c2e6dd559a7f9abd0468c5') version('4.0.3', 'd3848b454532ef18dc83e4fb160d1e10') - variant('shared', default=True, description='Enables the build of shared libraries') - variant('debug', default=False, description='Builds the library in debug mode') + variant('shared', default=True, + description='Enables the build of shared libraries') + variant('debug', default=False, + description='Builds the library in debug mode') variant('gdb', default=False, description='Enables gdb support') - variant('idx64', default=False, description='Use int64_t as default index type') - variant('real64', default=False, description='Use double precision floating point types') + variant('idx64', default=False, + description='Use int64_t as default index type') + variant('real64', default=False, + description='Use double precision floating point types') depends_on('cmake@2.8:', when='@5:', type='build') diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index e7a1d963889..fd91f705ebb 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -23,7 +23,9 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import glob, string +import glob +import string + class Mfem(Package): """Free, lightweight, scalable C++ library for finite element methods.""" @@ -31,10 +33,12 @@ class Mfem(Package): homepage = 'http://www.mfem.org' url = 'https://github.com/mfem/mfem' - version('3.2', '2938c3deed4ec4f7fd5b5f5cfe656845282e86e2dcd477d292390058b7b94340', + version('3.2', + '2938c3deed4ec4f7fd5b5f5cfe656845282e86e2dcd477d292390058b7b94340', url='http://goo.gl/Y9T75B', expand=False, preferred=True) - version('3.1', '841ea5cf58de6fae4de0f553b0e01ebaab9cd9c67fa821e8a715666ecf18fc57', + version('3.1', + '841ea5cf58de6fae4de0f553b0e01ebaab9cd9c67fa821e8a715666ecf18fc57', url='http://goo.gl/xrScXn', expand=False) # version('3.1', git='https://github.com/mfem/mfem.git', # commit='dbae60fe32e071989b52efaaf59d7d0eb2a3b574') @@ -71,8 +75,9 @@ def check_variants(self, spec): if '+suite-sparse' in spec and ('+metis' not in spec or '+lapack' not in spec): raise InstallError('mfem+suite-sparse must be built with ' + - '+metis and +lapack!') - if 'metis@5:' in spec and '%clang' in spec and ('^cmake %gcc' not in spec): + '+metis and +lapack!') + if 'metis@5:' in spec and '%clang' in spec and ( + '^cmake %gcc' not in spec): raise InstallError('To work around CMake bug with clang, must ' + 'build mfem with mfem[+variants] %clang ' + '^cmake %gcc to force CMake to build with gcc') @@ -86,15 +91,17 @@ def install(self, spec, prefix): if '+lapack' in spec: lapack_lib = '-L{0} -llapack -L{1} -lblas'.format( spec['lapack'].prefix.lib, spec['blas'].prefix.lib) - options.extend(['MFEM_USE_LAPACK=YES', - 'LAPACK_OPT=-I%s' % spec['lapack'].prefix.include, - 'LAPACK_LIB=%s' % lapack_lib]) + options.extend([ + 'MFEM_USE_LAPACK=YES', + 'LAPACK_OPT=-I%s' % spec['lapack'].prefix.include, + 'LAPACK_LIB=%s' % lapack_lib]) if '+hypre' in spec: - options.extend(['HYPRE_DIR=%s' % spec['hypre'].prefix, - 'HYPRE_OPT=-I%s' % spec['hypre'].prefix.include, - 'HYPRE_LIB=-L%s' % spec['hypre'].prefix.lib + - ' -lHYPRE']) + options.extend([ + 'HYPRE_DIR=%s' % spec['hypre'].prefix, + 'HYPRE_OPT=-I%s' % spec['hypre'].prefix.include, + 'HYPRE_LIB=-L%s' % spec['hypre'].prefix.lib + + ' -lHYPRE']) if '+metis' in spec: metis_lib = '-L%s -lmetis' % spec['metis'].prefix.lib @@ -102,22 +109,26 @@ def install(self, spec, prefix): metis_str = 'MFEM_USE_METIS_5=YES' else: metis_str = 'MFEM_USE_METIS_5=NO' - options.extend([metis_str, - 'METIS_DIR=%s' % spec['metis'].prefix, - 'METIS_OPT=-I%s' % spec['metis'].prefix.include, - 'METIS_LIB=%s' % metis_lib]) + options.extend([ + metis_str, + 'METIS_DIR=%s' % spec['metis'].prefix, + 'METIS_OPT=-I%s' % spec['metis'].prefix.include, + 'METIS_LIB=%s' % metis_lib]) - if '+mpi' in spec: options.extend(['MFEM_USE_MPI=YES']) + if '+mpi' in spec: + options.extend(['MFEM_USE_MPI=YES']) if '+suite-sparse' in spec: ssp = spec['suite-sparse'].prefix ss_lib = '-L%s' % ssp.lib ss_lib += (' -lumfpack -lcholmod -lcolamd -lamd -lcamd' + - ' -lccolamd -lsuitesparseconfig') + ' -lccolamd -lsuitesparseconfig') no_librt_archs = ['darwin-i686', 'darwin-x86_64'] - no_rt = any(map(lambda a: spec.satisfies('='+a), no_librt_archs)) - if not no_rt: ss_lib += ' -lrt' + no_rt = any(map(lambda a: spec.satisfies('=' + a), + no_librt_archs)) + if not no_rt: + ss_lib += ' -lrt' ss_lib += (' ' + metis_lib + ' ' + lapack_lib) options.extend(['MFEM_USE_SUITESPARSE=YES', @@ -125,10 +136,11 @@ def install(self, spec, prefix): 'SUITESPARSE_OPT=-I%s' % ssp.include, 'SUITESPARSE_LIB=%s' % ss_lib]) - if '+debug' in spec: options.extend(['MFEM_DEBUG=YES']) + if '+debug' in spec: + options.extend(['MFEM_DEBUG=YES']) # Dirty hack to cope with URL redirect - tgz_file = string.split(self.url,'/')[-1] + tgz_file = string.split(self.url, '/')[-1] tar = which('tar') tar('xzvf', tgz_file) cd(glob.glob('mfem*')[0]) @@ -138,12 +150,12 @@ def install(self, spec, prefix): make('all') # Run a small test before installation - args = ['-m', join_path('data','star.mesh'), '--no-visualization'] + args = ['-m', join_path('data', 'star.mesh'), '--no-visualization'] if '+mpi' in spec: Executable(join_path(spec['mpi'].prefix.bin, 'mpirun'))('-np', '4', - join_path('examples','ex1p'), + join_path('examples', 'ex1p'), *args) else: Executable(join_path('examples', 'ex1'))(*args) diff --git a/var/spack/repos/builtin/packages/mpc/package.py b/var/spack/repos/builtin/packages/mpc/package.py index 92eb976f8bc..71cacd5dfe9 100644 --- a/var/spack/repos/builtin/packages/mpc/package.py +++ b/var/spack/repos/builtin/packages/mpc/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Mpc(Package): """Gnu Mpc is a C library for the arithmetic of complex numbers with arbitrarily high precision and correct rounding of the diff --git a/var/spack/repos/builtin/packages/mpe2/package.py b/var/spack/repos/builtin/packages/mpe2/package.py index f69ea2d65ba..a129d599497 100644 --- a/var/spack/repos/builtin/packages/mpe2/package.py +++ b/var/spack/repos/builtin/packages/mpe2/package.py @@ -24,8 +24,9 @@ ############################################################################## from spack import * + class Mpe2(Package): - """Message Passing Extensions (MPE) -- Parallel, shared X window graphics""" + """Message Passing Extensions (MPE): Parallel, shared X window graphics""" homepage = "http://www.mcs.anl.gov/research/projects/perfvis/software/MPE/" url = "ftp://ftp.mcs.anl.gov/pub/mpi/mpe/mpe2-1.3.0.tar.gz" diff --git a/var/spack/repos/builtin/packages/mpfr/package.py b/var/spack/repos/builtin/packages/mpfr/package.py index ed3926a8abf..5777cd19264 100644 --- a/var/spack/repos/builtin/packages/mpfr/package.py +++ b/var/spack/repos/builtin/packages/mpfr/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Mpfr(Package): """The MPFR library is a C library for multiple-precision floating-point computations with correct rounding.""" diff --git a/var/spack/repos/builtin/packages/mpibash/package.py b/var/spack/repos/builtin/packages/mpibash/package.py index e659663d901..f3feaaaa426 100644 --- a/var/spack/repos/builtin/packages/mpibash/package.py +++ b/var/spack/repos/builtin/packages/mpibash/package.py @@ -22,9 +22,9 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os from spack import * + class Mpibash(Package): """Parallel scripting right from the Bourne-Again Shell (Bash)""" homepage = "http://www.ccs3.lanl.gov/~pakin/software/mpibash-4.3.html" diff --git a/var/spack/repos/builtin/packages/mpileaks/package.py b/var/spack/repos/builtin/packages/mpileaks/package.py index 51bc66a0eb9..ec4e9b30ccd 100644 --- a/var/spack/repos/builtin/packages/mpileaks/package.py +++ b/var/spack/repos/builtin/packages/mpileaks/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class Mpileaks(Package): - """Tool to detect and report leaked MPI objects like MPI_Requests and MPI_Datatypes.""" + """Tool to detect and report leaked MPI objects like MPI_Requests and + MPI_Datatypes.""" homepage = "https://github.com/hpc/mpileaks" url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz" diff --git a/var/spack/repos/builtin/packages/mrnet/package.py b/var/spack/repos/builtin/packages/mrnet/package.py index 3380c7f823d..490e99dd838 100644 --- a/var/spack/repos/builtin/packages/mrnet/package.py +++ b/var/spack/repos/builtin/packages/mrnet/package.py @@ -24,34 +24,40 @@ ############################################################################## from spack import * + class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_5.0.1.tar.gz" list_url = "http://ftp.cs.wisc.edu/paradyn/mrnet" - version('5.0.1-2', git='https://github.com/dyninst/mrnet.git', commit='20b1eacfc6d680d9f6472146d2dfaa0f900cc2e9') + version('5.0.1-2', git='https://github.com/dyninst/mrnet.git', + commit='20b1eacfc6d680d9f6472146d2dfaa0f900cc2e9') version('5.0.1', '17f65738cf1b9f9b95647ff85f69ecdd') version('4.1.0', '5a248298b395b329e2371bf25366115c') version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') - # Add a patch that brings mrnet-5.0.1 up to date with the current development tree - # The development tree contains fixes needed for the krell based tools - variant('krellpatch', default=False, description="Build MRNet with krell openspeedshop based patch.") + # Add a patch that brings mrnet-5.0.1 up to date with the current + # development tree The development tree contains fixes needed for the + # krell based tools + variant('krellpatch', default=False, + description="Build MRNet with krell openspeedshop based patch.") patch('krell-5.0.1.patch', when='@5.0.1+krellpatch') - variant('lwthreads', default=False, description="Also build the MRNet LW threadsafe libraries") + variant('lwthreads', default=False, + description="Also build the MRNet LW threadsafe libraries") parallel = False depends_on("boost") def install(self, spec, prefix): - # Build the MRNet LW thread safe libraries when the krelloptions variant is present + # Build the MRNet LW thread safe libraries when the krelloptions + # variant is present if '+lwthreads' in spec: - configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe") + configure("--prefix=%s" % prefix, "--enable-shared", + "--enable-ltwt-threadsafe") else: - configure("--prefix=%s" %prefix, "--enable-shared") + configure("--prefix=%s" % prefix, "--enable-shared") make() make("install") - diff --git a/var/spack/repos/builtin/packages/msgpack-c/package.py b/var/spack/repos/builtin/packages/msgpack-c/package.py index d42ac255d0e..9a726e23564 100644 --- a/var/spack/repos/builtin/packages/msgpack-c/package.py +++ b/var/spack/repos/builtin/packages/msgpack-c/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class MsgpackC(Package): """A small, fast binary interchange format convertible to/from JSON""" homepage = "http://www.msgpack.org" diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index b85a6d2b949..32bc42a9c32 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -36,15 +36,24 @@ class Mumps(Package): version('5.0.1', 'b477573fdcc87babe861f62316833db0') - variant('mpi', default=True, description='Activate the compilation of MUMPS with the MPI support') - variant('scotch', default=False, description='Activate Scotch as a possible ordering library') - variant('ptscotch', default=False, description='Activate PT-Scotch as a possible ordering library') - variant('metis', default=False, description='Activate Metis as a possible ordering library') - variant('parmetis', default=False, description='Activate Parmetis as a possible ordering library') - variant('double', default=True, description='Activate the compilation of dmumps') - variant('float', default=True, description='Activate the compilation of smumps') - variant('complex', default=True, description='Activate the compilation of cmumps and/or zmumps') - variant('idx64', default=False, description='Use int64_t/integer*8 as default index type') + variant('mpi', default=True, + description='Compile MUMPS with MPI support') + variant('scotch', default=False, + description='Activate Scotch as a possible ordering library') + variant('ptscotch', default=False, + description='Activate PT-Scotch as a possible ordering library') + variant('metis', default=False, + description='Activate Metis as a possible ordering library') + variant('parmetis', default=False, + description='Activate Parmetis as a possible ordering library') + variant('double', default=True, + description='Activate the compilation of dmumps') + variant('float', default=True, + description='Activate the compilation of smumps') + variant('complex', default=True, + description='Activate the compilation of cmumps and/or zmumps') + variant('idx64', default=False, + description='Use int64_t/integer*8 as default index type') variant('shared', default=True, description='Build shared libraries') depends_on('scotch + esmumps', when='~ptscotch+scotch') @@ -61,8 +70,10 @@ class Mumps(Package): # end before install # def patch(self): def write_makefile_inc(self): - if ('+parmetis' in self.spec or '+ptscotch' in self.spec) and '+mpi' not in self.spec: # NOQA: ignore=E501 - raise RuntimeError('You cannot use the variants parmetis or ptscotch without mpi') # NOQA: ignore=E501 + if ('+parmetis' in self.spec or '+ptscotch' in self.spec) and ( + '+mpi' not in self.spec): + raise RuntimeError( + 'You cannot use the variants parmetis or ptscotch without mpi') makefile_conf = ["LIBBLAS = %s" % to_link_flags( self.spec['blas'].blas_shared_lib) @@ -115,7 +126,7 @@ def write_makefile_inc(self): # the fortran compilation flags most probably are # working only for intel and gnu compilers this is # perhaps something the compiler should provide - ['OPTF = %s -O -DALLOW_NON_INIT %s' % (fpic, '-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8'), # NOQA: ignore=E501 + ['OPTF = %s -O -DALLOW_NON_INIT %s' % (fpic, '-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8'), # noqa 'OPTL = %s -O ' % fpic, 'OPTC = %s -O -DINTSIZE64' % fpic]) else: @@ -148,13 +159,13 @@ def write_makefile_inc(self): # 10.10. Use gfortran. (Homebrew) makefile_conf.extend([ 'LIBEXT=.dylib', - 'AR=%s -dynamiclib -Wl,-install_name -Wl,%s/$(notdir $@) -undefined dynamic_lookup -o ' % (os.environ['FC'], prefix.lib), # NOQA: ignore=E501 + 'AR=%s -dynamiclib -Wl,-install_name -Wl,%s/$(notdir $@) -undefined dynamic_lookup -o ' % (os.environ['FC'], prefix.lib), # noqa 'RANLIB=echo' ]) else: makefile_conf.extend([ 'LIBEXT=.so', - 'AR=$(FL) -shared -Wl,-soname -Wl,%s/$(notdir $@) -o' % prefix.lib, # NOQA: ignore=E501 + 'AR=$(FL) -shared -Wl,-soname -Wl,%s/$(notdir $@) -o' % prefix.lib, # noqa 'RANLIB=echo' ]) else: diff --git a/var/spack/repos/builtin/packages/munge/package.py b/var/spack/repos/builtin/packages/munge/package.py index ebe3e18882a..51455006e94 100644 --- a/var/spack/repos/builtin/packages/munge/package.py +++ b/var/spack/repos/builtin/packages/munge/package.py @@ -25,12 +25,14 @@ from spack import * import os + class Munge(Package): """ MUNGE Uid 'N' Gid Emporium """ homepage = "https://code.google.com/p/munge/" url = "https://github.com/dun/munge/releases/download/munge-0.5.11/munge-0.5.11.tar.bz2" - version('0.5.11', 'bd8fca8d5f4c1fcbef1816482d49ee01', url='https://github.com/dun/munge/releases/download/munge-0.5.11/munge-0.5.11.tar.bz2') + version('0.5.11', 'bd8fca8d5f4c1fcbef1816482d49ee01', + url='https://github.com/dun/munge/releases/download/munge-0.5.11/munge-0.5.11.tar.bz2') depends_on('openssl') depends_on('libgcrypt') @@ -41,4 +43,3 @@ def install(self, spec, prefix): make() make("install") - diff --git a/var/spack/repos/builtin/packages/muparser/package.py b/var/spack/repos/builtin/packages/muparser/package.py index 47d1855329e..1373c8cd7b3 100644 --- a/var/spack/repos/builtin/packages/muparser/package.py +++ b/var/spack/repos/builtin/packages/muparser/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Muparser(Package): """C++ math expression parser library.""" homepage = "http://muparser.beltoforion.de/" diff --git a/var/spack/repos/builtin/packages/muster/package.py b/var/spack/repos/builtin/packages/muster/package.py index 64b7324415f..81817e48dc0 100644 --- a/var/spack/repos/builtin/packages/muster/package.py +++ b/var/spack/repos/builtin/packages/muster/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Muster(Package): """The Muster library provides implementations of sequential and parallel K-Medoids clustering algorithms. It is intended as a diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py index 0fa5821b081..17124a05720 100644 --- a/var/spack/repos/builtin/packages/mvapich2/package.py +++ b/var/spack/repos/builtin/packages/mvapich2/package.py @@ -40,7 +40,8 @@ class Mvapich2(Package): provides('mpi@:2.2', when='@1.9') # MVAPICH2-1.9 supports MPI 2.2 provides('mpi@:3.0', when='@2.0:') # MVAPICH2-2.0 supports MPI 3.0 - variant('debug', default=False, description='Enables debug information and error messages at run-time') + variant('debug', default=False, + description='Enable debug info and error messages at run-time') ########## # TODO : Process managers should be grouped into the same variant, @@ -51,10 +52,14 @@ class Mvapich2(Package): GFORKER = 'gforker' REMSHELL = 'remshell' SLURM_INCOMPATIBLE_PMS = (HYDRA, GFORKER, REMSHELL) - variant(SLURM, default=False, description='Sets slurm as the only process manager') - variant(HYDRA, default=False, description='Sets hydra as one of the process managers') - variant(GFORKER, default=False, description='Sets gforker as one of the process managers') - variant(REMSHELL, default=False, description='Sets remshell as one of the process managers') + variant(SLURM, default=False, + description='Set slurm as the only process manager') + variant(HYDRA, default=False, + description='Set hydra as one of the process managers') + variant(GFORKER, default=False, + description='Set gforker as one of the process managers') + variant(REMSHELL, default=False, + description='Set remshell as one of the process managers') ########## ########## @@ -67,12 +72,24 @@ class Mvapich2(Package): NEMESIS = 'nemesis' MRAIL = 'mrail' SUPPORTED_NETWORKS = (PSM, SOCK, NEMESIS, NEMESISIB, NEMESISIBTCP) - variant(PSM, default=False, description='Configures a build for QLogic PSM-CH3') - variant(SOCK, default=False, description='Configures a build for TCP/IP-CH3') - variant(NEMESISIBTCP, default=False, description='Configures a build for both OFA-IB-Nemesis and TCP/IP-Nemesis') - variant(NEMESISIB, default=False, description='Configures a build for OFA-IB-Nemesis') - variant(NEMESIS, default=False, description='Configures a build for TCP/IP-Nemesis') - variant(MRAIL, default=False, description='Configures a build for OFA-IB-CH3') + variant( + PSM, default=False, + description='Configure for QLogic PSM-CH3') + variant( + SOCK, default=False, + description='Configure for TCP/IP-CH3') + variant( + NEMESISIBTCP, default=False, + description='Configure for both OFA-IB-Nemesis and TCP/IP-Nemesis') + variant( + NEMESISIB, default=False, + description='Configure for OFA-IB-Nemesis') + variant( + NEMESIS, default=False, + description='Configure for TCP/IP-Nemesis') + variant( + MRAIL, default=False, + description='Configure for OFA-IB-CH3') ########## # FIXME : CUDA support is missing diff --git a/var/spack/repos/builtin/packages/mxml/package.py b/var/spack/repos/builtin/packages/mxml/package.py index 254a5f1595f..113c48c18f2 100644 --- a/var/spack/repos/builtin/packages/mxml/package.py +++ b/var/spack/repos/builtin/packages/mxml/package.py @@ -1,3 +1,27 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## from spack import * diff --git a/var/spack/repos/builtin/packages/nag/package.py b/var/spack/repos/builtin/packages/nag/package.py index 63269a50b1c..792e3fe3c7a 100644 --- a/var/spack/repos/builtin/packages/nag/package.py +++ b/var/spack/repos/builtin/packages/nag/package.py @@ -43,8 +43,8 @@ class Nag(Package): def url_for_version(self, version): # TODO: url and checksum are architecture dependent # TODO: We currently only support x86_64 - return 'http://www.nag.com/downloads/impl/npl6a%sna_amd64.tgz' % \ - str(version).replace('.', '') + return 'http://www.nag.com/downloads/impl/npl6a%sna_amd64.tgz' % str( + version).replace('.', '') def install(self, spec, prefix): # Set installation directories diff --git a/var/spack/repos/builtin/packages/nasm/package.py b/var/spack/repos/builtin/packages/nasm/package.py index c955e6d13ee..9faccccaaea 100644 --- a/var/spack/repos/builtin/packages/nasm/package.py +++ b/var/spack/repos/builtin/packages/nasm/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Nasm(Package): """NASM (Netwide Assembler) is an 80x86 assembler designed for portability and modularity. It includes a disassembler as well.""" diff --git a/var/spack/repos/builtin/packages/nccmp/package.py b/var/spack/repos/builtin/packages/nccmp/package.py index 68bddd69576..d59ca093813 100644 --- a/var/spack/repos/builtin/packages/nccmp/package.py +++ b/var/spack/repos/builtin/packages/nccmp/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Nccmp(Package): """Compare NetCDF Files""" homepage = "http://nccmp.sourceforge.net/" diff --git a/var/spack/repos/builtin/packages/ncdu/package.py b/var/spack/repos/builtin/packages/ncdu/package.py index 0f2f9cda45f..2147319d3fb 100644 --- a/var/spack/repos/builtin/packages/ncdu/package.py +++ b/var/spack/repos/builtin/packages/ncdu/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Ncdu(Package): """ Ncdu is a disk usage analyzer with an ncurses interface. It is designed @@ -38,15 +39,15 @@ class Ncdu(Package): version('1.11', '9e44240a5356b029f05f0e70a63c4d12') version('1.10', '7535decc8d54eca811493e82d4bfab2d') - version('1.9' , '93258079db897d28bb8890e2db89b1fb') - version('1.8' , '94d7a821f8a0d7ba8ef3dd926226f7d5') - version('1.7' , '172047c29d232724cc62e773e82e592a') + version('1.9', '93258079db897d28bb8890e2db89b1fb') + version('1.8', '94d7a821f8a0d7ba8ef3dd926226f7d5') + version('1.7', '172047c29d232724cc62e773e82e592a') depends_on("ncurses") def install(self, spec, prefix): configure('--prefix=%s' % prefix, - '--with-ncurses=%s' % spec['ncurses']) + '--with-ncurses=%s' % spec['ncurses']) make() make("install") diff --git a/var/spack/repos/builtin/packages/nco/package.py b/var/spack/repos/builtin/packages/nco/package.py index 4bc4da68e36..16d72b4593c 100644 --- a/var/spack/repos/builtin/packages/nco/package.py +++ b/var/spack/repos/builtin/packages/nco/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os + class Nco(Package): """The NCO toolkit manipulates and analyzes data stored in @@ -39,9 +39,9 @@ class Nco(Package): depends_on('netcdf') depends_on('antlr@2.7.7+cxx') # (required for ncap2) - depends_on('gsl') # (desirable for ncap2) + depends_on('gsl') # (desirable for ncap2) depends_on('udunits2') # (allows dimensional unit transformations) - # depends_on('opendap') # (enables network transparency), + # depends_on('opendap') # (enables network transparency), def install(self, spec, prefix): opts = [ diff --git a/var/spack/repos/builtin/packages/ncurses/package.py b/var/spack/repos/builtin/packages/ncurses/package.py index 3ab2b0477d6..aaacbac7b12 100644 --- a/var/spack/repos/builtin/packages/ncurses/package.py +++ b/var/spack/repos/builtin/packages/ncurses/package.py @@ -24,11 +24,14 @@ ############################################################################## from spack import * + class Ncurses(Package): - """The ncurses (new curses) library is a free software emulation of curses - in System V Release 4.0, and more. It uses terminfo format, supports pads and - color and multiple highlights and forms characters and function-key mapping, - and has all the other SYSV-curses enhancements over BSD curses. + """The ncurses (new curses) library is a free software emulation of + curses in System V Release 4.0, and more. It uses terminfo format, + supports pads and color and multiple highlights and forms + characters and function-key mapping, and has all the other + SYSV-curses enhancements over BSD curses. + """ homepage = "http://invisible-island.net/ncurses/ncurses.html" diff --git a/var/spack/repos/builtin/packages/ncview/package.py b/var/spack/repos/builtin/packages/ncview/package.py index b39e17ca49d..f61e6984b51 100644 --- a/var/spack/repos/builtin/packages/ncview/package.py +++ b/var/spack/repos/builtin/packages/ncview/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Ncview(Package): """Simple viewer for NetCDF files.""" homepage = "http://meteora.ucsd.edu/~pierce/ncview_home_page.html" diff --git a/var/spack/repos/builtin/packages/ndiff/package.py b/var/spack/repos/builtin/packages/ndiff/package.py index 3c9dd4054af..dc41add03f1 100644 --- a/var/spack/repos/builtin/packages/ndiff/package.py +++ b/var/spack/repos/builtin/packages/ndiff/package.py @@ -24,11 +24,15 @@ ############################################################################## from spack import * + class Ndiff(Package): - """The ndiff tool is a binary utility that compares putatively similar files - while ignoring small numeric differernces. This utility is most often used - to compare files containing a lot of floating-point numeric data that - may be slightly different due to numeric error.""" + """The ndiff tool is a binary utility that compares putatively similar + files while ignoring small numeric differernces. This utility is + most often used to compare files containing a lot of + floating-point numeric data that may be slightly different due to + numeric error. + + """ homepage = "http://ftp.math.utah.edu/pub/ndiff/" url = "http://ftp.math.utah.edu/pub/ndiff/ndiff-2.00.tar.gz" diff --git a/var/spack/repos/builtin/packages/netcdf-cxx/package.py b/var/spack/repos/builtin/packages/netcdf-cxx/package.py index 994c51c0da0..2c3ab733099 100644 --- a/var/spack/repos/builtin/packages/netcdf-cxx/package.py +++ b/var/spack/repos/builtin/packages/netcdf-cxx/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class NetcdfCxx(Package): """Deprecated C++ compatibility bindings for NetCDF. These do NOT read or write NetCDF-4 files, and are no longer diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py index ad4ee596402..ab40c143400 100644 --- a/var/spack/repos/builtin/packages/netcdf/package.py +++ b/var/spack/repos/builtin/packages/netcdf/package.py @@ -26,9 +26,11 @@ class Netcdf(Package): - """NetCDF is a set of software libraries and self-describing, machine-independent - data formats that support the creation, access, and sharing of array-oriented - scientific data.""" + """NetCDF is a set of software libraries and self-describing, + machine-independent data formats that support the creation, access, + and sharing of array-oriented scientific data. + + """ homepage = "http://www.unidata.ucar.edu/software/netcdf" url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz" @@ -74,18 +76,19 @@ def install(self, spec, prefix): "--enable-dap" ] - # Make sure Netcdf links against Spack's curl - # Otherwise it may pick up system's curl, which could lead to link errors: - # /usr/lib/x86_64-linux-gnu/libcurl.so: undefined reference to `SSL_CTX_use_certificate_chain_file@OPENSSL_1.0.0' + # Make sure Netcdf links against Spack's curl, otherwise + # otherwise it may pick up system's curl, which can give link + # errors, e.g.: + # undefined reference to `SSL_CTX_use_certificate_chain_file` LIBS.append("-lcurl") CPPFLAGS.append("-I%s" % spec['curl'].prefix.include) - LDFLAGS.append( "-L%s" % spec['curl'].prefix.lib) + LDFLAGS.append("-L%s" % spec['curl'].prefix.lib) if '+mpi' in spec: config_args.append('--enable-parallel4') CPPFLAGS.append("-I%s/include" % spec['hdf5'].prefix) - LDFLAGS.append( "-L%s/lib" % spec['hdf5'].prefix) + LDFLAGS.append("-L%s/lib" % spec['hdf5'].prefix) # HDF4 support # As of NetCDF 4.1.3, "--with-hdf4=..." is no longer a valid option @@ -93,13 +96,13 @@ def install(self, spec, prefix): if '+hdf4' in spec: config_args.append("--enable-hdf4") CPPFLAGS.append("-I%s/include" % spec['hdf'].prefix) - LDFLAGS.append( "-L%s/lib" % spec['hdf'].prefix) - LIBS.append( "-l%s" % "jpeg") + LDFLAGS.append("-L%s/lib" % spec['hdf'].prefix) + LIBS.append("-l%s" % "jpeg") if 'szip' in spec: CPPFLAGS.append("-I%s/include" % spec['szip'].prefix) - LDFLAGS.append( "-L%s/lib" % spec['szip'].prefix) - LIBS.append( "-l%s" % "sz") + LDFLAGS.append("-L%s/lib" % spec['szip'].prefix) + LIBS.append("-l%s" % "sz") # Fortran support # In version 4.2+, NetCDF-C and NetCDF-Fortran have split. diff --git a/var/spack/repos/builtin/packages/netgauge/package.py b/var/spack/repos/builtin/packages/netgauge/package.py index be9292fabb7..b57cdbe5f37 100644 --- a/var/spack/repos/builtin/packages/netgauge/package.py +++ b/var/spack/repos/builtin/packages/netgauge/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Netgauge(Package): """Netgauge is a high-precision network parameter measurement tool. It supports benchmarking of many different network protocols diff --git a/var/spack/repos/builtin/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py index 70015baf1c5..08c94a5c9b8 100644 --- a/var/spack/repos/builtin/packages/netlib-lapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py @@ -26,11 +26,12 @@ class NetlibLapack(Package): - """ - LAPACK version 3.X is a comprehensive FORTRAN library that does linear algebra operations including matrix - inversions, least squared solutions to linear sets of equations, eigenvector analysis, singular value - decomposition, etc. It is a very comprehensive and reputable package that has found extensive use in the - scientific community. + """LAPACK version 3.X is a comprehensive FORTRAN library that does + linear algebra operations including matrix inversions, least squared + solutions to linear sets of equations, eigenvector analysis, singular + value decomposition, etc. It is a very comprehensive and reputable + package that has found extensive use in the scientific community. + """ homepage = "http://www.netlib.org/lapack/" url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" @@ -44,9 +45,11 @@ class NetlibLapack(Package): variant('debug', default=False, description='Activates the Debug build type') variant('shared', default=True, description="Build shared library version") - variant('external-blas', default=False, description='Build lapack with an external blas') + variant('external-blas', default=False, + description='Build lapack with an external blas') - variant('lapacke', default=True, description='Activates the build of the LAPACKE C interface') + variant('lapacke', default=True, + description='Activates the build of the LAPACKE C interface') # virtual dependency provides('blas', when='~external-blas') @@ -55,26 +58,30 @@ class NetlibLapack(Package): depends_on('cmake', type='build') depends_on('blas', when='+external-blas') - def patch(self): # Fix cblas CMakeLists.txt -- has wrong case for subdirectory name. if self.spec.satisfies('@3.6.0:'): - filter_file('${CMAKE_CURRENT_SOURCE_DIR}/CMAKE/', - '${CMAKE_CURRENT_SOURCE_DIR}/cmake/', 'CBLAS/CMakeLists.txt', string=True) + filter_file( + '${CMAKE_CURRENT_SOURCE_DIR}/CMAKE/', + '${CMAKE_CURRENT_SOURCE_DIR}/cmake/', + 'CBLAS/CMakeLists.txt', string=True) def install_one(self, spec, prefix, shared): - cmake_args = ['-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if shared else 'OFF'), - '-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'), - '-DLAPACKE:BOOL=%s' % ('ON' if '+lapacke' in spec else 'OFF')] + cmake_args = [ + '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if shared else 'OFF'), + '-DCMAKE_BUILD_TYPE:STRING=%s' % ( + 'Debug' if '+debug' in spec else 'Release'), + '-DLAPACKE:BOOL=%s' % ('ON' if '+lapacke' in spec else 'OFF')] if spec.satisfies('@3.6.0:'): - cmake_args.extend(['-DCBLAS=ON']) # always build CBLAS + cmake_args.extend(['-DCBLAS=ON']) # always build CBLAS if '+external-blas' in spec: - # TODO : the mechanism to specify the library should be more general, + # TODO : mechanism to specify the library should be more general, # TODO : but this allows to have an hook to an external blas cmake_args.extend([ '-DUSE_OPTIMIZED_BLAS:BOOL=ON', - '-DBLAS_LIBRARIES:PATH=%s' % join_path(spec['blas'].prefix.lib, 'libblas.a') + '-DBLAS_LIBRARIES:PATH=%s' % join_path( + spec['blas'].prefix.lib, 'libblas.a') ]) cmake_args.extend(std_cmake_args) @@ -85,7 +92,6 @@ def install_one(self, spec, prefix, shared): make() make("install") - def install(self, spec, prefix): # Always build static libraries. self.install_one(spec, prefix, False) @@ -94,15 +100,17 @@ def install(self, spec, prefix): if '+shared' in spec: self.install_one(spec, prefix, True) - def setup_dependent_package(self, module, dspec): # This is WIP for a prototype interface for virtual packages. # We can update this as more builds start depending on BLAS/LAPACK. - libdir = find_library_path('libblas.a', self.prefix.lib64, self.prefix.lib) + libdir = find_library_path( + 'libblas.a', self.prefix.lib64, self.prefix.lib) self.spec.blas_static_lib = join_path(libdir, 'libblas.a') self.spec.lapack_static_lib = join_path(libdir, 'liblapack.a') if '+shared' in self.spec: - self.spec.blas_shared_lib = join_path(libdir, 'libblas.%s' % dso_suffix) - self.spec.lapack_shared_lib = join_path(libdir, 'liblapack.%s' % dso_suffix) + self.spec.blas_shared_lib = join_path( + libdir, 'libblas.%s' % dso_suffix) + self.spec.lapack_shared_lib = join_path( + libdir, 'liblapack.%s' % dso_suffix) diff --git a/var/spack/repos/builtin/packages/netlib-scalapack/package.py b/var/spack/repos/builtin/packages/netlib-scalapack/package.py index f7733249cf5..49b86332098 100644 --- a/var/spack/repos/builtin/packages/netlib-scalapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-scalapack/package.py @@ -39,7 +39,8 @@ class NetlibScalapack(Package): # versions before 2.0.0 are not using cmake and requires blacs as # a separated package - variant('shared', default=True, description='Build the shared library version') + variant('shared', default=True, + description='Build the shared library version') variant('fpic', default=False, description="Build with -fpic compiler option") provides('scalapack') diff --git a/var/spack/repos/builtin/packages/nettle/package.py b/var/spack/repos/builtin/packages/nettle/package.py index 56e48366113..02e9ef5f1e1 100644 --- a/var/spack/repos/builtin/packages/nettle/package.py +++ b/var/spack/repos/builtin/packages/nettle/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Nettle(Package): """The Nettle package contains the low-level cryptographic library that is designed to fit easily in many contexts.""" diff --git a/var/spack/repos/builtin/packages/ninja/package.py b/var/spack/repos/builtin/packages/ninja/package.py index e3f38192899..dcd00576dd3 100644 --- a/var/spack/repos/builtin/packages/ninja/package.py +++ b/var/spack/repos/builtin/packages/ninja/package.py @@ -25,6 +25,7 @@ from spack import * import os + class Ninja(Package): """ A small, fast Make alternative """ homepage = "https://martine.github.io/ninja/" @@ -35,7 +36,6 @@ class Ninja(Package): extends('python') def install(self, spec, prefix): - sh = which('sh') python('configure.py', '--bootstrap') cp = which('cp') diff --git a/var/spack/repos/builtin/packages/numdiff/package.py b/var/spack/repos/builtin/packages/numdiff/package.py index 97164165e01..fb897f560b5 100644 --- a/var/spack/repos/builtin/packages/numdiff/package.py +++ b/var/spack/repos/builtin/packages/numdiff/package.py @@ -25,6 +25,7 @@ from spack import * import sys + class Numdiff(Package): """Numdiff is a little program that can be used to compare putatively similar files line by line and field by field, ignoring small numeric @@ -35,7 +36,7 @@ class Numdiff(Package): version('5.8.1', 'a295eb391f6cb1578209fc6b4f9d994e') - depends_on('gettext', when=sys.platform=='darwin', type='build') + depends_on('gettext', when=sys.platform == 'darwin', type='build') def install(self, spec, prefix): options = ['--prefix=%s' % prefix] diff --git a/var/spack/repos/builtin/packages/oce/package.py b/var/spack/repos/builtin/packages/oce/package.py index 108c8f8a41c..4f9f7b2e129 100644 --- a/var/spack/repos/builtin/packages/oce/package.py +++ b/var/spack/repos/builtin/packages/oce/package.py @@ -40,7 +40,8 @@ class Oce(Package): version('0.16.1', '4d591b240c9293e879f50d86a0cb2bb3') version('0.16', '7a4b4df5a104d75a537e25e7dd387eca') - variant('tbb', default=True, description='Build with Intel Threading Building Blocks') + variant('tbb', default=True, + description='Build with Intel Threading Building Blocks') depends_on('cmake@2.8:', type='build') depends_on('tbb', when='+tbb') diff --git a/var/spack/repos/builtin/packages/ompss/package.py b/var/spack/repos/builtin/packages/ompss/package.py index c0848ffd706..02925974ea4 100644 --- a/var/spack/repos/builtin/packages/ompss/package.py +++ b/var/spack/repos/builtin/packages/ompss/package.py @@ -26,19 +26,18 @@ import os import glob -# working config lines for ompss 14.06 : -#./nanox-0.7/config.log: $ ./configure --prefix=/usr/gapps/exmatex/ompss --with-mcc=/usr/gapps/exmatex/ompss/ --with-hwloc=/usr -#./mcxx-1.99.2/config.log: $ ./configure --prefix=/usr/gapps/exmatex/ompss --with-nanox=/usr/gapps/exmatex/ompss --enable-ompss --with-mpi=/opt/mvapich2-intel-shmem-1.7 --enable-tl-openmp-profile --enable-tl-openmp-intel class Ompss(Package): - """OmpSs is an effort to integrate features from the StarSs - programming model developed by BSC into a single programming - model. In particular, our objective is to extend OpenMP with - new directives to support asynchronous parallelism and - heterogeneity (devices like GPUs). However, it can also be - understood as new directives extending other accelerator based - APIs like CUDA or OpenCL. Our OmpSs environment is built on top - of our Mercurium compiler and Nanos++ runtime system.""" + """OmpSs is an effort to integrate features from the StarSs programming + model developed by BSC into a single programming model. In + particular, our objective is to extend OpenMP with new directives + to support asynchronous parallelism and heterogeneity (devices + like GPUs). However, it can also be understood as new directives + extending other accelerator based APIs like CUDA or OpenCL. Our + OmpSs environment is built on top of our Mercurium compiler and + Nanos++ runtime system. + + """ homepage = "http://pm.bsc.es/" url = "http://pm.bsc.es/sites/default/files/ftp/ompss/releases/ompss-14.10.tar.gz" list_url = 'http://pm.bsc.es/ompss-downloads' @@ -47,7 +46,7 @@ class Ompss(Package): # all dependencies are optional, really depends_on("mpi") - #depends_on("openmp") + # depends_on("openmp") depends_on("hwloc") depends_on("extrae") @@ -61,14 +60,22 @@ def install(self, spec, prefix): openmp_options = ["--enable-tl-openmp-profile"] if spec.satisfies('%intel'): - openmp_options.append( "--enable-tl-openmp-intel" ) + openmp_options.append("--enable-tl-openmp-intel") os.chdir(glob.glob('./nanox-*').pop()) - configure("--prefix=%s" % prefix, "--with-mcc=%s" % prefix, "--with-extrae=%s" % spec['extrae'].prefix, "--with-hwloc=%s" % spec['hwloc'].prefix) + configure("--prefix=%s" % prefix, + "--with-mcc=%s" % prefix, + "--with-extrae=%s" % + spec['extrae'].prefix, + "--with-hwloc=%s" % spec['hwloc'].prefix) make() make("install") os.chdir(glob.glob('../mcxx-*').pop()) - configure("--prefix=%s" % prefix, "--with-nanox=%s" % prefix, "--enable-ompss", "--with-mpi=%s" % mpi.prefix, *openmp_options) + configure("--prefix=%s" % prefix, + "--with-nanox=%s" % prefix, + "--enable-ompss", + "--with-mpi=%s" % mpi.prefix, + *openmp_options) make() make("install") diff --git a/var/spack/repos/builtin/packages/ompt-openmp/package.py b/var/spack/repos/builtin/packages/ompt-openmp/package.py index 800c04ae0ea..40159e4c6c9 100644 --- a/var/spack/repos/builtin/packages/ompt-openmp/package.py +++ b/var/spack/repos/builtin/packages/ompt-openmp/package.py @@ -24,8 +24,14 @@ ############################################################################## from spack import * + class OmptOpenmp(Package): - """LLVM/Clang OpenMP runtime with OMPT support. This is a fork of the OpenMPToolsInterface/LLVM-openmp fork of the official LLVM OpenMP mirror. This library provides a drop-in replacement of the OpenMP runtimes for GCC, Intel and LLVM/Clang.""" + """LLVM/Clang OpenMP runtime with OMPT support. This is a fork of the + OpenMPToolsInterface/LLVM-openmp fork of the official LLVM OpenMP + mirror. This library provides a drop-in replacement of the OpenMP + runtimes for GCC, Intel and LLVM/Clang. + + """ homepage = "https://github.com/OpenMPToolsInterface/LLVM-openmp" url = "http://github.com/khuck/LLVM-openmp/archive/v0.1.tar.gz" @@ -35,13 +41,9 @@ class OmptOpenmp(Package): def install(self, spec, prefix): with working_dir("runtime/build", create=True): - - # FIXME: Modify the configure line to suit your build system here. - cmake('-DCMAKE_C_COMPILER=%s' % self.compiler.cc, + cmake('-DCMAKE_C_COMPILER=%s' % self.compiler.cc, '-DCMAKE_CXX_COMPILER=%s' % self.compiler.cxx, '-DCMAKE_INSTALL_PREFIX=%s' % prefix, '..', *std_cmake_args) - - # FIXME: Add logic to build and install here make() make("install") diff --git a/var/spack/repos/builtin/packages/openspeedshop/package.py b/var/spack/repos/builtin/packages/openspeedshop/package.py index 4e2694a53c6..5e141060b25 100644 --- a/var/spack/repos/builtin/packages/openspeedshop/package.py +++ b/var/spack/repos/builtin/packages/openspeedshop/package.py @@ -22,7 +22,7 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -################################################################################ +########################################################################## # Copyright (c) 2015-2016 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under @@ -38,50 +38,72 @@ # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 59 Temple # Place, Suite 330, Boston, MA 02111-1307 USA -################################################################################ +########################################################################## from spack import * + class Openspeedshop(Package): - """OpenSpeedShop is a community effort by The Krell Institute with current direct funding from DOEs NNSA. - It builds on top of a broad list of community infrastructures, most notably Dyninst and MRNet from UW, - libmonitor from Rice, and PAPI from UTK. OpenSpeedShop is an open source multi platform Linux performance - tool which is targeted to support performance analysis of applications running on both single node and - large scale IA64, IA32, EM64T, AMD64, PPC, ARM, Blue Gene and Cray platforms. OpenSpeedShop development - is hosted by the Krell Institute. The infrastructure and base components of OpenSpeedShop are released - as open source code primarily under LGPL. + """OpenSpeedShop is a community effort by The Krell Institute with + current direct funding from DOEs NNSA. It builds on top of a broad + list of community infrastructures, most notably Dyninst and MRNet + from UW, libmonitor from Rice, and PAPI from UTK. OpenSpeedShop is an + open source multi platform Linux performance tool which is targeted + to support performance analysis of applications running on both + single node and large scale IA64, IA32, EM64T, AMD64, PPC, ARM, Blue + Gene and Cray platforms. OpenSpeedShop development is hosted by the + Krell Institute. The infrastructure and base components of + OpenSpeedShop are released as open source code primarily under LGPL. + """ homepage = "http://www.openspeedshop.org" - url = "https://github.com/OpenSpeedShop" + url = "https://github.com/OpenSpeedShop" version('2.2', '16cb051179c2038de4e8a845edf1d573') # Use when the git repository is available - version('2.2', branch='master', git='https://github.com/OpenSpeedShop/openspeedshop.git') + version('2.2', branch='master', + git='https://github.com/OpenSpeedShop/openspeedshop.git') # Optional mirror template - #url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/openspeedshop-2.2.tar.gz" - #version('2.2', '643337740dc6c2faca60f42d3620b0e1') + # url="file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/openspeedshop-2.2.tar.gz" + # version('2.2', '643337740dc6c2faca60f42d3620b0e1') parallel = False - variant('offline', default=True, description="build with offline instrumentor enabled.") - variant('cbtf', default=False, description="build with cbtf instrumentor enabled.") - variant('runtime', default=False, description="build only the runtime libraries and collectors.") - variant('frontend', default=False, description="build only the front-end tool using the runtime_dir to point to the target build.") - variant('cuda', default=False, description="build with cuda packages included.") - variant('ptgf', default=False, description="build with the PTGF based gui package enabled.") - variant('rtfe', default=False, description="build for generic cluster platforms that have different processors on the fe and be nodes.") + variant('offline', default=True, + description="build with offline instrumentor enabled.") + variant('cbtf', default=False, + description="build with cbtf instrumentor enabled.") + variant('runtime', default=False, + description="build only the runtime libraries and collectors.") + variant('frontend', default=False, + description="build only the front-end tool using the runtime_dir " + "to point to the target build.") + variant('cuda', default=False, + description="build with cuda packages included.") + variant('ptgf', default=False, + description="build with the PTGF based gui package enabled.") + variant('rtfe', default=False, + description="build for generic cluster platforms that have " + "different processors on the fe and be nodes.") # MPI variants - variant('openmpi', default=False, description="Build mpi experiment collector for openmpi MPI when this variant is enabled.") - variant('mpt', default=False, description="Build mpi experiment collector for SGI MPT MPI when this variant is enabled.") - variant('mvapich2', default=False, description="Build mpi experiment collector for mvapich2 MPI when this variant is enabled.") - variant('mvapich', default=False, description="Build mpi experiment collector for mvapich MPI when this variant is enabled.") - variant('mpich2', default=False, description="Build mpi experiment collector for mpich2 MPI when this variant is enabled.") - variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.") + variant('openmpi', default=False, + description="Build mpi experiment collector for openmpi MPI.") + variant('mpt', default=False, + description="Build mpi experiment collector for SGI MPT MPI.") + variant('mvapich2', default=False, + description="Build mpi experiment collector for mvapich2 MPI.") + variant('mvapich', default=False, + description="Build mpi experiment collector for mvapich MPI.") + variant('mpich2', default=False, + description="Build mpi experiment collector for mpich2 MPI.") + variant('mpich', default=False, + description="Build mpi experiment collector for mpich MPI.") depends_on("cmake@3.0.2", type='build') - # Dependencies for openspeedshop that are common to all the variants of the OpenSpeedShop build + # Dependencies for openspeedshop that are common to all the variants of + # the OpenSpeedShop build depends_on("bison") depends_on("flex") depends_on("binutils@2.24+krellpatch") @@ -111,11 +133,13 @@ class Openspeedshop(Package): depends_on("mrnet@5.0.1:+lwthreads+krellpatch", when='+cbtf') def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): - # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings + # Sets build type parameters into cmakeOptions the options that will + # enable the cbtf-krell built type settings - compile_flags="-O2 -g" + compile_flags = "-O2 -g" BuildTypeOptions = [] - # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs + # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the + # stdcmakeargs for word in cmakeOptions[:]: if word.startswith('-DCMAKE_BUILD_TYPE'): cmakeOptions.remove(word) @@ -124,63 +148,64 @@ def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions): if word.startswith('-DCMAKE_C_FLAGS'): cmakeOptions.remove(word) BuildTypeOptions.extend([ - '-DCMAKE_BUILD_TYPE=None', - '-DCMAKE_CXX_FLAGS=%s' % compile_flags, - '-DCMAKE_C_FLAGS=%s' % compile_flags + '-DCMAKE_BUILD_TYPE=None', + '-DCMAKE_CXX_FLAGS=%s' % compile_flags, + '-DCMAKE_C_FLAGS=%s' % compile_flags ]) cmakeOptions.extend(BuildTypeOptions) def set_mpi_cmakeOptions(self, spec, cmakeOptions): - # Appends to cmakeOptions the options that will enable the appropriate MPI implementations - + # Appends to cmakeOptions the options that will enable the appropriate + # MPI implementations + MPIOptions = [] # openmpi if '+openmpi' in spec: MPIOptions.extend([ - '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix + '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix ]) # mpich if '+mpich' in spec: MPIOptions.extend([ - '-DMPICH_DIR=%s' % spec['mpich'].prefix + '-DMPICH_DIR=%s' % spec['mpich'].prefix ]) # mpich2 if '+mpich2' in spec: MPIOptions.extend([ - '-DMPICH2_DIR=%s' % spec['mpich2'].prefix + '-DMPICH2_DIR=%s' % spec['mpich2'].prefix ]) # mvapich if '+mvapich' in spec: MPIOptions.extend([ - '-DMVAPICH_DIR=%s' % spec['mvapich'].prefix + '-DMVAPICH_DIR=%s' % spec['mvapich'].prefix ]) # mvapich2 if '+mvapich2' in spec: MPIOptions.extend([ - '-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix + '-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix ]) # mpt if '+mpt' in spec: MPIOptions.extend([ - '-DMPT_DIR=%s' % spec['mpt'].prefix + '-DMPT_DIR=%s' % spec['mpt'].prefix ]) cmakeOptions.extend(MPIOptions) - def install(self, spec, prefix): - #openmpi_prefix_path = "/opt/openmpi-1.8.2" - #mvapich_prefix_path = "/usr/local/tools/mvapich-gnu" - #'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix, - #'-DOPENMPI_DIR=%s' % openmpi_prefix_path, - #'-DMVAPICH_DIR=%s' % mvapich_prefix_path, + # openmpi_prefix_path = "/opt/openmpi-1.8.2" + # mvapich_prefix_path = "/usr/local/tools/mvapich-gnu" + # '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix, + # '-DOPENMPI_DIR=%s' % openmpi_prefix_path, + # '-DMVAPICH_DIR=%s' % mvapich_prefix_path, - # FIXME: How do we make this dynamic in spack? That is, can we specify the paths to cuda dynamically? - # WAITING for external package support. - #if '+cuda' in spec: + # FIXME: How do we make this dynamic in spack? + # FIXME: That is, can we specify the paths to cuda dynamically? + # WAITING for external package support. + # if '+cuda' in spec: # cuda_prefix_path = "/usr/local/cuda-6.0" # cupti_prefix_path = "/usr/local/cuda-6.0/extras/CUPTI" @@ -190,19 +215,20 @@ def install(self, spec, prefix): with working_dir('build_runtime', create=True): cmakeOptions = [] - cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, - '-DINSTRUMENTOR=%s' % instrumentor_setting, - '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, - '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, - '-DPAPI_DIR=%s' % spec['papi'].prefix - ]) - + cmakeOptions.extend([ + '-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, + '-DINSTRUMENTOR=%s' % instrumentor_setting, + '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, + '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, + '-DPAPI_DIR=%s' % spec['papi'].prefix]) + # Add any MPI implementations coming from variant settings self.set_mpi_cmakeOptions(spec, cmakeOptions) cmakeOptions.extend(std_cmake_args) - # Adjust the build options to the favored ones for this build + # Adjust the build options to the favored ones for this + # build self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) cmake('..', *cmakeOptions) @@ -214,43 +240,49 @@ def install(self, spec, prefix): cmake_prefix_path = join_path(spec['dyninst'].prefix) with working_dir('build', create=True): - #python_vers=join_path(spec['python'].version[:2]) - #'-DOPENMPI_DIR=%s' % openmpi_prefix_path, - #'-DMVAPICH_DIR=%s' % mvapich_prefix_path, - #'-DMPICH_DIR=%s' % spec['mpich'].prefix, - #'-DMPICH2_DIR=%s' % spec['mpich2'].prefix, - #'-DBoost_NO_SYSTEM_PATHS=TRUE', - #'-DBOOST_ROOT=%s' % spec['boost'].prefix, - #'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix, + # python_vers=join_path(spec['python'].version[:2]) + # '-DOPENMPI_DIR=%s' % openmpi_prefix_path, + # '-DMVAPICH_DIR=%s' % mvapich_prefix_path, + # '-DMPICH_DIR=%s' % spec['mpich'].prefix, + # '-DMPICH2_DIR=%s' % spec['mpich2'].prefix, + # '-DBoost_NO_SYSTEM_PATHS=TRUE', + # '-DBOOST_ROOT=%s' % spec['boost'].prefix, + # '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix, - python_vers='%d.%d' % spec['python'].version[:2] + python_vers = '%d.%d' % spec['python'].version[:2] cmakeOptions = [] - cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, - '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, - '-DINSTRUMENTOR=%s' % instrumentor_setting, - '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, - '-DLIBELF_DIR=%s' % spec['libelf'].prefix, - '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, - '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, - '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, - '-DPAPI_DIR=%s' % spec['papi'].prefix, - '-DSQLITE3_DIR=%s' % spec['sqlite'].prefix, - '-DQTLIB_DIR=%s' % spec['qt'].prefix, - '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'), - '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers, - '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so', - '-DBoost_NO_SYSTEM_PATHS=TRUE', - '-DBOOST_ROOT=%s' % spec['boost'].prefix, - '-DDYNINST_DIR=%s' % spec['dyninst'].prefix - ]) + cmakeOptions.extend([ + '-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, + '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, + '-DINSTRUMENTOR=%s' % instrumentor_setting, + '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, + '-DLIBELF_DIR=%s' % spec['libelf'].prefix, + '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, + '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix, + '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, + '-DPAPI_DIR=%s' % spec['papi'].prefix, + '-DSQLITE3_DIR=%s' % spec['sqlite'].prefix, + '-DQTLIB_DIR=%s' % spec['qt'].prefix, + '-DPYTHON_EXECUTABLE=%s' % join_path( + spec['python'].prefix, '/bin/python'), + '-DPYTHON_INCLUDE_DIR=%s' % join_path( + spec['python'].prefix.include, + 'python' + python_vers), + '-DPYTHON_LIBRARY=%s' % join_path( + spec['python'].prefix.lib, + 'libpython' + python_vers + '.so'), + '-DBoost_NO_SYSTEM_PATHS=TRUE', + '-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DDYNINST_DIR=%s' % spec['dyninst'].prefix]) # Add any MPI implementations coming from variant settings self.set_mpi_cmakeOptions(spec, cmakeOptions) cmakeOptions.extend(std_cmake_args) - # Adjust the build options to the favored ones for this build + # Adjust the build options to the favored ones for this + # build self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions) cmake('..', *cmakeOptions) @@ -261,160 +293,171 @@ def install(self, spec, prefix): elif '+cbtf' in spec: instrumentor_setting = "cbtf" - resolve_symbols = "symtabapi" - cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) + ':' + join_path(spec['dyninst'].prefix) - #runtime_platform_cray = "cray" - #if '+cray' in spec: + cmake_prefix_path = ':'.join(spec['cbtf'].prefix, + spec['cbtf-krell'].prefix, + spec['dyninst'].prefix) + + # resolve_symbols = "symtabapi" + # runtime_platform_cray = "cray" + # if '+cray' in spec: # if '+runtime' in spec: - # #-DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_INSTALL_DIR} \ - # with working_dir('build_cbtf_cray_runtime', create=True): - # python_vers='%d.%d' % spec['python'].version[:2] - # cmake('..', - # '-DCMAKE_INSTALL_PREFIX=%s' % prefix, - # '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, - # '-DRUNTIME_PLATFORM=%s' % runtime_platform_cray, - # '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, - # '-DRESOLVE_SYMBOLS=%s' % resolve_symbols, - # '-DINSTRUMENTOR=%s' % instrumentor_setting, - # '-DCBTF_DIR=%s' % spec['cbtf'].prefix, - # '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, - # '-DCBTF_KRELL_CN_RUNTIME_DIR=%s' % spec['cbtf-krell'].prefix, - # '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, - # '-DLIBELF_DIR=%s' % spec['libelf'].prefix, - # '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, - # '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, - # '-DPAPI_DIR=%s' % spec['papi'].prefix, - # '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, - # '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, - # '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - # '-DBoost_NO_SYSTEM_PATHS=TRUE', - # '-DBOOST_ROOT=%s' % spec['boost'].prefix, - # *std_cmake_args) + # #-DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_INSTALL_DIR} \ + # with working_dir('build_cbtf_cray_runtime', create=True): + # python_vers='%d.%d' % spec['python'].version[:2] + # cmake('..', + # '-DCMAKE_INSTALL_PREFIX=%s' % prefix, + # '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, + # '-DRUNTIME_PLATFORM=%s' % runtime_platform_cray, + # '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, + # '-DRESOLVE_SYMBOLS=%s' % resolve_symbols, + # '-DINSTRUMENTOR=%s' % instrumentor_setting, + # '-DCBTF_DIR=%s' % spec['cbtf'].prefix, + # '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, + # '-DCBTF_KRELL_CN_RUNTIME_DIR=%s' % spec['cbtf-krell'].prefix, + # '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, + # '-DLIBELF_DIR=%s' % spec['libelf'].prefix, + # '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, + # '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix, + # '-DPAPI_DIR=%s' % spec['papi'].prefix, + # '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, + # '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, + # '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + # '-DBoost_NO_SYSTEM_PATHS=TRUE', + # '-DBOOST_ROOT=%s' % spec['boost'].prefix, + # *std_cmake_args) # make("clean") # make() # make("install") - - #elif '+mic' in spec: - # comment out else and shift over the default case below until arch detection is in - #else: + # elif '+mic' in spec: + # comment out else and shift over the default case below + # until arch detection is in else: if '+runtime' in spec: with working_dir('build_cbtf_runtime', create=True): - python_vers='%d.%d' % spec['python'].version[:2] - cmake('..', - '-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, - '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, - '-DINSTRUMENTOR=%s' % instrumentor_setting, - '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, - '-DLIBELF_DIR=%s' % spec['libelf'].prefix, - '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, - '-DCBTF_DIR=%s' % spec['cbtf'].prefix, - '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, - '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'), - '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers, - '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so', - '-DBoost_NO_SYSTEM_PATHS=TRUE', - '-DBOOST_ROOT=%s' % spec['boost'].prefix, - '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, - '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - *std_cmake_args) + python_vers = '%d.%d' % spec['python'].version[:2] + cmake( + '..', + '-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, + '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, + '-DINSTRUMENTOR=%s' % instrumentor_setting, + '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, + '-DLIBELF_DIR=%s' % spec['libelf'].prefix, + '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, + '-DCBTF_DIR=%s' % spec['cbtf'].prefix, + '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, + '-DPYTHON_EXECUTABLE=%s' % join_path( + spec['python'].prefix, 'bin', 'python'), + '-DPYTHON_INCLUDE_DIR=%s' % join_path( + spec['python'].prefix.include, + 'python' + python_vers), + '-DPYTHON_LIBRARY=%s' % join_path( + spec['python'].prefix.lib, + 'libpython' + python_vers + '.so'), + '-DBoost_NO_SYSTEM_PATHS=TRUE', + '-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + *std_cmake_args) + make("clean") make() make("install") else: with working_dir('build_cbtf', create=True): - python_vers='%d.%d' % spec['python'].version[:2] - #python_vers=join_path(spec['python'].version[:2]) - cmake('..', - '-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, - '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, - '-DINSTRUMENTOR=%s' % instrumentor_setting, - '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, - '-DLIBELF_DIR=%s' % spec['libelf'].prefix, - '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, - '-DSQLITE3_DIR=%s' % spec['sqlite'].prefix, - '-DCBTF_DIR=%s' % spec['cbtf'].prefix, - '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, - '-DQTLIB_DIR=%s' % spec['qt'].prefix, - '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'), - '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers, - '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so', - '-DBoost_NO_SYSTEM_PATHS=TRUE', - '-DBOOST_ROOT=%s' % spec['boost'].prefix, - '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, - '-DMRNET_DIR=%s' % spec['mrnet'].prefix, - *std_cmake_args) + python_vers = '%d.%d' % spec['python'].version[:2] + # python_vers=join_path(spec['python'].version[:2]) + cmake( + '..', + '-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64, + '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path, + '-DINSTRUMENTOR=%s' % instrumentor_setting, + '-DBINUTILS_DIR=%s' % spec['binutils'].prefix, + '-DLIBELF_DIR=%s' % spec['libelf'].prefix, + '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix, + '-DSQLITE3_DIR=%s' % spec['sqlite'].prefix, + '-DCBTF_DIR=%s' % spec['cbtf'].prefix, + '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix, + '-DQTLIB_DIR=%s' % spec['qt'].prefix, + '-DPYTHON_EXECUTABLE=%s' % join_path( + spec['python'].prefix, 'bin', 'python'), + '-DPYTHON_INCLUDE_DIR=%s' % join_path( + spec['python'].prefix.include, + 'python' + python_vers), + '-DPYTHON_LIBRARY=%s' % join_path( + spec['python'].prefix.lib, + 'libpython' + python_vers + '.so'), + '-DBoost_NO_SYSTEM_PATHS=TRUE', + '-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DDYNINST_DIR=%s' % spec['dyninst'].prefix, + '-DMRNET_DIR=%s' % spec['mrnet'].prefix, + *std_cmake_args) + make("clean") make() make("install") - #if '+frontend' in spec: - # with working_dir('build_frontend', create=True): - # tbd - - - - #if '+cbtf' in spec: - # if cray build type detected: - # if '+runtime' in spec: - # with working_dir('build_cray_cbtf_compute', create=True): - # tbd - # else: - # with working_dir('build_cray_cbtf_frontend', create=True): - # tbd - # with working_dir('build_cray_osscbtf_frontend', create=True): - # tbd - # fi - # elif '+intelmic' in spec: - # if '+runtime' in spec: - # with working_dir('build_intelmic_cbtf_compute', create=True): - # tbd - # else: - # with working_dir('build_intelmic_cbtf_frontend', create=True): - # tbd - # with working_dir('build_intelmic_osscbtf_frontend', create=True): - # fi - # else - # with working_dir('build_cluster_cbtf', create=True): - # tbd - # with working_dir('build_cluster osscbtf', create=True): - # tbd - # fi - #elif '+offline' in spec: - # if cray build type detected: - # if '+runtime' in spec: - # with working_dir('build_cray_ossoff_compute', create=True): - # tbd - # else: - # with working_dir('build_cray_ossoff_frontend', create=True): - # tbd - # fi - # elif '+intelmic' in spec: - # if '+runtime' in spec: - # with working_dir('build_intelmic_ossoff_compute', create=True): - # tbd - # else: - # with working_dir('build_intelmic_ossoff_frontend', create=True): - # tbd - # fi - # elif bgq build type detected: - # if '+runtime' in spec: - # with working_dir('build_bgq_ossoff_compute', create=True): - # tbd - # else: - # with working_dir('build_bgq_ossoff_frontend', create=True): - # tbd - # fi - # else - # with working_dir('build_cluster ossoff', create=True): - # tbd - # fi - #fi - - +# if '+frontend' in spec: +# with working_dir('build_frontend', create=True): +# tbd +# if '+cbtf' in spec: +# if cray build type detected: +# if '+runtime' in spec: +# with working_dir('build_cray_cbtf_compute', create=True): +# tbd +# else: +# with working_dir('build_cray_cbtf_frontend', create=True): +# tbd +# with working_dir('build_cray_osscbtf_frontend', create=True): +# tbd +# fi +# elif '+intelmic' in spec: +# if '+runtime' in spec: +# with working_dir('build_intelmic_cbtf_compute', create=True): +# tbd +# else: +# with working_dir('build_intelmic_cbtf_frontend', create=True): +# tbd +# with working_dir('build_intelmic_osscbtf_frontend', create=True): +# fi +# else +# with working_dir('build_cluster_cbtf', create=True): +# tbd +# with working_dir('build_cluster osscbtf', create=True): +# tbd +# fi +# elif '+offline' in spec: +# if cray build type detected: +# if '+runtime' in spec: +# with working_dir('build_cray_ossoff_compute', create=True): +# tbd +# else: +# with working_dir('build_cray_ossoff_frontend', create=True): +# tbd +# fi +# elif '+intelmic' in spec: +# if '+runtime' in spec: +# with working_dir('build_intelmic_ossoff_compute', create=True): +# tbd +# else: +# with working_dir('build_intelmic_ossoff_frontend', create=True): +# tbd +# fi +# elif bgq build type detected: +# if '+runtime' in spec: +# with working_dir('build_bgq_ossoff_compute', create=True): +# tbd +# else: +# with working_dir('build_bgq_ossoff_frontend', create=True): +# tbd +# fi +# else +# with working_dir('build_cluster ossoff', create=True): +# tbd +# fi +# fi diff --git a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py index 2104bf842b3..161ba6254ae 100644 --- a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py +++ b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class OsuMicroBenchmarks(Package): """The Ohio MicroBenchmark suite is a collection of independent MPI message passing performance microbenchmarks developed and written at @@ -41,7 +42,6 @@ class OsuMicroBenchmarks(Package): depends_on('mpi') depends_on('cuda', when='+cuda') - def install(self, spec, prefix): config_args = [ 'CC=%s' % spec['mpi'].prefix.bin + '/mpicc', diff --git a/var/spack/repos/builtin/packages/otf/package.py b/var/spack/repos/builtin/packages/otf/package.py index 4a7a00b212e..39eb5a85aae 100644 --- a/var/spack/repos/builtin/packages/otf/package.py +++ b/var/spack/repos/builtin/packages/otf/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Otf(Package): """To improve scalability for very large and massively parallel traces the Open Trace Format (OTF) is developed at ZIH as a diff --git a/var/spack/repos/builtin/packages/otf2/package.py b/var/spack/repos/builtin/packages/otf2/package.py index 131836f8ac6..ee39f448eb7 100644 --- a/var/spack/repos/builtin/packages/otf2/package.py +++ b/var/spack/repos/builtin/packages/otf2/package.py @@ -27,8 +27,8 @@ class Otf2(Package): - """ - The Open Trace Format 2 is a highly scalable, memory efficient event trace data format plus support library. + """The Open Trace Format 2 is a highly scalable, memory efficient event + trace data format plus support library. """ homepage = "http://www.vi-hps.org/score-p" @@ -46,10 +46,10 @@ class Otf2(Package): url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz") def install(self, spec, prefix): - configure_args=["--prefix=%s" % prefix, - "--enable-shared", - "CFLAGS=-fPIC", - "CXXFLAGS=-fPIC"] + configure_args = ["--prefix=%s" % prefix, + "--enable-shared", + "CFLAGS=-fPIC", + "CXXFLAGS=-fPIC"] configure(*configure_args) make() make("install") diff --git a/var/spack/repos/builtin/packages/papi/package.py b/var/spack/repos/builtin/packages/papi/package.py index ecd958407f7..5c4abe1730a 100644 --- a/var/spack/repos/builtin/packages/papi/package.py +++ b/var/spack/repos/builtin/packages/papi/package.py @@ -28,6 +28,7 @@ import sys from llnl.util.filesystem import fix_darwin_install_name + class Papi(Package): """PAPI provides the tool designer and application engineer with a consistent interface and methodology for use of the performance @@ -47,7 +48,7 @@ class Papi(Package): def install(self, spec, prefix): with working_dir("src"): - configure_args=["--prefix=%s" % prefix] + configure_args = ["--prefix=%s" % prefix] # PAPI uses MPI if MPI is present; since we don't require # an MPI package, we ensure that all attempts to use MPI diff --git a/var/spack/repos/builtin/packages/paradiseo/package.py b/var/spack/repos/builtin/packages/paradiseo/package.py index 97d36d94a49..c91b01c9643 100644 --- a/var/spack/repos/builtin/packages/paradiseo/package.py +++ b/var/spack/repos/builtin/packages/paradiseo/package.py @@ -23,40 +23,50 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import sys + class Paradiseo(Package): - """A C++ white-box object-oriented framework dedicated to the reusable design of metaheuristics.""" + """A C++ white-box object-oriented framework dedicated to the reusable + design of metaheuristics.""" homepage = "http://paradiseo.gforge.inria.fr/" - # Installing from the development version is a better option at this + # Installing from the development version is a better option at this # point than using the very old supplied packages version('head', git='https://gforge.inria.fr/git/paradiseo/paradiseo.git') - # This is a version that the package formula author has tested successfully. - # However, the clone is very large (~1Gb git history). The history in the - # head version has been trimmed significantly. - version('dev-safe', git='https://gforge.inria.fr/git/paradiseo/paradiseo.git', - commit='dbb8fbe9a786efd4d1c26408ac1883442e7643a6') - variant('mpi', default=True, description='Compile with parallel and distributed metaheuristics module') - variant('smp', default=True, description='Compile with symmetric multi-processing module ') - variant('edo', default=True, description='Compile with (Experimental) EDO module') - #variant('tests', default=False, description='Compile with build tests') - #variant('doc', default=False, description='Compile with documentation') - variant('debug', default=False, description='Builds a debug version of the libraries') + # This is a version that the package formula author has tested + # successfully. However, the clone is very large (~1Gb git + # history). The history in the head version has been trimmed + # significantly. + version( + 'dev-safe', git='https://gforge.inria.fr/git/paradiseo/paradiseo.git', + commit='dbb8fbe9a786efd4d1c26408ac1883442e7643a6') + + variant('mpi', default=True, + description='Compile with parallel and distributed ' + 'metaheuristics module') + variant('smp', default=True, + description='Compile with symmetric multi-processing module ') + variant('edo', default=True, + description='Compile with (Experimental) EDO module') + + # variant('tests', default=False, description='Compile with build tests') + # variant('doc', default=False, description='Compile with documentation') + variant('debug', default=False, + description='Builds a debug version of the libraries') variant('openmp', default=False, description='Enable OpenMP support') variant('gnuplot', default=False, description='Enable GnuPlot support') - + # Required dependencies - depends_on ("cmake", type='build') + depends_on("cmake", type='build') # Optional dependencies - depends_on ("mpi", when="+mpi") - depends_on ("doxygen", when='+doc', type='build') - depends_on ("gnuplot", when='+gnuplot') - depends_on ("eigen", when='+edo', type='build') - depends_on ("boost~mpi", when='+edo~mpi') - depends_on ("boost+mpi", when='+edo+mpi') + depends_on("mpi", when="+mpi") + depends_on("doxygen", when='+doc', type='build') + depends_on("gnuplot", when='+gnuplot') + depends_on("eigen", when='+edo', type='build') + depends_on("boost~mpi", when='+edo~mpi') + depends_on("boost+mpi", when='+edo+mpi') # Patches patch('enable_eoserial.patch') @@ -69,16 +79,21 @@ def install(self, spec, prefix): options.extend(std_cmake_args) options.extend([ - '-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'), + '-DCMAKE_BUILD_TYPE:STRING=%s' % ( + 'Debug' if '+debug' in spec else 'Release'), '-DINSTALL_TYPE:STRING=MIN', '-DMPI:BOOL=%s' % ('TRUE' if '+mpi' in spec else 'FALSE'), - '-DSMP:BOOL=%s' % ('TRUE' if '+smp' in spec else 'FALSE'), # Note: This requires a C++11 compatible compiler + # Note: This requires a C++11 compatible compiler + '-DSMP:BOOL=%s' % ('TRUE' if '+smp' in spec else 'FALSE'), '-DEDO:BOOL=%s' % ('TRUE' if '+edo' in spec else 'FALSE'), - '-DENABLE_CMAKE_TESTING:BOOL=%s' % ('TRUE' if '+tests' in spec else 'FALSE'), - '-DENABLE_OPENMP:BOOL=%s' % ('TRUE' if '+openmp' in spec else 'FALSE'), - '-DENABLE_GNUPLOT:BOOL=%s' % ('TRUE' if '+gnuplot' in spec else 'FALSE') + '-DENABLE_CMAKE_TESTING:BOOL=%s' % ( + 'TRUE' if '+tests' in spec else 'FALSE'), + '-DENABLE_OPENMP:BOOL=%s' % ( + 'TRUE' if '+openmp' in spec else 'FALSE'), + '-DENABLE_GNUPLOT:BOOL=%s' % ( + 'TRUE' if '+gnuplot' in spec else 'FALSE') ]) - + with working_dir('spack-build', create=True): # Configure cmake('..', *options) diff --git a/var/spack/repos/builtin/packages/parallel-netcdf/package.py b/var/spack/repos/builtin/packages/parallel-netcdf/package.py index deee46df8ff..be4a8bc38bc 100644 --- a/var/spack/repos/builtin/packages/parallel-netcdf/package.py +++ b/var/spack/repos/builtin/packages/parallel-netcdf/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class ParallelNetcdf(Package): """Parallel netCDF (PnetCDF) is a library providing high-performance parallel I/O while still maintaining file-format compatibility with @@ -37,12 +38,14 @@ class ParallelNetcdf(Package): variant('cxx', default=True, description='Build the C++ Interface') variant('fortran', default=True, description='Build the Fortran Interface') - variant('fpic', default=True, description='Produce position-independent code (for use with shared libraries)') + variant('fpic', default=True, + description='Produce position-independent code (for shared libs)') depends_on("m4", type='build') depends_on("mpi") - # See: https://trac.mcs.anl.gov/projects/parallel-netcdf/browser/trunk/INSTALL + # See: + # https://trac.mcs.anl.gov/projects/parallel-netcdf/browser/trunk/INSTALL def install(self, spec, prefix): args = list() if '+fpic' in spec: @@ -53,7 +56,7 @@ def install(self, spec, prefix): args.append('--disable-fortran') args.extend(["--prefix=%s" % prefix, - "--with-mpi=%s" % spec['mpi'].prefix]) + "--with-mpi=%s" % spec['mpi'].prefix]) configure(*args) make() make("install") diff --git a/var/spack/repos/builtin/packages/paraver/package.py b/var/spack/repos/builtin/packages/paraver/package.py index 50ce6b79fba..1b6e3ce8e66 100644 --- a/var/spack/repos/builtin/packages/paraver/package.py +++ b/var/spack/repos/builtin/packages/paraver/package.py @@ -25,6 +25,7 @@ from spack import * import os + class Paraver(Package): """"A very powerful performance visualization and analysis tool based on traces that can be used to analyse any information that @@ -36,7 +37,7 @@ class Paraver(Package): version('4.5.3', '625de9ec0d639acd18d1aaa644b38f72') depends_on("boost") - #depends_on("extrae") + # depends_on("extrae") depends_on("wx") depends_on("wxpropgrid") @@ -47,8 +48,11 @@ def install(self, spec, prefix): make("install") os.chdir("../paraver-kernel") - #"--with-extrae=%s" % spec['extrae'].prefix, - configure("--prefix=%s" % prefix, "--with-ptools-common-files=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization") + # "--with-extrae=%s" % spec['extrae'].prefix, + configure("--prefix=%s" % prefix, + "--with-ptools-common-files=%s" % prefix, + "--with-boost=%s" % spec['boost'].prefix, + "--with-boost-serialization=boost_serialization") make() make("install") @@ -58,8 +62,11 @@ def install(self, spec, prefix): make("install") os.chdir("../wxparaver") - #"--with-extrae=%s" % spec['extrae'].prefix, - configure("--prefix=%s" % prefix, "--with-paraver=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization", "--with-wxdir=%s" % spec['wx'].prefix.bin) + # "--with-extrae=%s" % spec['extrae'].prefix, + configure("--prefix=%s" % prefix, + "--with-paraver=%s" % prefix, + "--with-boost=%s" % spec['boost'].prefix, + "--with-boost-serialization=boost_serialization", + "--with-wxdir=%s" % spec['wx'].prefix.bin) make() make("install") - diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 75f5272006e..68c781734ef 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Paraview(Package): homepage = 'http://www.paraview.org' url = 'http://www.paraview.org/files/v5.0/ParaView-v' @@ -52,23 +53,22 @@ class Paraview(Package): depends_on('cmake', type='build') depends_on('bzip2') depends_on('freetype') - #depends_on('hdf5+mpi', when='+mpi') - #depends_on('hdf5~mpi', when='~mpi') + # depends_on('hdf5+mpi', when='+mpi') + # depends_on('hdf5~mpi', when='~mpi') depends_on('jpeg') depends_on('libpng') depends_on('libtiff') depends_on('libxml2') - #depends_on('netcdf') - #depends_on('netcdf-cxx') - #depends_on('protobuf') # version mismatches? - #depends_on('sqlite') # external version not supported + # depends_on('netcdf') + # depends_on('netcdf-cxx') + # depends_on('protobuf') # version mismatches? + # depends_on('sqlite') # external version not supported depends_on('zlib') def url_for_version(self, version): """Handle ParaView version-based custom URLs.""" return self._url_str % (version.up_to(2), version) - def install(self, spec, prefix): with working_dir('spack-build', create=True): def feature_to_bool(feature, on='ON', off='OFF'): @@ -80,34 +80,46 @@ def nfeature_to_bool(feature): return feature_to_bool(feature, on='OFF', off='ON') feature_args = std_cmake_args[:] - feature_args.append('-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % feature_to_bool('+qt')) - feature_args.append('-DPARAVIEW_ENABLE_PYTHON:BOOL=%s' % feature_to_bool('+python')) + feature_args.append( + '-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % feature_to_bool('+qt')) + feature_args.append('-DPARAVIEW_ENABLE_PYTHON:BOOL=%s' % + feature_to_bool('+python')) if '+python' in spec: - feature_args.append('-DPYTHON_EXECUTABLE:FILEPATH=%s/bin/python' % spec['python'].prefix) - feature_args.append('-DPARAVIEW_USE_MPI:BOOL=%s' % feature_to_bool('+mpi')) + feature_args.append( + '-DPYTHON_EXECUTABLE:FILEPATH=%s/bin/python' + % spec['python'].prefix) + feature_args.append('-DPARAVIEW_USE_MPI:BOOL=%s' % + feature_to_bool('+mpi')) if '+mpi' in spec: - feature_args.append('-DMPIEXEC:FILEPATH=%s/bin/mpiexec' % spec['mpi'].prefix) - feature_args.append('-DVTK_ENABLE_TCL_WRAPPING:BOOL=%s' % feature_to_bool('+tcl')) - feature_args.append('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' % feature_to_bool('+osmesa')) - feature_args.append('-DVTK_USE_X:BOOL=%s' % nfeature_to_bool('+osmesa')) - feature_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL')) + feature_args.append( + '-DMPIEXEC:FILEPATH=%s/bin/mpiexec' % spec['mpi'].prefix) + feature_args.append( + '-DVTK_ENABLE_TCL_WRAPPING:BOOL=%s' % feature_to_bool('+tcl')) + feature_args.append('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' % + feature_to_bool('+osmesa')) + feature_args.append('-DVTK_USE_X:BOOL=%s' % + nfeature_to_bool('+osmesa')) + feature_args.append( + '-DVTK_RENDERING_BACKEND:STRING=%s' % + feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL')) feature_args.extend(std_cmake_args) if 'darwin' in self.spec.architecture: feature_args.append('-DVTK_USE_X:BOOL=OFF') - feature_args.append('-DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON') + feature_args.append( + '-DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON') cmake('..', - '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix, - '-DBUILD_TESTING:BOOL=OFF', - '-DVTK_USE_SYSTEM_FREETYPE:BOOL=ON', - '-DVTK_USE_SYSTEM_HDF5:BOOL=OFF', - '-DVTK_USE_SYSTEM_JPEG:BOOL=ON', - '-DVTK_USE_SYSTEM_LIBXML2:BOOL=ON', - '-DVTK_USE_SYSTEM_NETCDF:BOOL=OFF', - '-DVTK_USE_SYSTEM_TIFF:BOOL=ON', - '-DVTK_USE_SYSTEM_ZLIB:BOOL=ON', - *feature_args) + '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix, + '-DBUILD_TESTING:BOOL=OFF', + '-DVTK_USE_SYSTEM_FREETYPE:BOOL=ON', + '-DVTK_USE_SYSTEM_HDF5:BOOL=OFF', + '-DVTK_USE_SYSTEM_JPEG:BOOL=ON', + '-DVTK_USE_SYSTEM_LIBXML2:BOOL=ON', + '-DVTK_USE_SYSTEM_NETCDF:BOOL=OFF', + '-DVTK_USE_SYSTEM_TIFF:BOOL=ON', + '-DVTK_USE_SYSTEM_ZLIB:BOOL=ON', + *feature_args) make() make('install') diff --git a/var/spack/repos/builtin/packages/parmetis/package.py b/var/spack/repos/builtin/packages/parmetis/package.py index b2ceadc1283..d1cfa27f3a1 100644 --- a/var/spack/repos/builtin/packages/parmetis/package.py +++ b/var/spack/repos/builtin/packages/parmetis/package.py @@ -38,8 +38,10 @@ class Parmetis(Package): version('4.0.3', 'f69c479586bf6bb7aff6a9bc0c739628') version('4.0.2', '0912a953da5bb9b5e5e10542298ffdce') - variant('shared', default=True, description='Enables the build of shared libraries') - variant('debug', default=False, description='Builds the library in debug mode') + variant('shared', default=True, + description='Enables the build of shared libraries') + variant('debug', default=False, + description='Builds the library in debug mode') variant('gdb', default=False, description='Enables gdb support') depends_on('cmake@2.8:', type='build') # build dependency diff --git a/var/spack/repos/builtin/packages/parpack/package.py b/var/spack/repos/builtin/packages/parpack/package.py index 5930dada858..84bc88b3b03 100644 --- a/var/spack/repos/builtin/packages/parpack/package.py +++ b/var/spack/repos/builtin/packages/parpack/package.py @@ -26,6 +26,7 @@ import os import shutil + class Parpack(Package): """ARPACK is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems.""" @@ -52,13 +53,13 @@ def patch(self): mf.filter('^PLAT.*', 'PLAT = ') mf.filter('^home.*', 'home = %s' % os.getcwd()) mf.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix) - mf.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix) + mf.filter('^LAPACKdir.*', 'LAPACKdir = %s' % + self.spec['lapack'].prefix) mf.filter('^MAKE.*', 'MAKE = make') # build the library in our own prefix. mf.filter('^ARPACKLIB.*', 'PARPACKLIB = %s/libparpack.a' % os.getcwd()) - def install(self, spec, prefix): with working_dir('PARPACK/SRC/MPI'): make('all') diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py index 3860875bcc1..cd1f6790629 100644 --- a/var/spack/repos/builtin/packages/patchelf/package.py +++ b/var/spack/repos/builtin/packages/patchelf/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class Patchelf(Package): - """PatchELF is a small utility to modify the dynamic linker and RPATH of ELF executables.""" + """PatchELF is a small utility to modify the dynamic linker and RPATH of + ELF executables.""" homepage = "https://nixos.org/patchelf.html" url = "http://nixos.org/releases/patchelf/patchelf-0.8/patchelf-0.8.tar.gz" diff --git a/var/spack/repos/builtin/packages/pcre2/package.py b/var/spack/repos/builtin/packages/pcre2/package.py index b013685f05e..a2739e05844 100644 --- a/var/spack/repos/builtin/packages/pcre2/package.py +++ b/var/spack/repos/builtin/packages/pcre2/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Pcre2(Package): """The PCRE2 package contains Perl Compatible Regular Expression libraries. These are useful for implementing regular expression diff --git a/var/spack/repos/builtin/packages/pdt/package.py b/var/spack/repos/builtin/packages/pdt/package.py index 32611a7b147..074d28540bb 100644 --- a/var/spack/repos/builtin/packages/pdt/package.py +++ b/var/spack/repos/builtin/packages/pdt/package.py @@ -27,11 +27,13 @@ class Pdt(Package): - """ - Program Database Toolkit (PDT) is a framework for analyzing source code written in several programming languages - and for making rich program knowledge accessible to developers of static and dynamic analysis tools. PDT implements - a standard program representation, the program database (PDB), that can be accessed in a uniform way through a - class library supporting common PDB operations. + """Program Database Toolkit (PDT) is a framework for analyzing source + code written in several programming languages and for making rich + program knowledge accessible to developers of static and dynamic + analysis tools. PDT implements a standard program representation, + the program database (PDB), that can be accessed in a uniform way + through a class library supporting common PDB operations. + """ homepage = "https://www.cs.uoregon.edu/research/pdt/home.php" diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 11f5c36c22f..079e0b7af4e 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -44,18 +44,25 @@ class Petsc(Package): version('3.5.1', 'a557e029711ebf425544e117ffa44d8f') version('3.4.4', '7edbc68aa6d8d6a3295dd5f6c2f6979d') - variant('shared', default=True, description='Enables the build of shared libraries') + variant('shared', default=True, + description='Enables the build of shared libraries') variant('mpi', default=True, description='Activates MPI support') - variant('double', default=True, description='Switches between single and double precision') + variant('double', default=True, + description='Switches between single and double precision') variant('complex', default=False, description='Build with complex numbers') variant('debug', default=False, description='Compile in debug mode') - variant('metis', default=True, description='Activates support for metis and parmetis') - variant('hdf5', default=True, description='Activates support for HDF5 (only parallel)') + variant('metis', default=True, + description='Activates support for metis and parmetis') + variant('hdf5', default=True, + description='Activates support for HDF5 (only parallel)') variant('boost', default=True, description='Activates support for Boost') - variant('hypre', default=True, description='Activates support for Hypre (only parallel)') - variant('mumps', default=True, description='Activates support for MUMPS (only parallel)') - variant('superlu-dist', default=True, description='Activates support for SuperluDist (only parallel)') + variant('hypre', default=True, + description='Activates support for Hypre (only parallel)') + variant('mumps', default=True, + description='Activates support for MUMPS (only parallel)') + variant('superlu-dist', default=True, + description='Activates support for SuperluDist (only parallel)') # Virtual dependencies depends_on('blas') diff --git a/var/spack/repos/builtin/packages/pidx/package.py b/var/spack/repos/builtin/packages/pidx/package.py index e7b18ce7a84..e19bb9e470b 100644 --- a/var/spack/repos/builtin/packages/pidx/package.py +++ b/var/spack/repos/builtin/packages/pidx/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Pidx(Package): """PIDX Parallel I/O Library. diff --git a/var/spack/repos/builtin/packages/pixman/package.py b/var/spack/repos/builtin/packages/pixman/package.py index 34d8dfea0dd..3d7e332a3fb 100644 --- a/var/spack/repos/builtin/packages/pixman/package.py +++ b/var/spack/repos/builtin/packages/pixman/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Pixman(Package): """The Pixman package contains a library that provides low-level pixel manipulation features such as image compositing and diff --git a/var/spack/repos/builtin/packages/pmgr_collective/package.py b/var/spack/repos/builtin/packages/pmgr_collective/package.py index a6e3b8e2a2f..f6466a7954a 100644 --- a/var/spack/repos/builtin/packages/pmgr_collective/package.py +++ b/var/spack/repos/builtin/packages/pmgr_collective/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PmgrCollective(Package): """PMGR_COLLECTIVE provides a scalable network for bootstrapping MPI jobs.""" diff --git a/var/spack/repos/builtin/packages/ppl/package.py b/var/spack/repos/builtin/packages/ppl/package.py index a54c6ce221b..73404103f08 100644 --- a/var/spack/repos/builtin/packages/ppl/package.py +++ b/var/spack/repos/builtin/packages/ppl/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Ppl(Package): """The Parma Polyhedra Library (PPL) provides numerical abstractions especially targeted at applications in the field of diff --git a/var/spack/repos/builtin/packages/proj/package.py b/var/spack/repos/builtin/packages/proj/package.py index 20dd4f5f69e..06ab6108b63 100644 --- a/var/spack/repos/builtin/packages/proj/package.py +++ b/var/spack/repos/builtin/packages/proj/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Proj(Package): """Cartographic Projections""" homepage = "https://github.com/OSGeo/proj.4/wiki" diff --git a/var/spack/repos/builtin/packages/protobuf/package.py b/var/spack/repos/builtin/packages/protobuf/package.py index d4befc34ab3..bf0073b16a8 100644 --- a/var/spack/repos/builtin/packages/protobuf/package.py +++ b/var/spack/repos/builtin/packages/protobuf/package.py @@ -22,9 +22,9 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os from spack import * + class Protobuf(Package): """Google's data interchange format.""" diff --git a/var/spack/repos/builtin/packages/psi4/package.py b/var/spack/repos/builtin/packages/psi4/package.py index 192dd86e0ef..6296d0cee68 100644 --- a/var/spack/repos/builtin/packages/psi4/package.py +++ b/var/spack/repos/builtin/packages/psi4/package.py @@ -39,7 +39,15 @@ class Psi4(Package): # Required dependencies depends_on('blas') depends_on('lapack') - depends_on('boost+chrono+filesystem+python+regex+serialization+system+timer+thread') + depends_on('boost' + '+chrono' + '+filesystem' + '+python' + '+regex' + '+serialization' + '+system' + '+timer' + '+thread') depends_on('python') depends_on('cmake', type='build') depends_on('py-numpy', type=nolink) diff --git a/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py b/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py index 05f4616ff5e..ca59105b4c8 100644 --- a/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py +++ b/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PySqlalchemy(Package): """The Python SQL Toolkit and Object Relational Mapper""" diff --git a/var/spack/repos/builtin/packages/py-argcomplete/package.py b/var/spack/repos/builtin/packages/py-argcomplete/package.py index ace93204248..2549972a6d0 100644 --- a/var/spack/repos/builtin/packages/py-argcomplete/package.py +++ b/var/spack/repos/builtin/packages/py-argcomplete/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyArgcomplete(Package): """Bash tab completion for argparse.""" diff --git a/var/spack/repos/builtin/packages/py-astroid/package.py b/var/spack/repos/builtin/packages/py-astroid/package.py index e1ea71fb671..a47b7ffa337 100644 --- a/var/spack/repos/builtin/packages/py-astroid/package.py +++ b/var/spack/repos/builtin/packages/py-astroid/package.py @@ -43,4 +43,3 @@ class PyAstroid(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/repos/builtin/packages/py-autopep8/package.py b/var/spack/repos/builtin/packages/py-autopep8/package.py index 507664949b8..f6c08e2728a 100644 --- a/var/spack/repos/builtin/packages/py-autopep8/package.py +++ b/var/spack/repos/builtin/packages/py-autopep8/package.py @@ -1,5 +1,6 @@ from spack import * + class PyAutopep8(Package): """Automatic pep8 formatter""" homepage = "https://github.com/hhatto/autopep8" @@ -13,4 +14,3 @@ class PyAutopep8(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/repos/builtin/packages/py-biopython/package.py b/var/spack/repos/builtin/packages/py-biopython/package.py index bcc889756ef..c3edf9735b5 100644 --- a/var/spack/repos/builtin/packages/py-biopython/package.py +++ b/var/spack/repos/builtin/packages/py-biopython/package.py @@ -24,8 +24,13 @@ ############################################################################## from spack import * + class PyBiopython(Package): - """It is a distributed collaborative effort to develop Python libraries and applications which address the needs of current and future work in bioinformatics.""" + """A distributed collaborative effort to develop Python libraries and + applications which address the needs of current and future work in + bioinformatics. + + """ homepage = "http://biopython.org/wiki/Main_Page" url = "http://biopython.org/DIST/biopython-1.65.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-blessings/package.py b/var/spack/repos/builtin/packages/py-blessings/package.py index 8d7a2343d11..e6fc6aa983b 100644 --- a/var/spack/repos/builtin/packages/py-blessings/package.py +++ b/var/spack/repos/builtin/packages/py-blessings/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyBlessings(Package): """A nicer, kinder way to write to the terminal """ homepage = "https://github.com/erikrose/blessings" diff --git a/var/spack/repos/builtin/packages/py-bottleneck/package.py b/var/spack/repos/builtin/packages/py-bottleneck/package.py index 2483fab85f1..ad2ee749d33 100644 --- a/var/spack/repos/builtin/packages/py-bottleneck/package.py +++ b/var/spack/repos/builtin/packages/py-bottleneck/package.py @@ -24,8 +24,9 @@ ############################################################################## from spack import * + class PyBottleneck(Package): - """Bottleneck is a collection of fast NumPy array functions written in Cython.""" + """A collection of fast NumPy array functions written in Cython.""" homepage = "https://pypi.python.org/pypi/Bottleneck/1.0.0" url = "https://pypi.python.org/packages/source/B/Bottleneck/Bottleneck-1.0.0.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-cffi/package.py b/var/spack/repos/builtin/packages/py-cffi/package.py index e54d50fa506..3c1044783fc 100644 --- a/var/spack/repos/builtin/packages/py-cffi/package.py +++ b/var/spack/repos/builtin/packages/py-cffi/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyCffi(Package): """Foreign Function Interface for Python calling C code""" homepage = "http://cffi.readthedocs.org/en/latest/" diff --git a/var/spack/repos/builtin/packages/py-coverage/package.py b/var/spack/repos/builtin/packages/py-coverage/package.py index b7eaf58cd67..c2a698b0bdd 100644 --- a/var/spack/repos/builtin/packages/py-coverage/package.py +++ b/var/spack/repos/builtin/packages/py-coverage/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyCoverage(Package): """ Testing coverage checker for python """ # FIXME: add a proper url for your package's homepage here. diff --git a/var/spack/repos/builtin/packages/py-csvkit/package.py b/var/spack/repos/builtin/packages/py-csvkit/package.py index 2deaa2f74e8..5f50e3b6c21 100644 --- a/var/spack/repos/builtin/packages/py-csvkit/package.py +++ b/var/spack/repos/builtin/packages/py-csvkit/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyCsvkit(Package): """A library of utilities for working with CSV, the king of tabular file formats""" diff --git a/var/spack/repos/builtin/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py index 5ccc508697f..4b3e1cabe1c 100644 --- a/var/spack/repos/builtin/packages/py-cython/package.py +++ b/var/spack/repos/builtin/packages/py-cython/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyCython(Package): """The Cython compiler for writing C extensions for the Python language.""" homepage = "https://pypi.python.org/pypi/cython" diff --git a/var/spack/repos/builtin/packages/py-dask/package.py b/var/spack/repos/builtin/packages/py-dask/package.py index b7dbab3d979..4bc2c6fc99f 100644 --- a/var/spack/repos/builtin/packages/py-dask/package.py +++ b/var/spack/repos/builtin/packages/py-dask/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyDask(Package): """Minimal task scheduling abstraction""" homepage = "https://github.com/dask/dask/" diff --git a/var/spack/repos/builtin/packages/py-dateutil/package.py b/var/spack/repos/builtin/packages/py-dateutil/package.py index dfd1e143d90..40945232c12 100644 --- a/var/spack/repos/builtin/packages/py-dateutil/package.py +++ b/var/spack/repos/builtin/packages/py-dateutil/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyDateutil(Package): """Extensions to the standard Python datetime module.""" homepage = "https://pypi.python.org/pypi/dateutil" diff --git a/var/spack/repos/builtin/packages/py-dbf/package.py b/var/spack/repos/builtin/packages/py-dbf/package.py index 09c93de4288..7f83bce75ed 100644 --- a/var/spack/repos/builtin/packages/py-dbf/package.py +++ b/var/spack/repos/builtin/packages/py-dbf/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyDbf(Package): """Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos)""" diff --git a/var/spack/repos/builtin/packages/py-decorator/package.py b/var/spack/repos/builtin/packages/py-decorator/package.py index bd78f262f43..9101b07a0d9 100644 --- a/var/spack/repos/builtin/packages/py-decorator/package.py +++ b/var/spack/repos/builtin/packages/py-decorator/package.py @@ -24,8 +24,11 @@ ############################################################################## from spack import * + class PyDecorator(Package): - """The aim of the decorator module it to simplify the usage of decorators for the average programmer, and to popularize decorators by showing various non-trivial examples.""" + """The aim of the decorator module it to simplify the usage of decorators + for the average programmer, and to popularize decorators by showing + various non-trivial examples.""" homepage = "https://github.com/micheles/decorator" url = "https://pypi.python.org/packages/source/d/decorator/decorator-4.0.9.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-epydoc/package.py b/var/spack/repos/builtin/packages/py-epydoc/package.py index b370075e027..9d4b93dad48 100644 --- a/var/spack/repos/builtin/packages/py-epydoc/package.py +++ b/var/spack/repos/builtin/packages/py-epydoc/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class PyEpydoc(Package): - """Epydoc is a tool for generating API documentation documentation for Python modules, based on their docstrings.""" + """Epydoc is a tool for generating API documentation documentation for + Python modules, based on their docstrings.""" homepage = "https://pypi.python.org/pypi/epydoc" url = "https://pypi.python.org/packages/source/e/epydoc/epydoc-3.0.1.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-funcsigs/package.py b/var/spack/repos/builtin/packages/py-funcsigs/package.py index 11ba639b85d..c3d37f6b985 100644 --- a/var/spack/repos/builtin/packages/py-funcsigs/package.py +++ b/var/spack/repos/builtin/packages/py-funcsigs/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os + class PyFuncsigs(Package): """Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2.""" @@ -38,6 +38,3 @@ class PyFuncsigs(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - - - diff --git a/var/spack/repos/builtin/packages/py-genders/package.py b/var/spack/repos/builtin/packages/py-genders/package.py index 44f6cb1ef4e..f919a7e6c28 100644 --- a/var/spack/repos/builtin/packages/py-genders/package.py +++ b/var/spack/repos/builtin/packages/py-genders/package.py @@ -24,16 +24,19 @@ ############################################################################## from spack import * + class PyGenders(Package): - """Genders is a static cluster configuration database used for cluster configuration management. It is used by a variety of tools and scripts for management of large clusters.""" + """Genders is a static cluster configuration database used for cluster + configuration management. It is used by a variety of tools and scripts + for management of large clusters.""" homepage = "https://github.com/chaos/genders" url = "https://github.com/chaos/genders/releases/download/genders-1-22-1/genders-1.22.tar.gz" - version('1.22', '9ea59a024dcbddb85b0ed25ddca9bc8e', url='https://github.com/chaos/genders/releases/download/genders-1-22-1/genders-1.22.tar.gz') + version('1.22', '9ea59a024dcbddb85b0ed25ddca9bc8e', + url='https://github.com/chaos/genders/releases/download/genders-1-22-1/genders-1.22.tar.gz') extends('python') def install(self, spec, prefix): - configure("--prefix=%s" %prefix) + configure("--prefix=%s" % prefix) make(parallel=False) make("install") - diff --git a/var/spack/repos/builtin/packages/py-gnuplot/package.py b/var/spack/repos/builtin/packages/py-gnuplot/package.py index b08b03d1f1c..a448a66e512 100644 --- a/var/spack/repos/builtin/packages/py-gnuplot/package.py +++ b/var/spack/repos/builtin/packages/py-gnuplot/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class PyGnuplot(Package): - """Gnuplot.py is a Python package that allows you to create graphs from within Python using the gnuplot plotting program.""" + """Gnuplot.py is a Python package that allows you to create graphs from + within Python using the gnuplot plotting program.""" homepage = "http://gnuplot-py.sourceforge.net/" url = "http://downloads.sourceforge.net/project/gnuplot-py/Gnuplot-py/1.8/gnuplot-py-1.8.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-ipython/package.py b/var/spack/repos/builtin/packages/py-ipython/package.py index b583609953c..ac3291e21e0 100644 --- a/var/spack/repos/builtin/packages/py-ipython/package.py +++ b/var/spack/repos/builtin/packages/py-ipython/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class PyIpython(Package): - """IPython provides a rich toolkit to help you make the most out of using Python interactively.""" + """IPython provides a rich toolkit to help you make the most out of using + Python interactively.""" homepage = "https://pypi.python.org/pypi/ipython" url = "https://pypi.python.org/packages/source/i/ipython/ipython-2.3.1.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-jdcal/package.py b/var/spack/repos/builtin/packages/py-jdcal/package.py index fd1d6b44192..32acf751315 100644 --- a/var/spack/repos/builtin/packages/py-jdcal/package.py +++ b/var/spack/repos/builtin/packages/py-jdcal/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyJdcal(Package): """Julian dates from proleptic Gregorian and Julian calendars""" diff --git a/var/spack/repos/builtin/packages/py-jinja2/package.py b/var/spack/repos/builtin/packages/py-jinja2/package.py index 57f26e3b904..943edf521ab 100644 --- a/var/spack/repos/builtin/packages/py-jinja2/package.py +++ b/var/spack/repos/builtin/packages/py-jinja2/package.py @@ -48,4 +48,3 @@ class PyJinja2(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/repos/builtin/packages/py-lockfile/package.py b/var/spack/repos/builtin/packages/py-lockfile/package.py index 38cc81b8953..b873625bdbd 100644 --- a/var/spack/repos/builtin/packages/py-lockfile/package.py +++ b/var/spack/repos/builtin/packages/py-lockfile/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyLockfile(Package): """The lockfile package exports a LockFile class which provides a simple API for locking files. Unlike the Windows msvcrt.locking diff --git a/var/spack/repos/builtin/packages/py-logilab-common/package.py b/var/spack/repos/builtin/packages/py-logilab-common/package.py index 6dab4a40e4e..ac1b933e435 100644 --- a/var/spack/repos/builtin/packages/py-logilab-common/package.py +++ b/var/spack/repos/builtin/packages/py-logilab-common/package.py @@ -39,4 +39,3 @@ class PyLogilabCommon(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/repos/builtin/packages/py-mako/package.py b/var/spack/repos/builtin/packages/py-mako/package.py index 56741ec11f2..18a8dc0e68d 100644 --- a/var/spack/repos/builtin/packages/py-mako/package.py +++ b/var/spack/repos/builtin/packages/py-mako/package.py @@ -24,12 +24,13 @@ ############################################################################## from spack import * + class PyMako(Package): - """A super-fast templating language that borrows the best - ideas from the existing templating languages.""" + """A super-fast templating language that borrows the best + ideas from the existing templating languages.""" homepage = "https://pypi.python.org/pypi/mako" - url = "https://pypi.python.org/packages/source/M/Mako/Mako-1.0.1.tar.gz" + url = "https://pypi.python.org/packages/source/M/Mako/Mako-1.0.1.tar.gz" version('1.0.1', '9f0aafd177b039ef67b90ea350497a54') diff --git a/var/spack/repos/builtin/packages/py-markupsafe/package.py b/var/spack/repos/builtin/packages/py-markupsafe/package.py index ee396de7be6..0a039d1d478 100644 --- a/var/spack/repos/builtin/packages/py-markupsafe/package.py +++ b/var/spack/repos/builtin/packages/py-markupsafe/package.py @@ -48,4 +48,3 @@ class PyMarkupsafe(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py index b5eb8a53c48..c454a47ec31 100644 --- a/var/spack/repos/builtin/packages/py-matplotlib/package.py +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -25,6 +25,7 @@ from spack import * import os + class PyMatplotlib(Package): """Python plotting package.""" homepage = "https://pypi.python.org/pypi/matplotlib" @@ -65,12 +66,12 @@ def install(self, spec, prefix): if str(self.version) in ['1.4.2', '1.4.3']: # hack to fix configuration file config_file = None - for p,d,f in os.walk(prefix.lib): + for p, d, f in os.walk(prefix.lib): for file in f: if file.find('matplotlibrc') != -1: config_file = join_path(p, 'matplotlibrc') print config_file - if config_file == None: + if config_file is None: raise InstallError('could not find config file') filter_file(r'backend : pyside', 'backend : Qt4Agg', diff --git a/var/spack/repos/builtin/packages/py-mock/package.py b/var/spack/repos/builtin/packages/py-mock/package.py index f4c178de670..2c70535f191 100644 --- a/var/spack/repos/builtin/packages/py-mock/package.py +++ b/var/spack/repos/builtin/packages/py-mock/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyMock(Package): """mock is a library for testing in Python. It allows you to replace parts of your system under test with mock objects and make assertions about how diff --git a/var/spack/repos/builtin/packages/py-mpi4py/package.py b/var/spack/repos/builtin/packages/py-mpi4py/package.py index a749e16779e..1f0e52804e9 100644 --- a/var/spack/repos/builtin/packages/py-mpi4py/package.py +++ b/var/spack/repos/builtin/packages/py-mpi4py/package.py @@ -24,8 +24,14 @@ ############################################################################## from spack import * + class PyMpi4py(Package): - """This package provides Python bindings for the Message Passing Interface (MPI) standard. It is implemented on top of the MPI-1/MPI-2 specification and exposes an API which grounds on the standard MPI-2 C++ bindings.""" + """This package provides Python bindings for the Message Passing + Interface (MPI) standard. It is implemented on top of the + MPI-1/MPI-2 specification and exposes an API which grounds on the + standard MPI-2 C++ bindings. + + """ homepage = "https://pypi.python.org/pypi/mpi4py" url = "https://pypi.python.org/packages/source/m/mpi4py/mpi4py-1.3.1.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-mpmath/package.py b/var/spack/repos/builtin/packages/py-mpmath/package.py index 899ff053a9f..e5bae346940 100644 --- a/var/spack/repos/builtin/packages/py-mpmath/package.py +++ b/var/spack/repos/builtin/packages/py-mpmath/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyMpmath(Package): """A Python library for arbitrary-precision floating-point arithmetic.""" homepage = "http://mpmath.org" diff --git a/var/spack/repos/builtin/packages/py-mx/package.py b/var/spack/repos/builtin/packages/py-mx/package.py index d0f9f7cadfb..f5631916f69 100644 --- a/var/spack/repos/builtin/packages/py-mx/package.py +++ b/var/spack/repos/builtin/packages/py-mx/package.py @@ -24,8 +24,14 @@ ############################################################################## from spack import * + class PyMx(Package): - """The eGenix.com mx Base Distribution for Python is a collection of professional quality software tools which enhance Python's usability in many important areas such as fast text searching, date/time processing and high speed data types.""" + """The eGenix.com mx Base Distribution for Python is a collection of + professional quality software tools which enhance Python's + usability in many important areas such as fast text searching, + date/time processing and high speed data types. + + """ homepage = "http://www.egenix.com/products/python/mxBase/" url = "https://downloads.egenix.com/python/egenix-mx-base-3.2.8.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-mysqldb1/package.py b/var/spack/repos/builtin/packages/py-mysqldb1/package.py index 46b44f34dd5..693fda6dbb8 100644 --- a/var/spack/repos/builtin/packages/py-mysqldb1/package.py +++ b/var/spack/repos/builtin/packages/py-mysqldb1/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyMysqldb1(Package): """Legacy mysql bindings for python""" homepage = "https://github.com/farcepest/MySQLdb1" @@ -36,4 +37,3 @@ class PyMysqldb1(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/repos/builtin/packages/py-netcdf/package.py b/var/spack/repos/builtin/packages/py-netcdf/package.py index 9a354cb1c16..e4f67d75a65 100644 --- a/var/spack/repos/builtin/packages/py-netcdf/package.py +++ b/var/spack/repos/builtin/packages/py-netcdf/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyNetcdf(Package): """Python interface to the netCDF Library.""" homepage = "http://unidata.github.io/netcdf4-python" diff --git a/var/spack/repos/builtin/packages/py-numexpr/package.py b/var/spack/repos/builtin/packages/py-numexpr/package.py index fd8d7dc0c6d..b3b2e1d47d8 100644 --- a/var/spack/repos/builtin/packages/py-numexpr/package.py +++ b/var/spack/repos/builtin/packages/py-numexpr/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import re + class PyNumexpr(Package): """Fast numerical expression evaluator for NumPy""" diff --git a/var/spack/repos/builtin/packages/py-openpyxl/package.py b/var/spack/repos/builtin/packages/py-openpyxl/package.py index 624da58ff30..fa32cb879f6 100644 --- a/var/spack/repos/builtin/packages/py-openpyxl/package.py +++ b/var/spack/repos/builtin/packages/py-openpyxl/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyOpenpyxl(Package): """A Python library to read/write Excel 2007 xlsx/xlsm files""" diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index 8bd4227faf6..37234ae652a 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -23,10 +23,18 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os + class PyPandas(Package): - """pandas is a Python package providing fast, flexible, and expressive data structures designed to make working with relational or labeled data both easy and intuitive. It aims to be the fundamental high-level building block for doing practical, real world data analysis in Python. Additionally, it has the broader goal of becoming the most powerful and flexible open source data analysis / manipulation tool available in any language.""" + """pandas is a Python package providing fast, flexible, and expressive + data structures designed to make working with relational or + labeled data both easy and intuitive. It aims to be the + fundamental high-level building block for doing practical, real + world data analysis in Python. Additionally, it has the broader + goal of becoming the most powerful and flexible open source data + analysis / manipulation tool available in any language. + + """ homepage = "http://pandas.pydata.org/" url = "https://pypi.python.org/packages/source/p/pandas/pandas-0.16.0.tar.gz#md5=bfe311f05dc0c351f8955fbd1e296e73" diff --git a/var/spack/repos/builtin/packages/py-pbr/package.py b/var/spack/repos/builtin/packages/py-pbr/package.py index 8502b0b3642..a0cfe0e5a52 100644 --- a/var/spack/repos/builtin/packages/py-pbr/package.py +++ b/var/spack/repos/builtin/packages/py-pbr/package.py @@ -23,10 +23,11 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os + class PyPbr(Package): - """PBR is a library that injects some useful and sensible default behaviors into your setuptools run.""" + """PBR is a library that injects some useful and sensible default + behaviors into your setuptools run.""" homepage = "https://pypi.python.org/pypi/pbr" url = "https://pypi.python.org/packages/source/p/pbr/pbr-1.8.1.tar.gz" @@ -38,5 +39,3 @@ class PyPbr(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - - diff --git a/var/spack/repos/builtin/packages/py-pep8/package.py b/var/spack/repos/builtin/packages/py-pep8/package.py index ffcc2b318c7..87d1da9ab0c 100644 --- a/var/spack/repos/builtin/packages/py-pep8/package.py +++ b/var/spack/repos/builtin/packages/py-pep8/package.py @@ -1,5 +1,6 @@ from spack import * + class PyPep8(Package): """python pep8 format checker""" homepage = "https://github.com/PyCQA/pycodestyle" @@ -12,4 +13,3 @@ class PyPep8(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/repos/builtin/packages/py-periodictable/package.py b/var/spack/repos/builtin/packages/py-periodictable/package.py index 608f4e16c16..51d9cc2046c 100644 --- a/var/spack/repos/builtin/packages/py-periodictable/package.py +++ b/var/spack/repos/builtin/packages/py-periodictable/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyPeriodictable(Package): """nose extends the test loading and running features of unittest, making it easier to write, find and run tests.""" diff --git a/var/spack/repos/builtin/packages/py-pexpect/package.py b/var/spack/repos/builtin/packages/py-pexpect/package.py index dd95af96431..8a99c0473bd 100644 --- a/var/spack/repos/builtin/packages/py-pexpect/package.py +++ b/var/spack/repos/builtin/packages/py-pexpect/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyPexpect(Package): """Pexpect allows easy control of interactive console applications.""" homepage = "https://pypi.python.org/pypi/pexpect" diff --git a/var/spack/repos/builtin/packages/py-phonopy/package.py b/var/spack/repos/builtin/packages/py-phonopy/package.py index a3a4b7a9f7f..d5b3313a98f 100644 --- a/var/spack/repos/builtin/packages/py-phonopy/package.py +++ b/var/spack/repos/builtin/packages/py-phonopy/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyPhonopy(Package): """Phonopy is an open source package for phonon calculations at harmonic and quasi-harmonic levels.""" diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py index 13a09f63cfa..aae5727d4e9 100644 --- a/var/spack/repos/builtin/packages/py-pillow/package.py +++ b/var/spack/repos/builtin/packages/py-pillow/package.py @@ -53,7 +53,8 @@ class PyPillow(Package): variant('tiff', default=False, description='Access to TIFF files') variant('freetype', default=False, description='Font related services') variant('lcms', default=False, description='Color management') - variant('jpeg2000', default=False, description='Provide JPEG 2000 functionality') + variant('jpeg2000', default=False, + description='Provide JPEG 2000 functionality') # Spack does not (yet) support these modes of building # variant('webp', default=False, description='Provide the WebP format') diff --git a/var/spack/repos/builtin/packages/py-pmw/package.py b/var/spack/repos/builtin/packages/py-pmw/package.py index 062bfe9c03d..e0a332a6bfa 100644 --- a/var/spack/repos/builtin/packages/py-pmw/package.py +++ b/var/spack/repos/builtin/packages/py-pmw/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class PyPmw(Package): - """Pmw is a toolkit for building high-level compound widgets, or megawidgets, constructed using other widgets as component parts.""" + """Pmw is a toolkit for building high-level compound widgets, or + megawidgets, constructed using other widgets as component parts.""" homepage = "https://pypi.python.org/pypi/Pmw" url = "https://pypi.python.org/packages/source/P/Pmw/Pmw-2.0.0.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-py2neo/package.py b/var/spack/repos/builtin/packages/py-py2neo/package.py index 02f37d8b784..a79c2e477b2 100644 --- a/var/spack/repos/builtin/packages/py-py2neo/package.py +++ b/var/spack/repos/builtin/packages/py-py2neo/package.py @@ -43,4 +43,3 @@ class PyPy2neo(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/repos/builtin/packages/py-pychecker/package.py b/var/spack/repos/builtin/packages/py-pychecker/package.py index 137c5f491ce..e81c3dbc9bc 100644 --- a/var/spack/repos/builtin/packages/py-pychecker/package.py +++ b/var/spack/repos/builtin/packages/py-pychecker/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyPychecker(Package): """""" homepage = "http://pychecker.sourceforge.net/" diff --git a/var/spack/repos/builtin/packages/py-pycparser/package.py b/var/spack/repos/builtin/packages/py-pycparser/package.py index 9afba500779..ef1b772ffc5 100644 --- a/var/spack/repos/builtin/packages/py-pycparser/package.py +++ b/var/spack/repos/builtin/packages/py-pycparser/package.py @@ -24,14 +24,14 @@ ############################################################################## from spack import * + class PyPycparser(Package): - """pycparser is a complete parser of the C language, written in pure python""" + """A complete parser of the C language, written in pure python.""" homepage = "https://github.com/eliben/pycparser" url = "https://pypi.python.org/packages/source/p/pycparser/pycparser-2.13.tar.gz" version('2.13', 'e4fe1a2d341b22e25da0d22f034ef32f') - extends('python') depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-pyelftools/package.py b/var/spack/repos/builtin/packages/py-pyelftools/package.py index 0c4a7a36ccc..bf781daf83c 100644 --- a/var/spack/repos/builtin/packages/py-pyelftools/package.py +++ b/var/spack/repos/builtin/packages/py-pyelftools/package.py @@ -24,8 +24,10 @@ ############################################################################## from spack import * + class PyPyelftools(Package): - """A pure-Python library for parsing and analyzing ELF files and DWARF debugging information""" + """A pure-Python library for parsing and analyzing ELF files and DWARF + debugging information""" homepage = "https://pypi.python.org/pypi/pyelftools" url = "https://pypi.python.org/packages/source/p/pyelftools/pyelftools-0.23.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-pygments/package.py b/var/spack/repos/builtin/packages/py-pygments/package.py index 0ca15dd10dc..2d22bd9f03f 100644 --- a/var/spack/repos/builtin/packages/py-pygments/package.py +++ b/var/spack/repos/builtin/packages/py-pygments/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyPygments(Package): """Pygments is a syntax highlighting package written in Python.""" homepage = "https://pypi.python.org/pypi/pygments" diff --git a/var/spack/repos/builtin/packages/py-pylint/package.py b/var/spack/repos/builtin/packages/py-pylint/package.py index 7107b2987fd..c505d445300 100644 --- a/var/spack/repos/builtin/packages/py-pylint/package.py +++ b/var/spack/repos/builtin/packages/py-pylint/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import re + class PyPylint(Package): """array processing for numbers, strings, records, and objects.""" diff --git a/var/spack/repos/builtin/packages/py-pypar/package.py b/var/spack/repos/builtin/packages/py-pypar/package.py index f762789dea9..6fef71304c5 100644 --- a/var/spack/repos/builtin/packages/py-pypar/package.py +++ b/var/spack/repos/builtin/packages/py-pypar/package.py @@ -24,12 +24,16 @@ ############################################################################## from spack import * + class PyPypar(Package): - """Pypar is an efficient but easy-to-use module that allows programs written in Python to run in parallel on multiple processors and communicate using MPI.""" + """Pypar is an efficient but easy-to-use module that allows programs + written in Python to run in parallel on multiple processors and + communicate using MPI.""" homepage = "http://code.google.com/p/pypar/" url = "https://pypar.googlecode.com/files/pypar-2.1.5_108.tgz" - version('2.1.5_108', '7a1f28327d2a3b679f9455c843d850b8', url='https://pypar.googlecode.com/files/pypar-2.1.5_108.tgz') + version('2.1.5_108', '7a1f28327d2a3b679f9455c843d850b8', + url='https://pypar.googlecode.com/files/pypar-2.1.5_108.tgz') extends('python') depends_on('mpi') diff --git a/var/spack/repos/builtin/packages/py-pyparsing/package.py b/var/spack/repos/builtin/packages/py-pyparsing/package.py index 0423aa3bdb2..67d255b02d5 100644 --- a/var/spack/repos/builtin/packages/py-pyparsing/package.py +++ b/var/spack/repos/builtin/packages/py-pyparsing/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyPyparsing(Package): """A Python Parsing Module.""" homepage = "https://pypi.python.org/pypi/pyparsing" diff --git a/var/spack/repos/builtin/packages/py-pyqt/package.py b/var/spack/repos/builtin/packages/py-pyqt/package.py index 05fb7aa22cc..de68bfaa90d 100644 --- a/var/spack/repos/builtin/packages/py-pyqt/package.py +++ b/var/spack/repos/builtin/packages/py-pyqt/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyPyqt(Package): """PyQt is a set of Python v2 and v3 bindings for Digia's Qt application framework and runs on all platforms supported by Qt diff --git a/var/spack/repos/builtin/packages/py-pyside/package.py b/var/spack/repos/builtin/packages/py-pyside/package.py index cd174ce6583..1cb3e4745f1 100644 --- a/var/spack/repos/builtin/packages/py-pyside/package.py +++ b/var/spack/repos/builtin/packages/py-pyside/package.py @@ -25,6 +25,7 @@ from spack import * import os + class PyPyside(Package): """Python bindings for Qt.""" homepage = "https://pypi.python.org/pypi/pyside" @@ -43,7 +44,8 @@ def patch(self): # Figure out the special RPATH pypkg = self.spec['python'].package rpath = self.rpath - rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide')) + rpath.append(os.path.join( + self.prefix, pypkg.site_packages_dir, 'PySide')) # Add Spack's standard CMake args to the sub-builds. # They're called BY setup.py so we have to patch it. @@ -61,7 +63,6 @@ def patch(self): r'#rpath_cmd(pyside_path, srcpath)', 'pyside_postinstall.py') - def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix, diff --git a/var/spack/repos/builtin/packages/py-python-daemon/package.py b/var/spack/repos/builtin/packages/py-python-daemon/package.py index c2c7c4de4f8..a30dc00ba41 100644 --- a/var/spack/repos/builtin/packages/py-python-daemon/package.py +++ b/var/spack/repos/builtin/packages/py-python-daemon/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyPythonDaemon(Package): """Library to implement a well-behaved Unix daemon process. @@ -47,4 +48,3 @@ class PyPythonDaemon(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/repos/builtin/packages/py-pytz/package.py b/var/spack/repos/builtin/packages/py-pytz/package.py index caf2c90e739..96f686d5910 100644 --- a/var/spack/repos/builtin/packages/py-pytz/package.py +++ b/var/spack/repos/builtin/packages/py-pytz/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyPytz(Package): """World timezone definitions, modern and historical.""" homepage = "https://pypi.python.org/pypi/pytz" diff --git a/var/spack/repos/builtin/packages/py-pyyaml/package.py b/var/spack/repos/builtin/packages/py-pyyaml/package.py index c0e22ba6810..8da391fac13 100644 --- a/var/spack/repos/builtin/packages/py-pyyaml/package.py +++ b/var/spack/repos/builtin/packages/py-pyyaml/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyPyyaml(Package): """PyYAML is a YAML parser and emitter for Python.""" homepage = "http://pyyaml.org/wiki/PyYAML" diff --git a/var/spack/repos/builtin/packages/py-rpy2/package.py b/var/spack/repos/builtin/packages/py-rpy2/package.py index 8f4771f0eb5..f86d8137663 100644 --- a/var/spack/repos/builtin/packages/py-rpy2/package.py +++ b/var/spack/repos/builtin/packages/py-rpy2/package.py @@ -24,10 +24,16 @@ ############################################################################## from spack import * + class PyRpy2(Package): - """rpy2 is a redesign and rewrite of rpy. It is providing a low-level interface to R from Python, a proposed high-level interface, including wrappers to graphical libraries, as well as R-like structures and functions.""" + """rpy2 is a redesign and rewrite of rpy. It is providing a low-level + interface to R from Python, a proposed high-level interface, + including wrappers to graphical libraries, as well as R-like + structures and functions. + + """ homepage = "https://pypi.python.org/pypi/rpy2" - url = "https://pypi.python.org/packages/source/r/rpy2/rpy2-2.5.4.tar.gz" + url = "https://pypi.python.org/packages/source/r/rpy2/rpy2-2.5.4.tar.gz" version('2.5.4', '115a20ac30883f096da2bdfcab55196d') version('2.5.6', 'a36e758b633ce6aec6a5f450bfee980f') diff --git a/var/spack/repos/builtin/packages/py-scientificpython/package.py b/var/spack/repos/builtin/packages/py-scientificpython/package.py index 7f7eb76b73f..e2273dc1643 100644 --- a/var/spack/repos/builtin/packages/py-scientificpython/package.py +++ b/var/spack/repos/builtin/packages/py-scientificpython/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyScientificpython(Package): """ScientificPython is a collection of Python modules for scientific computing. It contains support for geometry, diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py index 1a6112bcf3e..3cd7ea74f3f 100644 --- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyScikitLearn(Package): """""" homepage = "https://pypi.python.org/pypi/scikit-learn" diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index ea3731cf1bb..cab516e1df0 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyScipy(Package): """Scientific Library for Python.""" homepage = "http://www.scipy.org/" @@ -41,7 +42,8 @@ def install(self, spec, prefix): if 'atlas' in spec: # libatlas.so actually isn't always installed, but this # seems to make the build autodetect things correctly. - env['ATLAS'] = join_path(spec['atlas'].prefix.lib, 'libatlas.' + dso_suffix) + env['ATLAS'] = join_path( + spec['atlas'].prefix.lib, 'libatlas.' + dso_suffix) else: env['BLAS'] = spec['blas'].blas_shared_lib env['LAPACK'] = spec['lapack'].lapack_shared_lib diff --git a/var/spack/repos/builtin/packages/py-shiboken/package.py b/var/spack/repos/builtin/packages/py-shiboken/package.py index 27188e14172..0713f26ebcd 100644 --- a/var/spack/repos/builtin/packages/py-shiboken/package.py +++ b/var/spack/repos/builtin/packages/py-shiboken/package.py @@ -25,8 +25,9 @@ from spack import * import os + class PyShiboken(Package): - """Shiboken generates bindings for C++ libraries using CPython source code.""" + """Shiboken generates bindings for C++ libraries using CPython.""" homepage = "https://shiboken.readthedocs.org/" url = "https://pypi.python.org/packages/source/S/Shiboken/Shiboken-1.2.2.tar.gz" @@ -45,7 +46,8 @@ def patch(self): # They're called BY setup.py so we have to patch it. pypkg = self.spec['python'].package rpath = self.rpath - rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'Shiboken')) + rpath.append(os.path.join( + self.prefix, pypkg.site_packages_dir, 'Shiboken')) filter_file( r'OPTION_CMAKE,', @@ -61,7 +63,6 @@ def patch(self): r'#rpath_cmd(shiboken_path, srcpath)', 'shiboken_postinstall.py') - def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix, diff --git a/var/spack/repos/builtin/packages/py-sip/package.py b/var/spack/repos/builtin/packages/py-sip/package.py index fc8e7f52967..9d97f084331 100644 --- a/var/spack/repos/builtin/packages/py-sip/package.py +++ b/var/spack/repos/builtin/packages/py-sip/package.py @@ -25,8 +25,10 @@ from spack import * import os + class PySip(Package): - """SIP is a tool that makes it very easy to create Python bindings for C and C++ libraries.""" + """SIP is a tool that makes it very easy to create Python bindings for C + and C++ libraries.""" homepage = "http://www.riverbankcomputing.com/software/sip/intro" url = "http://sourceforge.net/projects/pyqt/files/sip/sip-4.16.5/sip-4.16.5.tar.gz" diff --git a/var/spack/repos/builtin/packages/py-six/package.py b/var/spack/repos/builtin/packages/py-six/package.py index eb4846d5af8..da0c7aa003c 100644 --- a/var/spack/repos/builtin/packages/py-six/package.py +++ b/var/spack/repos/builtin/packages/py-six/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PySix(Package): """Python 2 and 3 compatibility utilities.""" homepage = "https://pypi.python.org/pypi/six" diff --git a/var/spack/repos/builtin/packages/py-sphinx/package.py b/var/spack/repos/builtin/packages/py-sphinx/package.py index d00f1d128b7..2295a6a0c33 100644 --- a/var/spack/repos/builtin/packages/py-sphinx/package.py +++ b/var/spack/repos/builtin/packages/py-sphinx/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PySphinx(Package): """Sphinx Documentation Generator.""" homepage = "http://sphinx-doc.org" diff --git a/var/spack/repos/builtin/packages/py-sympy/package.py b/var/spack/repos/builtin/packages/py-sympy/package.py index 5e38fc5be12..3d8b86ac4d0 100644 --- a/var/spack/repos/builtin/packages/py-sympy/package.py +++ b/var/spack/repos/builtin/packages/py-sympy/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PySympy(Package): """SymPy is a Python library for symbolic mathematics.""" homepage = "https://pypi.python.org/pypi/sympy" diff --git a/var/spack/repos/builtin/packages/py-tappy/package.py b/var/spack/repos/builtin/packages/py-tappy/package.py index b10244acddf..03e9528ad78 100644 --- a/var/spack/repos/builtin/packages/py-tappy/package.py +++ b/var/spack/repos/builtin/packages/py-tappy/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyTappy(Package): """Python TAP interface module for unit tests""" homepage = "https://github.com/mblayman/tappy" diff --git a/var/spack/repos/builtin/packages/py-tuiview/package.py b/var/spack/repos/builtin/packages/py-tuiview/package.py index 56056ebad16..672a3ee5873 100644 --- a/var/spack/repos/builtin/packages/py-tuiview/package.py +++ b/var/spack/repos/builtin/packages/py-tuiview/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyTuiview(Package): """ TuiView is a lightweight raster GIS with powerful raster attribute diff --git a/var/spack/repos/builtin/packages/py-twisted/package.py b/var/spack/repos/builtin/packages/py-twisted/package.py index 2ce83cd24c0..edf1e7b0d75 100644 --- a/var/spack/repos/builtin/packages/py-twisted/package.py +++ b/var/spack/repos/builtin/packages/py-twisted/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyTwisted(Package): """An asynchronous networking framework written in Python""" homepage = "https://twistedmatrix.com/" diff --git a/var/spack/repos/builtin/packages/py-urwid/package.py b/var/spack/repos/builtin/packages/py-urwid/package.py index 48ba84e7bc8..943fb250f69 100644 --- a/var/spack/repos/builtin/packages/py-urwid/package.py +++ b/var/spack/repos/builtin/packages/py-urwid/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyUrwid(Package): """A full-featured console UI library""" homepage = "http://urwid.org/" @@ -37,4 +38,3 @@ class PyUrwid(Package): def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/repos/builtin/packages/py-virtualenv/package.py b/var/spack/repos/builtin/packages/py-virtualenv/package.py index d6b33ae175a..0ed567df95f 100644 --- a/var/spack/repos/builtin/packages/py-virtualenv/package.py +++ b/var/spack/repos/builtin/packages/py-virtualenv/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import shutil + class PyVirtualenv(Package): """virtualenv is a tool to create isolated Python environments.""" diff --git a/var/spack/repos/builtin/packages/py-wheel/package.py b/var/spack/repos/builtin/packages/py-wheel/package.py index 9e383a9a19f..f0ad3408351 100644 --- a/var/spack/repos/builtin/packages/py-wheel/package.py +++ b/var/spack/repos/builtin/packages/py-wheel/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyWheel(Package): """A built-package format for Python.""" diff --git a/var/spack/repos/builtin/packages/py-xlrd/package.py b/var/spack/repos/builtin/packages/py-xlrd/package.py index 9220f903405..81c3c928c0e 100644 --- a/var/spack/repos/builtin/packages/py-xlrd/package.py +++ b/var/spack/repos/builtin/packages/py-xlrd/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyXlrd(Package): """Library for developers to extract data from Microsoft Excel (tm) spreadsheet files""" diff --git a/var/spack/repos/builtin/packages/py-yapf/package.py b/var/spack/repos/builtin/packages/py-yapf/package.py index f7fe4037a13..bc26b82b078 100644 --- a/var/spack/repos/builtin/packages/py-yapf/package.py +++ b/var/spack/repos/builtin/packages/py-yapf/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class PyYapf(Package): """ Yet Another Python Formatter """ homepage = "https://github.com/google/yapf" diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index f755527607c..43f600e2f62 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -55,7 +55,8 @@ class Python(Package): extendable = True variant('tk', default=False, description='Provide support for Tkinter') - variant('ucs4', default=False, description='Enable UCS4 (wide) unicode strings') + variant('ucs4', default=False, + description='Enable UCS4 (wide) unicode strings') # From https://docs.python.org/2/c-api/unicode.html: Python's default # builds use a 16-bit type for Py_UNICODE and store Unicode values # internally as UCS2. It is also possible to build a UCS4 version of Python diff --git a/var/spack/repos/builtin/packages/qhull/package.py b/var/spack/repos/builtin/packages/qhull/package.py index f7cbcc27365..2733d8b6525 100644 --- a/var/spack/repos/builtin/packages/qhull/package.py +++ b/var/spack/repos/builtin/packages/qhull/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Qhull(Package): """Qhull computes the convex hull, Delaunay triangulation, Voronoi diagram, halfspace intersection about a point, furt hest-site @@ -44,7 +45,7 @@ class Qhull(Package): # https://github.com/qhull/qhull/pull/5 patch('qhull-iterator.patch', when='@1.0') - + depends_on('cmake', type='build') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/qrupdate/package.py b/var/spack/repos/builtin/packages/qrupdate/package.py index 314950dc362..f6b4c80cf43 100644 --- a/var/spack/repos/builtin/packages/qrupdate/package.py +++ b/var/spack/repos/builtin/packages/qrupdate/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Qrupdate(Package): """qrupdate is a Fortran library for fast updates of QR and Cholesky decompositions.""" diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index f33e5c2d0e5..e496a3e4d5f 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -25,6 +25,7 @@ from spack import * import os + class Qt(Package): """Qt is a comprehensive cross-platform C++ application framework.""" homepage = 'http://qt.io' @@ -37,15 +38,17 @@ class Qt(Package): version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb') version('3.3.8b', '9f05b4125cfe477cc52c9742c3c09009') - # Add patch for compile issues with qt3 found with use in the OpenSpeedShop project - variant('krellpatch', default=False, description="Build with openspeedshop based patch.") + # Add patch for compile issues with qt3 found with use in the + # OpenSpeedShop project + variant('krellpatch', default=False, + description="Build with openspeedshop based patch.") variant('mesa', default=False, description="Depend on mesa.") variant('gtk', default=False, description="Build with gtkplus.") patch('qt3krell.patch', when='@3.3.8b+krellpatch') # Use system openssl for security. - #depends_on("openssl") + # depends_on("openssl") depends_on("glib") depends_on("gtkplus", when='+gtk') @@ -69,43 +72,39 @@ class Qt(Package): depends_on("mesa", when='@4:+mesa') depends_on("libxcb") - def url_for_version(self, version): url = "http://download.qt.io/archive/qt/" if version >= Version('5'): url += "%s/%s/single/qt-everywhere-opensource-src-%s.tar.gz" % \ - (version.up_to(2), version, version) + (version.up_to(2), version, version) elif version >= Version('4.8'): url += "%s/%s/qt-everywhere-opensource-src-%s.tar.gz" % \ - (version.up_to(2), version, version) + (version.up_to(2), version, version) elif version >= Version('4.6'): url += "%s/qt-everywhere-opensource-src-%s.tar.gz" % \ - (version.up_to(2), version) + (version.up_to(2), version) elif version >= Version('4.0'): url += "%s/qt-x11-opensource-src-%s.tar.gz" % \ - (version.up_to(2), version) + (version.up_to(2), version) elif version >= Version('3'): url += "%s/qt-x11-free-%s.tar.gz" % \ - (version.up_to(1), version) + (version.up_to(1), version) elif version >= Version('2.1'): url += "%s/qt-x11-%s.tar.gz" % \ - (version.up_to(1), version) + (version.up_to(1), version) else: url += "%s/qt-%s.tar.gz" % \ - (version.up_to(1), version) + (version.up_to(1), version) return url - def setup_environment(self, spack_env, env): env.set('QTDIR', self.prefix) - def setup_dependent_environment(self, spack_env, run_env, dspec): spack_env.set('QTDIR', self.prefix) - def patch(self): if self.spec.satisfies('@4'): qmake_conf = 'mkspecs/common/g++-base.conf' @@ -117,11 +116,14 @@ def patch(self): return # Fix qmake compilers in the default mkspec - filter_file(r'^QMAKE_COMPILER *=.*$', 'QMAKE_COMPILER = cc', qmake_conf) - filter_file(r'^QMAKE_CC *=.*$', 'QMAKE_CC = cc', qmake_conf) - filter_file(r'^QMAKE_CXX *=.*$', 'QMAKE_CXX = c++', qmake_conf) - filter_file(r'^QMAKE_LFLAGS_NOUNDEF *\+?=.*$', 'QMAKE_LFLAGS_NOUNDEF =', qmake_unix_conf) - + filter_file(r'^QMAKE_COMPILER *=.*$', + 'QMAKE_COMPILER = cc', qmake_conf) + filter_file(r'^QMAKE_CC *=.*$', + 'QMAKE_CC = cc', qmake_conf) + filter_file(r'^QMAKE_CXX *=.*$', + 'QMAKE_CXX = c++', qmake_conf) + filter_file(r'^QMAKE_LFLAGS_NOUNDEF *\+?=.*$', + 'QMAKE_LFLAGS_NOUNDEF =', qmake_unix_conf) @property def common_config_args(self): @@ -155,7 +157,7 @@ def common_config_args(self): @when('@3') def configure(self): # An user report that this was necessary to link Qt3 on ubuntu - os.environ['LD_LIBRARY_PATH'] = os.getcwd()+'/lib' + os.environ['LD_LIBRARY_PATH'] = os.getcwd() + '/lib' configure('-prefix', self.prefix, '-v', '-thread', @@ -169,17 +171,16 @@ def configure(self): '-no-webkit', *self.common_config_args) - @when('@5') def configure(self): configure('-no-eglfs', '-no-directfb', '-qt-xcb', - # If someone wants to get a webkit build working, be my guest! + # If someone wants to get a webkit build working, be my + # guest! '-skip', 'qtwebkit', *self.common_config_args) - def install(self, spec, prefix): self.configure() make() diff --git a/var/spack/repos/builtin/packages/qthreads/package.py b/var/spack/repos/builtin/packages/qthreads/package.py index 47b57060633..2eaff0a240c 100644 --- a/var/spack/repos/builtin/packages/qthreads/package.py +++ b/var/spack/repos/builtin/packages/qthreads/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Qthreads(Package): """The qthreads API is designed to make using large numbers of threads convenient and easy, and to allow portable access to diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index b6300a1dfa2..e9db4b4fc82 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Raja(Package): """RAJA Parallel Framework.""" homepage = "http://software.llnl.gov/RAJA/" @@ -31,6 +32,6 @@ class Raja(Package): version('git', git='https://github.com/LLNL/RAJA.git', branch="master") def install(self, spec, prefix): - cmake('.',*std_cmake_args) - make() - make('install') + cmake('.', *std_cmake_args) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/ravel/package.py b/var/spack/repos/builtin/packages/ravel/package.py index 96692ac7c14..4f4f2b2e10a 100644 --- a/var/spack/repos/builtin/packages/ravel/package.py +++ b/var/spack/repos/builtin/packages/ravel/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Ravel(Package): """Ravel is a parallel communication trace visualization tool that orders events according to logical time.""" diff --git a/var/spack/repos/builtin/packages/readline/package.py b/var/spack/repos/builtin/packages/readline/package.py index 039bf725eb1..abb6ba04ce4 100644 --- a/var/spack/repos/builtin/packages/readline/package.py +++ b/var/spack/repos/builtin/packages/readline/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Readline(Package): """The GNU Readline library provides a set of functions for use by applications that allow users to edit command lines as they diff --git a/var/spack/repos/builtin/packages/rose/package.py b/var/spack/repos/builtin/packages/rose/package.py index bcd317eb8ff..02b09f01265 100644 --- a/var/spack/repos/builtin/packages/rose/package.py +++ b/var/spack/repos/builtin/packages/rose/package.py @@ -22,12 +22,13 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -#------------------------------------------------------------------------------ +# ----------------------------------------------------------------------------- # Author: Justin Too -#------------------------------------------------------------------------------ +# ----------------------------------------------------------------------------- from spack import * + class Rose(Package): """A compiler infrastructure to build source-to-source program transformation and analysis tools. @@ -36,7 +37,8 @@ class Rose(Package): homepage = "http://rosecompiler.org/" url = "https://github.com/rose-compiler/edg4x-rose" - version('master', branch='master', git='https://github.com/rose-compiler/edg4x-rose.git') + version('master', branch='master', + git='https://github.com/rose-compiler/edg4x-rose.git') patch('add_spack_compiler_recognition.patch') @@ -60,4 +62,3 @@ def install(self, spec, prefix): "--with-boost=" + boost.prefix, "--disable-boost-version-check") make("install-core") - diff --git a/var/spack/repos/builtin/packages/rsync/package.py b/var/spack/repos/builtin/packages/rsync/package.py index a9f8d4cfdaa..4e741b255f2 100644 --- a/var/spack/repos/builtin/packages/rsync/package.py +++ b/var/spack/repos/builtin/packages/rsync/package.py @@ -24,8 +24,9 @@ ############################################################################## from spack import * + class Rsync(Package): - """rsync is an open source utility that provides fast incremental file transfer.""" + """An open source utility that provides fast incremental file transfer.""" homepage = "https://rsync.samba.org" url = "https://download.samba.org/pub/rsync/rsync-3.1.1.tar.gz" diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py index 65f81ce5342..629dfb4649f 100644 --- a/var/spack/repos/builtin/packages/rust/package.py +++ b/var/spack/repos/builtin/packages/rust/package.py @@ -6,6 +6,7 @@ def get_submodules(): git = which('git') git('submodule', 'update', '--init', '--recursive') + class Rust(Package): """The rust programming language toolchain""" homepage = "http://www.rust-lang.org" diff --git a/var/spack/repos/builtin/packages/scalasca/package.py b/var/spack/repos/builtin/packages/scalasca/package.py index 98e43ee75af..228d814aed2 100644 --- a/var/spack/repos/builtin/packages/scalasca/package.py +++ b/var/spack/repos/builtin/packages/scalasca/package.py @@ -27,10 +27,12 @@ class Scalasca(Package): - """ - Scalasca is a software tool that supports the performance optimization of parallel programs by measuring and - analyzing their runtime behavior. The analysis identifies potential performance bottlenecks - in particular those - concerning communication and synchronization - and offers guidance in exploring their causes. + """Scalasca is a software tool that supports the performance optimization + of parallel programs by measuring and analyzing their runtime + behavior. The analysis identifies potential performance + bottlenecks - in particular those concerning communication and + synchronization - and offers guidance in exploring their causes. + """ homepage = "http://www.scalasca.org" @@ -44,7 +46,8 @@ class Scalasca(Package): depends_on("mpi") ########## - # Hard-code dependencies for Scalasca according to what stated in the release page + # Hard-code dependencies for Scalasca according to what stated in the + # release page # The OTF2 library path should be detected automatically from SCOREP # SCALASCA 2.2.2 depends_on("scorep@1.4:", when='@2.2.2') @@ -60,4 +63,4 @@ def install(self, spec, prefix): "--enable-shared"] configure(*configure_args) make() - make("install") \ No newline at end of file + make("install") diff --git a/var/spack/repos/builtin/packages/scons/package.py b/var/spack/repos/builtin/packages/scons/package.py index 40ae4176dd6..2c32bde4a1c 100644 --- a/var/spack/repos/builtin/packages/scons/package.py +++ b/var/spack/repos/builtin/packages/scons/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Scons(Package): """SCons is a software construction tool""" homepage = "http://scons.org" diff --git a/var/spack/repos/builtin/packages/scotch/package.py b/var/spack/repos/builtin/packages/scotch/package.py index c2bad20c2fe..94a323ce907 100644 --- a/var/spack/repos/builtin/packages/scotch/package.py +++ b/var/spack/repos/builtin/packages/scotch/package.py @@ -39,11 +39,16 @@ class Scotch(Package): version('6.0.0', 'c50d6187462ba801f9a82133ee666e8e') version('5.1.10b', 'f587201d6cf5cf63527182fbfba70753') - variant('mpi', default=False, description='Activate the compilation of parallel libraries') - variant('compression', default=True, description='Activate the posibility to use compressed files') - variant('esmumps', default=False, description='Activate the compilation of esmumps needed by mumps') - variant('shared', default=True, description='Build a shared version of the library') - variant('metis', default=True, description='Build metis and parmetis wrapper libraries') + variant('mpi', default=False, + description='Activate the compilation of parallel libraries') + variant('compression', default=True, + description='Activate the posibility to use compressed files') + variant('esmumps', default=False, + description='Activate the compilation of esmumps needed by mumps') + variant('shared', default=True, + description='Build a shared version of the library') + variant('metis', default=True, + description='Build metis and parmetis wrapper libraries') depends_on('flex', type='build') depends_on('bison', type='build') diff --git a/var/spack/repos/builtin/packages/scr/package.py b/var/spack/repos/builtin/packages/scr/package.py index b638688e7b0..2b01c60b3e5 100644 --- a/var/spack/repos/builtin/packages/scr/package.py +++ b/var/spack/repos/builtin/packages/scr/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Scr(Package): """SCR caches checkpoint data in storage on the compute nodes of a Linux cluster to provide a fast, scalable checkpoint/restart @@ -34,8 +35,10 @@ class Scr(Package): depends_on("mpi") # depends_on("dtcmp") - version('1.1-7', 'a5930e9ab27d1b7049447c2fd7734ebd', url='http://downloads.sourceforge.net/project/scalablecr/releases/scr-1.1-7.tar.gz') - version('1.1.8', '6a0f11ad18e27fcfc00a271ff587b06e', url='https://github.com/hpc/scr/releases/download/v1.1.8/scr-1.1.8.tar.gz') + version('1.1-7', 'a5930e9ab27d1b7049447c2fd7734ebd', + url='http://downloads.sourceforge.net/project/scalablecr/releases/scr-1.1-7.tar.gz') + version('1.1.8', '6a0f11ad18e27fcfc00a271ff587b06e', + url='https://github.com/hpc/scr/releases/download/v1.1.8/scr-1.1.8.tar.gz') def install(self, spec, prefix): configure("--prefix=" + prefix, diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py index 017a09977a0..5113c88bdf9 100644 --- a/var/spack/repos/builtin/packages/silo/package.py +++ b/var/spack/repos/builtin/packages/silo/package.py @@ -38,7 +38,8 @@ class Silo(Package): variant('fortran', default=True, description='Enable Fortran support') variant('shared', default=True, description='Build shared libraries') - variant('silex', default=False, description='Builds Silex, a GUI for viewing Silo files') + variant('silex', default=False, + description='Builds Silex, a GUI for viewing Silo files') depends_on('hdf5') depends_on('qt', when='+silex') @@ -55,8 +56,10 @@ def install(self, spec, prefix): configure( '--prefix=%s' % prefix, - '--with-hdf5=%s,%s' % (spec['hdf5'].prefix.include, spec['hdf5'].prefix.lib), - '--with-zlib=%s,%s' % (spec['zlib'].prefix.include, spec['zlib'].prefix.lib), + '--with-hdf5=%s,%s' % (spec['hdf5'].prefix.include, + spec['hdf5'].prefix.lib), + '--with-zlib=%s,%s' % (spec['zlib'].prefix.include, + spec['zlib'].prefix.lib), '--enable-install-lite-headers', *config_args) diff --git a/var/spack/repos/builtin/packages/snappy/package.py b/var/spack/repos/builtin/packages/snappy/package.py index 836063f9334..1e94980c925 100644 --- a/var/spack/repos/builtin/packages/snappy/package.py +++ b/var/spack/repos/builtin/packages/snappy/package.py @@ -22,9 +22,9 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os from spack import * + class Snappy(Package): """A fast compressor/decompressor: https://code.google.com/p/snappy""" diff --git a/var/spack/repos/builtin/packages/sparsehash/package.py b/var/spack/repos/builtin/packages/sparsehash/package.py index a72a5ce1056..e5abd42ae6e 100644 --- a/var/spack/repos/builtin/packages/sparsehash/package.py +++ b/var/spack/repos/builtin/packages/sparsehash/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Sparsehash(Package): """Sparse and dense hash-tables for C++ by Google""" homepage = "https://github.com/sparsehash/sparsehash" diff --git a/var/spack/repos/builtin/packages/spindle/package.py b/var/spack/repos/builtin/packages/spindle/package.py index bcdc7543a3c..213d41e9705 100644 --- a/var/spack/repos/builtin/packages/spindle/package.py +++ b/var/spack/repos/builtin/packages/spindle/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Spindle(Package): """Spindle improves the library-loading performance of dynamically linked HPC applications. Without Spindle large MPI jobs can diff --git a/var/spack/repos/builtin/packages/spot/package.py b/var/spack/repos/builtin/packages/spot/package.py index 21bb6f03a70..096aa24c027 100644 --- a/var/spack/repos/builtin/packages/spot/package.py +++ b/var/spack/repos/builtin/packages/spot/package.py @@ -23,16 +23,17 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os + class Spot(Package): - """Spot is a C++11 library for omega-automata manipulation and model checking.""" + """Spot is a C++11 library for omega-automata manipulation and model + checking.""" homepage = "https://spot.lrde.epita.fr/index.html" url = "http://www.lrde.epita.fr/dload/spot/spot-1.99.3.tar.gz" version('1.99.3', 'd53adcb2d0fe7c69f45d4e595a58254e') - #depends_on("gcc@4.8:", type='build') + # depends_on("gcc@4.8:", type='build') depends_on("python@3.2:") def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/sqlite/package.py b/var/spack/repos/builtin/packages/sqlite/package.py index 5e7ae4fb8b2..513f8ec6d4d 100644 --- a/var/spack/repos/builtin/packages/sqlite/package.py +++ b/var/spack/repos/builtin/packages/sqlite/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Sqlite(Package): """SQLite3 is an SQL database engine in a C library. Programs that link the SQLite3 library can have SQL database access without diff --git a/var/spack/repos/builtin/packages/stat/package.py b/var/spack/repos/builtin/packages/stat/package.py index a03713397b6..ec2fae5e9bd 100644 --- a/var/spack/repos/builtin/packages/stat/package.py +++ b/var/spack/repos/builtin/packages/stat/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Stat(Package): """Library to create, manipulate, and export graphs Graphlib.""" homepage = "http://paradyn.org/STAT/STAT.html" @@ -49,13 +50,14 @@ def install(self, spec, prefix): configure_args = [ "--enable-gui", "--prefix=%s" % prefix, - "--disable-examples", # Examples require MPI: avoid this dependency. + # Examples require MPI: avoid this dependency. + "--disable-examples", "--with-launchmon=%s" % spec['launchmon'].prefix, "--with-mrnet=%s" % spec['mrnet'].prefix, "--with-graphlib=%s" % spec['graphlib'].prefix, "--with-stackwalker=%s" % spec['dyninst'].prefix, "--with-libdwarf=%s" % spec['libdwarf'].prefix - ] + ] if '+dysect' in spec: configure_args.append('--enable-dysectapi') configure(*configure_args) diff --git a/var/spack/repos/builtin/packages/subversion/package.py b/var/spack/repos/builtin/packages/subversion/package.py index 68ee3978572..26d143e4aaf 100644 --- a/var/spack/repos/builtin/packages/subversion/package.py +++ b/var/spack/repos/builtin/packages/subversion/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -#import os + class Subversion(Package): """Apache Subversion - an open source version control system.""" @@ -41,39 +41,40 @@ class Subversion(Package): # Optional: We need swig if we want the Perl, Python or Ruby # bindings. - #depends_on('swig') - #depends_on('python') - #depends_on('perl') - #depends_on('ruby') + # depends_on('swig') + # depends_on('python') + # depends_on('perl') + # depends_on('ruby') def install(self, spec, prefix): # configure, build, install: - # Ref: http://www.linuxfromscratch.org/blfs/view/svn/general/subversion.html + # Ref: + # http://www.linuxfromscratch.org/blfs/view/svn/general/subversion.html options = ['--prefix=%s' % prefix] options.append('--with-apr=%s' % spec['apr'].prefix) options.append('--with-apr-util=%s' % spec['apr-util'].prefix) options.append('--with-zlib=%s' % spec['zlib'].prefix) options.append('--with-sqlite=%s' % spec['sqlite'].prefix) options.append('--with-serf=%s' % spec['serf'].prefix) - #options.append('--with-swig=%s' % spec['swig'].prefix) + # options.append('--with-swig=%s' % spec['swig'].prefix) configure(*options) make() make('install') # python bindings - #make('swig-py', + # make('swig-py', # 'swig-pydir=/usr/lib/python2.7/site-packages/libsvn', # 'swig_pydir_extra=/usr/lib/python2.7/site-packages/svn') - #make('install-swig-py', + # make('install-swig-py', # 'swig-pydir=/usr/lib/python2.7/site-packages/libsvn', # 'swig_pydir_extra=/usr/lib/python2.7/site-packages/svn') # perl bindings - #make('swig-pl') - #make('install-swig-pl') + # make('swig-pl') + # make('install-swig-pl') # ruby bindings - #make('swig-rb') - #make('isntall-swig-rb') + # make('swig-rb') + # make('isntall-swig-rb') diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py index e52b90eb8ad..7582954ab12 100644 --- a/var/spack/repos/builtin/packages/sundials/package.py +++ b/var/spack/repos/builtin/packages/sundials/package.py @@ -36,11 +36,15 @@ class Sundials(Package): version('2.6.2', '3deeb0ede9f514184c6bd83ecab77d95') variant('mpi', default=True, description='Enable MPI support') - variant('lapack', default=True, description='Build with external BLAS/LAPACK libraries') - variant('klu', default=False, description='Build with SuiteSparse KLU libraries') - variant('superlu', default=False, description='Build with SuperLU_MT libraries') + variant('lapack', default=True, + description='Build with external BLAS/LAPACK libraries') + variant('klu', default=False, + description='Build with SuiteSparse KLU libraries') + variant('superlu', default=False, + description='Build with SuperLU_MT libraries') variant('openmp', default=False, description='Enable OpenMP support') - variant('pthread', default=True, description='Enable POSIX threads support') + variant('pthread', default=True, + description='Enable POSIX threads support') depends_on('cmake', type='build') depends_on('mpi', when='+mpi') diff --git a/var/spack/repos/builtin/packages/superlu-mt/package.py b/var/spack/repos/builtin/packages/superlu-mt/package.py index 5a9429d6e5e..e849273e085 100644 --- a/var/spack/repos/builtin/packages/superlu-mt/package.py +++ b/var/spack/repos/builtin/packages/superlu-mt/package.py @@ -37,11 +37,13 @@ class SuperluMt(Package): version('3.1', '06ac62f1b4b7d17123fffa0d0c315e91') - variant('blas', default=True, description='Build with external BLAS library') + variant('blas', default=True, + description='Build with external BLAS library') # Must choose one or the other variant('openmp', default=False, description='Build with OpenMP support') - variant('pthread', default=True, description='Build with POSIX threads support') + variant('pthread', default=True, + description='Build with POSIX threads support') # NOTE: must link with a single-threaded BLAS library depends_on('blas', when='+blas') diff --git a/var/spack/repos/builtin/packages/swiftsim/package.py b/var/spack/repos/builtin/packages/swiftsim/package.py index 620d658a101..636aa26bd2d 100644 --- a/var/spack/repos/builtin/packages/swiftsim/package.py +++ b/var/spack/repos/builtin/packages/swiftsim/package.py @@ -38,9 +38,11 @@ class Swiftsim(Package): homepage = 'http://icc.dur.ac.uk/swift/' url = 'http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0' - version('0.3.0', git='https://gitlab.cosma.dur.ac.uk/swift/swiftsim.git', commit='254cc1b563b2f88ddcf437b1f71da123bb9db733') + version('0.3.0', git='https://gitlab.cosma.dur.ac.uk/swift/swiftsim.git', + commit='254cc1b563b2f88ddcf437b1f71da123bb9db733') - variant('mpi', default=True, description='Enable distributed memory parallelism') + variant('mpi', default=True, + description='Enable distributed memory parallelism') # Build dependencies depends_on('autoconf', type='build') diff --git a/var/spack/repos/builtin/packages/szip/package.py b/var/spack/repos/builtin/packages/szip/package.py index fd3a2a209da..b2ca6f3995d 100644 --- a/var/spack/repos/builtin/packages/szip/package.py +++ b/var/spack/repos/builtin/packages/szip/package.py @@ -24,10 +24,13 @@ ############################################################################## from spack import * + class Szip(Package): - """Szip is an implementation of the extended-Rice lossless compression algorithm. - It provides lossless compression of scientific data, and is provided with HDF - software products.""" + """An implementation of the extended-Rice lossless compression algorithm. + It provides lossless compression of scientific data, and is provided + with HDF software products. + + """ homepage = "https://www.hdfgroup.org/doc_resource/SZIP/" url = "http://www.hdfgroup.org/ftp/lib-external/szip/2.1/src/szip-2.1.tar.gz" diff --git a/var/spack/repos/builtin/packages/tar/package.py b/var/spack/repos/builtin/packages/tar/package.py index 4dce0e5be18..7a72278b9ad 100644 --- a/var/spack/repos/builtin/packages/tar/package.py +++ b/var/spack/repos/builtin/packages/tar/package.py @@ -23,8 +23,6 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import sys -import os class Tar(Package): diff --git a/var/spack/repos/builtin/packages/task/package.py b/var/spack/repos/builtin/packages/task/package.py index 8c3b412f48d..785023fd03c 100644 --- a/var/spack/repos/builtin/packages/task/package.py +++ b/var/spack/repos/builtin/packages/task/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Task(Package): """Feature-rich console based todo list manager""" homepage = "http://www.taskwarrior.org" diff --git a/var/spack/repos/builtin/packages/taskd/package.py b/var/spack/repos/builtin/packages/taskd/package.py index 1d7f9ed49ea..d13f4803744 100644 --- a/var/spack/repos/builtin/packages/taskd/package.py +++ b/var/spack/repos/builtin/packages/taskd/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Taskd(Package): """TaskWarrior task synchronization daemon""" # FIXME: add a proper url for your package's homepage here. diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index 3b181f3fa4b..a77df8d37c4 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -30,6 +30,7 @@ from llnl.util.filesystem import join_path + class Tau(Package): """ A portable profiling and tracing toolkit for performance @@ -45,15 +46,20 @@ class Tau(Package): version('2.23.1', '6593b47ae1e7a838e632652f0426fe72') # TODO : shmem variant missing - variant('download', default=False, description='Downloads and builds various dependencies') + variant('download', default=False, + description='Downloads and builds various dependencies') variant('scorep', default=False, description='Activates SCOREP support') variant('openmp', default=True, description='Use OpenMP threads') - variant('mpi', default=True, description='Specify use of TAU MPI wrapper library') + variant('mpi', default=True, + description='Specify use of TAU MPI wrapper library') variant('phase', default=True, description='Generate phase based profiles') - variant('comm', default=True, description=' Generate profiles with MPI communicator info') + variant('comm', default=True, + description=' Generate profiles with MPI communicator info') - # TODO : Try to build direct OTF2 support? Some parts of the OTF support library in TAU are non-conformant, - # TODO : and fail at compile-time. Further, SCOREP is compiled with OTF2 support. + # TODO : Try to build direct OTF2 support? Some parts of the OTF support + # TODO : library in TAU are non-conformant, + # TODO : and fail at compile-time. Further, SCOREP is compiled with OTF2 + # support. depends_on('pdt') # Required for TAU instrumentation depends_on('scorep', when='+scorep') depends_on('binutils', when='~download') @@ -65,13 +71,17 @@ def set_compiler_options(self): ########## # Selecting a compiler with TAU configure is quite tricky: - # 1 - compilers are mapped to a given set of strings (and spack cc, cxx, etc. wrappers are not among them) + # 1 - compilers are mapped to a given set of strings + # (and spack cc, cxx, etc. wrappers are not among them) # 2 - absolute paths are not allowed - # 3 - the usual environment variables seems not to be checked ('CC', 'CXX' and 'FC') - # 4 - if no -cc= -cxx= is passed tau is built with system compiler silently + # 3 - the usual environment variables seems not to be checked + # ('CC', 'CXX' and 'FC') + # 4 - if no -cc= -cxx= is passed tau is built with + # system compiler silently # (regardless of what % is used in the spec) # - # In the following we give TAU what he expects and put compilers into PATH + # In the following we give TAU what he expects and put compilers into + # PATH compiler_path = os.path.dirname(self.compiler.cc) os.environ['PATH'] = ':'.join([compiler_path, os.environ['PATH']]) compiler_options = ['-c++=%s' % self.compiler.cxx_names[0], @@ -80,7 +90,8 @@ def set_compiler_options(self): compiler_options.append('-fortran=%s' % self.compiler.fc_names[0]) ########## - # Construct the string of custom compiler flags and append it to compiler related options + # Construct the string of custom compiler flags and append it to + # compiler related options useropt = ' '.join(useropt) useropt = "-useropt=%s" % useropt compiler_options.append(useropt) @@ -92,8 +103,9 @@ def install(self, spec, prefix): change_sed_delimiter('@', ';', 'utils/FixMakefile') change_sed_delimiter('@', ';', 'utils/FixMakefile.sed.default') - # TAU configure, despite the name , seems to be a manually written script (nothing related to autotools). - # As such it has a few #peculiarities# that make this build quite hackish. + # TAU configure, despite the name , seems to be a manually + # written script (nothing related to autotools). As such it has + # a few #peculiarities# that make this build quite hackish. options = ["-prefix=%s" % prefix, "-iowrapper", "-pdt=%s" % spec['pdt'].prefix] diff --git a/var/spack/repos/builtin/packages/tetgen/package.py b/var/spack/repos/builtin/packages/tetgen/package.py index 97aa68be65a..7ac55a67857 100644 --- a/var/spack/repos/builtin/packages/tetgen/package.py +++ b/var/spack/repos/builtin/packages/tetgen/package.py @@ -36,7 +36,8 @@ class Tetgen(Package): url = "http://www.tetgen.org/files/tetgen1.4.3.tar.gz" version('1.4.3', 'd6a4bcdde2ac804f7ec66c29dcb63c18') - version('1.5.0', '3b9fd9cdec121e52527b0308f7aad5c1', url='http://www.tetgen.org/1.5/src/tetgen1.5.0.tar.gz') + version('1.5.0', '3b9fd9cdec121e52527b0308f7aad5c1', + url='http://www.tetgen.org/1.5/src/tetgen1.5.0.tar.gz') depends_on('cmake@2.8.7:', when='@1.5.0:', type='build') diff --git a/var/spack/repos/builtin/packages/texinfo/package.py b/var/spack/repos/builtin/packages/texinfo/package.py index 5c6fef0db6f..ddb23e5d6f0 100644 --- a/var/spack/repos/builtin/packages/texinfo/package.py +++ b/var/spack/repos/builtin/packages/texinfo/package.py @@ -27,10 +27,13 @@ class Texinfo(Package): - """ - Texinfo is the official documentation format of the GNU project. It was invented by Richard Stallman and Bob - Chassell many years ago, loosely based on Brian Reid's Scribe and other formatting languages of the time. It is - used by many non-GNU projects as well.FIXME: put a proper description of your package here. + """Texinfo is the official documentation format of the GNU project. + + It was invented by Richard Stallman and Bob Chassell many years ago, + loosely based on Brian Reid's Scribe and other formatting languages + of the time. It is used by many non-GNU projects as well.FIXME: put a + proper description of your package here. + """ homepage = "https://www.gnu.org/software/texinfo/" url = "http://ftp.gnu.org/gnu/texinfo/texinfo-6.0.tar.gz" diff --git a/var/spack/repos/builtin/packages/texlive/package.py b/var/spack/repos/builtin/packages/texlive/package.py index d44a6e311e8..64158e74cb9 100644 --- a/var/spack/repos/builtin/packages/texlive/package.py +++ b/var/spack/repos/builtin/packages/texlive/package.py @@ -32,7 +32,8 @@ class Texlive(Package): homepage = "http://www.tug.org/texlive" - version('live', 'e671eea7f142c438959493cc42a2a59b', url="http://mirror.ctan.org/systems/texlive/tlnet/install-tl-unx.tar.gz") + version('live', 'e671eea7f142c438959493cc42a2a59b', + url="http://mirror.ctan.org/systems/texlive/tlnet/install-tl-unx.tar.gz") # There does not seem to be a complete list of schemes. # Examples include: diff --git a/var/spack/repos/builtin/packages/thrift/package.py b/var/spack/repos/builtin/packages/thrift/package.py index 025ee2b8858..755f7a80b9b 100644 --- a/var/spack/repos/builtin/packages/thrift/package.py +++ b/var/spack/repos/builtin/packages/thrift/package.py @@ -24,12 +24,16 @@ ############################################################################## from spack import * + class Thrift(Package): - """The Apache Thrift software framework, for scalable cross-language services - development, combines a software stack with a code generation engine to build - services that work efficiently and seamlessly between C++, Java, Python, PHP, - Ruby, Erlang, Perl, Haskell, C#, Cocoa, JavaScript, Node.js, Smalltalk, OCaml - and Delphi and other languages.""" + """Software framework for scalable cross-language services development. + + Thrift combines a software stack with a code generation engine to + build services that work efficiently and seamlessly between C++, + Java, Python, PHP, Ruby, Erlang, Perl, Haskell, C#, Cocoa, + JavaScript, Node.js, Smalltalk, OCaml and Delphi and other languages. + + """ homepage = "http://thrift.apache.org" url = "http://apache.mirrors.ionfish.org/thrift/0.9.2/thrift-0.9.2.tar.gz" @@ -37,8 +41,10 @@ class Thrift(Package): version('0.9.2', '89f63cc4d0100912f4a1f8a9dee63678') # Currently only support for c-family and python - variant('c', default=True, description="Build support for C-family languages") - variant('python', default=True, description="Build support for python") + variant('c', default=True, + description="Build support for C-family languages") + variant('python', default=True, + description="Build support for python") depends_on('jdk') depends_on('autoconf', type='build') @@ -66,7 +72,8 @@ def install(self, spec, prefix): options.append('--enable-tests=no') options.append('--with-c=%s' % ('yes' if '+c' in spec else 'no')) - options.append('--with-python=%s' % ('yes' if '+python' in spec else 'no')) + options.append('--with-python=%s' % + ('yes' if '+python' in spec else 'no')) options.append('--with-java=%s' % ('yes' if '+java' in spec else 'no')) options.append('--with-go=%s' % ('yes' if '+go' in spec else 'no')) options.append('--with-lua=%s' % ('yes' if '+lua' in spec else 'no')) diff --git a/var/spack/repos/builtin/packages/tmuxinator/package.py b/var/spack/repos/builtin/packages/tmuxinator/package.py index b9c92ea4db2..66da4006f24 100644 --- a/var/spack/repos/builtin/packages/tmuxinator/package.py +++ b/var/spack/repos/builtin/packages/tmuxinator/package.py @@ -24,17 +24,18 @@ ############################################################################## from spack import * + class Tmuxinator(Package): """A session configuration creator and manager for tmux""" homepage = "https://github.com/tmuxinator/tmuxinator" url = "https://github.com/tmuxinator/tmuxinator" version('0.6.11', - git='https://github.com/tmuxinator/tmuxinator', - tag='v0.6.11') + git='https://github.com/tmuxinator/tmuxinator', + tag='v0.6.11') extends('ruby') def install(self, spec, prefix): - gem('build', 'tmuxinator.gemspec') - gem('install', 'tmuxinator-{0}.gem'.format(self.version)) + gem('build', 'tmuxinator.gemspec') + gem('install', 'tmuxinator-{0}.gem'.format(self.version)) diff --git a/var/spack/repos/builtin/packages/triangle/package.py b/var/spack/repos/builtin/packages/triangle/package.py index bc8b0ec6397..f4ee9ca1c9a 100644 --- a/var/spack/repos/builtin/packages/triangle/package.py +++ b/var/spack/repos/builtin/packages/triangle/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Triangle(Package): """Triangle is a two-dimensional mesh generator and Delaunay triangulator. Triangle generates exact Delaunay triangulations, diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 1d83e055c9c..d39e45f0543 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -56,17 +56,24 @@ class Trilinos(Package): version('11.14.2', 'a43590cf896c677890d75bfe75bc6254') version('11.14.1', '40febc57f76668be8b6a77b7607bb67f') - variant('metis', default=True, description='Compile with METIS and ParMETIS') - variant('mumps', default=True, description='Compile with support for MUMPS solvers') - variant('superlu-dist', default=True, description='Compile with SuperluDist solvers') - variant('hypre', default=True, description='Compile with Hypre preconditioner') + variant('metis', default=True, + description='Compile with METIS and ParMETIS') + variant('mumps', default=True, + description='Compile with support for MUMPS solvers') + variant('superlu-dist', default=True, + description='Compile with SuperluDist solvers') + variant('hypre', default=True, + description='Compile with Hypre preconditioner') variant('hdf5', default=True, description='Compile with HDF5') - variant('suite-sparse', default=True, description='Compile with SuiteSparse solvers') + variant('suite-sparse', default=True, + description='Compile with SuiteSparse solvers') # not everyone has py-numpy activated, keep it disabled by default to avoid # configure errors variant('python', default=False, description='Build python wrappers') - variant('shared', default=True, description='Enables the build of shared libraries') - variant('debug', default=False, description='Builds a debug version of the libraries') + variant('shared', default=True, + description='Enables the build of shared libraries') + variant('debug', default=False, + description='Builds a debug version of the libraries') variant('boost', default=True, description='Compile with Boost') depends_on('cmake', type='build') diff --git a/var/spack/repos/builtin/packages/udunits2/package.py b/var/spack/repos/builtin/packages/udunits2/package.py index aed39668fdd..bae6414c5b6 100644 --- a/var/spack/repos/builtin/packages/udunits2/package.py +++ b/var/spack/repos/builtin/packages/udunits2/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Udunits2(Package): """Automated units conversion""" diff --git a/var/spack/repos/builtin/packages/uncrustify/package.py b/var/spack/repos/builtin/packages/uncrustify/package.py index db96bc301e0..c3182d0dc89 100644 --- a/var/spack/repos/builtin/packages/uncrustify/package.py +++ b/var/spack/repos/builtin/packages/uncrustify/package.py @@ -24,8 +24,9 @@ ############################################################################## from spack import * + class Uncrustify(Package): - """Source Code Beautifier for C, C++, C#, ObjectiveC, D, Java, Pawn and VALA""" + """Source Code Beautifier for C, C++, C#, ObjectiveC, Java, and others.""" homepage = "http://uncrustify.sourceforge.net/" url = "http://downloads.sourceforge.net/project/uncrustify/uncrustify/uncrustify-0.61/uncrustify-0.61.tar.gz" diff --git a/var/spack/repos/builtin/packages/unibilium/package.py b/var/spack/repos/builtin/packages/unibilium/package.py index d9e0ad6bcbe..943e4737e17 100644 --- a/var/spack/repos/builtin/packages/unibilium/package.py +++ b/var/spack/repos/builtin/packages/unibilium/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Unibilium(Package): """A terminfo parsing library""" homepage = "https://github.com/mauke/unibilium" @@ -32,5 +33,5 @@ class Unibilium(Package): version('1.2.0', '9b1c97839a880a373da6c097443b43c4') def install(self, spec, prefix): - make("PREFIX="+prefix) - make("install", "PREFIX="+prefix) + make("PREFIX=" + prefix) + make("install", "PREFIX=" + prefix) diff --git a/var/spack/repos/builtin/packages/util-linux/package.py b/var/spack/repos/builtin/packages/util-linux/package.py index bf6972683dd..99af170ca12 100644 --- a/var/spack/repos/builtin/packages/util-linux/package.py +++ b/var/spack/repos/builtin/packages/util-linux/package.py @@ -23,7 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import os + class UtilLinux(Package): """Util-linux is a suite of essential utilities for any Linux system.""" @@ -36,9 +36,9 @@ class UtilLinux(Package): depends_on("python@2.7:") def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "PKG_CONFIG_PATH=%s/pkgconfig" % spec['python'].prefix.lib, - "--disable-use-tty-group") + configure("--prefix=%s" % prefix, + "PKG_CONFIG_PATH=%s/pkgconfig" % spec['python'].prefix.lib, + "--disable-use-tty-group") make() make("install") diff --git a/var/spack/repos/builtin/packages/valgrind/package.py b/var/spack/repos/builtin/packages/valgrind/package.py index afd4cc6ad0d..e7ae227c27c 100644 --- a/var/spack/repos/builtin/packages/valgrind/package.py +++ b/var/spack/repos/builtin/packages/valgrind/package.py @@ -27,12 +27,15 @@ class Valgrind(Package): - """ - Valgrind is an instrumentation framework for building dynamic analysis tools. There are Valgrind tools that can - automatically detect many memory management and threading bugs, and profile your programs in detail. You can also - use Valgrind to build new tools. + """An instrumentation framework for building dynamic analysis. + + There are Valgrind tools that can automatically detect many memory + management and threading bugs, and profile your programs in + detail. You can also use Valgrind to build new tools. + + Valgrind is Open Source / Free Software, and is freely available + under the GNU General Public License, version 2. - Valgrind is Open Source / Free Software, and is freely available under the GNU General Public License, version 2. """ homepage = "http://valgrind.org/" url = "http://valgrind.org/downloads/valgrind-3.11.0.tar.bz2" @@ -42,7 +45,8 @@ class Valgrind(Package): version('3.10.0', '7c311a72a20388aceced1aa5573ce970') variant('mpi', default=True, description='Activates MPI support for valgrind') - variant('boost', default=True, description='Activates boost support for valgrind') + variant('boost', default=True, + description='Activates boost support for valgrind') depends_on('mpi', when='+mpi') depends_on('boost', when='+boost') diff --git a/var/spack/repos/builtin/packages/vim/package.py b/var/spack/repos/builtin/packages/vim/package.py index e0dfb648791..01eccfab57b 100644 --- a/var/spack/repos/builtin/packages/vim/package.py +++ b/var/spack/repos/builtin/packages/vim/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Vim(Package): """Vim is a highly configurable text editor built to enable efficient text editing. It is an improved version of the vi editor distributed with most @@ -51,7 +52,7 @@ class Vim(Package): feature_sets = ('huge', 'big', 'normal', 'small', 'tiny') for fs in feature_sets: - variant(fs, default=False, description="Use '%s' feature set" % fs) + variant(fs, default=False, description="Use '%s' feature set" % fs) variant('python', default=False, description="build with Python") depends_on('python', when='+python') @@ -66,42 +67,44 @@ class Vim(Package): # virtual dependency? def install(self, spec, prefix): - feature_set = None - for fs in self.feature_sets: - if "+" + fs in spec: - if feature_set is not None: - tty.error("Only one feature set allowed, both %s and %s specified" - % (feature_set, fs)) - feature_set = fs - if '+gui' in spec: - if feature_set is not None: - if feature_set is not 'huge': - tty.error("+gui variant requires 'huge' feature set, %s was specified" - % feature_set) - feature_set = 'huge' - if feature_set is None: - feature_set = 'normal' + feature_set = None + for fs in self.feature_sets: + if "+" + fs in spec: + if feature_set is not None: + tty.error( + "Only one feature set allowed, specified %s and %s" + % (feature_set, fs)) + feature_set = fs + if '+gui' in spec: + if feature_set is not None: + if feature_set is not 'huge': + tty.error( + "+gui variant requires 'huge' feature set, " + "%s was specified" % feature_set) + feature_set = 'huge' + if feature_set is None: + feature_set = 'normal' - configure_args = [] - configure_args.append("--with-features=" + feature_set) + configure_args = [] + configure_args.append("--with-features=" + feature_set) - if '+python' in spec: - configure_args.append("--enable-pythoninterp=yes") - else: - configure_args.append("--enable-pythoninterp=dynamic") + if '+python' in spec: + configure_args.append("--enable-pythoninterp=yes") + else: + configure_args.append("--enable-pythoninterp=dynamic") - if '+ruby' in spec: - configure_args.append("--enable-rubyinterp=yes") - else: - configure_args.append("--enable-rubyinterp=dynamic") + if '+ruby' in spec: + configure_args.append("--enable-rubyinterp=yes") + else: + configure_args.append("--enable-rubyinterp=dynamic") - if '+gui' in spec: - configure_args.append("--enable-gui=auto") + if '+gui' in spec: + configure_args.append("--enable-gui=auto") - if '+cscope' in spec: - configure_args.append("--enable-cscope") + if '+cscope' in spec: + configure_args.append("--enable-cscope") - configure("--prefix=%s" % prefix, *configure_args) + configure("--prefix=%s" % prefix, *configure_args) - make() - make("install") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/visit/package.py b/var/spack/repos/builtin/packages/visit/package.py index ae19fd04501..d88caeb00a4 100644 --- a/var/spack/repos/builtin/packages/visit/package.py +++ b/var/spack/repos/builtin/packages/visit/package.py @@ -26,7 +26,8 @@ class Visit(Package): - """VisIt is an Open Source, interactive, scalable, visualization, animation and analysis tool.""" + """VisIt is an Open Source, interactive, scalable, visualization, + animation and analysis tool.""" homepage = "https://wci.llnl.gov/simulation/computer-codes/visit/" url = "http://portal.nersc.gov/project/visit/releases/2.10.1/visit2.10.1.tar.gz" @@ -42,12 +43,15 @@ def install(self, spec, prefix): with working_dir('spack-build', create=True): feature_args = std_cmake_args[:] - feature_args.extend(["-DVTK_MAJOR_VERSION=6", - "-DVTK_MINOR_VERSION=1", - "-DVISIT_LOC_QMAKE_EXE:FILEPATH=%s/qmake-qt4" % spec['qt'].prefix.bin, - "-DPYTHON_EXECUTABLE:FILEPATH=%s/python" % spec['python'].prefix.bin, - "-DVISIT_SILO_DIR:PATH=%s" % spec['silo'].prefix, - "-DVISIT_HDF5_DIR:PATH=%s" % spec['hdf5'].prefix]) + feature_args.extend([ + "-DVTK_MAJOR_VERSION=6", + "-DVTK_MINOR_VERSION=1", + "-DVISIT_LOC_QMAKE_EXE:FILEPATH=%s/qmake-qt4" % spec[ + 'qt'].prefix.bin, + "-DPYTHON_EXECUTABLE:FILEPATH=%s/python" % spec[ + 'python'].prefix.bin, + "-DVISIT_SILO_DIR:PATH=%s" % spec['silo'].prefix, + "-DVISIT_HDF5_DIR:PATH=%s" % spec['hdf5'].prefix]) cmake('../src', *feature_args) diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py index 5c196b5ea81..087c0e93eb5 100644 --- a/var/spack/repos/builtin/packages/vtk/package.py +++ b/var/spack/repos/builtin/packages/vtk/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Vtk(Package): """The Visualization Toolkit (VTK) is an open-source, freely available software system for 3D computer graphics, image @@ -31,9 +32,11 @@ class Vtk(Package): homepage = "http://www.vtk.org" url = "http://www.vtk.org/files/release/6.1/VTK-6.1.0.tar.gz" - version("7.0.0", "5fe35312db5fb2341139b8e4955c367d", url="http://www.vtk.org/files/release/7.0/VTK-7.0.0.tar.gz") + version("7.0.0", "5fe35312db5fb2341139b8e4955c367d", + url="http://www.vtk.org/files/release/7.0/VTK-7.0.0.tar.gz") - version("6.3.0", '0231ca4840408e9dd60af48b314c5b6d', url="http://www.vtk.org/files/release/6.3/VTK-6.3.0.tar.gz") + version("6.3.0", '0231ca4840408e9dd60af48b314c5b6d', + url="http://www.vtk.org/files/release/6.3/VTK-6.3.0.tar.gz") version('6.1.0', '25e4dfb3bad778722dcaec80cd5dab7d') @@ -41,7 +44,8 @@ class Vtk(Package): depends_on("qt") # VTK7 defaults to OpenGL2 rendering backend - variant('opengl2', default=True, description='Build with OpenGL instead of OpenGL2 as rendering backend') + variant('opengl2', default=True, + description='Build with OpenGL instead of OpenGL2 backend') def install(self, spec, prefix): def feature_to_bool(feature, on='ON', off='OFF'): @@ -67,7 +71,7 @@ def feature_to_bool(feature, on='ON', off='OFF'): "-DVTK_Group_Qt=OFF", "-DModule_vtkGUISupportQt:BOOL=ON", "-DModule_vtkGUISupportQtOpenGL:BOOL=ON" - ]) + ]) if spec['qt'].satisfies('@5'): cmake_args.append("-DVTK_QT_VERSION:STRING=5") @@ -76,7 +80,8 @@ def feature_to_bool(feature, on='ON', off='OFF'): cmake_args.append("-DCMAKE_C_FLAGS=-DGLX_GLXEXT_LEGACY") cmake_args.append("-DCMAKE_CXX_FLAGS=-DGLX_GLXEXT_LEGACY") - cmake_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL')) + cmake_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % + feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL')) cmake(*cmake_args) make() diff --git a/var/spack/repos/builtin/packages/wget/package.py b/var/spack/repos/builtin/packages/wget/package.py index 532cf332e9c..aff771b723c 100644 --- a/var/spack/repos/builtin/packages/wget/package.py +++ b/var/spack/repos/builtin/packages/wget/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Wget(Package): """GNU Wget is a free software package for retrieving files using HTTP, HTTPS and FTP, the most widely-used Internet protocols. It @@ -40,9 +41,11 @@ class Wget(Package): depends_on("openssl") def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--with-ssl=openssl", - "OPENSSL_CFLAGS=-I%s" % spec['openssl'].prefix.include, - "OPENSSL_LIBS=-L%s -lssl -lcrypto -lz" % spec['openssl'].prefix.lib) + configure( + "--prefix=%s" % prefix, + "--with-ssl=openssl", + "OPENSSL_CFLAGS=-I%s" % spec['openssl'].prefix.include, + "OPENSSL_LIBS=-L%s -lssl -lcrypto -lz" % spec[ + 'openssl'].prefix.lib) make() make("install") diff --git a/var/spack/repos/builtin/packages/wx/package.py b/var/spack/repos/builtin/packages/wx/package.py index c0008248036..5a80ca1c1f8 100644 --- a/var/spack/repos/builtin/packages/wx/package.py +++ b/var/spack/repos/builtin/packages/wx/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Wx(Package): """wxWidgets is a C++ library that lets developers create applications for Windows, Mac OS X, Linux and other platforms @@ -43,8 +44,8 @@ class Wx(Package): depends_on('gtkplus') def install(self, spec, prefix): - configure("--prefix=%s" % prefix, "--enable-unicode", "--disable-precomp-headers") + configure("--prefix=%s" % prefix, "--enable-unicode", + "--disable-precomp-headers") make(parallel=False) make("install") - diff --git a/var/spack/repos/builtin/packages/wxpropgrid/package.py b/var/spack/repos/builtin/packages/wxpropgrid/package.py index 2283e1acf14..cc9ff445d6b 100644 --- a/var/spack/repos/builtin/packages/wxpropgrid/package.py +++ b/var/spack/repos/builtin/packages/wxpropgrid/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Wxpropgrid(Package): """wxPropertyGrid is a property sheet control for wxWidgets. In other words, it is a specialized two-column grid for editing @@ -37,8 +38,8 @@ class Wxpropgrid(Package): depends_on("wx") def install(self, spec, prefix): - configure("--prefix=%s" % prefix, "--with-wxdir=%s" % spec['wx'].prefix.bin, "--enable-unicode") + configure("--prefix=%s" % prefix, "--with-wxdir=%s" % + spec['wx'].prefix.bin, "--enable-unicode") make() make("install") - diff --git a/var/spack/repos/builtin/packages/xcb-proto/package.py b/var/spack/repos/builtin/packages/xcb-proto/package.py index efcbdf0aea9..587983f6bdd 100644 --- a/var/spack/repos/builtin/packages/xcb-proto/package.py +++ b/var/spack/repos/builtin/packages/xcb-proto/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class XcbProto(Package): """Protocol for libxcb""" diff --git a/var/spack/repos/builtin/packages/xorg-util-macros/package.py b/var/spack/repos/builtin/packages/xorg-util-macros/package.py index 963d93442f9..cd50d468261 100644 --- a/var/spack/repos/builtin/packages/xorg-util-macros/package.py +++ b/var/spack/repos/builtin/packages/xorg-util-macros/package.py @@ -24,11 +24,12 @@ ############################################################################## from spack import * + class XorgUtilMacros(Package): - """The util-macros package contains the m4 macros used by all of the Xorg packages.""" + """The m4 macros used by all of the Xorg packages.""" homepage = "http://www.example.com" - url = "http://ftp.x.org/pub/individual/util/util-macros-1.19.0.tar.bz2" + url = "http://ftp.x.org/pub/individual/util/util-macros-1.19.0.tar.bz2" version('1.19.0', '1cf984125e75f8204938d998a8b6c1e1') diff --git a/var/spack/repos/builtin/packages/xproto/package.py b/var/spack/repos/builtin/packages/xproto/package.py index 7be6defb83b..dbceaa15758 100644 --- a/var/spack/repos/builtin/packages/xproto/package.py +++ b/var/spack/repos/builtin/packages/xproto/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Xproto(Package): """The Xorg protocol headers provide the header files required to build the system, and to allow other applications to build against diff --git a/var/spack/repos/builtin/packages/xz/package.py b/var/spack/repos/builtin/packages/xz/package.py index b3ef7808aae..a8ab959a629 100644 --- a/var/spack/repos/builtin/packages/xz/package.py +++ b/var/spack/repos/builtin/packages/xz/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Xz(Package): """XZ Utils is free general-purpose data compression software with high compression ratio. XZ Utils were written for POSIX-like @@ -39,4 +40,3 @@ def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install") - diff --git a/var/spack/repos/builtin/packages/yasm/package.py b/var/spack/repos/builtin/packages/yasm/package.py index e05160c8ea6..f14bdbcee71 100644 --- a/var/spack/repos/builtin/packages/yasm/package.py +++ b/var/spack/repos/builtin/packages/yasm/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Yasm(Package): """Yasm is a complete rewrite of the NASM-2.11.06 assembler. It supports the x86 and AMD64 instruction sets, accepts NASM and diff --git a/var/spack/repos/builtin/packages/zeromq/package.py b/var/spack/repos/builtin/packages/zeromq/package.py index 9bdd5861e0a..6a657dc39ca 100644 --- a/var/spack/repos/builtin/packages/zeromq/package.py +++ b/var/spack/repos/builtin/packages/zeromq/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Zeromq(Package): """ The ZMQ networking/concurrency library and core API """ homepage = "http://zguide.zeromq.org/" @@ -38,7 +39,7 @@ class Zeromq(Package): depends_on("libsodium") def install(self, spec, prefix): - configure("--with-libsodium","--prefix=%s" % prefix) + configure("--with-libsodium", "--prefix=%s" % prefix) make() make("install") diff --git a/var/spack/repos/builtin/packages/zfp/package.py b/var/spack/repos/builtin/packages/zfp/package.py index 878b65118f3..a898ab03d3e 100644 --- a/var/spack/repos/builtin/packages/zfp/package.py +++ b/var/spack/repos/builtin/packages/zfp/package.py @@ -24,12 +24,15 @@ ############################################################################## from spack import * + class Zfp(Package): - """zfp is an open source C library for compressed floating-point arrays that supports - very high throughput read and write random acces, target error bounds or bit rates. - Although bit-for-bit lossless compression is not always possible, zfp is usually - accurate to within machine epsilon in near-lossless mode, and is often orders of - magnitude more accurate than other lossy compressors. + """zfp is an open source C library for compressed floating-point arrays + that supports very high throughput read and write random acces, + target error bounds or bit rates. Although bit-for-bit lossless + compression is not always possible, zfp is usually accurate to + within machine epsilon in near-lossless mode, and is often orders + of magnitude more accurate than other lossy compressors. + """ homepage = "http://computation.llnl.gov/projects/floating-point-compression" diff --git a/var/spack/repos/builtin/packages/zlib/package.py b/var/spack/repos/builtin/packages/zlib/package.py index e1cbdc7e281..6d799fb05ab 100644 --- a/var/spack/repos/builtin/packages/zlib/package.py +++ b/var/spack/repos/builtin/packages/zlib/package.py @@ -24,11 +24,10 @@ ############################################################################## from spack import * + class Zlib(Package): - """zlib is designed to be a free, general-purpose, legally unencumbered -- - that is, not covered by any patents -- lossless data-compression library for - use on virtually any computer hardware and operating system. - """ + """A free, general-purpose, legally unencumbered lossless + data-compression library.""" homepage = "http://zlib.net" url = "http://zlib.net/zlib-1.2.8.tar.gz" diff --git a/var/spack/repos/builtin/packages/zoltan/package.py b/var/spack/repos/builtin/packages/zoltan/package.py index 841ff3f4a2d..00943959688 100644 --- a/var/spack/repos/builtin/packages/zoltan/package.py +++ b/var/spack/repos/builtin/packages/zoltan/package.py @@ -22,16 +22,22 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import re, os, glob +import re +import os +import glob from spack import * + class Zoltan(Package): - """The Zoltan library is a toolkit of parallel combinatorial algorithms for - parallel, unstructured, and/or adaptive scientific applications. Zoltan's - largest component is a suite of dynamic load-balancing and paritioning - algorithms that increase applications' parallel performance by reducing - idle time. Zoltan also has graph coloring and graph ordering algorithms, - which are useful in task schedulers and parallel preconditioners.""" + """The Zoltan library is a toolkit of parallel combinatorial algorithms + for parallel, unstructured, and/or adaptive scientific + applications. Zoltan's largest component is a suite of dynamic + load-balancing and paritioning algorithms that increase + applications' parallel performance by reducing idle time. Zoltan + also has graph coloring and graph ordering algorithms, which are + useful in task schedulers and parallel preconditioners. + + """ homepage = "http://www.cs.sandia.gov/zoltan" base_url = "http://www.cs.sandia.gov/~kddevin/Zoltan_Distributions" @@ -41,8 +47,10 @@ class Zoltan(Package): version('3.6', '9cce794f7241ecd8dbea36c3d7a880f9') version('3.3', '5eb8f00bda634b25ceefa0122bd18d65') - variant('debug', default=False, description='Builds a debug version of the library') - variant('shared', default=True, description='Builds a shared version of the library') + variant('debug', default=False, + description='Builds a debug version of the library') + variant('shared', default=True, + description='Builds a shared version of the library') variant('fortran', default=True, description='Enable Fortran support') variant('mpi', default=False, description='Enable MPI support') @@ -51,8 +59,11 @@ class Zoltan(Package): def install(self, spec, prefix): config_args = [ - '--enable-f90interface' if '+fortan' in spec else '--disable-f90interface', - '--enable-mpi' if '+mpi' in spec else '--disable-mpi', + '--enable-f90interface' + if '+fortan' in spec else '--disable-f90interface', + + '--enable-mpi' + if '+mpi' in spec else '--disable-mpi', ] config_cflags = [ '-O0' if '+debug' in spec else '-O3', @@ -68,7 +79,8 @@ def install(self, spec, prefix): config_args.append('CC=%s/mpicc' % spec['mpi'].prefix.bin) config_args.append('CXX=%s/mpicxx' % spec['mpi'].prefix.bin) config_args.append('--with-mpi=%s' % spec['mpi'].prefix) - config_args.append('--with-mpi-compilers=%s' % spec['mpi'].prefix.bin) + config_args.append('--with-mpi-compilers=%s' % + spec['mpi'].prefix.bin) # NOTE: Early versions of Zoltan come packaged with a few embedded # library packages (e.g. ParMETIS, Scotch), which messes with Spack's @@ -89,13 +101,15 @@ def install(self, spec, prefix): make() make('install') - # NOTE: Unfortunately, Zoltan doesn't provide any configuration options for - # the extension of the output library files, so this script must change these - # extensions as a post-processing step. + # NOTE: Unfortunately, Zoltan doesn't provide any configuration + # options for the extension of the output library files, so this + # script must change these extensions as a post-processing step. if '+shared' in spec: for libpath in glob.glob('lib/*.a'): - libdir, libname = (os.path.dirname(libpath), os.path.basename(libpath)) - move(libpath, os.path.join(libdir, re.sub(r'\.a$', '.so', libname))) + libdir, libname = (os.path.dirname(libpath), + os.path.basename(libpath)) + move(libpath, os.path.join( + libdir, re.sub(r'\.a$', '.so', libname))) mkdirp(prefix) move('include', prefix) diff --git a/var/spack/repos/builtin/packages/zsh/package.py b/var/spack/repos/builtin/packages/zsh/package.py index 2c9ed4c6e7a..4c27cd3ec29 100644 --- a/var/spack/repos/builtin/packages/zsh/package.py +++ b/var/spack/repos/builtin/packages/zsh/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Zsh(Package): """ Zsh is a shell designed for interactive use, although it is also a powerful From db59f87e3297dfbeecb3805f8d68ebd85d0b6696 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 10 Aug 2016 12:54:42 -0700 Subject: [PATCH 283/284] Update README.md for PEP8 --- README.md | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index fe00e2af279..5d5ac7dd390 100644 --- a/README.md +++ b/README.md @@ -58,17 +58,24 @@ can join it here: ### Contributions -At the moment, contributing to Spack is relatively simple. Just send us -a [pull request](https://help.github.com/articles/using-pull-requests/). +Contributing to Spack is relatively. Just send us a +[pull request](https://help.github.com/articles/using-pull-requests/). When you send your request, make ``develop`` the destination branch on the [Spack repository](https://github.com/LLNL/spack). -Your contribution will need to pass all the tests run by the `spack test` -command, as well as the formatting checks in `share/spack/qa/run-flake8`. -You should run both of these before submitting your pull request, to -ensure that the online checks succeed. +Before you send a PR, your code should pass the following checks: -Spack is using a rough approximation of the [Git +* Your contribution will need to pass the `spack test` command. + Run this before submitting your PR. + +* Also run the `share/spack/qa/run-flake8` script to check for PEP8 compliance. + To encourage contributions and readability by a broad audience, + Spack uses the [PEP8](https://www.python.org/dev/peps/pep-0008/) coding + standard with [a few exceptions](https://github.com/LLNL/spack/blob/develop/.flake8). + +We enforce these guidelines with [Travis CI](https://travis-ci.org/LLNL/spack). + +Spack uses a rough approximation of the [Git Flow](http://nvie.com/posts/a-successful-git-branching-model/) branching model. The ``develop`` branch contains the latest contributions, and ``master`` is always tagged and points to the From 84e331c58618f2c67da00cbc56f51c75bd61af91 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 10 Aug 2016 14:05:59 -0700 Subject: [PATCH 284/284] Remove unneeded noqa comments. --- .../repos/builtin/packages/bzip2/package.py | 30 ++++++++---- .../repos/builtin/packages/dealii/package.py | 26 ++++++---- .../repos/builtin/packages/fenics/package.py | 2 +- .../builtin/packages/graphviz/package.py | 4 +- .../repos/builtin/packages/gromacs/package.py | 4 +- .../builtin/packages/hdf5-blosc/package.py | 6 +-- .../repos/builtin/packages/intel/package.py | 4 +- .../repos/builtin/packages/jdk/package.py | 4 +- .../builtin/packages/launchmon/package.py | 2 +- .../repos/builtin/packages/lmod/package.py | 2 +- .../builtin/packages/openblas/package.py | 9 ++-- .../repos/builtin/packages/openmpi/package.py | 3 +- .../builtin/packages/parmetis/package.py | 4 +- .../repos/builtin/packages/petsc/package.py | 48 +++++++++++++------ .../repos/builtin/packages/plumed/package.py | 2 +- .../builtin/packages/py-pillow/package.py | 6 ++- .../repos/builtin/packages/python/package.py | 3 +- .../builtin/packages/superlu-dist/package.py | 2 +- .../repos/builtin/packages/tar/package.py | 2 +- .../repos/builtin/packages/tbb/package.py | 6 ++- 20 files changed, 109 insertions(+), 60 deletions(-) diff --git a/var/spack/repos/builtin/packages/bzip2/package.py b/var/spack/repos/builtin/packages/bzip2/package.py index 27303293d2e..9e5894a6a84 100644 --- a/var/spack/repos/builtin/packages/bzip2/package.py +++ b/var/spack/repos/builtin/packages/bzip2/package.py @@ -51,15 +51,23 @@ def patch(self): kwargs = {'ignore_absent': False, 'backup': False, 'string': True} mf = FileFilter('Makefile-libbz2_so') - mf.filter('$(CC) -shared -Wl,-soname -Wl,libbz2.so.{0} -o libbz2.so.{1} $(OBJS)'.format(v2, v3), # NOQA ignore=E501 - '$(CC) -dynamiclib -Wl,-install_name -Wl,@rpath/libbz2.{0}.dylib -current_version {1} -compatibility_version {2} -o libbz2.{3}.dylib $(OBJS)'.format(v1, v2, v3, v3), **kwargs) # NOQA ignore=E501 + mf.filter('$(CC) -shared -Wl,-soname -Wl,libbz2.so.{0} -o libbz2.so.{1} $(OBJS)' # noqa + .format(v2, v3), + '$(CC) -dynamiclib -Wl,-install_name -Wl,@rpath/libbz2.{0}.dylib -current_version {1} -compatibility_version {2} -o libbz2.{3}.dylib $(OBJS)' # noqa + .format(v1, v2, v3, v3), + **kwargs) - mf.filter('$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so.{0}'.format(v3), # NOQA ignore=E501 - '$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.{0}.dylib'.format(v3), **kwargs) # NOQA ignore=E501 - mf.filter('rm -f libbz2.so.{0}'.format(v2), - 'rm -f libbz2.{0}.dylib'.format(v2), **kwargs) - mf.filter('ln -s libbz2.so.{0} libbz2.so.{1}'.format(v3, v2), - 'ln -s libbz2.{0}.dylib libbz2.{1}.dylib'.format(v3, v2), **kwargs) # NOQA ignore=E501 + mf.filter( + '$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so.{0}'.format(v3), # noqa + '$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.{0}.dylib' + .format(v3), **kwargs) + mf.filter( + 'rm -f libbz2.so.{0}'.format(v2), + 'rm -f libbz2.{0}.dylib'.format(v2), **kwargs) + mf.filter( + 'ln -s libbz2.so.{0} libbz2.so.{1}'.format(v3, v2), + 'ln -s libbz2.{0}.dylib libbz2.{1}.dylib'.format(v3, v2), + **kwargs) def install(self, spec, prefix): # Build the dynamic library first @@ -73,10 +81,12 @@ def install(self, spec, prefix): v1, v2, v3 = (self.spec.version.up_to(i) for i in (1, 2, 3)) if 'darwin' in self.spec.architecture: lib = 'libbz2.dylib' - lib1, lib2, lib3 = ('libbz2.{0}.dylib'.format(v) for v in (v1, v2, v3)) # NOQA ignore=E501 + lib1, lib2, lib3 = ('libbz2.{0}.dylib'.format(v) + for v in (v1, v2, v3)) else: lib = 'libbz2.so' - lib1, lib2, lib3 = ('libbz2.so.{0}'.format(v) for v in (v1, v2, v3)) # NOQA ignore=E501 + lib1, lib2, lib3 = ('libbz2.so.{0}'.format(v) + for v in (v1, v2, v3)) install(lib3, join_path(prefix.lib, lib3)) with working_dir(prefix.lib): diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 1411494c6f0..939d8fc0134 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -174,7 +174,8 @@ def install(self, spec, prefix): # Optional dependencies for which librariy names are the same as CMake # variables: - for library in ('gsl', 'hdf5', 'p4est', 'petsc', 'slepc', 'trilinos', 'metis'): # NOQA: ignore=E501 + for library in ( + 'gsl', 'hdf5', 'p4est', 'petsc', 'slepc', 'trilinos', 'metis'): if library in spec: options.extend([ '-D%s_DIR=%s' % (library.upper(), spec[library].prefix), @@ -305,15 +306,20 @@ def install(self, spec, prefix): print('=== Step-40 Trilinos SuperluDist ====') print('=====================================') # change to direct solvers - filter_file(r'(LA::SolverCG solver\(solver_control\);)', ('TrilinosWrappers::SolverDirect::AdditionalData data(false,"Amesos_Superludist"); TrilinosWrappers::SolverDirect solver(solver_control,data);'), 'step-40.cc') # NOQA: ignore=E501 - filter_file(r'(LA::MPI::PreconditionAMG preconditioner;)', - (''), 'step-40.cc') - filter_file(r'(LA::MPI::PreconditionAMG::AdditionalData data;)', # NOQA: ignore=E501 - (''), 'step-40.cc') - filter_file(r'(preconditioner.initialize\(system_matrix, data\);)', # NOQA: ignore=E501 - (''), 'step-40.cc') - filter_file(r'(solver\.solve \(system_matrix, completely_distributed_solution, system_rhs,)', ('solver.solve (system_matrix, completely_distributed_solution, system_rhs);'), 'step-40.cc') # NOQA: ignore=E501 - filter_file(r'(preconditioner\);)', (''), 'step-40.cc') + filter_file(r'(LA::SolverCG solver\(solver_control\);)', ('TrilinosWrappers::SolverDirect::AdditionalData data(false,"Amesos_Superludist"); TrilinosWrappers::SolverDirect solver(solver_control,data);'), 'step-40.cc') # noqa + filter_file( + r'(LA::MPI::PreconditionAMG preconditioner;)', + (''), 'step-40.cc') + filter_file( + r'(LA::MPI::PreconditionAMG::AdditionalData data;)', + (''), 'step-40.cc') + filter_file( + r'(preconditioner.initialize\(system_matrix, data\);)', + (''), 'step-40.cc') + filter_file( + r'(solver\.solve \(system_matrix, completely_distributed_solution, system_rhs,)', ('solver.solve (system_matrix, completely_distributed_solution, system_rhs);'), 'step-40.cc') # noqa + filter_file( + r'(preconditioner\);)', (''), 'step-40.cc') if '^trilinos+superlu-dist' in spec: make('release') make('run', paralle=False) diff --git a/var/spack/repos/builtin/packages/fenics/package.py b/var/spack/repos/builtin/packages/fenics/package.py index 1762b15b669..eeeefc540dd 100644 --- a/var/spack/repos/builtin/packages/fenics/package.py +++ b/var/spack/repos/builtin/packages/fenics/package.py @@ -36,7 +36,7 @@ class Fenics(Package): homepage = "http://fenicsproject.org/" url = "https://bitbucket.org/fenics-project/dolfin/downloads/dolfin-1.6.0.tar.gz" - base_url = "https://bitbucket.org/fenics-project/{pkg}/downloads/{pkg}-{version}.tar.gz" # NOQA: ignore E501 + base_url = "https://bitbucket.org/fenics-project/{pkg}/downloads/{pkg}-{version}.tar.gz" variant('hdf5', default=True, description='Compile with HDF5') variant('parmetis', default=True, description='Compile with ParMETIS') diff --git a/var/spack/repos/builtin/packages/graphviz/package.py b/var/spack/repos/builtin/packages/graphviz/package.py index e5898a6e590..b37121248c7 100644 --- a/var/spack/repos/builtin/packages/graphviz/package.py +++ b/var/spack/repos/builtin/packages/graphviz/package.py @@ -37,7 +37,9 @@ class Graphviz(Package): # related to missing Perl packages. If spack begins support for Perl in the # future, this package can be updated to depend_on('perl') and the # ncecessary devel packages. - variant('perl', default=False, description='Enable if you need the optional Perl language bindings.') # NOQA: ignore=E501 + variant( + 'perl', default=False, + description='Enable if you need the optional Perl language bindings.') parallel = False diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 607927fe8b8..d39c9738efe 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -48,7 +48,9 @@ class Gromacs(Package): variant('shared', default=True, description='Enables the build of shared libraries') variant('debug', default=False, description='Enables debug mode') - variant('double', default=False, description='Produces a double precision version of the executables') # NOQA: ignore=E501 + variant( + 'double', default=False, + description='Produces a double precision version of the executables') variant('plumed', default=False, description='Enable PLUMED support') depends_on('mpi', when='+mpi') diff --git a/var/spack/repos/builtin/packages/hdf5-blosc/package.py b/var/spack/repos/builtin/packages/hdf5-blosc/package.py index 89c20f3998d..b9c19dff622 100644 --- a/var/spack/repos/builtin/packages/hdf5-blosc/package.py +++ b/var/spack/repos/builtin/packages/hdf5-blosc/package.py @@ -72,10 +72,10 @@ def install(self, spec, prefix): # TODO: these vars are not used. # if "+mpi" in spec["hdf5"]: - # cc = "mpicc" # noqa + # cc = "mpicc" # else: - # cc = "cc" # noqa - # shlibext = "so" if sys.platform != "darwin" else "dylib" # noqa + # cc = "cc" + # shlibext = "so" if sys.platform != "darwin" else "dylib" mkdirp(prefix.include) mkdirp(prefix.lib) diff --git a/var/spack/repos/builtin/packages/intel/package.py b/var/spack/repos/builtin/packages/intel/package.py index d171411946f..26134a12ae9 100644 --- a/var/spack/repos/builtin/packages/intel/package.py +++ b/var/spack/repos/builtin/packages/intel/package.py @@ -83,10 +83,10 @@ class Intel(IntelInstaller): # TODO: can also try the online installer (will download files on demand) version('16.0.2', '1133fb831312eb519f7da897fec223fa', - url="file://%s/parallel_studio_xe_2016_composer_edition_update2.tgz" # NOQA: ignore=E501 + url="file://%s/parallel_studio_xe_2016_composer_edition_update2.tgz" % os.getcwd()) version('16.0.3', '3208eeabee951fc27579177b593cefe9', - url="file://%s/parallel_studio_xe_2016_composer_edition_update3.tgz" # NOQA: ignore=E501 + url="file://%s/parallel_studio_xe_2016_composer_edition_update3.tgz" % os.getcwd()) variant('rpath', default=True, description="Add rpath to .cfg files") diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py index 39ec39b66d9..63bf6514cbe 100644 --- a/var/spack/repos/builtin/packages/jdk/package.py +++ b/var/spack/repos/builtin/packages/jdk/package.py @@ -35,7 +35,7 @@ class Jdk(Package): """The Java Development Kit (JDK) released by Oracle Corporation in the form of a binary product aimed at Java developers.""" - homepage = "http://www.oracle.com/technetwork/java/javase/downloads/index.html" # noqa: E501 + homepage = "http://www.oracle.com/technetwork/java/javase/downloads/index.html" version('8u66-linux-x64', '88f31f3d642c3287134297b8c10e61bf', url="http://download.oracle.com/otn-pub/java/jdk/8u66-b17/jdk-8u66-linux-x64.tar.gz") @@ -47,7 +47,7 @@ class Jdk(Package): # automate this process, we need to utilize these additional curl # commandline options. # - # See http://stackoverflow.com/questions/10268583/how-to-automate-download-and-installation-of-java-jdk-on-linux # noqa: E501 + # See http://stackoverflow.com/questions/10268583/how-to-automate-download-and-installation-of-java-jdk-on-linux curl_options = [ '-j', # junk cookies '-H', # specify required License Agreement cookie diff --git a/var/spack/repos/builtin/packages/launchmon/package.py b/var/spack/repos/builtin/packages/launchmon/package.py index 8aa6d727278..d7c96a03d87 100644 --- a/var/spack/repos/builtin/packages/launchmon/package.py +++ b/var/spack/repos/builtin/packages/launchmon/package.py @@ -29,7 +29,7 @@ class Launchmon(Package): """Software infrastructure that enables HPC run-time tools to co-locate tool daemons with a parallel job.""" homepage = "https://github.com/LLNL/LaunchMON" - url = "https://github.com/LLNL/LaunchMON/releases/download/v1.0.2/launchmon-v1.0.2.tar.gz" # NOQA: ignore=E501 + url = "https://github.com/LLNL/LaunchMON/releases/download/v1.0.2/launchmon-v1.0.2.tar.gz" version('1.0.2', '8d6ba77a0ec2eff2fde2c5cc8fa7ff7a') diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py index a3ae4a7f512..69965bc4236 100644 --- a/var/spack/repos/builtin/packages/lmod/package.py +++ b/var/spack/repos/builtin/packages/lmod/package.py @@ -35,7 +35,7 @@ class Lmod(Package): variable. Modulefiles for Library packages provide environment variables that specify where the library and header files can be found. """ - homepage = 'https://www.tacc.utexas.edu/research-development/tacc-projects/lmod' # NOQA: ignore=E501 + homepage = 'https://www.tacc.utexas.edu/research-development/tacc-projects/lmod' url = 'https://github.com/TACC/Lmod/archive/6.4.1.tar.gz' version('6.4.5', '14f6c58dbc0a5a75574d795eac2c1e3c') diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index d09ebd67390..37f7a7005df 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -37,9 +37,12 @@ class Openblas(Package): version('0.2.16', 'fef46ab92463bdbb1479dcec594ef6dc') version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9') - variant('shared', default=True, description="Build shared libraries as well as static libs.") # NOQA: ignore=E501 - variant('openmp', default=False, description="Enable OpenMP support.") - variant('fpic', default=True, description="Build position independent code") # NOQA: ignore=E501 + variant('shared', default=True, + description="Build shared libraries as well as static libs.") + variant('openmp', default=False, + description="Enable OpenMP support.") + variant('fpic', default=True, + description="Build position independent code") # virtual dependency provides('blas') diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index 3fcf942f050..b0efe27deff 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -108,7 +108,8 @@ class Openmpi(Package): depends_on('sqlite', when='+sqlite3') def url_for_version(self, version): - return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % (version.up_to(2), version) # NOQA: ignore=E501 + return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % ( + version.up_to(2), version) def setup_dependent_environment(self, spack_env, run_env, dependent_spec): spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc')) diff --git a/var/spack/repos/builtin/packages/parmetis/package.py b/var/spack/repos/builtin/packages/parmetis/package.py index d1cfa27f3a1..2750df2bdbf 100644 --- a/var/spack/repos/builtin/packages/parmetis/package.py +++ b/var/spack/repos/builtin/packages/parmetis/package.py @@ -50,9 +50,9 @@ class Parmetis(Package): patch('enable_external_metis.patch') # bug fixes from PETSc developers - # https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/ # NOQA: ignore=E501 + # https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/ patch('pkg-parmetis-1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b.patch') - # https://bitbucket.org/petsc/pkg-parmetis/commits/82409d68aa1d6cbc70740d0f35024aae17f7d5cb/raw/ # NOQA: ignore=E501 + # https://bitbucket.org/petsc/pkg-parmetis/commits/82409d68aa1d6cbc70740d0f35024aae17f7d5cb/raw/ patch('pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch') def url_for_version(self, version): diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 079e0b7af4e..66e1abdf1a9 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -81,7 +81,7 @@ class Petsc(Package): # Hypre does not support complex numbers. # Also PETSc prefer to build it without internal superlu, likely due to # conflict in headers see - # https://bitbucket.org/petsc/petsc/src/90564b43f6b05485163c147b464b5d6d28cde3ef/config/BuildSystem/config/packages/hypre.py # NOQA: ignore=E501 + # https://bitbucket.org/petsc/petsc/src/90564b43f6b05485163c147b464b5d6d28cde3ef/config/BuildSystem/config/packages/hypre.py depends_on('hypre~internal-superlu', when='+hypre+mpi~complex') depends_on('superlu-dist@:4.3', when='@:3.6.4+superlu-dist+mpi') depends_on('superlu-dist@5.0.0:', when='@3.7:+superlu-dist+mpi') @@ -92,17 +92,21 @@ def mpi_dependent_options(self): if '~mpi' in self.spec: compiler_opts = [ '--with-cc=%s' % os.environ['CC'], - '--with-cxx=%s' % (os.environ['CXX'] if self.compiler.cxx is not None else '0'), # NOQA: ignore=E501 - '--with-fc=%s' % (os.environ['FC'] if self.compiler.fc is not None else '0'), # NOQA: ignore=E501 + '--with-cxx=%s' % (os.environ['CXX'] + if self.compiler.cxx is not None else '0'), + '--with-fc=%s' % (os.environ['FC'] + if self.compiler.fc is not None else '0'), '--with-mpi=0' ] - error_message_fmt = '\t{library} support requires "+mpi" to be activated' # NOQA: ignore=E501 + error_message_fmt = \ + '\t{library} support requires "+mpi" to be activated' # If mpi is disabled (~mpi), it's an error to have any of these # enabled. This generates a list of any such errors. - errors = [error_message_fmt.format(library=x) - for x in ('hdf5', 'hypre', 'parmetis', 'mumps', 'superlu-dist') # NOQA: ignore=E501 - if ('+' + x) in self.spec] + errors = [ + error_message_fmt.format(library=x) + for x in ('hdf5', 'hypre', 'parmetis', 'mumps', 'superlu-dist') + if ('+' + x) in self.spec] if errors: errors = ['incompatible variants given'] + errors raise RuntimeError('\n'.join(errors)) @@ -117,8 +121,10 @@ def install(self, spec, prefix): options = ['--with-ssl=0'] options.extend(self.mpi_dependent_options()) options.extend([ - '--with-precision=%s' % ('double' if '+double' in spec else 'single'), # NOQA: ignore=E501 - '--with-scalar-type=%s' % ('complex' if '+complex' in spec else 'real'), # NOQA: ignore=E501 + '--with-precision=%s' % ( + 'double' if '+double' in spec else 'single'), + '--with-scalar-type=%s' % ( + 'complex' if '+complex' in spec else 'real'), '--with-shared-libraries=%s' % ('1' if '+shared' in spec else '0'), '--with-debugging=%s' % ('1' if '+debug' in spec else '0'), '--with-blas-lapack-dir=%s' % spec['lapack'].prefix @@ -127,11 +133,13 @@ def install(self, spec, prefix): for library in ('metis', 'boost', 'hdf5', 'hypre', 'parmetis', 'mumps', 'scalapack'): options.append( - '--with-{library}={value}'.format(library=library, value=('1' if library in spec else '0')) # NOQA: ignore=E501 + '--with-{library}={value}'.format( + library=library, value=('1' if library in spec else '0')) ) if library in spec: options.append( - '--with-{library}-dir={path}'.format(library=library, path=spec[library].prefix) # NOQA: ignore=E501 + '--with-{library}-dir={path}'.format( + library=library, path=spec[library].prefix) ) # PETSc does not pick up SuperluDist from the dir as they look for # superlu_dist_4.1.a @@ -165,13 +173,25 @@ def install(self, spec, prefix): run = Executable(join_path(spec['mpi'].prefix.bin, 'mpirun')) run('ex50', '-da_grid_x', '4', '-da_grid_y', '4') if 'superlu-dist' in spec: - run('ex50', '-da_grid_x', '4', '-da_grid_y', '4', '-pc_type', 'lu', '-pc_factor_mat_solver_package', 'superlu_dist') # NOQA: ignore=E501 + run('ex50', + '-da_grid_x', '4', + '-da_grid_y', '4', + '-pc_type', 'lu', + '-pc_factor_mat_solver_package', 'superlu_dist') if 'mumps' in spec: - run('ex50', '-da_grid_x', '4', '-da_grid_y', '4', '-pc_type', 'lu', '-pc_factor_mat_solver_package', 'mumps') # NOQA: ignore=E501 + run('ex50', + '-da_grid_x', '4', + '-da_grid_y', '4', + '-pc_type', 'lu', + '-pc_factor_mat_solver_package', 'mumps') if 'hypre' in spec: - run('ex50', '-da_grid_x', '4', '-da_grid_y', '4', '-pc_type', 'hypre', '-pc_hypre_type', 'boomeramg') # NOQA: ignore=E501 + run('ex50', + '-da_grid_x', '4', + '-da_grid_y', '4', + '-pc_type', 'hypre', + '-pc_hypre_type', 'boomeramg') def setup_dependent_environment(self, spack_env, run_env, dependent_spec): # set up PETSC_DIR for everyone using PETSc package diff --git a/var/spack/repos/builtin/packages/plumed/package.py b/var/spack/repos/builtin/packages/plumed/package.py index abb03e07703..b670b4c2b8a 100644 --- a/var/spack/repos/builtin/packages/plumed/package.py +++ b/var/spack/repos/builtin/packages/plumed/package.py @@ -102,7 +102,7 @@ def install(self, spec, prefix): configure_opts.extend([ '--prefix={0}'.format(prefix), - '--enable-shared={0}'.format('yes' if '+shared' in spec else 'no'), # NOQA: ignore=E501 + '--enable-shared={0}'.format('yes' if '+shared' in spec else 'no'), '--enable-mpi={0}'.format('yes' if '+mpi' in spec else 'no'), '--enable-gsl={0}'.format('yes' if '+gsl' in spec else 'no') ]) diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py index aae5727d4e9..1d8b3faa6fa 100644 --- a/var/spack/repos/builtin/packages/py-pillow/package.py +++ b/var/spack/repos/builtin/packages/py-pillow/package.py @@ -58,8 +58,10 @@ class PyPillow(Package): # Spack does not (yet) support these modes of building # variant('webp', default=False, description='Provide the WebP format') - # variant('webpmux', default=False, description='WebP metadata, relies on WebP support') # NOQA: ignore=E501 - # variant('imagequant', default=False, description='Provide improved color quantization') # NOQA: ignore=E501 + # variant('webpmux', default=False, + # description='WebP metadata, relies on WebP support') + # variant('imagequant', default=False, + # description='Provide improved color quantization') # Required dependencies extends('python') diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 43f600e2f62..c4e67549690 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -118,7 +118,8 @@ def install(self, spec, prefix): config_args.append('--with-wide-unicode') elif spec.satisfies('@3.3:'): # https://docs.python.org/3.3/whatsnew/3.3.html - raise ValueError('+ucs4 variant not compatible with Python 3.3 and beyond') # NOQA: ignore=E501 + raise ValueError( + '+ucs4 variant not compatible with Python 3.3 and beyond') if spec.satisfies('@3:'): config_args.append('--without-ensurepip') diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index e51d7224d92..4b3354e379d 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -57,7 +57,7 @@ def install(self, spec, prefix): 'METISLIB = -L%s -lmetis' % spec['metis'].prefix.lib, 'PARMETISLIB = -L%s -lparmetis' % spec['parmetis'].prefix.lib, 'FLIBS =', - 'LIBS = $(DSUPERLULIB) $(BLASLIB) $(PARMETISLIB) $(METISLIB)', # NOQA: ignore=E501 + 'LIBS = $(DSUPERLULIB) $(BLASLIB) $(PARMETISLIB) $(METISLIB)', # noqa 'ARCH = ar', 'ARCHFLAGS = cr', 'RANLIB = true', diff --git a/var/spack/repos/builtin/packages/tar/package.py b/var/spack/repos/builtin/packages/tar/package.py index 7a72278b9ad..f5995cb0076 100644 --- a/var/spack/repos/builtin/packages/tar/package.py +++ b/var/spack/repos/builtin/packages/tar/package.py @@ -35,7 +35,7 @@ class Tar(Package): version('1.28', '6ea3dbea1f2b0409b234048e021a9fd7') # see http://lists.gnu.org/archive/html/bug-tar/2014-08/msg00001.html and - # https://github.com/Homebrew/homebrew-core/commit/aef9a1792de4648d0322b4b04d32287532f046bb # NOQA: ignore=E501 + # https://github.com/Homebrew/homebrew-core/commit/aef9a1792de4648d0322b4b04d32287532f046bb # TODO: when=sys.platform=='darwin' ? patch('gnutar-configure-xattrs.patch', when='@1.28') diff --git a/var/spack/repos/builtin/packages/tbb/package.py b/var/spack/repos/builtin/packages/tbb/package.py index c88b1708165..d13579b44da 100644 --- a/var/spack/repos/builtin/packages/tbb/package.py +++ b/var/spack/repos/builtin/packages/tbb/package.py @@ -35,8 +35,10 @@ class Tbb(Package): homepage = "http://www.threadingbuildingblocks.org/" # Only version-specific URL's work for TBB - version('4.4.4', 'd4cee5e4ca75cab5181834877738619c56afeb71', url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160413oss_src.tgz') # NOQA: ignore=E501 - version('4.4.3', '80707e277f69d9b20eeebdd7a5f5331137868ce1', url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160128oss_src_0.tgz') # NOQA: ignore=E501 + version('4.4.4', 'd4cee5e4ca75cab5181834877738619c56afeb71', + url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160413oss_src.tgz') + version('4.4.3', '80707e277f69d9b20eeebdd7a5f5331137868ce1', + url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160128oss_src_0.tgz') def coerce_to_spack(self, tbb_build_subdir): for compiler in ["icc", "gcc", "clang"]: