From ec47ab16ce863ee3a73d0f8fe27821d5f198fa40 Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Fri, 14 Aug 2015 16:27:51 -0700 Subject: [PATCH 001/121] updated blas and lapack packages --- var/spack/packages/netlib-blas/package.py | 6 +++ var/spack/packages/netlib-lapack/package.py | 59 +++++++++++++++++++++ 2 files changed, 65 insertions(+) create mode 100644 var/spack/packages/netlib-lapack/package.py diff --git a/var/spack/packages/netlib-blas/package.py b/var/spack/packages/netlib-blas/package.py index 0a6cdb04423..85e97323d35 100644 --- a/var/spack/packages/netlib-blas/package.py +++ b/var/spack/packages/netlib-blas/package.py @@ -9,6 +9,8 @@ class NetlibBlas(Package): version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf') + variant('fpic', default=False, description="Build with -fpic compiler option") + # virtual dependency provides('blas') @@ -23,6 +25,10 @@ def patch(self): mf.filter('^LOADER.*', 'LOADER = f90') mf.filter('^CC =.*', 'CC = cc') + if '+fpic' in self.spec: + mf.filter('^OPTS.*=.*', 'OPTS = -O2 -frecursive -fpic') + mf.filter('^CFLAGS =.*', 'CFLAGS = -O3 -fpic') + def install(self, spec, prefix): make('blaslib') diff --git a/var/spack/packages/netlib-lapack/package.py b/var/spack/packages/netlib-lapack/package.py new file mode 100644 index 00000000000..fb6b99e27c3 --- /dev/null +++ b/var/spack/packages/netlib-lapack/package.py @@ -0,0 +1,59 @@ +from spack import * + +class NetlibLapack(Package): + """ + LAPACK version 3.X is a comprehensive FORTRAN library that does + linear algebra operations including matrix inversions, least + squared solutions to linear sets of equations, eigenvector + analysis, singular value decomposition, etc. It is a very + comprehensive and reputable package that has found extensive + use in the scientific community. + """ + homepage = "http://www.netlib.org/lapack/" + url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" + + version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf') + version('3.4.2', '61bf1a8a4469d4bdb7604f5897179478') + version('3.4.1', '44c3869c38c8335c2b9c2a8bb276eb55') + version('3.4.0', '02d5706ec03ba885fc246e5fa10d8c70') + version('3.3.1', 'd0d533ec9a5b74933c2a1e84eedc58b4') + + variant('shared', default=False, description="Build shared library version") + + # virtual dependency + provides('lapack') + + # blas is a virtual dependency. + depends_on('blas') + + depends_on('cmake') + + # Doesn't always build correctly in parallel + parallel = False + + @when('^netlib-blas') + def get_blas_libs(self): + blas = self.spec['netlib-blas'] + return [join_path(blas.prefix.lib, 'blas.a')] + + + @when('^atlas') + def get_blas_libs(self): + blas = self.spec['atlas'] + return [join_path(blas.prefix.lib, l) + for l in ('libf77blas.a', 'libatlas.a')] + + + def install(self, spec, prefix): + blas_libs = ";".join(self.get_blas_libs()) + cmake_args = [".", '-DBLAS_LIBRARIES=' + blas_libs] + + if '+shared' in spec: + cmake_args.append('-DBUILD_SHARED_LIBS=ON') + + cmake_args += std_cmake_args + + cmake(*cmake_args) + make() + make("install") + From 92ea86dc36de9543fd37928ec29e257e18362869 Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Fri, 14 Aug 2015 16:30:35 -0700 Subject: [PATCH 002/121] moved lapack package to netlib-lapack --- var/spack/packages/lapack/package.py | 45 ---------------------------- 1 file changed, 45 deletions(-) delete mode 100644 var/spack/packages/lapack/package.py diff --git a/var/spack/packages/lapack/package.py b/var/spack/packages/lapack/package.py deleted file mode 100644 index d9d37e3e4aa..00000000000 --- a/var/spack/packages/lapack/package.py +++ /dev/null @@ -1,45 +0,0 @@ -from spack import * - -class Lapack(Package): - """ - LAPACK version 3.X is a comprehensive FORTRAN library that does - linear algebra operations including matrix inversions, least - squared solutions to linear sets of equations, eigenvector - analysis, singular value decomposition, etc. It is a very - comprehensive and reputable package that has found extensive - use in the scientific community. - """ - homepage = "http://www.netlib.org/lapack/" - url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" - - version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf') - version('3.4.2', '61bf1a8a4469d4bdb7604f5897179478') - version('3.4.1', '44c3869c38c8335c2b9c2a8bb276eb55') - version('3.4.0', '02d5706ec03ba885fc246e5fa10d8c70') - version('3.3.1', 'd0d533ec9a5b74933c2a1e84eedc58b4') - - # blas is a virtual dependency. - depends_on('blas') - - # Doesn't always build correctly in parallel - parallel = False - - @when('^netlib-blas') - def get_blas_libs(self): - blas = self.spec['netlib-blas'] - return [join_path(blas.prefix.lib, 'blas.a')] - - - @when('^atlas') - def get_blas_libs(self): - blas = self.spec['atlas'] - return [join_path(blas.prefix.lib, l) - for l in ('libf77blas.a', 'libatlas.a')] - - - def install(self, spec, prefix): - blas_libs = ";".join(self.get_blas_libs()) - cmake(".", '-DBLAS_LIBRARIES=' + blas_libs, *std_cmake_args) - make() - make("install") - From bd1cd83d7148a4856720cc7f29a1656a289fe92c Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Mon, 17 Aug 2015 11:54:14 -0700 Subject: [PATCH 003/121] added lapack and blas deps, fixed gfortran compiler dependency --- var/spack/packages/py-numpy/gnu_fcompiler.patch | 11 +++++++++++ var/spack/packages/py-numpy/package.py | 8 ++++++++ var/spack/packages/py-scipy/package.py | 2 ++ 3 files changed, 21 insertions(+) create mode 100644 var/spack/packages/py-numpy/gnu_fcompiler.patch diff --git a/var/spack/packages/py-numpy/gnu_fcompiler.patch b/var/spack/packages/py-numpy/gnu_fcompiler.patch new file mode 100644 index 00000000000..e4e882b9dfa --- /dev/null +++ b/var/spack/packages/py-numpy/gnu_fcompiler.patch @@ -0,0 +1,11 @@ +--- a/numpy/distutils/fcompiler/gnu.py.orig 2015-08-17 11:31:52.000000000 -0700 ++++ b/numpy/distutils/fcompiler/gnu.py 2015-08-17 11:32:48.000000000 -0700 +@@ -252,7 +252,7 @@ + # GNU Fortran 95 (GCC) 4.2.0 20060218 (experimental) + # GNU Fortran (GCC) 4.3.0 20070316 (experimental) + +- possible_executables = ['gfortran', 'f95'] ++ possible_executables = ['fc', 'f95'] + executables = { + 'version_cmd' : ["", "--version"], + 'compiler_f77' : [None, "-Wall", "-g", "-ffixed-form", diff --git a/var/spack/packages/py-numpy/package.py b/var/spack/packages/py-numpy/package.py index 6534e063d23..76fdd48da7d 100644 --- a/var/spack/packages/py-numpy/package.py +++ b/var/spack/packages/py-numpy/package.py @@ -7,9 +7,17 @@ class PyNumpy(Package): version('1.9.1', '78842b73560ec378142665e712ae4ad9') version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645') + + patch('gnu_fcompiler.patch', when='%gcc') extends('python') depends_on('py-nose') + depends_on('netlib-blas+fpic') + depends_on('netlib-lapack+shared') def install(self, spec, prefix): + with open('site.cfg', 'w') as f: + f.write('[DEFAULT]\n') + f.write('libraries=lapack,blas\n') + f.write('library_dirs=%s/lib:%s/lib\n' % (spec['blas'].prefix, spec['lapack'].prefix)) python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-scipy/package.py b/var/spack/packages/py-scipy/package.py index cdcf74534b0..3a1124cc15a 100644 --- a/var/spack/packages/py-scipy/package.py +++ b/var/spack/packages/py-scipy/package.py @@ -11,6 +11,8 @@ class PyScipy(Package): extends('python') depends_on('py-nose') depends_on('py-numpy') + depends_on('blas') + depends_on('lapack') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) From 40b21d7cbea0695309f4a7ae8b9403a6fad48c0c Mon Sep 17 00:00:00 2001 From: Peer-Timo Bremer Date: Mon, 17 Aug 2015 14:01:23 -0700 Subject: [PATCH 004/121] Initial version of PIDX package --- var/spack/packages/pidx/package.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 var/spack/packages/pidx/package.py diff --git a/var/spack/packages/pidx/package.py b/var/spack/packages/pidx/package.py new file mode 100644 index 00000000000..9b63a84a0f0 --- /dev/null +++ b/var/spack/packages/pidx/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Pidx(Package): + """PIDX Parallel I/O Library""" + + homepage = "http://www.cedmav.com/pidx" + #url = "http://www.example.com/pidx-1.0.tar.gz" + + version('1.0', git='https://github.com/sci-visus/PIDX.git', + commit='6afa1cf71d1c41263296dc049c8fabaf73c296da') + + depends_on("mpi") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") From 1da56e5290ba7272ea4bd0f18cdf5cae2b83f093 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Fri, 21 Aug 2015 11:32:12 -0700 Subject: [PATCH 005/121] Added a database of installed packages. No methods use the database so far. Also, a bug fix: Previous version did not remove the staging directory on a failed install This led to spack refusing to uninstall dependencies of the failed install Added to cleanup() to blow away the staging directory on failed install. --- lib/spack/spack/database.py | 150 ++++++++++++++++++++++++++++ lib/spack/spack/directory_layout.py | 8 ++ lib/spack/spack/package.py | 12 +++ lib/spack/spack/spec.py | 8 +- 4 files changed, 175 insertions(+), 3 deletions(-) create mode 100644 lib/spack/spack/database.py diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py new file mode 100644 index 00000000000..8ae71a43853 --- /dev/null +++ b/lib/spack/spack/database.py @@ -0,0 +1,150 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +import sys +import inspect +import glob +import imp + +from external import yaml +from external.yaml.error import MarkedYAMLError + +import llnl.util.tty as tty +from llnl.util.filesystem import join_path +from llnl.util.lang import * + +import spack.error +import spack.spec +from spack.spec import Spec +from spack.error import SpackError +from spack.virtual import ProviderIndex +from spack.util.naming import mod_to_class, validate_module_name + + +class Database(object): + def __init__(self,file_name="specDB.yaml"): + """ + Create an empty Database + Location defaults to root/specDB.yaml + """ + self.file_name = file_name + self.data = [] + + + def from_yaml(self,stream): + """ + Fill database from YAML + Translate the spec portions from node-dict form to spec from + """ + try: + file = yaml.load(stream) + except MarkedYAMLError, e: + raise SpackYAMLError("error parsing YAML database:", str(e)) + + if file==None: + return + + for sp in file['database']: + spec = Spec.from_node_dict(sp['spec']) + path = sp['path'] + db_entry = {'spec': spec, 'path': path} + self.data.append(db_entry) + + + @staticmethod + def read_database(root): + """Create a Database from the data in the standard location""" + database = Database() + full_path = join_path(root,database.file_name) + if os.path.isfile(full_path): + with open(full_path,'r') as f: + database.from_yaml(f) + else: + with open(full_path,'w+') as f: + database.from_yaml(f) + + return database + + + def write_database_to_yaml(self,stream): + """ + Replace each spec with its dict-node form + Then stream all data to YAML + """ + node_list = [] + for sp in self.data: + node = {} + node['spec']=Spec.to_node_dict(sp['spec']) + node['spec'][sp['spec'].name]['hash']=sp['spec'].dag_hash() + node['path']=sp['path'] + node_list.append(node) + return yaml.dump({ 'database' : node_list}, + stream=stream, default_flow_style=False) + + + def write(self,root): + """Write the database to the standard location""" + full_path = join_path(root,self.file_name) + #test for file existence + with open(full_path,'w') as f: + self.write_database_to_yaml(f) + + + @staticmethod + def add(root, spec, path): + """Read the database from the standard location + Add the specified entry as a dict + Write the database back to memory + + TODO: Caching databases + """ + database = Database.read_database(root) + + spec_and_path = {} + spec_and_path['spec']=spec + spec_and_path['path']=path + + database.data.append(spec_and_path) + + database.write(root) + + + @staticmethod + def remove(root, spec): + """ + Reads the database from the standard location + Searches for and removes the specified spec + Writes the database back to memory + + TODO: Caching databases + """ + database = Database.read_database(root) + + for sp in database.data: + #This requires specs w/o dependencies, is that sustainable? + if sp['spec'] == spec: + database.data.remove(sp) + + database.write(root) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index e61929d8fdd..0bbe6c9d85e 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -37,6 +37,7 @@ from spack.spec import Spec from spack.error import SpackError +from spack.database import Database def _check_concrete(spec): @@ -152,6 +153,8 @@ def remove_install_directory(self, spec): os.rmdir(path) path = os.path.dirname(path) + Database.remove(self.root,spec) + class YamlDirectoryLayout(DirectoryLayout): """Lays out installation directories like this:: @@ -263,6 +266,11 @@ def create_install_directory(self, spec): self.write_spec(spec, spec_file_path) + def add_to_database(self, spec): + """Simply adds a spec to the database""" + Database.add(self.root, spec, self.path_for_spec(spec)) + + @memoized def all_specs(self): if not os.path.isdir(self.root): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 3507807373e..a425c555a27 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -779,6 +779,15 @@ def cleanup(): "Manually remove this directory to fix:", self.prefix) + if not (keep_prefix and keep_stage): + self.do_clean() + else: + tty.warn("Keeping stage in place despite error.", + "Spack will refuse to uninstall dependencies of this package." + + "Manually remove this directory to fix:", + self.stage.path) + + def real_work(): try: tty.msg("Building %s." % self.name) @@ -808,6 +817,9 @@ def real_work(): log_install_path = spack.install_layout.build_log_path(self.spec) install(log_path, log_install_path) + #Update the database once we know install successful + spack.install_layout.add_to_database(self.spec) + # On successful install, remove the stage. if not keep_stage: self.stage.destroy() diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index e1fbb844234..df74b6064e6 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -427,7 +427,6 @@ def __init__(self, spec_like, *dep_like, **kwargs): spec = dep if isinstance(dep, Spec) else Spec(dep) self._add_dependency(spec) - # # Private routines here are called by the parser when building a spec. # @@ -640,7 +639,10 @@ def prefix(self): def dag_hash(self, length=None): - """Return a hash of the entire spec DAG, including connectivity.""" + """ + Return a hash of the entire spec DAG, including connectivity. + Stores the hash iff the spec is concrete. + """ yaml_text = yaml.dump( self.to_node_dict(), default_flow_style=True, width=sys.maxint) sha = hashlib.sha1(yaml_text) @@ -710,7 +712,7 @@ def from_yaml(stream): try: yfile = yaml.load(stream) except MarkedYAMLError, e: - raise SpackYAMLError("error parsing YMAL spec:", str(e)) + raise SpackYAMLError("error parsing YAML spec:", str(e)) for node in yfile['spec']: name = next(iter(node)) From 55f68bb2b05322297c3fc4d9fe265870b9385d4c Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Fri, 21 Aug 2015 13:04:27 -0700 Subject: [PATCH 006/121] Added hashes to the database --- lib/spack/spack/database.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 8ae71a43853..4646530d520 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -48,11 +48,15 @@ def __init__(self,file_name="specDB.yaml"): """ Create an empty Database Location defaults to root/specDB.yaml + The individual data are dicts containing + spec: the top level spec of a package + path: the path to the install of that package + dep_hash: a hash of the dependence DAG for that package """ self.file_name = file_name self.data = [] - + def from_yaml(self,stream): """ Fill database from YAML @@ -69,10 +73,11 @@ def from_yaml(self,stream): for sp in file['database']: spec = Spec.from_node_dict(sp['spec']) path = sp['path'] - db_entry = {'spec': spec, 'path': path} + dep_hash = sp['hash'] + db_entry = {'spec': spec, 'path': path, 'hash':dep_hash} self.data.append(db_entry) - + @staticmethod def read_database(root): """Create a Database from the data in the standard location""" @@ -87,7 +92,7 @@ def read_database(root): return database - + def write_database_to_yaml(self,stream): """ Replace each spec with its dict-node form @@ -97,7 +102,8 @@ def write_database_to_yaml(self,stream): for sp in self.data: node = {} node['spec']=Spec.to_node_dict(sp['spec']) - node['spec'][sp['spec'].name]['hash']=sp['spec'].dag_hash() +# node['spec'][sp['spec'].name]['hash']=sp['spec'].dag_hash() + node['hash']=sp['hash'] node['path']=sp['path'] node_list.append(node) return yaml.dump({ 'database' : node_list}, @@ -121,13 +127,14 @@ def add(root, spec, path): TODO: Caching databases """ database = Database.read_database(root) - - spec_and_path = {} - spec_and_path['spec']=spec - spec_and_path['path']=path - - database.data.append(spec_and_path) - + + sph = {} + sph['spec']=spec + sph['path']=path + sph['hash']=spec.dag_hash() + + database.data.append(sph) + database.write(root) From fb1874165b420c5a8866bec7ac87a98e97f2b670 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Fri, 21 Aug 2015 16:42:12 -0700 Subject: [PATCH 007/121] Eliminated all calls that relied on finding all packages in the opt directory Replaced them all with references to the database Implemented caching in the database. The database now only re-reads data if the database file exists and was changed since this file last wrote to it. Added the installed_db field to the spack instance Left the call to all_specs from testdirectory_layout.py for now. --- lib/spack/spack/__init__.py | 6 ++ lib/spack/spack/cmd/__init__.py | 2 +- lib/spack/spack/cmd/deactivate.py | 2 +- lib/spack/spack/cmd/extensions.py | 2 +- lib/spack/spack/cmd/find.py | 4 +- lib/spack/spack/cmd/module.py | 4 +- lib/spack/spack/cmd/uninstall.py | 2 +- lib/spack/spack/database.py | 142 ++++++++++++++++++++-------- lib/spack/spack/directory_layout.py | 6 -- lib/spack/spack/package.py | 6 +- lib/spack/spack/packages.py | 42 -------- 11 files changed, 122 insertions(+), 96 deletions(-) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index caa09eb6e00..4d10bc2da65 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -53,6 +53,12 @@ packages_path = join_path(var_path, "packages") db = PackageDB(packages_path) +# +# Set up the installed packages database +# +from spack.database import Database +installed_db = Database(install_path) + # # Paths to mock files for testing. # diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index b96ac5af510..fd3ef3ed270 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -124,7 +124,7 @@ def elide_list(line_list, max_num=10): def disambiguate_spec(spec): - matching_specs = spack.db.get_installed(spec) + matching_specs = spack.installed_db.get_installed(spec) if not matching_specs: tty.die("Spec '%s' matches no installed packages." % spec) diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py index e44be41029d..1f0e303cdf2 100644 --- a/lib/spack/spack/cmd/deactivate.py +++ b/lib/spack/spack/cmd/deactivate.py @@ -54,7 +54,7 @@ def deactivate(parser, args): if args.all: if pkg.extendable: tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec) - ext_pkgs = spack.db.installed_extensions_for(spec) + ext_pkgs = spack.installed_db.installed_extensions_for(spec) for ext_pkg in ext_pkgs: ext_pkg.spec.normalize() diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index fc8e6842c3b..e919b1c4fba 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -80,7 +80,7 @@ def extensions(parser, args): colify(ext.name for ext in extensions) # List specs of installed extensions. - installed = [s.spec for s in spack.db.installed_extensions_for(spec)] + installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)] print if not installed: tty.msg("None installed.") diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 3c993990b14..6f9072e3116 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -138,9 +138,9 @@ def find(parser, args): # Get all the specs the user asked for if not query_specs: - specs = set(spack.db.installed_package_specs()) + specs = set(spack.installed_db.installed_package_specs()) else: - results = [set(spack.db.get_installed(qs)) for qs in query_specs] + results = [set(spack.installed_db.get_installed(qs)) for qs in query_specs] specs = set.union(*results) if not args.mode: diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py index 34f0855a50b..215d877bd0a 100644 --- a/lib/spack/spack/cmd/module.py +++ b/lib/spack/spack/cmd/module.py @@ -65,7 +65,7 @@ def module_find(mtype, spec_array): tty.die("You can only pass one spec.") spec = specs[0] - specs = [s for s in spack.db.installed_package_specs() if s.satisfies(spec)] + specs = [s for s in spack.installed_db.installed_package_specs() if s.satisfies(spec)] if len(specs) == 0: tty.die("No installed packages match spec %s" % spec) @@ -86,7 +86,7 @@ def module_find(mtype, spec_array): def module_refresh(): """Regenerate all module files for installed packages known to spack (some packages may no longer exist).""" - specs = [s for s in spack.db.installed_known_package_specs()] + specs = [s for s in spack.installed_db.installed_known_package_specs()] for name, cls in module_types.items(): tty.msg("Regenerating %s module files." % name) diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index aa62510fede..4870712eb69 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -59,7 +59,7 @@ def uninstall(parser, args): # Fail and ask user to be unambiguous if it doesn't pkgs = [] for spec in specs: - matching_specs = spack.db.get_installed(spec) + matching_specs = spack.installed_db.get_installed(spec) if not args.all and len(matching_specs) > 1: tty.error("%s matches multiple packages:" % spec) print diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 4646530d520..d027db312ff 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -28,6 +28,9 @@ import glob import imp +import time +import copy + from external import yaml from external.yaml.error import MarkedYAMLError @@ -43,8 +46,18 @@ from spack.util.naming import mod_to_class, validate_module_name +def _autospec(function): + """Decorator that automatically converts the argument of a single-arg + function to a Spec.""" + def converter(self, spec_like, **kwargs): + if not isinstance(spec_like, spack.spec.Spec): + spec_like = spack.spec.Spec(spec_like) + return function(self, spec_like, **kwargs) + return converter + + class Database(object): - def __init__(self,file_name="specDB.yaml"): + def __init__(self,root,file_name="specDB.yaml"): """ Create an empty Database Location defaults to root/specDB.yaml @@ -53,13 +66,16 @@ def __init__(self,file_name="specDB.yaml"): path: the path to the install of that package dep_hash: a hash of the dependence DAG for that package """ + self.root = root self.file_name = file_name + self.file_path = join_path(self.root,self.file_name) self.data = [] + self.last_write_time = 0 def from_yaml(self,stream): """ - Fill database from YAML + Fill database from YAML, do not maintain old data Translate the spec portions from node-dict form to spec from """ try: @@ -70,6 +86,7 @@ def from_yaml(self,stream): if file==None: return + self.data = [] for sp in file['database']: spec = Spec.from_node_dict(sp['spec']) path = sp['path'] @@ -78,19 +95,14 @@ def from_yaml(self,stream): self.data.append(db_entry) - @staticmethod - def read_database(root): - """Create a Database from the data in the standard location""" - database = Database() - full_path = join_path(root,database.file_name) - if os.path.isfile(full_path): - with open(full_path,'r') as f: - database.from_yaml(f) + def read_database(self): + """Reread Database from the data in the set location""" + if os.path.isfile(self.file_path): + with open(self.file_path,'r') as f: + self.from_yaml(f) else: - with open(full_path,'w+') as f: - database.from_yaml(f) - - return database + #The file doesn't exist, construct empty data. + self.data = [] def write_database_to_yaml(self,stream): @@ -110,48 +122,104 @@ def write_database_to_yaml(self,stream): stream=stream, default_flow_style=False) - def write(self,root): + def write(self): """Write the database to the standard location""" - full_path = join_path(root,self.file_name) - #test for file existence - with open(full_path,'w') as f: + #creates file if necessary + with open(self.file_path,'w') as f: + self.last_write_time = int(time.time()) self.write_database_to_yaml(f) - @staticmethod - def add(root, spec, path): - """Read the database from the standard location + def is_dirty(self): + """ + Returns true iff the database file exists + and was most recently written to by another spack instance. + """ + return (os.path.isfile(self.file_path) and (os.path.getmtime(self.file_path) > self.last_write_time)) + + +# @_autospec + def add(self, spec, path): + """Re-read the database from the set location if data is dirty Add the specified entry as a dict Write the database back to memory - - TODO: Caching databases """ - database = Database.read_database(root) + if self.is_dirty(): + self.read_database() sph = {} sph['spec']=spec sph['path']=path sph['hash']=spec.dag_hash() - database.data.append(sph) + self.data.append(sph) - database.write(root) + self.write() - @staticmethod - def remove(root, spec): + @_autospec + def remove(self, spec): """ - Reads the database from the standard location + Re-reads the database from the set location if data is dirty Searches for and removes the specified spec Writes the database back to memory - - TODO: Caching databases """ - database = Database.read_database(root) + if self.is_dirty(): + self.read_database() - for sp in database.data: - #This requires specs w/o dependencies, is that sustainable? - if sp['spec'] == spec: - database.data.remove(sp) + for sp in self.data: + + if sp['hash'] == spec.dag_hash() and sp['spec'] == Spec.from_node_dict(spec.to_node_dict()): + self.data.remove(sp) + + self.write() + + + @_autospec + def get_installed(self, spec): + """ + Get all the installed specs that satisfy the provided spec constraint + """ + return [s for s in self.installed_package_specs() if s.satisfies(spec)] + + + @_autospec + def installed_extensions_for(self, extendee_spec): + """ + Return the specs of all packages that extend + the given spec + """ + for s in self.installed_package_specs(): + try: + if s.package.extends(extendee_spec): + yield s.package + except UnknownPackageError, e: + continue + #skips unknown packages + #TODO: conditional way to do this instead of catching exceptions + + + def installed_package_specs(self): + """ + Read installed package names from the database + and return their specs + """ + if self.is_dirty(): + self.read_database() + + installed = [] + for sph in self.data: + sph['spec'].normalize() + sph['spec'].concretize() + installed.append(sph['spec']) + return installed + + + def installed_known_package_specs(self): + """ + Read installed package names from the database. + Return only the specs for which the package is known + to this version of spack + """ + return [s for s in self.installed_package_specs() if spack.db.exists(s.name)] - database.write(root) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 0bbe6c9d85e..6dc1b0e5508 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -153,7 +153,6 @@ def remove_install_directory(self, spec): os.rmdir(path) path = os.path.dirname(path) - Database.remove(self.root,spec) class YamlDirectoryLayout(DirectoryLayout): @@ -266,11 +265,6 @@ def create_install_directory(self, spec): self.write_spec(spec, spec_file_path) - def add_to_database(self, spec): - """Simply adds a spec to the database""" - Database.add(self.root, spec, self.path_for_spec(spec)) - - @memoized def all_specs(self): if not os.path.isdir(self.root): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index a425c555a27..acf558d6399 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -565,7 +565,7 @@ def installed_dependents(self): """Return a list of the specs of all installed packages that depend on this one.""" dependents = [] - for spec in spack.db.installed_package_specs(): + for spec in spack.installed_db.installed_package_specs(): if self.name == spec.name: continue for dep in spec.traverse(): @@ -601,7 +601,7 @@ def url_version(self, version): def remove_prefix(self): """Removes the prefix for a package along with any empty parent directories.""" spack.install_layout.remove_install_directory(self.spec) - + spack.installed_db.remove(self.spec) def do_fetch(self): """Creates a stage directory and downloads the taball for this package. @@ -818,7 +818,7 @@ def real_work(): install(log_path, log_install_path) #Update the database once we know install successful - spack.install_layout.add_to_database(self.spec) + spack.installed_db.add(self.spec, spack.install_layout.path_for_spec(self.spec)) # On successful install, remove the stage. if not keep_stage: diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index adfbc26c1d4..2e3e95ca407 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -95,12 +95,6 @@ def purge(self): self.instances.clear() - @_autospec - def get_installed(self, spec): - """Get all the installed specs that satisfy the provided spec constraint.""" - return [s for s in self.installed_package_specs() if s.satisfies(spec)] - - @_autospec def providers_for(self, vpkg_spec): if self.provider_index is None: @@ -117,19 +111,6 @@ def extensions_for(self, extendee_spec): return [p for p in self.all_packages() if p.extends(extendee_spec)] - @_autospec - def installed_extensions_for(self, extendee_spec): - for s in self.installed_package_specs(): - try: - if s.package.extends(extendee_spec): - yield s.package - except UnknownPackageError, e: - # Skip packages we know nothing about - continue - # TODO: add some conditional way to do this instead of - # catching exceptions. - - def dirname_for_package_name(self, pkg_name): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" @@ -150,29 +131,6 @@ def filename_for_package_name(self, pkg_name): return join_path(pkg_dir, _package_file_name) - def installed_package_specs(self): - """Read installed package names straight from the install directory - layout. - """ - # Get specs from the directory layout but ensure that they're - # all normalized properly. - installed = [] - for spec in spack.install_layout.all_specs(): - spec.normalize() - installed.append(spec) - return installed - - - def installed_known_package_specs(self): - """Read installed package names straight from the install - directory layout, but return only specs for which the - package is known to this version of spack. - """ - for spec in spack.install_layout.all_specs(): - if self.exists(spec.name): - yield spec - - @memoized def all_package_names(self): """Generator function for all packages. This looks for From 689dbb8fb6ac9ed8e9e0c16d33d623e1f0a3a72a Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Mon, 24 Aug 2015 08:51:41 -0700 Subject: [PATCH 008/121] use filter_file instead of patch --- var/spack/packages/py-numpy/gnu_fcompiler.patch | 11 ----------- var/spack/packages/py-numpy/package.py | 9 +++++++-- 2 files changed, 7 insertions(+), 13 deletions(-) delete mode 100644 var/spack/packages/py-numpy/gnu_fcompiler.patch diff --git a/var/spack/packages/py-numpy/gnu_fcompiler.patch b/var/spack/packages/py-numpy/gnu_fcompiler.patch deleted file mode 100644 index e4e882b9dfa..00000000000 --- a/var/spack/packages/py-numpy/gnu_fcompiler.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- a/numpy/distutils/fcompiler/gnu.py.orig 2015-08-17 11:31:52.000000000 -0700 -+++ b/numpy/distutils/fcompiler/gnu.py 2015-08-17 11:32:48.000000000 -0700 -@@ -252,7 +252,7 @@ - # GNU Fortran 95 (GCC) 4.2.0 20060218 (experimental) - # GNU Fortran (GCC) 4.3.0 20070316 (experimental) - -- possible_executables = ['gfortran', 'f95'] -+ possible_executables = ['fc', 'f95'] - executables = { - 'version_cmd' : ["", "--version"], - 'compiler_f77' : [None, "-Wall", "-g", "-ffixed-form", diff --git a/var/spack/packages/py-numpy/package.py b/var/spack/packages/py-numpy/package.py index 76fdd48da7d..efa109a3e9c 100644 --- a/var/spack/packages/py-numpy/package.py +++ b/var/spack/packages/py-numpy/package.py @@ -8,13 +8,18 @@ class PyNumpy(Package): version('1.9.1', '78842b73560ec378142665e712ae4ad9') version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645') - patch('gnu_fcompiler.patch', when='%gcc') - extends('python') depends_on('py-nose') depends_on('netlib-blas+fpic') depends_on('netlib-lapack+shared') + def patch(self): + filter_file( + "possible_executables = \['(gfortran|g77|ifort|efl)", + "possible_executables = ['fc", + "numpy/distutils/fcompiler/gnu.py", + "numpy/distutils/fcompiler/intel.py") + def install(self, spec, prefix): with open('site.cfg', 'w') as f: f.write('[DEFAULT]\n') From 1c9c5a385d4bb24c081a4297daf5dcad08a3c706 Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Tue, 25 Aug 2015 11:38:39 -0700 Subject: [PATCH 009/121] added graphviz and dependencies --- var/spack/packages/ghostscript/package.py | 17 +++++++++++++++++ var/spack/packages/graphviz/package.py | 21 +++++++++++++++++++++ var/spack/packages/pkg-config/package.py | 17 +++++++++++++++++ 3 files changed, 55 insertions(+) create mode 100644 var/spack/packages/ghostscript/package.py create mode 100644 var/spack/packages/graphviz/package.py create mode 100644 var/spack/packages/pkg-config/package.py diff --git a/var/spack/packages/ghostscript/package.py b/var/spack/packages/ghostscript/package.py new file mode 100644 index 00000000000..0ab49d425f7 --- /dev/null +++ b/var/spack/packages/ghostscript/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Ghostscript(Package): + """an interpreter for the PostScript language and for PDF. """ + homepage = "http://ghostscript.com/" + url = "http://downloads.ghostscript.com/public/ghostscript-9.16.tar.gz" + + version('9.16', '829319325bbdb83f5c81379a8f86f38f') + + parallel = False + + def install(self, spec, prefix): + configure("--prefix=%s" %prefix, "--enable-shared") + + make() + make("install") + diff --git a/var/spack/packages/graphviz/package.py b/var/spack/packages/graphviz/package.py new file mode 100644 index 00000000000..7af7da1881a --- /dev/null +++ b/var/spack/packages/graphviz/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Graphviz(Package): + """Graph Visualization Software""" + homepage = "http://www.graphviz.org" + url = "http://www.graphviz.org/pub/graphviz/stable/SOURCES/graphviz-2.38.0.tar.gz" + + version('2.38.0', '5b6a829b2ac94efcd5fa3c223ed6d3ae') + + parallel = False + + depends_on("swig") + depends_on("python") + depends_on("ghostscript") + + def install(self, spec, prefix): + configure("--prefix=%s" %prefix) + + make() + make("install") + diff --git a/var/spack/packages/pkg-config/package.py b/var/spack/packages/pkg-config/package.py new file mode 100644 index 00000000000..9964c6ce346 --- /dev/null +++ b/var/spack/packages/pkg-config/package.py @@ -0,0 +1,17 @@ +from spack import * + +class PkgConfig(Package): + """pkg-config is a helper tool used when compiling applications and libraries""" + homepage = "http://www.freedesktop.org/wiki/Software/pkg-config/" + url = "http://pkgconfig.freedesktop.org/releases/pkg-config-0.28.tar.gz" + + version('0.28', 'aa3c86e67551adc3ac865160e34a2a0d') + + parallel = False + + def install(self, spec, prefix): + configure("--prefix=%s" %prefix, "--enable-shared") + + make() + make("install") + From 2c3e7a6f87ef5f8b5ba154b5a8b7da7a119f4a67 Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Tue, 25 Aug 2015 11:48:12 -0700 Subject: [PATCH 010/121] added graphviz dependency and dysect variant to STAT --- var/spack/packages/stat/package.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/stat/package.py b/var/spack/packages/stat/package.py index c40d65ae783..5d81e627317 100644 --- a/var/spack/packages/stat/package.py +++ b/var/spack/packages/stat/package.py @@ -9,17 +9,20 @@ class Stat(Package): version('2.1.0', 'ece26beaf057aa9134d62adcdda1ba91') version('2.0.0', 'c7494210b0ba26b577171b92838e1a9b') + variant('dysect', default=False, description="enable DySectAPI") + depends_on('libelf') depends_on('libdwarf') depends_on('dyninst') depends_on('graphlib') + depends_on('graphviz') depends_on('launchmon') depends_on('mrnet') patch('configure_mpicxx.patch', when='@2.1.0') def install(self, spec, prefix): - configure( + configure_args = [ "--enable-gui", "--prefix=%s" % prefix, "--disable-examples", # Examples require MPI: avoid this dependency. @@ -27,7 +30,11 @@ def install(self, spec, prefix): "--with-mrnet=%s" % spec['mrnet'].prefix, "--with-graphlib=%s" % spec['graphlib'].prefix, "--with-stackwalker=%s" % spec['dyninst'].prefix, - "--with-libdwarf=%s" % spec['libdwarf'].prefix) + "--with-libdwarf=%s" % spec['libdwarf'].prefix + ] + if '+dysect' in spec: + configure_args.append('--enable-dysectapi') + configure(*configure_args) make(parallel=False) make("install") From 4a2bd1753a3c0b96c0b38ee9ec909cbf98308125 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Tue, 25 Aug 2015 15:11:18 -0700 Subject: [PATCH 011/121] Added dependency indices to database, ensuring correctly reconstructed specs from database Began work on file locking, currently commented out. --- lib/spack/spack/database.py | 102 +++++++++++++++++++++++++++--------- lib/spack/spack/package.py | 1 + 2 files changed, 78 insertions(+), 25 deletions(-) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index d027db312ff..7e2c3ac0796 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -30,6 +30,7 @@ import time import copy +import errno from external import yaml from external.yaml.error import MarkedYAMLError @@ -69,6 +70,10 @@ def __init__(self,root,file_name="specDB.yaml"): self.root = root self.file_name = file_name self.file_path = join_path(self.root,self.file_name) + + self.lock_name = "db_lock" + self.lock_path = join_path(self.root,self.lock_name) + self.data = [] self.last_write_time = 0 @@ -86,17 +91,38 @@ def from_yaml(self,stream): if file==None: return - self.data = [] - for sp in file['database']: + data = {} + for index, sp in file['database'].items(): spec = Spec.from_node_dict(sp['spec']) + deps = sp['dependency_indices'] path = sp['path'] dep_hash = sp['hash'] - db_entry = {'spec': spec, 'path': path, 'hash':dep_hash} - self.data.append(db_entry) + db_entry = {'deps':deps, 'spec': spec, 'path': path, 'hash':dep_hash} + data[index] = db_entry + + for sph in data.values(): + for idx in sph['deps']: + sph['spec'].dependencies[data[idx]['spec'].name] = data[idx]['spec'] + + self.data = data.values() def read_database(self): - """Reread Database from the data in the set location""" + """ + Re-read Database from the data in the set location + If the cache is fresh, return immediately. + Implemented with mkdir locking for the database file. + """ + if not self.is_dirty(): + return + """ + while True: + try: + os.mkdir(self.lock_path) + break + except OSError as err: + pass + """ if os.path.isfile(self.file_path): with open(self.file_path,'r') as f: self.from_yaml(f) @@ -104,6 +130,8 @@ def read_database(self): #The file doesn't exist, construct empty data. self.data = [] +# os.rmdir(self.lock_path) + def write_database_to_yaml(self,stream): """ @@ -111,24 +139,54 @@ def write_database_to_yaml(self,stream): Then stream all data to YAML """ node_list = [] - for sp in self.data: + spec_list = [sph['spec'] for sph in self.data] + + for sph in self.data: node = {} - node['spec']=Spec.to_node_dict(sp['spec']) -# node['spec'][sp['spec'].name]['hash']=sp['spec'].dag_hash() - node['hash']=sp['hash'] - node['path']=sp['path'] + deps = [] + for name,spec in sph['spec'].dependencies.items(): + deps.append(spec_list.index(spec)) + node['spec']=Spec.to_node_dict(sph['spec']) + node['hash']=sph['hash'] + node['path']=sph['path'] + node['dependency_indices']=deps node_list.append(node) - return yaml.dump({ 'database' : node_list}, + + node_dict = dict(enumerate(node_list)) + return yaml.dump({ 'database' : node_dict}, stream=stream, default_flow_style=False) def write(self): - """Write the database to the standard location""" - #creates file if necessary + """ + Write the database to the standard location + Implements mkdir locking for the database file + """ + """ + while True: + try: + os.mkdir(self.lock_path) + break + except OSError as err: + pass + """ with open(self.file_path,'w') as f: self.last_write_time = int(time.time()) self.write_database_to_yaml(f) + # os.rmdir(self.lock_path) + + + def get_index_of(self, spec): + """ + Returns the index of a spec in the database + If unable to find the spec it returns -1 + """ + for index, sph in enumerate(self.data): + if sph['spec'] == spec: + return index + return -1 + def is_dirty(self): """ @@ -140,12 +198,11 @@ def is_dirty(self): # @_autospec def add(self, spec, path): - """Re-read the database from the set location if data is dirty + """Read the database from the set location Add the specified entry as a dict Write the database back to memory """ - if self.is_dirty(): - self.read_database() + self.read_database() sph = {} sph['spec']=spec @@ -160,16 +217,14 @@ def add(self, spec, path): @_autospec def remove(self, spec): """ - Re-reads the database from the set location if data is dirty + Reads the database from the set location Searches for and removes the specified spec Writes the database back to memory """ - if self.is_dirty(): - self.read_database() + self.read_database() for sp in self.data: - - if sp['hash'] == spec.dag_hash() and sp['spec'] == Spec.from_node_dict(spec.to_node_dict()): + if sp['hash'] == spec.dag_hash() and sp['spec'] == spec: self.data.remove(sp) self.write() @@ -204,13 +259,10 @@ def installed_package_specs(self): Read installed package names from the database and return their specs """ - if self.is_dirty(): - self.read_database() + self.read_database() installed = [] for sph in self.data: - sph['spec'].normalize() - sph['spec'].concretize() installed.append(sph['spec']) return installed diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index acf558d6399..929e0c086c2 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -557,6 +557,7 @@ def virtual_dependencies(self, visited=None): @property def installed(self): + print self.prefix return os.path.isdir(self.prefix) From e32c59f805c4e8d9cb23ce9fcf2edcf571ce3949 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Tue, 25 Aug 2015 15:32:45 -0700 Subject: [PATCH 012/121] Fixed file locking. Fix is slightly ugly (lock integer added) but it gets the job done It avoids having to spin simply on the OSError. --- lib/spack/spack/database.py | 51 +++++++++++++++++++++++-------------- 1 file changed, 32 insertions(+), 19 deletions(-) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 7e2c3ac0796..5e8bb172b86 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -115,22 +115,29 @@ def read_database(self): """ if not self.is_dirty(): return - """ - while True: + + lock=0 + while lock==0: try: os.mkdir(self.lock_path) - break + lock=1 except OSError as err: pass - """ - if os.path.isfile(self.file_path): - with open(self.file_path,'r') as f: - self.from_yaml(f) - else: - #The file doesn't exist, construct empty data. - self.data = [] -# os.rmdir(self.lock_path) + #The try statement ensures that a failure won't leave the + #database locked to other processes. + try: + if os.path.isfile(self.file_path): + with open(self.file_path,'r') as f: + self.from_yaml(f) + else: + #The file doesn't exist, construct empty data. + self.data = [] + except: + os.rmdir(self.lock_path) + raise + + os.rmdir(self.lock_path) def write_database_to_yaml(self,stream): @@ -162,19 +169,25 @@ def write(self): Write the database to the standard location Implements mkdir locking for the database file """ - """ - while True: + lock=0 + while lock==0: try: os.mkdir(self.lock_path) - break + lock=1 except OSError as err: pass - """ - with open(self.file_path,'w') as f: - self.last_write_time = int(time.time()) - self.write_database_to_yaml(f) - # os.rmdir(self.lock_path) + #The try statement ensures that a failure won't leave the + #database locked to other processes. + try: + with open(self.file_path,'w') as f: + self.last_write_time = int(time.time()) + self.write_database_to_yaml(f) + except: + os.rmdir(self.lock_path) + raise + + os.rmdir(self.lock_path) def get_index_of(self, spec): From ce8df65d7b9162b71be88e6268ee5172deab4cd9 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Tue, 25 Aug 2015 16:28:55 -0700 Subject: [PATCH 013/121] Eliminated unnecessary differences in pull request --- lib/spack/spack/directory_layout.py | 2 -- lib/spack/spack/package.py | 1 - lib/spack/spack/spec.py | 1 + 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 6dc1b0e5508..e61929d8fdd 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -37,7 +37,6 @@ from spack.spec import Spec from spack.error import SpackError -from spack.database import Database def _check_concrete(spec): @@ -154,7 +153,6 @@ def remove_install_directory(self, spec): path = os.path.dirname(path) - class YamlDirectoryLayout(DirectoryLayout): """Lays out installation directories like this:: / diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 929e0c086c2..acf558d6399 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -557,7 +557,6 @@ def virtual_dependencies(self, visited=None): @property def installed(self): - print self.prefix return os.path.isdir(self.prefix) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index df74b6064e6..8050b73b9eb 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -427,6 +427,7 @@ def __init__(self, spec_like, *dep_like, **kwargs): spec = dep if isinstance(dep, Spec) else Spec(dep) self._add_dependency(spec) + # # Private routines here are called by the parser when building a spec. # From 9345e7877983d1bde8a4aefbecdc4a8cab181186 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Tue, 25 Aug 2015 16:31:09 -0700 Subject: [PATCH 014/121] Fixed inaccurate comment in spec.py --- lib/spack/spack/spec.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 8050b73b9eb..cde2e168a07 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -642,7 +642,6 @@ def prefix(self): def dag_hash(self, length=None): """ Return a hash of the entire spec DAG, including connectivity. - Stores the hash iff the spec is concrete. """ yaml_text = yaml.dump( self.to_node_dict(), default_flow_style=True, width=sys.maxint) From babbc5bb4581e7a1912c2c6fe582479d6674f3d4 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 26 Aug 2015 23:48:20 -0700 Subject: [PATCH 015/121] Minor fix-ups for PIDX --- var/spack/packages/pidx/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/pidx/package.py b/var/spack/packages/pidx/package.py index 9b63a84a0f0..81aed62fb10 100644 --- a/var/spack/packages/pidx/package.py +++ b/var/spack/packages/pidx/package.py @@ -1,10 +1,13 @@ from spack import * class Pidx(Package): - """PIDX Parallel I/O Library""" + """PIDX Parallel I/O Library. + + PIDX is an efficient parallel I/O library that reads and writes + multiresolution IDX data files. + """ homepage = "http://www.cedmav.com/pidx" - #url = "http://www.example.com/pidx-1.0.tar.gz" version('1.0', git='https://github.com/sci-visus/PIDX.git', commit='6afa1cf71d1c41263296dc049c8fabaf73c296da') From 6af49d41fdf5fc9802ef63942de74fcb79a8c1f6 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 24 Aug 2015 09:38:39 -0700 Subject: [PATCH 016/121] Add elfutils. --- var/spack/packages/elfutils/package.py | 26 ++++++++++++++++++++++++++ var/spack/packages/libelf/package.py | 2 ++ var/spack/packages/mpich/package.py | 1 - var/spack/packages/scorep/package.py | 15 +++++++-------- 4 files changed, 35 insertions(+), 9 deletions(-) create mode 100644 var/spack/packages/elfutils/package.py diff --git a/var/spack/packages/elfutils/package.py b/var/spack/packages/elfutils/package.py new file mode 100644 index 00000000000..926d2345846 --- /dev/null +++ b/var/spack/packages/elfutils/package.py @@ -0,0 +1,26 @@ +from spack import * + +class Elfutils(Package): + """elfutils is a collection of various binary tools such as + eu-objdump, eu-readelf, and other utilities that allow you to + inspect and manipulate ELF files. Refer to Table 5.Tools Included + in elfutils for Red Hat Developer for a complete list of binary + tools that are distributed with the Red Hat Developer Toolset + version of elfutils.""" + + homepage = "https://fedorahosted.org/elfutils/" + + version('0.163', + git='git://git.fedorahosted.org/git/elfutils.git', + tag='elfutils-0.163') + + provides('elf') + + def install(self, spec, prefix): + autoreconf = which('autoreconf') + autoreconf('-if') + + configure('--prefix=%s' % prefix, '--enable-maintainer-mode') + make() + make("install") + diff --git a/var/spack/packages/libelf/package.py b/var/spack/packages/libelf/package.py index bf2fefabd50..9338b8f393e 100644 --- a/var/spack/packages/libelf/package.py +++ b/var/spack/packages/libelf/package.py @@ -36,6 +36,8 @@ class Libelf(Package): version('0.8.13', '4136d7b4c04df68b686570afa26988ac') version('0.8.12', 'e21f8273d9f5f6d43a59878dc274fec7') + provides('elf') + def install(self, spec, prefix): configure("--prefix=" + prefix, "--enable-shared", diff --git a/var/spack/packages/mpich/package.py b/var/spack/packages/mpich/package.py index 6aa8f2eca4b..b6b2dfde214 100644 --- a/var/spack/packages/mpich/package.py +++ b/var/spack/packages/mpich/package.py @@ -38,7 +38,6 @@ class Mpich(Package): provides('mpi@:3', when='@3:') provides('mpi@:1', when='@1:') - def setup_dependent_environment(self, module, spec, dep_spec): """For dependencies, make mpicc's use spack wrapper.""" os.environ['MPICH_CC'] = 'cc' diff --git a/var/spack/packages/scorep/package.py b/var/spack/packages/scorep/package.py index 32a772e3dbe..f013bd1cbb7 100644 --- a/var/spack/packages/scorep/package.py +++ b/var/spack/packages/scorep/package.py @@ -1,11 +1,10 @@ # FIXME: Add copyright statement from spack import * -from contextlib import closing class Scorep(Package): - """The Score-P measurement infrastructure is a highly scalable and - easy-to-use tool suite for profiling, event tracing, and online + """The Score-P measurement infrastructure is a highly scalable and + easy-to-use tool suite for profiling, event tracing, and online analysis of HPC applications.""" # FIXME: add a proper url for your package's homepage here. @@ -20,7 +19,7 @@ class Scorep(Package): depends_on("mpi") depends_on("papi") # depends_on("otf2@1.2:1.2.1") # only Score-P 1.2.x - depends_on("otf2") + depends_on("otf2") depends_on("opari2") depends_on("cube@4.2:4.2.3") @@ -53,12 +52,12 @@ def install(self, spec, prefix): # Use a custom compiler configuration, otherwise the score-p # build system messes with spack's compiler settings. # Create these three files in the build directory - with closing(open("platform-backend-user-provided", "w")) as backend_file: + with open("platform-backend-user-provided", "w") as backend_file: backend_file.write(self.backend_user_provided) - with closing(open("platform-frontend-user-provided", "w")) as frontend_file: + with open("platform-frontend-user-provided", "w") as frontend_file: frontend_file.write(self.frontend_user_provided) - with closing(open("platform-mpi-user-provided", "w")) as mpi_file: - mpi_file.write(self.mpi_user_provided) + with open("platform-mpi-user-provided", "w") as mpi_file: + mpi_file.write(self.mpi_user_provided) configure_args = ["--prefix=%s" % prefix, "--with-custom-compilers", From c5c9ada7b0efd9cec5cf55c4cf71d76b186513ff Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 27 Aug 2015 02:04:58 -0700 Subject: [PATCH 017/121] Fix for GitHub #95 develop: compiler clang@unknown created for /usr/bin/clang-format https://github.com/scalability-llnl/spack/issues/95 --- lib/spack/spack/compiler.py | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 646050d267a..1e800a89795 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -227,14 +227,32 @@ def find(cls, *path): for d in dicts: all_keys.update(d) - compilers = [] + compilers = {} for k in all_keys: ver, pre, suf = k + + # Skip compilers with unknown version. + if ver == 'unknown': + continue + paths = tuple(pn[k] if k in pn else None for pn in dicts) spec = spack.spec.CompilerSpec(cls.name, ver) - compilers.append(cls(spec, *paths)) - return compilers + if ver in compilers: + prev = compilers[ver] + + # prefer the one with more compilers. + prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc] + newcount = len([p for p in paths if p is not None]) + prevcount = len([p for p in prev_paths if p is not None]) + + # Don't add if it's not an improvement over prev compiler. + if newcount <= prevcount: + continue + + compilers[ver] = cls(spec, *paths) + + return list(compilers.values()) def __repr__(self): From f406fcb843edcea359e3fb6103eed560dadc1355 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Mon, 31 Aug 2015 09:38:38 -0700 Subject: [PATCH 018/121] Fixed several issues from code review Most importantly wrote the Lock, Read_Lock_Instance, and Write_Lock_Instance classes in lock.py Updated the locking in database.py TODO: Lock on larger areas --- lib/spack/llnl/util/lock.py | 136 ++++++++++++++++++++++++++++++++++++ lib/spack/spack/database.py | 130 +++++++++++++--------------------- 2 files changed, 184 insertions(+), 82 deletions(-) create mode 100644 lib/spack/llnl/util/lock.py diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py new file mode 100644 index 00000000000..05641475eda --- /dev/null +++ b/lib/spack/llnl/util/lock.py @@ -0,0 +1,136 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +import fcntl +import errno +import time +import socket + + +class Read_Lock_Instance(object): + """ + A context manager for getting shared access to the object lock + Arguments are lock and timeout (default 5 minutes) + """ + def __init__(self,lock,timeout = 300): + self._lock = lock + self._timeout = timeout + def __enter__(self): + self._lock.acquire_read(self._timeout) + def __exit__(self,type,value,traceback): + self._lock.release_read() + + +class Write_Lock_Instance(object): + """ + A context manager for getting exclusive access to the object lock + Arguments are lock and timeout (default 5 minutes) + """ + def __init__(self,lock,timeout = 300): + self._lock = lock + self._timeout = timeout + def __enter__(self): + self._lock.acquire_write(self._timeout) + def __exit__(self,type,value,traceback): + self._lock.release_write() + + +class Lock(object): + def __init__(self,file_path): + self._file_path = file_path + self._fd = os.open(file_path,os.O_RDWR) + self._reads = 0 + self._writes = 0 + + + def acquire_read(self,timeout): + """ + Implements recursive lock. If held in both read and write mode, + the write lock will be maintained until all locks are released + """ + if self._reads == 0 and self._writes == 0: + self._lock(fcntl.LOCK_SH,timeout) + self._reads += 1 + + + def acquire_write(self,timeout): + """ + Implements recursive lock + """ + if self._writes == 0: + self._lock(fcntl.LOCK_EX,timeout) + self._writes += 1 + + + def _lock(self,op,timeout): + """ + The timeout is implemented using nonblocking flock() + to avoid using signals for timing + Write locks store pid and host information to the lock file + Read locks do not store data + """ + total_time = 0 + while total_time < timeout: + try: + fcntl.flock(self._fd, op | fcntl.LOCK_NB) + if op == fcntl.LOCK_EX: + with open(self._file_path,'w') as f: + f.write("pid = "+str(os.getpid())+", host = "+socket.getfqdn()) + return + except IOError as error: + if error.errno == errno.EAGAIN or error.errno == EACCES: + pass + else: + raise + time.sleep(0.1) + total_time += 0.1 + + + def release_read(self): + """ + Assert there is a lock of the right type to release, recursive lock + """ + assert self._reads > 0 + if self._reads == 1 and self._writes == 0: + self._unlock() + self._reads -= 1 + + + def release_write(self): + """ + Assert there is a lock of the right type to release, recursive lock + """ + assert self._writes > 0 + if self._writes == 1 and self._reads == 0: + self._unlock() + self._writes -= 1 + + + def _unlock(self): + """ + Releases the lock regardless of mode. Note that read locks may be + masquerading as write locks at times, but this removes either. + """ + fcntl.flock(self._fd,fcntl.LOCK_UN) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 5e8bb172b86..6a429b68a99 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -38,6 +38,7 @@ import llnl.util.tty as tty from llnl.util.filesystem import join_path from llnl.util.lang import * +from llnl.util.lock import * import spack.error import spack.spec @@ -58,7 +59,7 @@ def converter(self, spec_like, **kwargs): class Database(object): - def __init__(self,root,file_name="specDB.yaml"): + def __init__(self,root,file_name="_index.yaml"): """ Create an empty Database Location defaults to root/specDB.yaml @@ -67,28 +68,31 @@ def __init__(self,root,file_name="specDB.yaml"): path: the path to the install of that package dep_hash: a hash of the dependence DAG for that package """ - self.root = root - self.file_name = file_name - self.file_path = join_path(self.root,self.file_name) + self._root = root + self._file_name = file_name + self._file_path = join_path(self._root,self._file_name) - self.lock_name = "db_lock" - self.lock_path = join_path(self.root,self.lock_name) + self._lock_name = "_db_lock" + self._lock_path = join_path(self._root,self._lock_name) + if not os.path.exists(self._lock_path): + open(self._lock_path,'w').close() + self.lock = Lock(self._lock_path) - self.data = [] - self.last_write_time = 0 + self._data = [] + self._last_write_time = 0 - def from_yaml(self,stream): + def _read_from_yaml(self,stream): """ Fill database from YAML, do not maintain old data - Translate the spec portions from node-dict form to spec from + Translate the spec portions from node-dict form to spec form """ try: file = yaml.load(stream) except MarkedYAMLError, e: raise SpackYAMLError("error parsing YAML database:", str(e)) - if file==None: + if file is None: return data = {} @@ -104,51 +108,34 @@ def from_yaml(self,stream): for idx in sph['deps']: sph['spec'].dependencies[data[idx]['spec'].name] = data[idx]['spec'] - self.data = data.values() + self._data = data.values() def read_database(self): """ Re-read Database from the data in the set location If the cache is fresh, return immediately. - Implemented with mkdir locking for the database file. """ if not self.is_dirty(): return - lock=0 - while lock==0: - try: - os.mkdir(self.lock_path) - lock=1 - except OSError as err: - pass - - #The try statement ensures that a failure won't leave the - #database locked to other processes. - try: - if os.path.isfile(self.file_path): - with open(self.file_path,'r') as f: - self.from_yaml(f) - else: + if os.path.isfile(self._file_path): + with open(self._file_path,'r') as f: + self._read_from_yaml(f) + else: #The file doesn't exist, construct empty data. - self.data = [] - except: - os.rmdir(self.lock_path) - raise - - os.rmdir(self.lock_path) + self._data = [] - def write_database_to_yaml(self,stream): + def _write_database_to_yaml(self,stream): """ Replace each spec with its dict-node form Then stream all data to YAML """ node_list = [] - spec_list = [sph['spec'] for sph in self.data] + spec_list = [sph['spec'] for sph in self._data] - for sph in self.data: + for sph in self._data: node = {} deps = [] for name,spec in sph['spec'].dependencies.items(): @@ -167,46 +154,23 @@ def write_database_to_yaml(self,stream): def write(self): """ Write the database to the standard location - Implements mkdir locking for the database file + Everywhere that the database is written it is read + within the same lock, so there is no need to refresh + the database within write() """ - lock=0 - while lock==0: - try: - os.mkdir(self.lock_path) - lock=1 - except OSError as err: - pass - - #The try statement ensures that a failure won't leave the - #database locked to other processes. - try: - with open(self.file_path,'w') as f: - self.last_write_time = int(time.time()) - self.write_database_to_yaml(f) - except: - os.rmdir(self.lock_path) - raise - - os.rmdir(self.lock_path) - - - def get_index_of(self, spec): - """ - Returns the index of a spec in the database - If unable to find the spec it returns -1 - """ - for index, sph in enumerate(self.data): - if sph['spec'] == spec: - return index - return -1 - + temp_name = os.getpid() + socket.getfqdn() + ".temp" + temp_file = path.join(self._root,temp_name) + with open(self.temp_path,'w') as f: + self._last_write_time = int(time.time()) + self._write_database_to_yaml(f) + os.rename(temp_name,self._file_path) def is_dirty(self): """ Returns true iff the database file exists and was most recently written to by another spack instance. """ - return (os.path.isfile(self.file_path) and (os.path.getmtime(self.file_path) > self.last_write_time)) + return (os.path.isfile(self._file_path) and (os.path.getmtime(self._file_path) > self._last_write_time)) # @_autospec @@ -215,16 +179,15 @@ def add(self, spec, path): Add the specified entry as a dict Write the database back to memory """ - self.read_database() - sph = {} sph['spec']=spec sph['path']=path sph['hash']=spec.dag_hash() - self.data.append(sph) - - self.write() + with Write_Lock_Instance(self.lock,60): + self.read_database() + self._data.append(sph) + self.write() @_autospec @@ -234,13 +197,15 @@ def remove(self, spec): Searches for and removes the specified spec Writes the database back to memory """ - self.read_database() + with Write_Lock_Instance(self.lock,60): + self.read_database() - for sp in self.data: - if sp['hash'] == spec.dag_hash() and sp['spec'] == spec: - self.data.remove(sp) + for sp in self._data: + #Not sure the hash comparison is necessary + if sp['hash'] == spec.dag_hash() and sp['spec'] == spec: + self._data.remove(sp) - self.write() + self.write() @_autospec @@ -272,10 +237,11 @@ def installed_package_specs(self): Read installed package names from the database and return their specs """ - self.read_database() + with Read_Lock_Instance(self.lock,60): + self.read_database() installed = [] - for sph in self.data: + for sph in self._data: installed.append(sph['spec']) return installed From c3246ee8ba26909ba73c3587ead9e61342692957 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Mon, 31 Aug 2015 09:46:55 -0700 Subject: [PATCH 019/121] Removed incorrect stage removal code from cleanup() in do_install() --- lib/spack/spack/package.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index acf558d6399..e64b4278521 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -779,14 +779,6 @@ def cleanup(): "Manually remove this directory to fix:", self.prefix) - if not (keep_prefix and keep_stage): - self.do_clean() - else: - tty.warn("Keeping stage in place despite error.", - "Spack will refuse to uninstall dependencies of this package." + - "Manually remove this directory to fix:", - self.stage.path) - def real_work(): try: From ac088bae6a09c8d2ee7f9deac3e701126ca5dfa4 Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Tue, 1 Sep 2015 16:02:14 -0700 Subject: [PATCH 020/121] update boost --- var/spack/packages/boost/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/packages/boost/package.py b/var/spack/packages/boost/package.py index 254d1afda1d..35824d53a26 100644 --- a/var/spack/packages/boost/package.py +++ b/var/spack/packages/boost/package.py @@ -14,6 +14,10 @@ class Boost(Package): list_url = "http://sourceforge.net/projects/boost/files/boost/" list_depth = 2 + version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87') + version('1.58.0', 'b8839650e61e9c1c0a89f371dd475546') + version('1.57.0', '1be49befbdd9a5ce9def2983ba3e7b76') + version('1.56.0', 'a744cf167b05d72335f27c88115f211d') version('1.55.0', 'd6eef4b4cacb2183f2bf265a5a03a354') version('1.54.0', '15cb8c0803064faef0c4ddf5bc5ca279') version('1.53.0', 'a00d22605d5dbcfb4c9936a9b35bc4c2') From 9c8e46dc228e096a83a892e5f53424bb3446d8f6 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Thu, 3 Sep 2015 09:21:19 -0700 Subject: [PATCH 021/121] Added conservative locking to the spack commands that access the database at _index --- lib/spack/spack/cmd/__init__.py | 20 +++++----- lib/spack/spack/cmd/deactivate.py | 12 +++--- lib/spack/spack/cmd/diy.py | 64 ++++++++++++++++--------------- lib/spack/spack/cmd/extensions.py | 4 +- lib/spack/spack/cmd/find.py | 7 +++- lib/spack/spack/cmd/install.py | 22 ++++++----- lib/spack/spack/cmd/uninstall.py | 52 +++++++++++++------------ lib/spack/spack/database.py | 11 ++++-- 8 files changed, 105 insertions(+), 87 deletions(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index fd3ef3ed270..c62d22979ae 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -28,6 +28,7 @@ import llnl.util.tty as tty from llnl.util.lang import attr_setdefault +from llnl.util.lock import * import spack import spack.spec @@ -124,15 +125,16 @@ def elide_list(line_list, max_num=10): def disambiguate_spec(spec): - matching_specs = spack.installed_db.get_installed(spec) - if not matching_specs: - tty.die("Spec '%s' matches no installed packages." % spec) + with Read_Lock_Instance(spack.installed_db.lock,1800): + matching_specs = spack.installed_db.get_installed(spec) + if not matching_specs: + tty.die("Spec '%s' matches no installed packages." % spec) - elif len(matching_specs) > 1: - args = ["%s matches multiple packages." % spec, - "Matching packages:"] - args += [" " + str(s) for s in matching_specs] - args += ["Use a more specific spec."] - tty.die(*args) + elif len(matching_specs) > 1: + args = ["%s matches multiple packages." % spec, + "Matching packages:"] + args += [" " + str(s) for s in matching_specs] + args += ["Use a more specific spec."] + tty.die(*args) return matching_specs[0] diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py index 1f0e303cdf2..015809345a6 100644 --- a/lib/spack/spack/cmd/deactivate.py +++ b/lib/spack/spack/cmd/deactivate.py @@ -24,6 +24,7 @@ ############################################################################## from external import argparse import llnl.util.tty as tty +from llnl.util.lock import * import spack import spack.cmd @@ -54,12 +55,13 @@ def deactivate(parser, args): if args.all: if pkg.extendable: tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec) - ext_pkgs = spack.installed_db.installed_extensions_for(spec) + with Read_Lock_Instance(spack.installed_db.lock,1800): + ext_pkgs = spack.installed_db.installed_extensions_for(spec) - for ext_pkg in ext_pkgs: - ext_pkg.spec.normalize() - if ext_pkg.activated: - ext_pkg.do_deactivate(force=True) + for ext_pkg in ext_pkgs: + ext_pkg.spec.normalize() + if ext_pkg.activated: + ext_pkg.do_deactivate(force=True) elif pkg.is_extension: if not args.force and not spec.package.activated: diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 6e7f10fba63..7403b9e3e80 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -27,6 +27,7 @@ from external import argparse import llnl.util.tty as tty +from llnl.util.lock import * import spack import spack.cmd @@ -54,40 +55,41 @@ def diy(self, args): if not args.spec: tty.die("spack diy requires a package spec argument.") - specs = spack.cmd.parse_specs(args.spec) - if len(specs) > 1: - tty.die("spack diy only takes one spec.") + with Write_Lock_Instance(spack.installed_db.lock,1800): + specs = spack.cmd.parse_specs(args.spec) + if len(specs) > 1: + tty.die("spack diy only takes one spec.") - spec = specs[0] - if not spack.db.exists(spec.name): - tty.warn("No such package: %s" % spec.name) - create = tty.get_yes_or_no("Create this package?", default=False) - if not create: - tty.msg("Exiting without creating.") + spec = specs[0] + if not spack.db.exists(spec.name): + tty.warn("No such package: %s" % spec.name) + create = tty.get_yes_or_no("Create this package?", default=False) + if not create: + tty.msg("Exiting without creating.") + sys.exit(1) + else: + tty.msg("Running 'spack edit -f %s'" % spec.name) + edit_package(spec.name, True) + return + + if not spec.version.concrete: + tty.die("spack diy spec must have a single, concrete version.") + + spec.concretize() + package = spack.db.get(spec) + + if package.installed: + tty.error("Already installed in %s" % package.prefix) + tty.msg("Uninstall or try adding a version suffix for this DIY build.") sys.exit(1) - else: - tty.msg("Running 'spack edit -f %s'" % spec.name) - edit_package(spec.name, True) - return - if not spec.version.concrete: - tty.die("spack diy spec must have a single, concrete version.") - - spec.concretize() - package = spack.db.get(spec) - - if package.installed: - tty.error("Already installed in %s" % package.prefix) - tty.msg("Uninstall or try adding a version suffix for this DIY build.") - sys.exit(1) - - # Forces the build to run out of the current directory. - package.stage = DIYStage(os.getcwd()) + # Forces the build to run out of the current directory. + package.stage = DIYStage(os.getcwd()) # TODO: make this an argument, not a global. - spack.do_checksum = False + spack.do_checksum = False - package.do_install( - keep_prefix=args.keep_prefix, - ignore_deps=args.ignore_deps, - keep_stage=True) # don't remove source dir for DIY. + package.do_install( + keep_prefix=args.keep_prefix, + ignore_deps=args.ignore_deps, + keep_stage=True) # don't remove source dir for DIY. diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index e919b1c4fba..66211e29a9d 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -27,6 +27,7 @@ import llnl.util.tty as tty from llnl.util.tty.colify import colify +from llnl.util.lock import * import spack import spack.cmd @@ -80,7 +81,8 @@ def extensions(parser, args): colify(ext.name for ext in extensions) # List specs of installed extensions. - installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)] + with Read_Lock_Instance(spack.installed_db.lock,1800): + installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)] print if not installed: tty.msg("None installed.") diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 6f9072e3116..f7fa409ebb2 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -32,6 +32,7 @@ from llnl.util.tty.colify import * from llnl.util.tty.color import * from llnl.util.lang import * +from llnl.util.lock import * import spack import spack.spec @@ -138,9 +139,11 @@ def find(parser, args): # Get all the specs the user asked for if not query_specs: - specs = set(spack.installed_db.installed_package_specs()) + with Read_Lock_Instance(spack.installed_db.lock,1800): + specs = set(spack.installed_db.installed_package_specs()) else: - results = [set(spack.installed_db.get_installed(qs)) for qs in query_specs] + with Read_Lock_Instance(spack.installed_db.lock,1800): + results = [set(spack.installed_db.get_installed(qs)) for qs in query_specs] specs = set.union(*results) if not args.mode: diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index acb688a0923..330774b8d95 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -25,6 +25,7 @@ from external import argparse import llnl.util.tty as tty +from llnl.util.lock import * import spack import spack.cmd @@ -68,13 +69,14 @@ def install(parser, args): if args.no_checksum: spack.do_checksum = False # TODO: remove this global. - specs = spack.cmd.parse_specs(args.packages, concretize=True) - for spec in specs: - package = spack.db.get(spec) - package.do_install( - keep_prefix=args.keep_prefix, - keep_stage=args.keep_stage, - ignore_deps=args.ignore_deps, - make_jobs=args.jobs, - verbose=args.verbose, - fake=args.fake) + with Write_Lock_Instance(spack.installed_db.lock,1800): + specs = spack.cmd.parse_specs(args.packages, concretize=True) + for spec in specs: + package = spack.db.get(spec) + package.do_install( + keep_prefix=args.keep_prefix, + keep_stage=args.keep_stage, + ignore_deps=args.ignore_deps, + make_jobs=args.jobs, + verbose=args.verbose, + fake=args.fake) diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 4870712eb69..4b0267dac21 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -27,6 +27,7 @@ import llnl.util.tty as tty from llnl.util.tty.colify import colify +from llnl.util.lock import * import spack import spack.cmd @@ -53,35 +54,36 @@ def uninstall(parser, args): if not args.packages: tty.die("uninstall requires at least one package argument.") - specs = spack.cmd.parse_specs(args.packages) + with Write_Lock_Instance(spack.installed_db.lock,1800): + specs = spack.cmd.parse_specs(args.packages) - # For each spec provided, make sure it refers to only one package. - # Fail and ask user to be unambiguous if it doesn't - pkgs = [] - for spec in specs: - matching_specs = spack.installed_db.get_installed(spec) - if not args.all and len(matching_specs) > 1: - tty.error("%s matches multiple packages:" % spec) - print - display_specs(matching_specs, long=True) - print - print "You can either:" - print " a) Use a more specific spec, or" - print " b) use spack uninstall -a to uninstall ALL matching specs." - sys.exit(1) + # For each spec provided, make sure it refers to only one package. + # Fail and ask user to be unambiguous if it doesn't + pkgs = [] + for spec in specs: + matching_specs = spack.installed_db.get_installed(spec) + if not args.all and len(matching_specs) > 1: + tty.error("%s matches multiple packages:" % spec) + print + display_specs(matching_specs, long=True) + print + print "You can either:" + print " a) Use a more specific spec, or" + print " b) use spack uninstall -a to uninstall ALL matching specs." + sys.exit(1) - if len(matching_specs) == 0: - if args.force: continue - tty.die("%s does not match any installed packages." % spec) + if len(matching_specs) == 0: + if args.force: continue + tty.die("%s does not match any installed packages." % spec) - for s in matching_specs: - try: - # should work if package is known to spack - pkgs.append(s.package) + for s in matching_specs: + try: + # should work if package is known to spack + pkgs.append(s.package) - except spack.packages.UnknownPackageError, e: - # The package.py file has gone away -- but still want to uninstall. - spack.Package(s).do_uninstall(force=True) + except spack.packages.UnknownPackageError, e: + # The package.py file has gone away -- but still want to uninstall. + spack.Package(s).do_uninstall(force=True) # Sort packages to be uninstalled by the number of installed dependents # This ensures we do things in the right order diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 6a429b68a99..9b759827d30 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -158,12 +158,12 @@ def write(self): within the same lock, so there is no need to refresh the database within write() """ - temp_name = os.getpid() + socket.getfqdn() + ".temp" - temp_file = path.join(self._root,temp_name) - with open(self.temp_path,'w') as f: + temp_name = str(os.getpid()) + socket.getfqdn() + ".temp" + temp_file = join_path(self._root,temp_name) + with open(temp_file,'w') as f: self._last_write_time = int(time.time()) self._write_database_to_yaml(f) - os.rename(temp_name,self._file_path) + os.rename(temp_file,self._file_path) def is_dirty(self): """ @@ -184,6 +184,7 @@ def add(self, spec, path): sph['path']=path sph['hash']=spec.dag_hash() + #Should always already be locked with Write_Lock_Instance(self.lock,60): self.read_database() self._data.append(sph) @@ -197,6 +198,7 @@ def remove(self, spec): Searches for and removes the specified spec Writes the database back to memory """ + #Should always already be locked with Write_Lock_Instance(self.lock,60): self.read_database() @@ -237,6 +239,7 @@ def installed_package_specs(self): Read installed package names from the database and return their specs """ + #Should always already be locked with Read_Lock_Instance(self.lock,60): self.read_database() From b06117f77a71a5c82a07b2ff063c30cb9a9a25c3 Mon Sep 17 00:00:00 2001 From: Rasmus Wriedt Larsen Date: Thu, 3 Sep 2015 14:19:27 -0700 Subject: [PATCH 022/121] Add fish (shell) package --- var/spack/packages/fish/package.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 var/spack/packages/fish/package.py diff --git a/var/spack/packages/fish/package.py b/var/spack/packages/fish/package.py new file mode 100644 index 00000000000..12255587057 --- /dev/null +++ b/var/spack/packages/fish/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Fish(Package): + """fish is a smart and user-friendly command line shell for OS X, Linux, and + the rest of the family. + """ + + homepage = "http://fishshell.com/" + url = "http://fishshell.com/files/2.2.0/fish-2.2.0.tar.gz" + list_url = homepage + + version('2.2.0', 'a76339fd14ce2ec229283c53e805faac48c3e99d9e3ede9d82c0554acfc7b77a') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") From 99ca0f31531059037a6508aab8ecc62d080f8ce5 Mon Sep 17 00:00:00 2001 From: Justin Too Date: Fri, 11 Sep 2015 20:27:09 +0000 Subject: [PATCH 023/121] (Package) Add Doxygen (v1.8.10) --- var/spack/packages/doxygen/package.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 var/spack/packages/doxygen/package.py diff --git a/var/spack/packages/doxygen/package.py b/var/spack/packages/doxygen/package.py new file mode 100644 index 00000000000..3d4a4e47a78 --- /dev/null +++ b/var/spack/packages/doxygen/package.py @@ -0,0 +1,25 @@ +#------------------------------------------------------------------------------ +# Author: Justin Too +# Date: September 11, 2015 +#------------------------------------------------------------------------------ + +from spack import * + +class Doxygen(Package): + """Doxygen is the de facto standard tool for generating documentation + from annotated C++ sources, but it also supports other popular programming + languages such as C, Objective-C, C#, PHP, Java, Python, IDL (Corba, + Microsoft, and UNO/OpenOffice flavors), Fortran, VHDL, Tcl, and to some extent D.. + """ + homepage = "http://www.stack.nl/~dimitri/doxygen/" + url = "http://ftp.stack.nl/pub/users/dimitri/doxygen-1.8.10.src.tar.gz" + + version('1.8.10', '79767ccd986f12a0f949015efb5f058f') + + depends_on("cmake@2.8.12:") + + def install(self, spec, prefix): + cmake('.', *std_cmake_args) + + make() + make("install") From cd23d2eaa2d06e4a407a4ae88c28d13cbbd7fd1e Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Tue, 15 Sep 2015 14:20:19 -0700 Subject: [PATCH 024/121] Added spack fsck and re-read from glob if the database file does not exist. Allows older versions to smoothly upgrade to the database. --- lib/spack/spack/cmd/fsck.py | 43 +++++++++++++++++++++++++++++++++++++ lib/spack/spack/database.py | 18 ++++++++++++---- 2 files changed, 57 insertions(+), 4 deletions(-) create mode 100644 lib/spack/spack/cmd/fsck.py diff --git a/lib/spack/spack/cmd/fsck.py b/lib/spack/spack/cmd/fsck.py new file mode 100644 index 00000000000..3141a7031d4 --- /dev/null +++ b/lib/spack/spack/cmd/fsck.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse + +from llnl.util.lock import * + +import spack +import os + +description = "Correct database irregularities" + +#Very basic version of spack fsck +def fsck(parser, args): + with Write_Lock_Instance(spack.installed_db.lock,1800): + #remove database file + if os.path.exists(spack.installed_db._file_path): + os.remove(spack.installed_db._file_path) + #read database + spack.installed_db.read_database() + #write database + spack.installed_db.write() diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 9b759827d30..680d184f1f5 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -123,8 +123,15 @@ def read_database(self): with open(self._file_path,'r') as f: self._read_from_yaml(f) else: - #The file doesn't exist, construct empty data. + #The file doesn't exist, construct from file paths self._data = [] + specs = spack.install_layout.all_specs() + for spec in specs: + sph = {} + sph['spec']=spec + sph['hash']=spec.dag_hash() + sph['path']=spack.install_layout.path_for_spec(spec) + self._data.append(sph) def _write_database_to_yaml(self,stream): @@ -167,10 +174,13 @@ def write(self): def is_dirty(self): """ - Returns true iff the database file exists - and was most recently written to by another spack instance. + Returns true iff the database file does not exist + or was most recently written to by another spack instance. """ - return (os.path.isfile(self._file_path) and (os.path.getmtime(self._file_path) > self._last_write_time)) + if not os.path.isfile(self._file_path): + return True + else: + return (os.path.getmtime(self._file_path) > self._last_write_time) # @_autospec From ccf311c9c6eb958c16e9621d8a2ef7665a4fa4d7 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 17 Sep 2015 00:16:12 -0700 Subject: [PATCH 025/121] Several changes to DB implementation. 1. Database stores a file version, so we can add to it in the future. 2. Database indexed by hashes and not numerical indexes. 3. Specs built by database have consistent hashes and it's checked. 4. minor naming and whitespace changes. --- lib/spack/llnl/util/filesystem.py | 2 +- lib/spack/llnl/util/lock.py | 27 ++- lib/spack/spack/cmd/find.py | 5 +- lib/spack/spack/database.py | 319 ++++++++++++++++++------------ lib/spack/spack/error.py | 4 +- lib/spack/spack/spec.py | 2 +- 6 files changed, 220 insertions(+), 139 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 029a7536dfb..03f25d3dff7 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -222,7 +222,7 @@ def working_dir(dirname, **kwargs): def touch(path): """Creates an empty file at the specified path.""" - with closing(open(path, 'a')) as file: + with open(path, 'a') as file: os.utime(path, None) diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index 05641475eda..bb3b15c9cf7 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -22,6 +22,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +"""Lock implementation for shared filesystems.""" import os import fcntl import errno @@ -34,11 +35,13 @@ class Read_Lock_Instance(object): A context manager for getting shared access to the object lock Arguments are lock and timeout (default 5 minutes) """ - def __init__(self,lock,timeout = 300): + def __init__(self, lock, timeout=300): self._lock = lock self._timeout = timeout + def __enter__(self): self._lock.acquire_read(self._timeout) + def __exit__(self,type,value,traceback): self._lock.release_read() @@ -48,17 +51,21 @@ class Write_Lock_Instance(object): A context manager for getting exclusive access to the object lock Arguments are lock and timeout (default 5 minutes) """ - def __init__(self,lock,timeout = 300): + def __init__(self, lock, timeout=300): self._lock = lock self._timeout = timeout + def __enter__(self): self._lock.acquire_write(self._timeout) + def __exit__(self,type,value,traceback): self._lock.release_write() class Lock(object): - def __init__(self,file_path): + """Distributed file-based lock using ``flock``.""" + + def __init__(self, file_path): self._file_path = file_path self._fd = os.open(file_path,os.O_RDWR) self._reads = 0 @@ -71,20 +78,20 @@ def acquire_read(self,timeout): the write lock will be maintained until all locks are released """ if self._reads == 0 and self._writes == 0: - self._lock(fcntl.LOCK_SH,timeout) + self._lock(fcntl.LOCK_SH, timeout) self._reads += 1 - def acquire_write(self,timeout): + def acquire_write(self, timeout): """ Implements recursive lock """ if self._writes == 0: - self._lock(fcntl.LOCK_EX,timeout) + self._lock(fcntl.LOCK_EX, timeout) self._writes += 1 - def _lock(self,op,timeout): + def _lock(self, op, timeout): """ The timeout is implemented using nonblocking flock() to avoid using signals for timing @@ -96,8 +103,8 @@ def _lock(self,op,timeout): try: fcntl.flock(self._fd, op | fcntl.LOCK_NB) if op == fcntl.LOCK_EX: - with open(self._file_path,'w') as f: - f.write("pid = "+str(os.getpid())+", host = "+socket.getfqdn()) + with open(self._file_path, 'w') as f: + f.write("pid = " + str(os.getpid()) + ", host = " + socket.getfqdn()) return except IOError as error: if error.errno == errno.EAGAIN or error.errno == EACCES: @@ -133,4 +140,4 @@ def _unlock(self): Releases the lock regardless of mode. Note that read locks may be masquerading as write locks at times, but this removes either. """ - fcntl.flock(self._fd,fcntl.LOCK_UN) + fcntl.flock(self._fd, fcntl.LOCK_UN) diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index f7fa409ebb2..2d2b8843689 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -139,10 +139,11 @@ def find(parser, args): # Get all the specs the user asked for if not query_specs: - with Read_Lock_Instance(spack.installed_db.lock,1800): + with Read_Lock_Instance(spack.installed_db.lock, 1800): specs = set(spack.installed_db.installed_package_specs()) + else: - with Read_Lock_Instance(spack.installed_db.lock,1800): + with Read_Lock_Instance(spack.installed_db.lock, 1800): results = [set(spack.installed_db.get_installed(qs)) for qs in query_specs] specs = set.union(*results) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 680d184f1f5..43ba178fed9 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -1,5 +1,5 @@ ############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. @@ -23,95 +23,192 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import sys -import inspect -import glob -import imp - import time -import copy -import errno from external import yaml -from external.yaml.error import MarkedYAMLError +from external.yaml.error import MarkedYAMLError, YAMLError import llnl.util.tty as tty -from llnl.util.filesystem import join_path -from llnl.util.lang import * +from llnl.util.filesystem import * from llnl.util.lock import * -import spack.error import spack.spec +from spack.version import Version from spack.spec import Spec from spack.error import SpackError -from spack.virtual import ProviderIndex -from spack.util.naming import mod_to_class, validate_module_name + +# DB goes in this directory underneath the root +_db_dirname = '.spack-db' + +# DB version. This is stuck in the DB file to track changes in format. +_db_version = Version('0.9') def _autospec(function): """Decorator that automatically converts the argument of a single-arg function to a Spec.""" - def converter(self, spec_like, **kwargs): + def converter(self, spec_like, *args, **kwargs): if not isinstance(spec_like, spack.spec.Spec): spec_like = spack.spec.Spec(spec_like) - return function(self, spec_like, **kwargs) + return function(self, spec_like, *args, **kwargs) return converter +class InstallRecord(object): + """A record represents one installation in the DB.""" + def __init__(self, spec, path): + self.spec = spec + self.path = path + + def to_dict(self): + return { 'spec' : self.spec.to_node_dict(), + 'path' : self.path } + + @classmethod + def from_dict(cls, d): + return InstallRecord(d['spec'], d['path']) + + class Database(object): - def __init__(self,root,file_name="_index.yaml"): - """ - Create an empty Database - Location defaults to root/specDB.yaml + def __init__(self, root): + """Create an empty Database. + + Location defaults to root/_index.yaml The individual data are dicts containing spec: the top level spec of a package path: the path to the install of that package dep_hash: a hash of the dependence DAG for that package """ self._root = root - self._file_name = file_name - self._file_path = join_path(self._root,self._file_name) - self._lock_name = "_db_lock" - self._lock_path = join_path(self._root,self._lock_name) + # Set up layout of database files. + self._db_dir = join_path(self._root, _db_dirname) + self._index_path = join_path(self._db_dir, 'index.yaml') + self._lock_path = join_path(self._db_dir, 'lock') + + # Create needed directories and files + if not os.path.exists(self._db_dir): + mkdirp(self._db_dir) + if not os.path.exists(self._lock_path): - open(self._lock_path,'w').close() - self.lock = Lock(self._lock_path) + touch(self._lock_path) - self._data = [] + # initialize rest of state. + self.lock = Lock(self._lock_path) + self._data = {} self._last_write_time = 0 - def _read_from_yaml(self,stream): + def _write_to_yaml(self, stream): + """Write out the databsae to a YAML file.""" + # map from per-spec hash code to installation record. + installs = dict((k, v.to_dict()) for k, v in self._data.items()) + + # databaes includes installation list and version. + + # NOTE: this DB version does not handle multiple installs of + # the same spec well. If there are 2 identical specs with + # different paths, it can't differentiate. + # TODO: fix this before we support multiple install locations. + database = { + 'database' : { + 'installs' : installs, + 'version' : str(_db_version) + } + } + + try: + return yaml.dump(database, stream=stream, default_flow_style=False) + except YAMLError as e: + raise SpackYAMLError("error writing YAML database:", str(e)) + + + def _read_spec_from_yaml(self, hash_key, installs): + """Recursively construct a spec from a hash in a YAML database.""" + # TODO: check validity of hash_key records here. + spec_dict = installs[hash_key]['spec'] + + # Build spec from dict first. + spec = Spec.from_node_dict(spec_dict) + + # Add dependencies from other records in the install DB to + # form a full spec. + for dep_hash in spec_dict[spec.name]['dependencies'].values(): + spec._add_dependency(self._read_spec_from_yaml(dep_hash, installs)) + + return spec + + + def _read_from_yaml(self, stream): """ Fill database from YAML, do not maintain old data Translate the spec portions from node-dict form to spec form """ try: - file = yaml.load(stream) - except MarkedYAMLError, e: + if isinstance(stream, basestring): + with open(stream, 'r') as f: + yfile = yaml.load(f) + else: + yfile = yaml.load(stream) + + except MarkedYAMLError as e: raise SpackYAMLError("error parsing YAML database:", str(e)) - if file is None: + if yfile is None: return + def check(cond, msg): + if not cond: raise CorruptDatabaseError(self._index_path, msg) + + check('database' in yfile, "No 'database' attribute in YAML.") + + # High-level file checks. + db = yfile['database'] + check('installs' in db, "No 'installs' in YAML DB.") + check('version' in db, "No 'version' in YAML DB.") + + # TODO: better version check. + version = Version(db['version']) + if version != _db_version: + raise InvalidDatabaseVersionError(_db_version, version) + + # Iterate through database and check each record. + installs = db['installs'] data = {} - for index, sp in file['database'].items(): - spec = Spec.from_node_dict(sp['spec']) - deps = sp['dependency_indices'] - path = sp['path'] - dep_hash = sp['hash'] - db_entry = {'deps':deps, 'spec': spec, 'path': path, 'hash':dep_hash} - data[index] = db_entry + for hash_key, rec in installs.items(): + try: + spec = self._read_spec_from_yaml(hash_key, installs) + spec_hash = spec.dag_hash() + if not spec_hash == hash_key: + tty.warn("Hash mismatch in database: %s -> spec with hash %s" + % (hash_key, spec_hash)) + continue - for sph in data.values(): - for idx in sph['deps']: - sph['spec'].dependencies[data[idx]['spec'].name] = data[idx]['spec'] + data[hash_key] = InstallRecord(spec, rec['path']) - self._data = data.values() + except Exception as e: + tty.warn("Invalid database reecord:", + "file: %s" % self._index_path, + "hash: %s" % hash_key, + "cause: %s" % str(e)) + raise + + self._data = data - def read_database(self): + def reindex(self, directory_layout): + """Build database index from scratch based from a directory layout.""" + with Write_Lock_Instance(self.lock, 60): + data = {} + for spec in directory_layout.all_specs(): + path = directory_layout.path_for_spec(spec) + hash_key = spec.dag_hash() + data[hash_key] = InstallRecord(spec, path) + self._data = data + self.write() + + + def read(self): """ Re-read Database from the data in the set location If the cache is fresh, return immediately. @@ -119,43 +216,12 @@ def read_database(self): if not self.is_dirty(): return - if os.path.isfile(self._file_path): - with open(self._file_path,'r') as f: - self._read_from_yaml(f) + if os.path.isfile(self._index_path): + # Read from YAML file if a database exists + self._read_from_yaml(self._index_path) else: - #The file doesn't exist, construct from file paths - self._data = [] - specs = spack.install_layout.all_specs() - for spec in specs: - sph = {} - sph['spec']=spec - sph['hash']=spec.dag_hash() - sph['path']=spack.install_layout.path_for_spec(spec) - self._data.append(sph) - - - def _write_database_to_yaml(self,stream): - """ - Replace each spec with its dict-node form - Then stream all data to YAML - """ - node_list = [] - spec_list = [sph['spec'] for sph in self._data] - - for sph in self._data: - node = {} - deps = [] - for name,spec in sph['spec'].dependencies.items(): - deps.append(spec_list.index(spec)) - node['spec']=Spec.to_node_dict(sph['spec']) - node['hash']=sph['hash'] - node['path']=sph['path'] - node['dependency_indices']=deps - node_list.append(node) - - node_dict = dict(enumerate(node_list)) - return yaml.dump({ 'database' : node_dict}, - stream=stream, default_flow_style=False) + # The file doesn't exist, try to traverse the directory. + self.reindex(spack.install_layout) def write(self): @@ -165,39 +231,42 @@ def write(self): within the same lock, so there is no need to refresh the database within write() """ - temp_name = str(os.getpid()) + socket.getfqdn() + ".temp" - temp_file = join_path(self._root,temp_name) - with open(temp_file,'w') as f: - self._last_write_time = int(time.time()) - self._write_database_to_yaml(f) - os.rename(temp_file,self._file_path) + temp_name = '%s.%s.temp' % (socket.getfqdn(), os.getpid()) + temp_file = join_path(self._db_dir, temp_name) + + # Write a temporary database file them move it into place + try: + with open(temp_file, 'w') as f: + self._last_write_time = int(time.time()) + self._write_to_yaml(f) + os.rename(temp_file, self._index_path) + + except: + # Clean up temp file if something goes wrong. + if os.path.exists(temp_file): + os.remove(temp_file) + raise + def is_dirty(self): """ Returns true iff the database file does not exist or was most recently written to by another spack instance. """ - if not os.path.isfile(self._file_path): - return True - else: - return (os.path.getmtime(self._file_path) > self._last_write_time) + return (not os.path.isfile(self._index_path) or + (os.path.getmtime(self._index_path) > self._last_write_time)) -# @_autospec + @_autospec def add(self, spec, path): """Read the database from the set location Add the specified entry as a dict Write the database back to memory """ - sph = {} - sph['spec']=spec - sph['path']=path - sph['hash']=spec.dag_hash() - - #Should always already be locked - with Write_Lock_Instance(self.lock,60): - self.read_database() - self._data.append(sph) + # Should always already be locked + with Write_Lock_Instance(self.lock, 60): + self.read() + self._data[spec.dag_hash()] = InstallRecord(spec, path) self.write() @@ -208,23 +277,18 @@ def remove(self, spec): Searches for and removes the specified spec Writes the database back to memory """ - #Should always already be locked - with Write_Lock_Instance(self.lock,60): - self.read_database() - - for sp in self._data: - #Not sure the hash comparison is necessary - if sp['hash'] == spec.dag_hash() and sp['spec'] == spec: - self._data.remove(sp) - + # Should always already be locked + with Write_Lock_Instance(self.lock, 60): + self.read() + hash_key = spec.dag_hash() + if hash_key in self._data: + del self._data[hash_key] self.write() @_autospec def get_installed(self, spec): - """ - Get all the installed specs that satisfy the provided spec constraint - """ + """Get installed specs that satisfy the provided spec constraint.""" return [s for s in self.installed_package_specs() if s.satisfies(spec)] @@ -238,10 +302,10 @@ def installed_extensions_for(self, extendee_spec): try: if s.package.extends(extendee_spec): yield s.package - except UnknownPackageError, e: + except UnknownPackageError as e: continue - #skips unknown packages - #TODO: conditional way to do this instead of catching exceptions + # skips unknown packages + # TODO: conditional way to do this instead of catching exceptions def installed_package_specs(self): @@ -249,14 +313,10 @@ def installed_package_specs(self): Read installed package names from the database and return their specs """ - #Should always already be locked - with Read_Lock_Instance(self.lock,60): - self.read_database() - - installed = [] - for sph in self._data: - installed.append(sph['spec']) - return installed + # Should always already be locked + with Read_Lock_Instance(self.lock, 60): + self.read() + return sorted(rec.spec for rec in self._data.values()) def installed_known_package_specs(self): @@ -265,5 +325,18 @@ def installed_known_package_specs(self): Return only the specs for which the package is known to this version of spack """ - return [s for s in self.installed_package_specs() if spack.db.exists(s.name)] + return [s for s in self.installed_package_specs() + if spack.db.exists(s.name)] + +class CorruptDatabaseError(SpackError): + def __init__(self, path, msg=''): + super(CorruptDatabaseError, self).__init__( + "Spack database is corrupt: %s. %s" %(path, msg)) + + +class InvalidDatabaseVersionError(SpackError): + def __init__(self, expected, found): + super(InvalidDatabaseVersionError, self).__init__( + "Expected database version %s but found version %s" + % (expected, found)) diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py index bfa7951a473..b3b24e6105b 100644 --- a/lib/spack/spack/error.py +++ b/lib/spack/spack/error.py @@ -55,8 +55,8 @@ def die(self): def __str__(self): msg = self.message - if self.long_message: - msg += "\n %s" % self.long_message + if self._long_message: + msg += "\n %s" % self._long_message return msg class UnsupportedPlatformError(SpackError): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index cde2e168a07..7b79feb311a 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -2000,4 +2000,4 @@ def __init__(self, provided, required): class SpackYAMLError(spack.error.SpackError): def __init__(self, msg, yaml_error): - super(SpackError, self).__init__(msg, str(yaml_error)) + super(SpackYAMLError, self).__init__(msg, str(yaml_error)) From e17ad6a684b94211e9b4267cca68cc7fdf6ad277 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 17 Sep 2015 01:05:19 -0700 Subject: [PATCH 026/121] Simplify lock context managers. --- lib/spack/llnl/util/lock.py | 30 +++++++++++++++++++++--------- lib/spack/spack/cmd/__init__.py | 3 +-- lib/spack/spack/cmd/deactivate.py | 3 +-- lib/spack/spack/cmd/diy.py | 3 +-- lib/spack/spack/cmd/extensions.py | 3 +-- lib/spack/spack/cmd/find.py | 5 ++--- lib/spack/spack/cmd/fsck.py | 17 +++-------------- lib/spack/spack/cmd/install.py | 3 +-- lib/spack/spack/cmd/uninstall.py | 3 +-- lib/spack/spack/database.py | 22 +++++++++++++++++----- 10 files changed, 49 insertions(+), 43 deletions(-) diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index bb3b15c9cf7..3cd02befe53 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -29,13 +29,15 @@ import time import socket +# Default timeout for locks. +DEFAULT_TIMEOUT = 60 + +class _ReadLockContext(object): + """Context manager that takes and releases a read lock. -class Read_Lock_Instance(object): - """ - A context manager for getting shared access to the object lock Arguments are lock and timeout (default 5 minutes) """ - def __init__(self, lock, timeout=300): + def __init__(self, lock, timeout=DEFAULT_TIMEOUT): self._lock = lock self._timeout = timeout @@ -46,12 +48,12 @@ def __exit__(self,type,value,traceback): self._lock.release_read() -class Write_Lock_Instance(object): - """ - A context manager for getting exclusive access to the object lock +class _WriteLockContext(object): + """Context manager that takes and releases a write lock. + Arguments are lock and timeout (default 5 minutes) """ - def __init__(self, lock, timeout=300): + def __init__(self, lock, timeout=DEFAULT_TIMEOUT): self._lock = lock self._timeout = timeout @@ -72,7 +74,17 @@ def __init__(self, file_path): self._writes = 0 - def acquire_read(self,timeout): + def write_lock(self, timeout=DEFAULT_TIMEOUT): + """Convenience method that returns a write lock context.""" + return _WriteLockContext(self, timeout) + + + def read_lock(self, timeout=DEFAULT_TIMEOUT): + """Convenience method that returns a read lock context.""" + return _ReadLockContext(self, timeout) + + + def acquire_read(self, timeout): """ Implements recursive lock. If held in both read and write mode, the write lock will be maintained until all locks are released diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index c62d22979ae..a8e8b1a48b3 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -28,7 +28,6 @@ import llnl.util.tty as tty from llnl.util.lang import attr_setdefault -from llnl.util.lock import * import spack import spack.spec @@ -125,7 +124,7 @@ def elide_list(line_list, max_num=10): def disambiguate_spec(spec): - with Read_Lock_Instance(spack.installed_db.lock,1800): + with spack.installed_db.read_lock(): matching_specs = spack.installed_db.get_installed(spec) if not matching_specs: tty.die("Spec '%s' matches no installed packages." % spec) diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py index 015809345a6..5428e3d2de3 100644 --- a/lib/spack/spack/cmd/deactivate.py +++ b/lib/spack/spack/cmd/deactivate.py @@ -24,7 +24,6 @@ ############################################################################## from external import argparse import llnl.util.tty as tty -from llnl.util.lock import * import spack import spack.cmd @@ -55,7 +54,7 @@ def deactivate(parser, args): if args.all: if pkg.extendable: tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec) - with Read_Lock_Instance(spack.installed_db.lock,1800): + with spack.installed_db.read_lock(): ext_pkgs = spack.installed_db.installed_extensions_for(spec) for ext_pkg in ext_pkgs: diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 7403b9e3e80..6178c9c3e3d 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -27,7 +27,6 @@ from external import argparse import llnl.util.tty as tty -from llnl.util.lock import * import spack import spack.cmd @@ -55,7 +54,7 @@ def diy(self, args): if not args.spec: tty.die("spack diy requires a package spec argument.") - with Write_Lock_Instance(spack.installed_db.lock,1800): + with spack.installed_db.write_lock(): specs = spack.cmd.parse_specs(args.spec) if len(specs) > 1: tty.die("spack diy only takes one spec.") diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index 66211e29a9d..f0f99a26910 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -27,7 +27,6 @@ import llnl.util.tty as tty from llnl.util.tty.colify import colify -from llnl.util.lock import * import spack import spack.cmd @@ -81,7 +80,7 @@ def extensions(parser, args): colify(ext.name for ext in extensions) # List specs of installed extensions. - with Read_Lock_Instance(spack.installed_db.lock,1800): + with spack.installed_db.read_lock(): installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)] print if not installed: diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 2d2b8843689..e2edd454f4d 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -32,7 +32,6 @@ from llnl.util.tty.colify import * from llnl.util.tty.color import * from llnl.util.lang import * -from llnl.util.lock import * import spack import spack.spec @@ -139,11 +138,11 @@ def find(parser, args): # Get all the specs the user asked for if not query_specs: - with Read_Lock_Instance(spack.installed_db.lock, 1800): + with spack.installed_db.read_lock(): specs = set(spack.installed_db.installed_package_specs()) else: - with Read_Lock_Instance(spack.installed_db.lock, 1800): + with spack.installed_db.read_lock(): results = [set(spack.installed_db.get_installed(qs)) for qs in query_specs] specs = set.union(*results) diff --git a/lib/spack/spack/cmd/fsck.py b/lib/spack/spack/cmd/fsck.py index 3141a7031d4..9a3c450dcf9 100644 --- a/lib/spack/spack/cmd/fsck.py +++ b/lib/spack/spack/cmd/fsck.py @@ -1,5 +1,5 @@ ############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. @@ -23,21 +23,10 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from external import argparse - -from llnl.util.lock import * - import spack -import os description = "Correct database irregularities" -#Very basic version of spack fsck +# Very basic version of spack fsck def fsck(parser, args): - with Write_Lock_Instance(spack.installed_db.lock,1800): - #remove database file - if os.path.exists(spack.installed_db._file_path): - os.remove(spack.installed_db._file_path) - #read database - spack.installed_db.read_database() - #write database - spack.installed_db.write() + spack.installed_db.reindex(spack.install_layout) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 330774b8d95..ada655b937b 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -25,7 +25,6 @@ from external import argparse import llnl.util.tty as tty -from llnl.util.lock import * import spack import spack.cmd @@ -69,7 +68,7 @@ def install(parser, args): if args.no_checksum: spack.do_checksum = False # TODO: remove this global. - with Write_Lock_Instance(spack.installed_db.lock,1800): + with spack.installed_db.write_lock(): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: package = spack.db.get(spec) diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 4b0267dac21..7425db3ca32 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -27,7 +27,6 @@ import llnl.util.tty as tty from llnl.util.tty.colify import colify -from llnl.util.lock import * import spack import spack.cmd @@ -54,7 +53,7 @@ def uninstall(parser, args): if not args.packages: tty.die("uninstall requires at least one package argument.") - with Write_Lock_Instance(spack.installed_db.lock,1800): + with spack.installed_db.write_lock(): specs = spack.cmd.parse_specs(args.packages) # For each spec provided, make sure it refers to only one package. diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 43ba178fed9..cea56eb1b93 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -24,13 +24,14 @@ ############################################################################## import os import time +import socket from external import yaml from external.yaml.error import MarkedYAMLError, YAMLError import llnl.util.tty as tty from llnl.util.filesystem import * -from llnl.util.lock import * +from llnl.util.lock import Lock import spack.spec from spack.version import Version @@ -99,6 +100,16 @@ def __init__(self, root): self._last_write_time = 0 + def write_lock(self): + """Get a write lock context for use in a `with` block.""" + return self.lock.write_lock() + + + def read_lock(self): + """Get a read lock context for use in a `with` block.""" + return self.lock.read_lock() + + def _write_to_yaml(self, stream): """Write out the databsae to a YAML file.""" # map from per-spec hash code to installation record. @@ -198,13 +209,14 @@ def check(cond, msg): def reindex(self, directory_layout): """Build database index from scratch based from a directory layout.""" - with Write_Lock_Instance(self.lock, 60): + with self.write_lock(): data = {} for spec in directory_layout.all_specs(): path = directory_layout.path_for_spec(spec) hash_key = spec.dag_hash() data[hash_key] = InstallRecord(spec, path) self._data = data + self.write() @@ -264,7 +276,7 @@ def add(self, spec, path): Write the database back to memory """ # Should always already be locked - with Write_Lock_Instance(self.lock, 60): + with self.write_lock(): self.read() self._data[spec.dag_hash()] = InstallRecord(spec, path) self.write() @@ -278,7 +290,7 @@ def remove(self, spec): Writes the database back to memory """ # Should always already be locked - with Write_Lock_Instance(self.lock, 60): + with self.write_lock(): self.read() hash_key = spec.dag_hash() if hash_key in self._data: @@ -314,7 +326,7 @@ def installed_package_specs(self): and return their specs """ # Should always already be locked - with Read_Lock_Instance(self.lock, 60): + with self.read_lock(): self.read() return sorted(rec.spec for rec in self._data.values()) From fb73979345d16b4912ea1f6da148d35c676a6576 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 17 Sep 2015 16:09:59 -0700 Subject: [PATCH 027/121] Allow custom timeout for database locking. --- lib/spack/spack/database.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index cea56eb1b93..e74217a2627 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -44,6 +44,8 @@ # DB version. This is stuck in the DB file to track changes in format. _db_version = Version('0.9') +# Default timeout for spack database locks is 5 min. +_db_lock_timeout = 300 def _autospec(function): """Decorator that automatically converts the argument of a single-arg @@ -100,14 +102,14 @@ def __init__(self, root): self._last_write_time = 0 - def write_lock(self): + def write_lock(self, timeout=_db_lock_timeout): """Get a write lock context for use in a `with` block.""" - return self.lock.write_lock() + return self.lock.write_lock(timeout) - def read_lock(self): + def read_lock(self, timeout=_db_lock_timeout): """Get a read lock context for use in a `with` block.""" - return self.lock.read_lock() + return self.lock.read_lock(timeout) def _write_to_yaml(self, stream): From d0e22b22406c8fb064031dbd4ac887b7a9abbc95 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 18 Sep 2015 11:40:05 -0700 Subject: [PATCH 028/121] Add ref counting to database. This does not handle removal properly yet. --- lib/spack/spack/cmd/__init__.py | 2 +- lib/spack/spack/cmd/find.py | 35 ++++-- lib/spack/spack/cmd/module.py | 4 +- lib/spack/spack/cmd/uninstall.py | 2 +- lib/spack/spack/database.py | 188 ++++++++++++++++++++++++------- lib/spack/spack/package.py | 7 +- 6 files changed, 183 insertions(+), 55 deletions(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index a8e8b1a48b3..d4778b1375a 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -125,7 +125,7 @@ def elide_list(line_list, max_num=10): def disambiguate_spec(spec): with spack.installed_db.read_lock(): - matching_specs = spack.installed_db.get_installed(spec) + matching_specs = spack.installed_db.query(spec) if not matching_specs: tty.die("Spec '%s' matches no installed packages." % spec) diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index e2edd454f4d..6a0c3d11ff5 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -54,6 +54,16 @@ def setup_parser(subparser): '-L', '--very-long', action='store_true', dest='very_long', help='Show dependency hashes as well as versions.') + subparser.add_argument( + '-u', '--unknown', action='store_true', dest='unknown', + help='Show only specs Spack does not have a package for.') + subparser.add_argument( + '-m', '--missing', action='store_true', dest='missing', + help='Show missing dependencies as well as installed specs.') + subparser.add_argument( + '-M', '--only-missing', action='store_true', dest='only_missing', + help='Show only missing dependencies.') + subparser.add_argument( 'query_specs', nargs=argparse.REMAINDER, help='optional specs to filter results') @@ -113,6 +123,7 @@ def fmt(s): if hashes: string += gray_hash(s, hlen) + ' ' string += s.format('$-_$@$+', color=True) + return string colify(fmt(s) for s in specs) @@ -136,15 +147,23 @@ def find(parser, args): if not query_specs: return - # Get all the specs the user asked for - if not query_specs: - with spack.installed_db.read_lock(): - specs = set(spack.installed_db.installed_package_specs()) + # Set up query arguments. + installed, known = True, any + if args.only_missing: + installed = False + elif args.missing: + installed = any + if args.unknown: + known = False + q_args = { 'installed' : installed, 'known' : known } - else: - with spack.installed_db.read_lock(): - results = [set(spack.installed_db.get_installed(qs)) for qs in query_specs] - specs = set.union(*results) + # Get all the specs the user asked for + with spack.installed_db.read_lock(): + if not query_specs: + specs = set(spack.installed_db.query(**q_args)) + else: + results = [set(spack.installed_db.query(qs, **q_args)) for qs in query_specs] + specs = set.union(*results) if not args.mode: args.mode = 'short' diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py index 215d877bd0a..654b0cb2fa7 100644 --- a/lib/spack/spack/cmd/module.py +++ b/lib/spack/spack/cmd/module.py @@ -65,7 +65,7 @@ def module_find(mtype, spec_array): tty.die("You can only pass one spec.") spec = specs[0] - specs = [s for s in spack.installed_db.installed_package_specs() if s.satisfies(spec)] + specs = spack.installed_db.query(spec) if len(specs) == 0: tty.die("No installed packages match spec %s" % spec) @@ -86,7 +86,7 @@ def module_find(mtype, spec_array): def module_refresh(): """Regenerate all module files for installed packages known to spack (some packages may no longer exist).""" - specs = [s for s in spack.installed_db.installed_known_package_specs()] + specs = [s for s in spack.installed_db.query(installed=True, known=True)] for name, cls in module_types.items(): tty.msg("Regenerating %s module files." % name) diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 7425db3ca32..7b7c32c0655 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -60,7 +60,7 @@ def uninstall(parser, args): # Fail and ask user to be unambiguous if it doesn't pkgs = [] for spec in specs: - matching_specs = spack.installed_db.get_installed(spec) + matching_specs = spack.installed_db.query(spec) if not args.all and len(matching_specs) > 1: tty.error("%s matches multiple packages:" % spec) print diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index e74217a2627..1d1c640d663 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -22,6 +22,23 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +"""Spack's installation tracking database. + +The database serves two purposes: + + 1. It implements a cache on top of a potentially very large Spack + directory hierarchy, speeding up many operations that would + otherwise require filesystem access. + + 2. It will allow us to track external installations as well as lost + packages and their dependencies. + +Prior ot the implementation of this store, a direcotry layout served +as the authoritative database of packages in Spack. This module +provides a cache and a sanity checking mechanism for what is in the +filesystem. + +""" import os import time import socket @@ -58,18 +75,37 @@ def converter(self, spec_like, *args, **kwargs): class InstallRecord(object): - """A record represents one installation in the DB.""" - def __init__(self, spec, path): + """A record represents one installation in the DB. + + The record keeps track of the spec for the installation, its + install path, AND whether or not it is installed. We need the + installed flag in case a user either: + + a) blew away a directory, or + b) used spack uninstall -f to get rid of it + + If, in either case, the package was removed but others still + depend on it, we still need to track its spec, so we don't + actually remove from the database until a spec has no installed + dependents left. + + """ + def __init__(self, spec, path, installed): self.spec = spec self.path = path + self.installed = installed + self.ref_count = 0 def to_dict(self): - return { 'spec' : self.spec.to_node_dict(), - 'path' : self.path } + return { 'spec' : self.spec.to_node_dict(), + 'path' : self.path, + 'installed' : self.installed, + 'ref_count' : self.ref_count } @classmethod def from_dict(cls, d): - return InstallRecord(d['spec'], d['path']) + # TODO: check the dict more rigorously. + return InstallRecord(d['spec'], d['path'], d['installed'], d['ref_count']) class Database(object): @@ -136,9 +172,11 @@ def _write_to_yaml(self, stream): raise SpackYAMLError("error writing YAML database:", str(e)) - def _read_spec_from_yaml(self, hash_key, installs): + def _read_spec_from_yaml(self, hash_key, installs, parent_key=None): """Recursively construct a spec from a hash in a YAML database.""" - # TODO: check validity of hash_key records here. + if hash_key not in installs: + parent = read_spec(installs[parent_key]['path']) + spec_dict = installs[hash_key]['spec'] # Build spec from dict first. @@ -147,7 +185,8 @@ def _read_spec_from_yaml(self, hash_key, installs): # Add dependencies from other records in the install DB to # form a full spec. for dep_hash in spec_dict[spec.name]['dependencies'].values(): - spec._add_dependency(self._read_spec_from_yaml(dep_hash, installs)) + child = self._read_spec_from_yaml(dep_hash, installs, hash_key) + spec._add_dependency(child) return spec @@ -175,12 +214,12 @@ def check(cond, msg): check('database' in yfile, "No 'database' attribute in YAML.") - # High-level file checks. + # High-level file checks db = yfile['database'] check('installs' in db, "No 'installs' in YAML DB.") check('version' in db, "No 'version' in YAML DB.") - # TODO: better version check. + # TODO: better version checking semantics. version = Version(db['version']) if version != _db_version: raise InvalidDatabaseVersionError(_db_version, version) @@ -190,14 +229,21 @@ def check(cond, msg): data = {} for hash_key, rec in installs.items(): try: + # This constructs a spec DAG from the list of all installs spec = self._read_spec_from_yaml(hash_key, installs) + + # Validate the spec by ensuring the stored and actual + # hashes are the same. spec_hash = spec.dag_hash() if not spec_hash == hash_key: tty.warn("Hash mismatch in database: %s -> spec with hash %s" % (hash_key, spec_hash)) - continue + continue # TODO: is skipping the right thing to do? - data[hash_key] = InstallRecord(spec, rec['path']) + # Insert the brand new spec in the database. Each + # spec has its own copies of its dependency specs. + # TODO: would a more immmutable spec implementation simplify this? + data[hash_key] = InstallRecord(spec, rec['path'], rec['installed']) except Exception as e: tty.warn("Invalid database reecord:", @@ -213,12 +259,29 @@ def reindex(self, directory_layout): """Build database index from scratch based from a directory layout.""" with self.write_lock(): data = {} + + # Ask the directory layout to traverse the filesystem. for spec in directory_layout.all_specs(): + # Create a spec for each known package and add it. path = directory_layout.path_for_spec(spec) hash_key = spec.dag_hash() - data[hash_key] = InstallRecord(spec, path) + data[hash_key] = InstallRecord(spec, path, True) + + # Recursively examine dependencies and add them, even + # if they are NOT installed. This ensures we know + # about missing dependencies. + for dep in spec.traverse(root=False): + dep_hash = dep.dag_hash() + if dep_hash not in data: + path = directory_layout.path_for_spec(dep) + installed = os.path.isdir(path) + data[dep_hash] = InstallRecord(dep.copy(), path, installed) + data[dep_hash].ref_count += 1 + + # Assuming everything went ok, replace this object's data. self._data = data + # write out, blowing away the old version if necessary self.write() @@ -274,22 +337,37 @@ def is_dirty(self): @_autospec def add(self, spec, path): """Read the database from the set location - Add the specified entry as a dict - Write the database back to memory + + Add the specified entry as a dict, then write the database + back to memory. This assumes that ALL dependencies are already in + the database. Should not be called otherwise. + """ # Should always already be locked with self.write_lock(): self.read() - self._data[spec.dag_hash()] = InstallRecord(spec, path) + self._data[spec.dag_hash()] = InstallRecord(spec, path, True) + + # sanity check the dependencies in case something went + # wrong during install() + # TODO: ensure no races during distributed install. + for dep in spec.traverse(root=False): + assert dep.dag_hash() in self._data + self.write() @_autospec def remove(self, spec): - """ - Reads the database from the set location - Searches for and removes the specified spec - Writes the database back to memory + """Removes a spec from the database. To be called on uninstall. + + Reads the database, then: + + 1. Marks the spec as not installed. + 2. Removes the spec if it has no more dependents. + 3. If removed, recursively updates dependencies' ref counts + and remvoes them if they are no longer needed. + """ # Should always already be locked with self.write_lock(): @@ -300,19 +378,13 @@ def remove(self, spec): self.write() - @_autospec - def get_installed(self, spec): - """Get installed specs that satisfy the provided spec constraint.""" - return [s for s in self.installed_package_specs() if s.satisfies(spec)] - - @_autospec def installed_extensions_for(self, extendee_spec): """ Return the specs of all packages that extend the given spec """ - for s in self.installed_package_specs(): + for s in self.query(): try: if s.package.extends(extendee_spec): yield s.package @@ -322,25 +394,59 @@ def installed_extensions_for(self, extendee_spec): # TODO: conditional way to do this instead of catching exceptions - def installed_package_specs(self): + def query(self, query_spec=any, known=any, installed=True): + """Run a query on the database. + + ``query_spec`` + Queries iterate through specs in the database and return + those that satisfy the supplied ``query_spec``. If + query_spec is `any`, This will match all specs in the + database. If it is a spec, we'll evaluate + ``spec.satisfies(query_spec)``. + + The query can be constrained by two additional attributes: + + ``known`` + Possible values: True, False, any + + Specs that are "known" are those for which Spack can + locate a ``package.py`` file -- i.e., Spack "knows" how to + install them. Specs that are unknown may represent + packages that existed in a previous version of Spack, but + have since either changed their name or been removed. + + ``installed`` + Possible values: True, False, any + + Specs for which a prefix exists are "installed". A spec + that is NOT installed will be in the database if some + other spec depends on it but its installation has gone + away since Spack installed it. + + TODO: Specs are a lot like queries. Should there be a + wildcard spec object, and should specs have attributes + like installed and known that can be queried? Or are + these really special cases that only belong here? + """ - Read installed package names from the database - and return their specs - """ - # Should always already be locked with self.read_lock(): self.read() - return sorted(rec.spec for rec in self._data.values()) + + results = [] + for key, rec in self._data.items(): + if installed is not any and rec.installed != installed: + continue + if known is not any and spack.db.exists(rec.spec.name) != known: + continue + if query_spec is any or rec.spec.satisfies(query_spec): + results.append(rec.spec) + + return sorted(results) - def installed_known_package_specs(self): - """ - Read installed package names from the database. - Return only the specs for which the package is known - to this version of spack - """ - return [s for s in self.installed_package_specs() - if spack.db.exists(s.name)] + def missing(self, spec): + key = spec.dag_hash() + return key in self._data and not self._data[key].installed class CorruptDatabaseError(SpackError): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index e64b4278521..e6944ce40cd 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -563,9 +563,12 @@ def installed(self): @property def installed_dependents(self): """Return a list of the specs of all installed packages that depend - on this one.""" + on this one. + + TODO: move this method to database.py? + """ dependents = [] - for spec in spack.installed_db.installed_package_specs(): + for spec in spack.installed_db.query(): if self.name == spec.name: continue for dep in spec.traverse(): From e4e22ed17ec92a7be2895070e1d720ba4afb3dcb Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Mon, 21 Sep 2015 16:19:50 -0700 Subject: [PATCH 029/121] add protobuf package --- var/spack/packages/protobuf/package.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 var/spack/packages/protobuf/package.py diff --git a/var/spack/packages/protobuf/package.py b/var/spack/packages/protobuf/package.py new file mode 100644 index 00000000000..34085c7ce95 --- /dev/null +++ b/var/spack/packages/protobuf/package.py @@ -0,0 +1,16 @@ +import os +from spack import * + +class Protobuf(Package): + """Google's data interchange format.""" + + homepage = "https://developers.google.com/protocol-buffers" + url = "https://github.com/google/protobuf/releases/download/v2.5.0/protobuf-2.5.0.tar.bz2" + + version('2.5.0', 'a72001a9067a4c2c4e0e836d0f92ece4') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("check") + make("install") From b5d7cd55ce197bf4e6d476996ab874083efb86a2 Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Mon, 21 Sep 2015 16:30:11 -0700 Subject: [PATCH 030/121] add glog package --- var/spack/packages/glog/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 var/spack/packages/glog/package.py diff --git a/var/spack/packages/glog/package.py b/var/spack/packages/glog/package.py new file mode 100644 index 00000000000..d73386b394b --- /dev/null +++ b/var/spack/packages/glog/package.py @@ -0,0 +1,15 @@ +import os +from spack import * + +class Glog(Package): + """C++ implementation of the Google logging module.""" + + homepage = "https://github.com/google/glog" + url = "https://github.com/google/glog/archive/v0.3.3.tar.gz" + + version('0.3.3', 'c1f86af27bd9c73186730aa957607ed0') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") From 22ba02b22b50107463928878430dbab2b26c39d1 Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Mon, 21 Sep 2015 16:40:23 -0700 Subject: [PATCH 031/121] add gflags package --- var/spack/packages/gflags/package.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 var/spack/packages/gflags/package.py diff --git a/var/spack/packages/gflags/package.py b/var/spack/packages/gflags/package.py new file mode 100644 index 00000000000..62dd80a0944 --- /dev/null +++ b/var/spack/packages/gflags/package.py @@ -0,0 +1,21 @@ +import os +from spack import * + +class Gflags(Package): + """The gflags package contains a C++ library that implements + commandline flags processing. It includes built-in support for + standard types such as string and the ability to define flags + in the source file in which they are used. Online documentation + available at: https://gflags.github.io/gflags/""" + + homepage = "https://gflags.github.io/gflags" + url = "https://github.com/gflags/gflags/archive/v2.1.2.tar.gz" + + version('2.1.2', 'ac432de923f9de1e9780b5254884599f') + + def install(self, spec, prefix): + cmake("-DCMAKE_INSTALL_PREFIX=" + prefix, + "-DBUILD_SHARED_LIBS=ON") + make() + make("test") + make("install") From 30acc6f2f04beb56ef67af385cc8543ceeca82af Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Mon, 21 Sep 2015 16:45:59 -0700 Subject: [PATCH 032/121] add snappy package --- var/spack/packages/snappy/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 var/spack/packages/snappy/package.py diff --git a/var/spack/packages/snappy/package.py b/var/spack/packages/snappy/package.py new file mode 100644 index 00000000000..c8f9ceef7da --- /dev/null +++ b/var/spack/packages/snappy/package.py @@ -0,0 +1,15 @@ +import os +from spack import * + +class Snappy(Package): + """A fast compressor/decompressor: https://code.google.com/p/snappy""" + + homepage = "https://code.google.com/p/snappy" + url = "https://github.com/google/snappy/releases/download/1.1.3/snappy-1.1.3.tar.gz" + + version('1.1.3', '7358c82f133dc77798e4c2062a749b73') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") From 3ade141829bc382cd7d50d02d79967e78d0de147 Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Tue, 22 Sep 2015 11:59:09 -0700 Subject: [PATCH 033/121] add leveldb package --- var/spack/packages/leveldb/package.py | 29 +++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 var/spack/packages/leveldb/package.py diff --git a/var/spack/packages/leveldb/package.py b/var/spack/packages/leveldb/package.py new file mode 100644 index 00000000000..da68a9cbcb2 --- /dev/null +++ b/var/spack/packages/leveldb/package.py @@ -0,0 +1,29 @@ +import os +import glob +from spack import * + +class Leveldb(Package): + """LevelDB is a fast key-value storage library written at Google + that provides an ordered mapping from string keys to string values.""" + + homepage = "https://github.com/google/leveldb" + url = "https://github.com/google/leveldb/archive/v1.18.tar.gz" + + version('1.18', '73770de34a2a5ab34498d2e05b2b7fa0') + + depends_on("snappy") + + def install(self, spec, prefix): + make() + + mkdirp(prefix.include) + mkdirp(prefix.lib) + + cp = which('cp') + + # cp --preserve=links libleveldb.* prefix/lib + args = glob.glob('libleveldb.*') + args.append(prefix + '/lib') + cp('--preserve=links', *args) + + cp('-r', 'include/leveldb', prefix + '/include') From 84823ed828599aa03ba9d6cd49488d975652d242 Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Tue, 22 Sep 2015 12:17:01 -0700 Subject: [PATCH 034/121] add lmdb package --- var/spack/packages/lmdb/package.py | 39 ++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 var/spack/packages/lmdb/package.py diff --git a/var/spack/packages/lmdb/package.py b/var/spack/packages/lmdb/package.py new file mode 100644 index 00000000000..875b8100c5a --- /dev/null +++ b/var/spack/packages/lmdb/package.py @@ -0,0 +1,39 @@ +import os +from spack import * + +class Lmdb(Package): + """Read-only mirror of official repo on openldap.org. Issues and + pull requests here are ignored. Use OpenLDAP ITS for issues. + http://www.openldap.org/software/repo.html""" + + + homepage = "http://www.openldap.org/software/repo.html" + url = "https://github.com/LMDB/lmdb/archive/LMDB_0.9.16.tar.gz" + + version('0.9.16', '0de89730b8f3f5711c2b3a4ba517b648') + + def install(self, spec, prefix): + os.chdir('libraries/liblmdb') + + make() + + mkdirp(prefix.bin) + mkdirp(prefix + '/man/man1') + mkdirp(prefix.lib) + mkdirp(prefix.include) + + bins = ['mdb_stat', 'mdb_copy', 'mdb_dump', 'mdb_load'] + for f in bins: + install(f, prefix.bin) + + mans = ['mdb_stat.1', 'mdb_copy.1', 'mdb_dump.1', 'mdb_load.1'] + for f in mans: + install(f, prefix + '/man/man1') + + libs = ['liblmdb.a', 'liblmdb.so'] + for f in libs: + install(f, prefix.lib) + + includes = ['lmdb.h'] + for f in includes: + install(f, prefix.include) From 521632b3dc30c81fe4230ef010b972e65d80c47a Mon Sep 17 00:00:00 2001 From: Mario Melara Date: Wed, 23 Sep 2015 16:30:44 -0700 Subject: [PATCH 035/121] Added samtools bioinformatics software --- var/spack/packages/samtools/package.py | 18 +++++++++++++++++ var/spack/packages/samtools/samtools1.2.patch | 20 +++++++++++++++++++ 2 files changed, 38 insertions(+) create mode 100644 var/spack/packages/samtools/package.py create mode 100644 var/spack/packages/samtools/samtools1.2.patch diff --git a/var/spack/packages/samtools/package.py b/var/spack/packages/samtools/package.py new file mode 100644 index 00000000000..72900398d86 --- /dev/null +++ b/var/spack/packages/samtools/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Samtools(Package): + """SAM Tools provide various utilities for manipulating alignments in the SAM format, + including sorting, merging, indexing and generating + alignments in a per-position format""" + + homepage = "www.htslib.org" + version('1.2','988ec4c3058a6ceda36503eebecd4122',url = "https://github.com/samtools/samtools/releases/download/1.2/samtools-1.2.tar.bz2") + + depends_on("zlib") + depends_on("mpc") + parallel=False + patch("samtools1.2.patch",level=0) + + def install(self, spec, prefix): + make("prefix=%s" % prefix, "install") + diff --git a/var/spack/packages/samtools/samtools1.2.patch b/var/spack/packages/samtools/samtools1.2.patch new file mode 100644 index 00000000000..ead3ab4e2cb --- /dev/null +++ b/var/spack/packages/samtools/samtools1.2.patch @@ -0,0 +1,20 @@ +--- Makefile 2015-02-03 08:27:34.000000000 -0800 ++++ Makefile.new 2015-07-21 10:38:27.881406892 -0700 +@@ -26,7 +26,7 @@ + CFLAGS = -g -Wall -O2 + LDFLAGS = + LDLIBS = +-DFLAGS= -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_CURSES_LIB=1 ++DFLAGS= -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_CURSES_LIB=0 + LOBJS= bam_aux.o bam.o bam_import.o sam.o \ + sam_header.o bam_plbuf.o + AOBJS= bam_index.o bam_plcmd.o sam_view.o \ +@@ -37,7 +37,7 @@ + faidx.o stats.o stats_isize.o bam_flags.o bam_split.o \ + bam_tview.o bam_tview_curses.o bam_tview_html.o bam_lpileup.o + INCLUDES= -I. -I$(HTSDIR) +-LIBCURSES= -lcurses # -lXCurses ++#LIBCURSES= -lcurses # -lXCurses + + prefix = /usr/local + exec_prefix = $(prefix) From 13eb461ec31520c75d6d604038e455468d061a61 Mon Sep 17 00:00:00 2001 From: Mario Melara Date: Thu, 24 Sep 2015 11:21:54 -0700 Subject: [PATCH 036/121] Adding bowtie2 package along with patch --- var/spack/packages/bowtie2/bowtie2-2.5.patch | 16 +++++++++++++ var/spack/packages/bowtie2/package.py | 24 ++++++++++++++++++++ 2 files changed, 40 insertions(+) create mode 100644 var/spack/packages/bowtie2/bowtie2-2.5.patch create mode 100644 var/spack/packages/bowtie2/package.py diff --git a/var/spack/packages/bowtie2/bowtie2-2.5.patch b/var/spack/packages/bowtie2/bowtie2-2.5.patch new file mode 100644 index 00000000000..290be39c732 --- /dev/null +++ b/var/spack/packages/bowtie2/bowtie2-2.5.patch @@ -0,0 +1,16 @@ +--- Makefile 2015-02-26 10:50:00.000000000 -0800 ++++ Makefile.new 2015-07-29 18:03:59.891357399 -0700 +@@ -22,10 +22,10 @@ + # + + INC = +-GCC_PREFIX = $(shell dirname `which gcc`) ++GCC_PREFIX = + GCC_SUFFIX = +-CC = $(GCC_PREFIX)/gcc$(GCC_SUFFIX) +-CPP = $(GCC_PREFIX)/g++$(GCC_SUFFIX) ++CC = cc ++CPP = c++ + CXX = $(CPP) + HEADERS = $(wildcard *.h) + BOWTIE_MM = 1 diff --git a/var/spack/packages/bowtie2/package.py b/var/spack/packages/bowtie2/package.py new file mode 100644 index 00000000000..339aab6598d --- /dev/null +++ b/var/spack/packages/bowtie2/package.py @@ -0,0 +1,24 @@ +from spack import * +from glob import glob +class Bowtie2(Package): + """Description""" + homepage = "bowtie-bio.sourceforge.net/bowtie2/index.shtml" + version('2.2.5','51fa97a862d248d7ee660efc1147c75f', url = "http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.2.5/bowtie2-2.2.5-source.zip") + + patch('bowtie2-2.5.patch',when='@2.2.5', level=0) + + def install(self, spec, prefix): + make() + mkdirp(prefix.bin) + for bow in glob("bowtie2*"): + install(bow, prefix.bin) + # install('bowtie2',prefix.bin) + # install('bowtie2-align-l',prefix.bin) + # install('bowtie2-align-s',prefix.bin) + # install('bowtie2-build',prefix.bin) + # install('bowtie2-build-l',prefix.bin) + # install('bowtie2-build-s',prefix.bin) + # install('bowtie2-inspect',prefix.bin) + # install('bowtie2-inspect-l',prefix.bin) + # install('bowtie2-inspect-s',prefix.bin) + From adbd393c390dd78dca6cdb986ae72835dc1bf8b1 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 27 Sep 2015 16:52:38 -0700 Subject: [PATCH 037/121] Remove special characters (@, %, +, ~, etc) from stage name --- lib/spack/spack/package.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 3507807373e..61606d05905 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -438,9 +438,16 @@ def stage(self): raise ValueError("Can only get a stage for a concrete package.") if self._stage is None: + # Construct a mirror path (TODO: get this out of package.py) mp = spack.mirror.mirror_archive_path(self.spec) - self._stage = Stage( - self.fetcher, mirror_path=mp, name=self.spec.short_spec) + + # Construct a path where the stage should build.. + s = self.spec + stage_name = "%s-%s-%s" % (s.name, s.version, s.dag_hash()) + + # Build the stage + self._stage = Stage(self.fetcher, mirror_path=mp, name=stage_name) + return self._stage From 8818f4ac5e9ce8e7669e06fa71ca17b8301bda19 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 27 Sep 2015 16:57:20 -0700 Subject: [PATCH 038/121] Remove enabled variants from install prefix. - these make the prefix too long in many cases. - users can figure out which install is which by querying. --- lib/spack/spack/directory_layout.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index e61929d8fdd..85ecc1ce2b1 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -187,14 +187,9 @@ def hidden_file_paths(self): def relative_path_for_spec(self, spec): _check_concrete(spec) - enabled_variants = ( - '-' + v.name for v in spec.variants.values() - if v.enabled) - - dir_name = "%s-%s%s-%s" % ( + dir_name = "%s-%s-%s" % ( spec.name, spec.version, - ''.join(enabled_variants), spec.dag_hash(self.hash_len)) path = join_path( From 5fda7daf57e2362c803d4f2152da93ed270818d9 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Thu, 17 Sep 2015 11:29:27 -0700 Subject: [PATCH 039/121] an ordered database test --- lib/spack/spack/test/__init__.py | 3 +- lib/spack/spack/test/database.py | 104 +++++++++++++++++++++++++++++++ 2 files changed, 106 insertions(+), 1 deletion(-) create mode 100644 lib/spack/spack/test/database.py diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 6b3715be6f9..c3b39b76f8a 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -56,7 +56,8 @@ 'spec_yaml', 'optional_deps', 'make_executable', - 'configure_guess'] + 'configure_guess', + 'database'] def list_tests(): diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py new file mode 100644 index 00000000000..a3386bad991 --- /dev/null +++ b/lib/spack/spack/test/database.py @@ -0,0 +1,104 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +""" +These tests check the database is functioning properly, +both in memory and in its file +""" +import unittest + +from llnl.util.lock import * +from llnl.util.filesystem import join_path + +import spack +from spack.database import Database + + +class DatabaseTest(unittest.TestCase): + def setUp(self): + self.original_db = spack.installed_db + spack.installed_db = Database(self.original_db._root,"_test_index.yaml") + self.file_path = join_path(self.original_db._root,"_test_index.yaml") + if os.path.exists(self.file_path): + os.remove(self.file_path) + + def tearDown(self): + spack.installed_db = self.original_db + os.remove(self.file_path) + + def _test_read_from_install_tree(self): + specs = spack.install_layout.all_specs() + spack.installed_db.read_database() + spack.installed_db.write() + for sph in spack.installed_db._data: + self.assertTrue(sph['spec'] in specs) + self.assertEqual(len(specs),len(spack.installed_db._data)) + + def _test_remove_and_add(self): + specs = spack.install_layout.all_specs() + spack.installed_db.remove(specs[len(specs)-1]) + for sph in spack.installed_db._data: + self.assertTrue(sph['spec'] in specs[:len(specs)-1]) + self.assertEqual(len(specs)-1,len(spack.installed_db._data)) + + spack.installed_db.add(specs[len(specs)-1],"") + for sph in spack.installed_db._data: + self.assertTrue(sph['spec'] in specs) + self.assertEqual(len(specs),len(spack.installed_db._data)) + + def _test_read_from_file(self): + spack.installed_db.read_database() + size = len(spack.installed_db._data) + spack.installed_db._data = spack.installed_db._data[1:] + os.utime(spack.installed_db._file_path,None) + spack.installed_db.read_database() + self.assertEqual(size,len(spack.installed_db._data)) + + specs = spack.install_layout.all_specs() + self.assertEqual(size,len(specs)) + for sph in spack.installed_db._data: + self.assertTrue(sph['spec'] in specs) + + + def _test_write_to_file(self): + spack.installed_db.read_database() + size = len(spack.installed_db._data) + real_data = spack.installed_db._data + spack.installed_db._data = real_data[:size-1] + spack.installed_db.write() + spack.installed_db._data = real_data + os.utime(spack.installed_db._file_path,None) + spack.installed_db.read_database() + self.assertEqual(size-1,len(spack.installed_db._data)) + + specs = spack.install_layout.all_specs() + self.assertEqual(size,len(specs)) + for sph in spack.installed_db._data: + self.assertTrue(sph['spec'] in specs[:size-1]) + + def test_ordered_test(self): + self._test_read_from_install_tree() + self._test_remove_and_add() + self._test_read_from_file() + self._test_write_to_file() From 5563c666758f593b311a2de3e47cf617966a4c25 Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Tue, 29 Sep 2015 18:18:34 -0700 Subject: [PATCH 040/121] update scr to 1.1.8 --- var/spack/packages/scr/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/scr/package.py b/var/spack/packages/scr/package.py index 74b75a8742e..9fb758f0722 100644 --- a/var/spack/packages/scr/package.py +++ b/var/spack/packages/scr/package.py @@ -28,12 +28,14 @@ class Scr(Package): """SCR caches checkpoint data in storage on the compute nodes of a Linux cluster to provide a fast, scalable checkpoint/restart capability for MPI codes""" + homepage = "https://computation.llnl.gov/project/scr/" - url = "http://downloads.sourceforge.net/project/scalablecr/releases/scr-1.1-7.tar.gz" depends_on("mpi") +# depends_on("dtcmp") - version('1.1-7', 'a5930e9ab27d1b7049447c2fd7734ebd') + version('1.1-7', 'a5930e9ab27d1b7049447c2fd7734ebd', url='http://downloads.sourceforge.net/project/scalablecr/releases/scr-1.1-7.tar.gz') + version('1.1.8', '6a0f11ad18e27fcfc00a271ff587b06e', url='https://github.com/hpc/scr/releases/download/v1.1.8/scr-1.1.8.tar.gz') def install(self, spec, prefix): configure("--prefix=" + prefix, From 75d30b08fbf09cbbc587891d0df472327d3669b9 Mon Sep 17 00:00:00 2001 From: Mario Melara Date: Thu, 1 Oct 2015 21:51:55 -0700 Subject: [PATCH 041/121] Added current version 3.2 of libffi version 3.1 has a bug that interferes with glib --- var/spack/packages/libffi/package.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/var/spack/packages/libffi/package.py b/var/spack/packages/libffi/package.py index 2c1c4eed4d3..acec0317171 100644 --- a/var/spack/packages/libffi/package.py +++ b/var/spack/packages/libffi/package.py @@ -6,11 +6,12 @@ class Libffi(Package): to call any function specified by a call interface description at run time.""" homepage = "https://sourceware.org/libffi/" - url = "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz" - - version('3.1', 'f5898b29bbfd70502831a212d9249d10') + + version('3.2.1','83b89587607e3eb65c70d361f13bab43',url = "ftp://sourceware.org/pub/libffi/libffi-3.2.1.tar.gz") + #version('3.1', 'f5898b29bbfd70502831a212d9249d10',url = "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz") # Has a bug $(lib64) instead of ${lib64} in libffi.pc def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install") + From 6fcf16b710ef5e8ec8639aa191200661be7be2fd Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Mon, 5 Oct 2015 12:59:26 -0700 Subject: [PATCH 042/121] added libxcb version 1.11.1 --- var/spack/packages/libxcb/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/packages/libxcb/package.py b/var/spack/packages/libxcb/package.py index 521cd0d4757..16a5525c0d3 100644 --- a/var/spack/packages/libxcb/package.py +++ b/var/spack/packages/libxcb/package.py @@ -10,7 +10,7 @@ class Libxcb(Package): url = "http://xcb.freedesktop.org/dist/libxcb-1.11.tar.gz" version('1.11', '1698dd837d7e6e94d029dbe8b3a82deb') - + version('1.11.1', '118623c15a96b08622603a71d8789bf3') depends_on("python") depends_on("xcb-proto") From f068d1ba57518d9fcaa0c03ce0f483ec7f2b49f3 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 26 Sep 2015 10:12:24 +0200 Subject: [PATCH 043/121] GCC : added variants for libelf, binutils, isl. gcc@5.0: still has issues --- var/spack/packages/gcc/package.py | 57 ++++++++++++++++++++----------- 1 file changed, 38 insertions(+), 19 deletions(-) diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py index 5e3d1a3efac..925c9d35cec 100644 --- a/var/spack/packages/gcc/package.py +++ b/var/spack/packages/gcc/package.py @@ -42,15 +42,19 @@ class Gcc(Package): version('4.7.4', '4c696da46297de6ae77a82797d2abe28') version('4.6.4', 'b407a3d1480c11667f293bfb1f17d1a4') version('4.5.4', '27e459c2566b8209ab064570e1b378f7') + + variant('binutils', default=False, description='Add a dependency on binutils') + variant('libelf', default=False, description='Add a dependency on libelf') + variant('isl', default=True, description='Add a dependency on isl') depends_on("mpfr") depends_on("gmp") depends_on("mpc") # when @4.5: - depends_on("libelf") - depends_on("binutils") + depends_on("libelf", when='+libelf') + depends_on("binutils",when="+binutils") # Save these until we can do optional deps. - #depends_on("isl") + depends_on("isl", when='@5.0:+isl') #depends_on("ppl") #depends_on("cloog") @@ -62,23 +66,38 @@ def install(self, spec, prefix): if spec.satisfies("@4.7.1:"): enabled_languages.add('go') + # Generic options to compile GCC + options = ["--prefix=%s" % prefix, + "--libdir=%s/lib64" % prefix, + "--disable-multilib", + "--enable-languages=" + ','.join(enabled_languages), + "--with-mpc=%s" % spec['mpc'].prefix, + "--with-mpfr=%s" % spec['mpfr'].prefix, + "--with-gmp=%s" % spec['gmp'].prefix, + "--enable-lto", + "--with-gnu-ld", + "--with-gnu-as", + "--with-quad"] + # Libelf + if '+libelf' in spec: + libelf_options = ["--with-libelf=%s" % spec['libelf'].prefix] + options.extend(libelf_options) + + # Binutils + if '+binutils' in spec: + binutils_options = ["--with-stage1-ldflags=%s" % self.rpath_args, + "--with-boot-ldflags=%s" % self.rpath_args, + "--with-ld=%s/bin/ld" % spec['binutils'].prefix, + "--with-as=%s/bin/as" % spec['binutils'].prefix] + options.extend(binutils_options) + + # Isl + if spec.satisfies('@5.0:+isl'): + isl_options = ["--with-isl=%s" % spec['isl'].prefix] + options.extend(isl_options) + # Rest of install is straightforward. - configure("--prefix=%s" % prefix, - "--libdir=%s/lib64" % prefix, - "--disable-multilib", - "--enable-languages=" + ','.join(enabled_languages), - "--with-mpc=%s" % spec['mpc'].prefix, - "--with-mpfr=%s" % spec['mpfr'].prefix, - "--with-gmp=%s" % spec['gmp'].prefix, - "--with-libelf=%s" % spec['libelf'].prefix, - "--with-stage1-ldflags=%s" % self.rpath_args, - "--with-boot-ldflags=%s" % self.rpath_args, - "--enable-lto", - "--with-gnu-ld", - "--with-ld=%s/bin/ld" % spec['binutils'].prefix, - "--with-gnu-as", - "--with-as=%s/bin/as" % spec['binutils'].prefix, - "--with-quad") + configure(*options) make() make("install") From d44571257a6cf4c236d1aad489f8e6202acbb631 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 6 Oct 2015 15:57:17 +0200 Subject: [PATCH 044/121] spec file : everything is dumped, only link rule is modified --- var/spack/packages/gcc/package.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py index 925c9d35cec..ddc303bfdb9 100644 --- a/var/spack/packages/gcc/package.py +++ b/var/spack/packages/gcc/package.py @@ -36,8 +36,11 @@ class Gcc(Package): list_url = 'http://open-source-box.org/gcc/' list_depth = 2 + version('5.2.0', 'a51bcfeb3da7dd4c623e27207ed43467') + version('4.9.3', '6f831b4d251872736e8e9cc09746f327') version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43') version('4.9.1', 'fddf71348546af523353bd43d34919c1') + version('4.8.5', '80d2c2982a3392bb0b89673ff136e223') version('4.8.4', '5a84a30839b2aca22a2d723de2a626ec') version('4.7.4', '4c696da46297de6ae77a82797d2abe28') version('4.6.4', 'b407a3d1480c11667f293bfb1f17d1a4') @@ -120,12 +123,11 @@ def write_rpath_specs(self): gcc = Executable(join_path(self.prefix.bin, 'gcc')) lines = gcc('-dumpspecs', return_output=True).split("\n") - for i, line in enumerate(lines): - if line.startswith("*link:"): - specs_file = join_path(self.spec_dir, 'specs') - with closing(open(specs_file, 'w')) as out: - out.write(lines[i] + "\n") - out.write("-rpath %s/lib:%s/lib64 \\\n" - % (self.prefix, self.prefix)) - out.write(lines[i+1] + "\n") - set_install_permissions(specs_file) + specs_file = join_path(self.spec_dir, 'specs') + with closing(open(specs_file, 'w')) as out: + for line in lines: + if line.startswith("*link:"): + out.write(line + "\n") + out.write("-rpath %s/lib:%s/lib64 \\\n"% (self.prefix, self.prefix)) + out.write(line + "\n") + set_install_permissions(specs_file) From 17de9a37f1d064e18ff61145196b1cb77ae8b64b Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 6 Oct 2015 18:20:48 +0200 Subject: [PATCH 045/121] gcc : fixed spec file --- var/spack/packages/gcc/package.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py index ddc303bfdb9..3d0f2caff51 100644 --- a/var/spack/packages/gcc/package.py +++ b/var/spack/packages/gcc/package.py @@ -122,12 +122,11 @@ def write_rpath_specs(self): return gcc = Executable(join_path(self.prefix.bin, 'gcc')) - lines = gcc('-dumpspecs', return_output=True).split("\n") + lines = gcc('-dumpspecs', return_output=True).strip().split("\n") specs_file = join_path(self.spec_dir, 'specs') with closing(open(specs_file, 'w')) as out: for line in lines: - if line.startswith("*link:"): - out.write(line + "\n") - out.write("-rpath %s/lib:%s/lib64 \\\n"% (self.prefix, self.prefix)) out.write(line + "\n") + if line.startswith("*link:"): + out.write("-rpath %s/lib:%s/lib64 \\\n"% (self.prefix, self.prefix)) set_install_permissions(specs_file) From 58c53738900db52615062387165e1e8ba9801362 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 8 Oct 2015 00:14:44 -0700 Subject: [PATCH 046/121] update warning message for spack -k. - warning said it didn't do checksums; really does. --- bin/spack | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/spack b/bin/spack index 5c042edd2d7..127a85f6fe4 100755 --- a/bin/spack +++ b/bin/spack @@ -118,7 +118,7 @@ def main(): # If the user asked for it, don't check ssl certs. if args.insecure: - tty.warn("You asked for --insecure, which does not check SSL certificates or checksums.") + tty.warn("You asked for --insecure, which does not check SSL certificates.") spack.curl.add_default_arg('-k') # Try to load the particular command asked for and run it From 5dd39bfca56e984089236f4383d3ad348e1b0a54 Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Thu, 8 Oct 2015 08:12:30 -0700 Subject: [PATCH 047/121] bug fix for github issue 122 --- var/spack/packages/bzip2/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/packages/bzip2/package.py b/var/spack/packages/bzip2/package.py index fbd5869a532..d88336664db 100644 --- a/var/spack/packages/bzip2/package.py +++ b/var/spack/packages/bzip2/package.py @@ -22,8 +22,10 @@ def install(self, spec, prefix): bzip2_exe = join_path(prefix.bin, 'bzip2') install('bzip2-shared', bzip2_exe) - for libfile in glob('libbz2.so*'): + for i, libfile in enumerate(glob('libbz2.so*')): install(libfile, prefix.lib) + if i == 0: + symlink(join_path(prefix.lib, libfile), join_path(prefix.lib, 'libbz2.so')) bunzip2 = join_path(prefix.bin, 'bunzip2') remove(bunzip2) From 2c81875019803af7b7f08b070f37c45e51a3c7d5 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 12 Oct 2015 14:44:41 -0700 Subject: [PATCH 048/121] Fix bug in colify color handling. --- lib/spack/llnl/util/tty/colify.py | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py index 66c52c39687..db928444c74 100644 --- a/lib/spack/llnl/util/tty/colify.py +++ b/lib/spack/llnl/util/tty/colify.py @@ -33,8 +33,7 @@ from StringIO import StringIO from llnl.util.tty import terminal_size -from llnl.util.tty.color import clen - +from llnl.util.tty.color import clen, cextra class ColumnConfig: def __init__(self, cols): @@ -42,7 +41,6 @@ def __init__(self, cols): self.line_length = 0 self.valid = True self.widths = [0] * cols # does not include ansi colors - self.cwidths = [0] * cols # includes ansi colors def __repr__(self): attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")] @@ -66,8 +64,6 @@ def config_variable_cols(elts, console_width, padding, cols=0): # Get a bound on the most columns we could possibly have. # 'clen' ignores length of ansi color sequences. lengths = [clen(e) for e in elts] - clengths = [len(e) for e in elts] - max_cols = max(1, console_width / (min(lengths) + padding)) max_cols = min(len(elts), max_cols) @@ -85,7 +81,6 @@ def config_variable_cols(elts, console_width, padding, cols=0): if conf.widths[col] < (length + p): conf.line_length += length + p - conf.widths[col] conf.widths[col] = length + p - conf.cwidths[col] = clengths[i] + p conf.valid = (conf.line_length < console_width) try: @@ -118,7 +113,6 @@ def config_uniform_cols(elts, console_width, padding, cols=0): config = ColumnConfig(cols) config.widths = [max_len] * cols - config.cwidths = [max_clen] * cols return config @@ -147,9 +141,6 @@ def colify(elts, **options): method= Method to use to fit columns. Options are variable or uniform. Variable-width columns are tighter, uniform columns are all the same width and fit less data on the screen. - - len= Function to use for calculating string length. - Useful for ignoring ansi color. Default is 'len'. """ # Get keyword arguments or set defaults cols = options.pop("cols", 0) @@ -199,9 +190,6 @@ def colify(elts, **options): raise ValueError("method must be one of: " + allowed_methods) cols = config.cols - formats = ["%%-%ds" % width for width in config.cwidths[:-1]] - formats.append("%s") # last column has no trailing space - rows = (len(elts) + cols - 1) / cols rows_last_col = len(elts) % rows @@ -209,7 +197,9 @@ def colify(elts, **options): output.write(" " * indent) for col in xrange(cols): elt = col * rows + row - output.write(formats[col] % elts[elt]) + width = config.widths[col] + cextra(elts[elt]) + fmt = '%%-%ds' % width + output.write(fmt % elts[elt]) output.write("\n") row += 1 From b7249d66b3b47f710ddbb1719f433b507322b5a3 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Mon, 12 Oct 2015 19:09:11 -0700 Subject: [PATCH 049/121] Adding command testinstall. See "spack testinstall -h" for documentation. Still need to add output formatting (in a commonly parse-able format like Junit or TAP). May want to adjust how the build log is accessed in case of a build failure. --- lib/spack/spack/cmd/testinstall.py | 129 +++++++++++++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 lib/spack/spack/cmd/testinstall.py diff --git a/lib/spack/spack/cmd/testinstall.py b/lib/spack/spack/cmd/testinstall.py new file mode 100644 index 00000000000..d3a2cae3c2a --- /dev/null +++ b/lib/spack/spack/cmd/testinstall.py @@ -0,0 +1,129 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse +import xml.etree.ElementTree as ET + +import llnl.util.tty as tty +from llnl.util.filesystem import * + +import spack +import spack.cmd + +description = "Build and install packages" + +def setup_parser(subparser): + #subparser.add_argument( + # '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', + # help="Do not try to install dependencies of requested packages.") + + subparser.add_argument( + '-j', '--jobs', action='store', type=int, + help="Explicitly set number of make jobs. Default is #cpus.") + + #always false for test + #subparser.add_argument( + # '--keep-prefix', action='store_true', dest='keep_prefix', + # help="Don't remove the install prefix if installation fails.") + + #always true for test + #subparser.add_argument( + # '--keep-stage', action='store_true', dest='keep_stage', + # help="Don't remove the build stage if installation succeeds.") + + subparser.add_argument( + '-n', '--no-checksum', action='store_true', dest='no_checksum', + help="Do not check packages against checksum") + subparser.add_argument( + '-v', '--verbose', action='store_true', dest='verbose', + help="Display verbose build output while installing.") + + #subparser.add_argument( + # '--fake', action='store_true', dest='fake', + # help="Fake install. Just remove the prefix and touch a fake file in it.") + + subparser.add_argument( + 'outputdir', help="test output goes in this directory, 1 file per package") + + subparser.add_argument( + 'packages', nargs=argparse.REMAINDER, help="specs of packages to install") + + +class JunitTestResult(object): + def __init__(self): + self.root = Element('testsuite') + self.tests = [] + + def addTest(self, identifier, passed=True, output=None): + self.tests.append((identifier, passed, output)) + + def output(self): + self.root.set('tests', '{0}'.format(len(self.tests))) + + +def testinstall(parser, args): + if not args.packages: + tty.die("install requires at least one package argument") + + if args.jobs is not None: + if args.jobs <= 0: + tty.die("The -j option must be a positive integer!") + + if args.no_checksum: + spack.do_checksum = False # TODO: remove this global. + + print "Output to:", args.outputdir + + specs = spack.cmd.parse_specs(args.packages, concretize=True) + try: + for spec in specs: + #import pdb; pdb.set_trace() + package = spack.db.get(spec) + package.do_install( + keep_prefix=False, + keep_stage=False, + ignore_deps=False, + make_jobs=args.jobs, + verbose=args.verbose, + fake=False) + finally: + for spec in specs: + package = spack.db.get(spec) + #import pdb; pdb.set_trace() + + print spec.name + print spec.version + print spec.dag_hash() + + if package.installed: + installLog = spack.install_layout.build_log_path(spec) + else: + #TODO: search recursively under stage.path instead of only within + # stage.source_path + installLog = join_path(package.stage.source_path, 'spack-build.out') + + with open(installLog, 'rb') as F: + for line in F.readlines()[:10]: + print "\t{0}".format(line.strip()) + From 6cd22e5786dbf40f1261b9b0410bdafbb6dd6f29 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Mon, 12 Oct 2015 20:49:23 -0700 Subject: [PATCH 050/121] 1. Added Junit XML format 2. Specify output to a file vs. a directory 3. Use [1] and [2] to write an XML file tracking success of package installs in Junit XML format --- lib/spack/spack/cmd/testinstall.py | 50 ++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/lib/spack/spack/cmd/testinstall.py b/lib/spack/spack/cmd/testinstall.py index d3a2cae3c2a..7ebadbd3444 100644 --- a/lib/spack/spack/cmd/testinstall.py +++ b/lib/spack/spack/cmd/testinstall.py @@ -64,24 +64,43 @@ def setup_parser(subparser): # help="Fake install. Just remove the prefix and touch a fake file in it.") subparser.add_argument( - 'outputdir', help="test output goes in this directory, 1 file per package") + 'output', help="test output goes in this file") subparser.add_argument( 'packages', nargs=argparse.REMAINDER, help="specs of packages to install") -class JunitTestResult(object): +class JunitResultFormat(object): def __init__(self): - self.root = Element('testsuite') + self.root = ET.Element('testsuite') self.tests = [] - def addTest(self, identifier, passed=True, output=None): - self.tests.append((identifier, passed, output)) + def addTest(self, buildId, passed=True, buildLog=None): + self.tests.append((buildId, passed, buildLog)) - def output(self): + def writeTo(self, stream): self.root.set('tests', '{0}'.format(len(self.tests))) - - + for buildId, passed, buildLog in self.tests: + testcase = ET.SubElement(self.root, 'testcase') + testcase.set('classname', buildId.name) + testcase.set('name', buildId.stringId()) + if not passed: + failure = ET.SubElement(testcase, 'failure') + failure.set('type', "Build Error") + failure.text = buildLog + ET.ElementTree(self.root).write(stream) + + +class BuildId(object): + def __init__(self, name, version, hashId): + self.name = name + self.version = version + self.hashId = hashId + + def stringId(self): + return "-".join(str(x) for x in (self.name, self.version, self.hashId)) + + def testinstall(parser, args): if not args.packages: tty.die("install requires at least one package argument") @@ -93,8 +112,6 @@ def testinstall(parser, args): if args.no_checksum: spack.do_checksum = False # TODO: remove this global. - print "Output to:", args.outputdir - specs = spack.cmd.parse_specs(args.packages, concretize=True) try: for spec in specs: @@ -108,13 +125,12 @@ def testinstall(parser, args): verbose=args.verbose, fake=False) finally: + jrf = JunitResultFormat() for spec in specs: package = spack.db.get(spec) #import pdb; pdb.set_trace() - print spec.name - print spec.version - print spec.dag_hash() + bId = BuildId(spec.name, spec.version, spec.dag_hash()) if package.installed: installLog = spack.install_layout.build_log_path(spec) @@ -124,6 +140,8 @@ def testinstall(parser, args): installLog = join_path(package.stage.source_path, 'spack-build.out') with open(installLog, 'rb') as F: - for line in F.readlines()[:10]: - print "\t{0}".format(line.strip()) - + buildLog = F.read() #TODO: this may not return all output + jrf.addTest(bId, package.installed, buildLog) + + with open(args.output, 'wb') as F: + jrf.writeTo(F) From 9f56d9c807d9d3efc7cde0591bd53d3db404dacc Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Mon, 12 Oct 2015 20:56:03 -0700 Subject: [PATCH 051/121] Don't create test output for any package that was already installed. --- lib/spack/spack/cmd/testinstall.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/lib/spack/spack/cmd/testinstall.py b/lib/spack/spack/cmd/testinstall.py index 7ebadbd3444..04e594c0b85 100644 --- a/lib/spack/spack/cmd/testinstall.py +++ b/lib/spack/spack/cmd/testinstall.py @@ -113,23 +113,23 @@ def testinstall(parser, args): spack.do_checksum = False # TODO: remove this global. specs = spack.cmd.parse_specs(args.packages, concretize=True) + newInstalls = list() try: for spec in specs: - #import pdb; pdb.set_trace() package = spack.db.get(spec) - package.do_install( - keep_prefix=False, - keep_stage=False, - ignore_deps=False, - make_jobs=args.jobs, - verbose=args.verbose, - fake=False) + if not package.installed: + newInstalls.append(spec) + package.do_install( + keep_prefix=False, + keep_stage=False, + ignore_deps=False, + make_jobs=args.jobs, + verbose=args.verbose, + fake=False) finally: jrf = JunitResultFormat() - for spec in specs: + for spec in newInstalls: package = spack.db.get(spec) - #import pdb; pdb.set_trace() - bId = BuildId(spec.name, spec.version, spec.dag_hash()) if package.installed: From d16095c8560cbaae1020a8e84494b8877bfe36f5 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 13 Oct 2015 10:35:19 -0700 Subject: [PATCH 052/121] Add forgotten file from previous commit. --- lib/spack/llnl/util/tty/color.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py index 22080a7b37f..0d09303da09 100644 --- a/lib/spack/llnl/util/tty/color.py +++ b/lib/spack/llnl/util/tty/color.py @@ -158,6 +158,11 @@ def clen(string): return len(re.sub(r'\033[^m]*m', '', string)) +def cextra(string): + """"Length of extra color characters in a string""" + return len(''.join(re.findall(r'\033[^m]*m', string))) + + def cwrite(string, stream=sys.stdout, color=None): """Replace all color expressions in string with ANSI control codes and write the result to the stream. If color is From 1ce6d8b627fac204fc4a4500ea28b4733dd172dd Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Tue, 13 Oct 2015 10:41:47 -0700 Subject: [PATCH 053/121] Add spec YAML format to test output. --- lib/spack/spack/cmd/testinstall.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/cmd/testinstall.py b/lib/spack/spack/cmd/testinstall.py index 04e594c0b85..3a9e1e14e16 100644 --- a/lib/spack/spack/cmd/testinstall.py +++ b/lib/spack/spack/cmd/testinstall.py @@ -75,19 +75,19 @@ def __init__(self): self.root = ET.Element('testsuite') self.tests = [] - def addTest(self, buildId, passed=True, buildLog=None): - self.tests.append((buildId, passed, buildLog)) + def addTest(self, buildId, passed=True, buildInfo=None): + self.tests.append((buildId, passed, buildInfo)) def writeTo(self, stream): self.root.set('tests', '{0}'.format(len(self.tests))) - for buildId, passed, buildLog in self.tests: + for buildId, passed, buildInfo in self.tests: testcase = ET.SubElement(self.root, 'testcase') testcase.set('classname', buildId.name) testcase.set('name', buildId.stringId()) if not passed: failure = ET.SubElement(testcase, 'failure') failure.set('type', "Build Error") - failure.text = buildLog + failure.text = buildInfo ET.ElementTree(self.root).write(stream) @@ -133,15 +133,18 @@ def testinstall(parser, args): bId = BuildId(spec.name, spec.version, spec.dag_hash()) if package.installed: - installLog = spack.install_layout.build_log_path(spec) + buildLogPath = spack.install_layout.build_log_path(spec) else: #TODO: search recursively under stage.path instead of only within # stage.source_path - installLog = join_path(package.stage.source_path, 'spack-build.out') + buildLogPath = join_path(package.stage.source_path, 'spack-build.out') - with open(installLog, 'rb') as F: + with open(buildLogPath, 'rb') as F: buildLog = F.read() #TODO: this may not return all output - jrf.addTest(bId, package.installed, buildLog) + #TODO: add the whole build log? it could be several thousand + # lines. It may be better to look for errors. + jrf.addTest(bId, package.installed, buildLogPath + '\n' + + spec.to_yaml() + buildLog) with open(args.output, 'wb') as F: jrf.writeTo(F) From 71dcf8833c263a2f87b8bfcce6f2eaf7a14014a3 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Tue, 13 Oct 2015 19:02:41 -0700 Subject: [PATCH 054/121] Make sure to generate output for dependencies as if they were separate tests: the original intent was to generate output as if each package was a unit test, but I noticed that I was only generating test output for top-level packages. --- lib/spack/spack/cmd/testinstall.py | 71 +++++++++++++++++++++--------- 1 file changed, 51 insertions(+), 20 deletions(-) diff --git a/lib/spack/spack/cmd/testinstall.py b/lib/spack/spack/cmd/testinstall.py index 3a9e1e14e16..6f3514bc346 100644 --- a/lib/spack/spack/cmd/testinstall.py +++ b/lib/spack/spack/cmd/testinstall.py @@ -24,6 +24,7 @@ ############################################################################## from external import argparse import xml.etree.ElementTree as ET +import itertools import llnl.util.tty as tty from llnl.util.filesystem import * @@ -100,7 +101,37 @@ def __init__(self, name, version, hashId): def stringId(self): return "-".join(str(x) for x in (self.name, self.version, self.hashId)) - + +def createTestOutput(spec, handled, output): + if spec in handled: + return handled[spec] + + childSuccesses = list(createTestOutput(dep, handled, output) + for dep in spec.dependencies.itervalues()) + package = spack.db.get(spec) + handled[spec] = package.installed + + if all(childSuccesses): + bId = BuildId(spec.name, spec.version, spec.dag_hash()) + + if package.installed: + buildLogPath = spack.install_layout.build_log_path(spec) + else: + #TODO: search recursively under stage.path instead of only within + # stage.source_path + buildLogPath = join_path(package.stage.source_path, 'spack-build.out') + + with open(buildLogPath, 'rb') as F: + buildLog = F.read() #TODO: this may not return all output + #TODO: add the whole build log? it could be several thousand + # lines. It may be better to look for errors. + output.addTest(bId, package.installed, buildLogPath + '\n' + + spec.to_yaml() + buildLog) + #TODO: create a failed test if a dependency didn't install? + + return handled[spec] + + def testinstall(parser, args): if not args.packages: tty.die("install requires at least one package argument") @@ -113,12 +144,17 @@ def testinstall(parser, args): spack.do_checksum = False # TODO: remove this global. specs = spack.cmd.parse_specs(args.packages, concretize=True) - newInstalls = list() + newInstalls = set() + for spec in itertools.chain.from_iterable(spec.traverse() + for spec in specs): + package = spack.db.get(spec) + if not package.installed: + newInstalls.add(spec) + try: for spec in specs: package = spack.db.get(spec) if not package.installed: - newInstalls.append(spec) package.do_install( keep_prefix=False, keep_stage=False, @@ -127,24 +163,19 @@ def testinstall(parser, args): verbose=args.verbose, fake=False) finally: + #TODO: note if multiple packages are specified and a prior one fails, + # it will prevent any of the others from succeeding even if they + # don't share any dependencies. i.e. the results may be strange if + # you run testinstall with >1 top-level package + + #Find all packages that are not a dependency of another package + topLevelNewInstalls = newInstalls - set(itertools.chain.from_iterable( + spec.dependencies for spec in newInstalls)) + jrf = JunitResultFormat() - for spec in newInstalls: - package = spack.db.get(spec) - bId = BuildId(spec.name, spec.version, spec.dag_hash()) - - if package.installed: - buildLogPath = spack.install_layout.build_log_path(spec) - else: - #TODO: search recursively under stage.path instead of only within - # stage.source_path - buildLogPath = join_path(package.stage.source_path, 'spack-build.out') - - with open(buildLogPath, 'rb') as F: - buildLog = F.read() #TODO: this may not return all output - #TODO: add the whole build log? it could be several thousand - # lines. It may be better to look for errors. - jrf.addTest(bId, package.installed, buildLogPath + '\n' + - spec.to_yaml() + buildLog) + handled = {} + for spec in topLevelNewInstalls: + createTestOutput(spec, handled, jrf) with open(args.output, 'wb') as F: jrf.writeTo(F) From 9ec4ae83af7f88110e464fca22a9d1377d74b28e Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Thu, 15 Oct 2015 09:20:16 -0400 Subject: [PATCH 055/121] netcdf: link to MPI Newer GNU linker refuses to use transitive linking; if netcdf calls MPI_Allreduce, it must link the providing library directly. Also switched to CMake because I don't know autoconf well enough to know how to put MPI stuff into it. --- .../packages/netcdf/netcdf-4.3.3-mpi.patch | 25 ++++++++++++++++ var/spack/packages/netcdf/package.py | 29 +++++++++---------- 2 files changed, 39 insertions(+), 15 deletions(-) create mode 100644 var/spack/packages/netcdf/netcdf-4.3.3-mpi.patch diff --git a/var/spack/packages/netcdf/netcdf-4.3.3-mpi.patch b/var/spack/packages/netcdf/netcdf-4.3.3-mpi.patch new file mode 100644 index 00000000000..46dda5fc9de --- /dev/null +++ b/var/spack/packages/netcdf/netcdf-4.3.3-mpi.patch @@ -0,0 +1,25 @@ +diff -Nur netcdf-4.3.3/CMakeLists.txt netcdf-4.3.3.mpi/CMakeLists.txt +--- netcdf-4.3.3/CMakeLists.txt 2015-02-12 16:44:35.000000000 -0500 ++++ netcdf-4.3.3.mpi/CMakeLists.txt 2015-10-14 16:44:41.176300658 -0400 +@@ -753,6 +753,7 @@ + SET(USE_PARALLEL OFF CACHE BOOL "") + MESSAGE(STATUS "Cannot find HDF5 library built with parallel support. Disabling parallel build.") + ELSE() ++ FIND_PACKAGE(MPI REQUIRED) + SET(USE_PARALLEL ON CACHE BOOL "") + SET(STATUS_PARALLEL "ON") + ENDIF() +diff -Nur netcdf-4.3.3/liblib/CMakeLists.txt netcdf-4.3.3.mpi/liblib/CMakeLists.txt +--- netcdf-4.3.3/liblib/CMakeLists.txt 2015-02-12 16:44:35.000000000 -0500 ++++ netcdf-4.3.3.mpi/liblib/CMakeLists.txt 2015-10-14 16:44:57.757793634 -0400 +@@ -71,6 +71,10 @@ + SET(TLL_LIBS ${TLL_LIBS} ${CURL_LIBRARY}) + ENDIF() + ++IF(USE_PARALLEL) ++ SET(TLL_LIBS ${TLL_LIBS} ${MPI_C_LIBRARIES}) ++ENDIF() ++ + IF(USE_HDF4) + SET(TLL_LIBS ${TLL_LIBS} ${HDF4_LIBRARIES}) + ENDIF() diff --git a/var/spack/packages/netcdf/package.py b/var/spack/packages/netcdf/package.py index 34284ea725a..e1e0d836c62 100644 --- a/var/spack/packages/netcdf/package.py +++ b/var/spack/packages/netcdf/package.py @@ -1,28 +1,27 @@ from spack import * class Netcdf(Package): - """NetCDF is a set of software libraries and self-describing, machine-independent - data formats that support the creation, access, and sharing of array-oriented - scientific data.""" + """NetCDF is a set of software libraries and self-describing, machine-independent + data formats that support the creation, access, and sharing of array-oriented + scientific data.""" homepage = "http://www.unidata.ucar.edu/software/netcdf/" url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz" version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae') + patch('netcdf-4.3.3-mpi.patch') + # Dependencies: - # >HDF5 + # >HDF5 depends_on("hdf5") - def install(self, spec, prefix): - configure( - "--prefix=%s" % prefix, - "--disable-dap", # Disable DAP. - "--disable-shared", # Don't build shared libraries (use static libs). - "CPPFLAGS=-I%s/include" % spec['hdf5'].prefix, # Link HDF5's include dir. - "LDFLAGS=-L%s/lib" % spec['hdf5'].prefix) # Link HDF5's lib dir. - - make("install") + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', + "-DCMAKE_INSTALL_PREFIX:PATH=%s" % prefix, + "-DENABLE_DAP:BOOL=OFF", # Disable DAP. + "-DBUILD_SHARED_LIBS:BOOL=OFF") # Don't build shared libraries (use static libs). - # Check the newly installed netcdf package. Currently disabled. - # make("check") + make() + make("install") From 3ce85b22708769ff9d8b2b0ba79e4157dac46d74 Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Thu, 15 Oct 2015 09:27:05 -0400 Subject: [PATCH 056/121] spack: split spack_root from prefix A foundation for allowing runtime configuring of the prefix. --- lib/spack/spack/__init__.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index caa09eb6e00..6e8d41895fb 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -27,24 +27,26 @@ from llnl.util.filesystem import * # This lives in $prefix/lib/spack/spack/__file__ -prefix = ancestor(__file__, 4) +spack_root = ancestor(__file__, 4) # The spack script itself -spack_file = join_path(prefix, "bin", "spack") +spack_file = join_path(spack_root, "bin", "spack") # spack directory hierarchy -etc_path = join_path(prefix, "etc") -lib_path = join_path(prefix, "lib", "spack") +lib_path = join_path(spack_root, "lib", "spack") build_env_path = join_path(lib_path, "env") module_path = join_path(lib_path, "spack") compilers_path = join_path(module_path, "compilers") test_path = join_path(module_path, "test") hooks_path = join_path(module_path, "hooks") -var_path = join_path(prefix, "var", "spack") +var_path = join_path(spack_root, "var", "spack") stage_path = join_path(var_path, "stage") +share_path = join_path(spack_root, "share", "spack") + +prefix = spack_root opt_path = join_path(prefix, "opt") install_path = join_path(opt_path, "spack") -share_path = join_path(prefix, "share", "spack") +etc_path = join_path(prefix, "etc") # # Set up the packages database. From 0d66362cee9849db77a9f3297aa697f21fe1acdd Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 10:17:08 -0700 Subject: [PATCH 057/121] Only install 1 top-level package with testinstall. Otherwise if multiple packages are specified and a prior one fails, it will prevent any of the others from succeeding (and generating test output) even if they don't share dependencies. --- lib/spack/spack/cmd/testinstall.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/cmd/testinstall.py b/lib/spack/spack/cmd/testinstall.py index 6f3514bc346..5e5288bfbd2 100644 --- a/lib/spack/spack/cmd/testinstall.py +++ b/lib/spack/spack/cmd/testinstall.py @@ -68,7 +68,7 @@ def setup_parser(subparser): 'output', help="test output goes in this file") subparser.add_argument( - 'packages', nargs=argparse.REMAINDER, help="specs of packages to install") + 'package', help="spec of package to install") class JunitResultFormat(object): @@ -133,8 +133,8 @@ def createTestOutput(spec, handled, output): def testinstall(parser, args): - if not args.packages: - tty.die("install requires at least one package argument") + if not args.package: + tty.die("install requires a package argument") if args.jobs is not None: if args.jobs <= 0: @@ -143,7 +143,8 @@ def testinstall(parser, args): if args.no_checksum: spack.do_checksum = False # TODO: remove this global. - specs = spack.cmd.parse_specs(args.packages, concretize=True) + #TODO: should a single argument be wrapped in a list? + specs = spack.cmd.parse_specs(args.package, concretize=True) newInstalls = set() for spec in itertools.chain.from_iterable(spec.traverse() for spec in specs): @@ -162,12 +163,7 @@ def testinstall(parser, args): make_jobs=args.jobs, verbose=args.verbose, fake=False) - finally: - #TODO: note if multiple packages are specified and a prior one fails, - # it will prevent any of the others from succeeding even if they - # don't share any dependencies. i.e. the results may be strange if - # you run testinstall with >1 top-level package - + finally: #Find all packages that are not a dependency of another package topLevelNewInstalls = newInstalls - set(itertools.chain.from_iterable( spec.dependencies for spec in newInstalls)) From 2ae7839b666592e3acaf370d52f69376b80b28a7 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 10:26:13 -0700 Subject: [PATCH 058/121] Edit function names to conform to naming conventions. --- lib/spack/spack/cmd/testinstall.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/spack/spack/cmd/testinstall.py b/lib/spack/spack/cmd/testinstall.py index 5e5288bfbd2..ea092727fe6 100644 --- a/lib/spack/spack/cmd/testinstall.py +++ b/lib/spack/spack/cmd/testinstall.py @@ -76,10 +76,10 @@ def __init__(self): self.root = ET.Element('testsuite') self.tests = [] - def addTest(self, buildId, passed=True, buildInfo=None): + def add_test(self, buildId, passed=True, buildInfo=None): self.tests.append((buildId, passed, buildInfo)) - def writeTo(self, stream): + def write_to(self, stream): self.root.set('tests', '{0}'.format(len(self.tests))) for buildId, passed, buildInfo in self.tests: testcase = ET.SubElement(self.root, 'testcase') @@ -102,11 +102,11 @@ def stringId(self): return "-".join(str(x) for x in (self.name, self.version, self.hashId)) -def createTestOutput(spec, handled, output): +def create_test_output(spec, handled, output): if spec in handled: return handled[spec] - childSuccesses = list(createTestOutput(dep, handled, output) + childSuccesses = list(create_test_output(dep, handled, output) for dep in spec.dependencies.itervalues()) package = spack.db.get(spec) handled[spec] = package.installed @@ -125,7 +125,7 @@ def createTestOutput(spec, handled, output): buildLog = F.read() #TODO: this may not return all output #TODO: add the whole build log? it could be several thousand # lines. It may be better to look for errors. - output.addTest(bId, package.installed, buildLogPath + '\n' + + output.add_test(bId, package.installed, buildLogPath + '\n' + spec.to_yaml() + buildLog) #TODO: create a failed test if a dependency didn't install? @@ -171,7 +171,7 @@ def testinstall(parser, args): jrf = JunitResultFormat() handled = {} for spec in topLevelNewInstalls: - createTestOutput(spec, handled, jrf) + create_test_output(spec, handled, jrf) with open(args.output, 'wb') as F: - jrf.writeTo(F) + jrf.write_to(F) From e3d703b80ffe442d83879c070e0bbeafa4f4ef20 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 10:33:39 -0700 Subject: [PATCH 059/121] Change name of file to conform to conventions. --- lib/spack/spack/cmd/{testinstall.py => test-install.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename lib/spack/spack/cmd/{testinstall.py => test-install.py} (100%) diff --git a/lib/spack/spack/cmd/testinstall.py b/lib/spack/spack/cmd/test-install.py similarity index 100% rename from lib/spack/spack/cmd/testinstall.py rename to lib/spack/spack/cmd/test-install.py From 11861fb8d717623c4f0014194c81fa86041fbe10 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 10:35:42 -0700 Subject: [PATCH 060/121] Changing name of file requires changing function name to be invoked as a command --- lib/spack/spack/cmd/test-install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index ea092727fe6..eff4bcc46b6 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -132,7 +132,7 @@ def create_test_output(spec, handled, output): return handled[spec] -def testinstall(parser, args): +def test_install(parser, args): if not args.package: tty.die("install requires a package argument") From f2b4341ad6d9dc925364214863440dde29d61b50 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 10:45:03 -0700 Subject: [PATCH 061/121] Always run with verbose output (so eliminate it as an option). Also remove other commented options. --- lib/spack/spack/cmd/test-install.py | 23 +---------------------- 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index eff4bcc46b6..ec413115c01 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -35,34 +35,13 @@ description = "Build and install packages" def setup_parser(subparser): - #subparser.add_argument( - # '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', - # help="Do not try to install dependencies of requested packages.") - subparser.add_argument( '-j', '--jobs', action='store', type=int, help="Explicitly set number of make jobs. Default is #cpus.") - - #always false for test - #subparser.add_argument( - # '--keep-prefix', action='store_true', dest='keep_prefix', - # help="Don't remove the install prefix if installation fails.") - - #always true for test - #subparser.add_argument( - # '--keep-stage', action='store_true', dest='keep_stage', - # help="Don't remove the build stage if installation succeeds.") subparser.add_argument( '-n', '--no-checksum', action='store_true', dest='no_checksum', help="Do not check packages against checksum") - subparser.add_argument( - '-v', '--verbose', action='store_true', dest='verbose', - help="Display verbose build output while installing.") - - #subparser.add_argument( - # '--fake', action='store_true', dest='fake', - # help="Fake install. Just remove the prefix and touch a fake file in it.") subparser.add_argument( 'output', help="test output goes in this file") @@ -161,7 +140,7 @@ def test_install(parser, args): keep_stage=False, ignore_deps=False, make_jobs=args.jobs, - verbose=args.verbose, + verbose=True, fake=False) finally: #Find all packages that are not a dependency of another package From b9bf0b942c4e2770de65282311876d02d00e7f5b Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 11:52:08 -0700 Subject: [PATCH 062/121] Use spec.traverse vs. recursive function. Also even though I calculated which installs are new (e.g. vs. packages that have already been installed by a previous command) I forgot to make use of that in create_test_output (so I was always generating test output even if a package had been installed before running the test-install command). Note to avoid confusion: the 'handled' variable (removed in this commit) did not serve the same purpose as 'newInstalls': it was originally required because the recursive approach would visit the same dependency twice if more than one package depended on it. --- lib/spack/spack/cmd/test-install.py | 36 +++++++++++++---------------- 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index ec413115c01..6652a204fbb 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -81,18 +81,21 @@ def stringId(self): return "-".join(str(x) for x in (self.name, self.version, self.hashId)) -def create_test_output(spec, handled, output): - if spec in handled: - return handled[spec] - - childSuccesses = list(create_test_output(dep, handled, output) - for dep in spec.dependencies.itervalues()) - package = spack.db.get(spec) - handled[spec] = package.installed - - if all(childSuccesses): +def create_test_output(topSpec, newInstalls, output): + # Post-order traversal is not strictly required but it makes sense to output + # tests for dependencies first. + for spec in topSpec.traverse(order='post'): + if spec not in newInstalls: + continue + + if not all(spack.db.get(childSpec).installed for childSpec in + spec.dependencies.itervalues()): + #TODO: create a failed test if a dependency didn't install? + continue + bId = BuildId(spec.name, spec.version, spec.dag_hash()) + package = spack.db.get(spec) if package.installed: buildLogPath = spack.install_layout.build_log_path(spec) else: @@ -106,10 +109,7 @@ def create_test_output(spec, handled, output): # lines. It may be better to look for errors. output.add_test(bId, package.installed, buildLogPath + '\n' + spec.to_yaml() + buildLog) - #TODO: create a failed test if a dependency didn't install? - - return handled[spec] - + def test_install(parser, args): if not args.package: @@ -143,14 +143,10 @@ def test_install(parser, args): verbose=True, fake=False) finally: - #Find all packages that are not a dependency of another package - topLevelNewInstalls = newInstalls - set(itertools.chain.from_iterable( - spec.dependencies for spec in newInstalls)) - jrf = JunitResultFormat() handled = {} - for spec in topLevelNewInstalls: - create_test_output(spec, handled, jrf) + for spec in specs: + create_test_output(spec, newInstalls, jrf) with open(args.output, 'wb') as F: jrf.write_to(F) From c985ad7644e00fa2fd5253d6b3f761a2596c6c4b Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 12:23:56 -0700 Subject: [PATCH 063/121] Update test failure output: don't include the entire build log, just lines which mention errors (or if no such lines can be found, output the last 10 lines from the log). --- lib/spack/spack/cmd/test-install.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 6652a204fbb..f2c40e57e49 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -25,6 +25,7 @@ from external import argparse import xml.etree.ElementTree as ET import itertools +import re import llnl.util.tty as tty from llnl.util.filesystem import * @@ -104,11 +105,15 @@ def create_test_output(topSpec, newInstalls, output): buildLogPath = join_path(package.stage.source_path, 'spack-build.out') with open(buildLogPath, 'rb') as F: - buildLog = F.read() #TODO: this may not return all output - #TODO: add the whole build log? it could be several thousand - # lines. It may be better to look for errors. - output.add_test(bId, package.installed, buildLogPath + '\n' + - spec.to_yaml() + buildLog) + lines = F.readlines() + errMessages = list(line for line in lines if + re.search('error:', line, re.IGNORECASE)) + errOutput = errMessages if errMessages else lines[-10:] + errOutput = '\n'.join(itertools.chain( + [spec.to_yaml(), "Errors:"], errOutput, + ["Build Log:", buildLogPath])) + + output.add_test(bId, package.installed, errOutput) def test_install(parser, args): From 4997f0fe57e65002d8122da05a4f203f51ac4345 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 12:44:02 -0700 Subject: [PATCH 064/121] Move logic for tracking the build log into package.py (since that is what is managing the build log) and expose as package.build_log_path. --- lib/spack/spack/cmd/test-install.py | 11 ++--------- lib/spack/spack/package.py | 8 ++++++++ 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index f2c40e57e49..85140422394 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -97,21 +97,14 @@ def create_test_output(topSpec, newInstalls, output): bId = BuildId(spec.name, spec.version, spec.dag_hash()) package = spack.db.get(spec) - if package.installed: - buildLogPath = spack.install_layout.build_log_path(spec) - else: - #TODO: search recursively under stage.path instead of only within - # stage.source_path - buildLogPath = join_path(package.stage.source_path, 'spack-build.out') - - with open(buildLogPath, 'rb') as F: + with open(package.build_log_path, 'rb') as F: lines = F.readlines() errMessages = list(line for line in lines if re.search('error:', line, re.IGNORECASE)) errOutput = errMessages if errMessages else lines[-10:] errOutput = '\n'.join(itertools.chain( [spec.to_yaml(), "Errors:"], errOutput, - ["Build Log:", buildLogPath])) + ["Build Log:", package.build_log_path])) output.add_test(bId, package.installed, errOutput) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 61606d05905..da19a7c3987 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -863,6 +863,14 @@ def do_install_dependencies(self, **kwargs): dep.package.do_install(**kwargs) + @property + def build_log_path(self): + if self.installed: + return spack.install_layout.build_log_path(spec) + else: + return join_path(self.stage.source_path, 'spack-build.out') + + @property def module(self): """Use this to add variables to the class's module's scope. From e451421db32b5e2059d9da293c70baa4b3374449 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 19:22:36 -0700 Subject: [PATCH 065/121] 1. Specifying the output file path for test-install is now an option (vs. an argument). The default path is [package id].xml in the CWD where test-install is called from. 2. Fixed a bug with package.build_log_path (which was added in this branch). 3. keep_stage for package.do_install is now set. This allows uninstalling and reinstalling packages without (re) downloading them. --- lib/spack/spack/cmd/test-install.py | 32 +++++++++++++++++++---------- lib/spack/spack/package.py | 2 +- 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 85140422394..e2072958106 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -26,6 +26,7 @@ import xml.etree.ElementTree as ET import itertools import re +import os import llnl.util.tty as tty from llnl.util.filesystem import * @@ -45,7 +46,7 @@ def setup_parser(subparser): help="Do not check packages against checksum") subparser.add_argument( - 'output', help="test output goes in this file") + '-o', '--output', action='store', help="test output goes in this file") subparser.add_argument( 'package', help="spec of package to install") @@ -73,10 +74,10 @@ def write_to(self, stream): class BuildId(object): - def __init__(self, name, version, hashId): - self.name = name - self.version = version - self.hashId = hashId + def __init__(self, spec): + self.name = spec.name + self.version = spec.version + self.hashId = spec.dag_hash() def stringId(self): return "-".join(str(x) for x in (self.name, self.version, self.hashId)) @@ -94,7 +95,7 @@ def create_test_output(topSpec, newInstalls, output): #TODO: create a failed test if a dependency didn't install? continue - bId = BuildId(spec.name, spec.version, spec.dag_hash()) + bId = BuildId(spec) package = spack.db.get(spec) with open(package.build_log_path, 'rb') as F: @@ -120,22 +121,31 @@ def test_install(parser, args): if args.no_checksum: spack.do_checksum = False # TODO: remove this global. - #TODO: should a single argument be wrapped in a list? + # TODO: should a single argument be wrapped in a list? specs = spack.cmd.parse_specs(args.package, concretize=True) + + # There is 1 top-level package + topSpec = iter(specs).next() + newInstalls = set() - for spec in itertools.chain.from_iterable(spec.traverse() - for spec in specs): + for spec in topSpec.traverse(): package = spack.db.get(spec) if not package.installed: newInstalls.add(spec) + if not args.output: + bId = BuildId(topSpec) + outputFpath = join_path(os.getcwd(), "{0}.xml".format(bId.stringId())) + else: + outputFpath = args.output + try: for spec in specs: package = spack.db.get(spec) if not package.installed: package.do_install( keep_prefix=False, - keep_stage=False, + keep_stage=True, ignore_deps=False, make_jobs=args.jobs, verbose=True, @@ -146,5 +156,5 @@ def test_install(parser, args): for spec in specs: create_test_output(spec, newInstalls, jrf) - with open(args.output, 'wb') as F: + with open(outputFpath, 'wb') as F: jrf.write_to(F) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index da19a7c3987..b1257a092fb 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -866,7 +866,7 @@ def do_install_dependencies(self, **kwargs): @property def build_log_path(self): if self.installed: - return spack.install_layout.build_log_path(spec) + return spack.install_layout.build_log_path(self.spec) else: return join_path(self.stage.source_path, 'spack-build.out') From 82ed1bc34397542394ea7fc4a23f3b827546809a Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 19:38:47 -0700 Subject: [PATCH 066/121] Originally I enforced specifying 1 top-level package with the test-install command by having it consume exactly 1 positional argument (i.e. by removing "nargs=argparse.REMAINDER") but this does not work when configuring dependencies of a top-level package (which show up as additional positional args). Instead now there is an explicit check to ensure there is only 1 top-level package. --- lib/spack/spack/cmd/test-install.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index e2072958106..962af939f27 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -49,7 +49,7 @@ def setup_parser(subparser): '-o', '--output', action='store', help="test output goes in this file") subparser.add_argument( - 'package', help="spec of package to install") + 'package', nargs=argparse.REMAINDER, help="spec of package to install") class JunitResultFormat(object): @@ -120,11 +120,10 @@ def test_install(parser, args): if args.no_checksum: spack.do_checksum = False # TODO: remove this global. - - # TODO: should a single argument be wrapped in a list? - specs = spack.cmd.parse_specs(args.package, concretize=True) - # There is 1 top-level package + specs = spack.cmd.parse_specs(args.package, concretize=True) + if len(specs) > 1: + tty.die("Only 1 top-level package can be specified") topSpec = iter(specs).next() newInstalls = set() From 49b91235bb8522a79d5a0b213718af2a6f81f501 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 19:59:57 -0700 Subject: [PATCH 067/121] Minor edit for clarity (generate output for single top level spec vs. iterating through collection of size 1) --- lib/spack/spack/cmd/test-install.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 962af939f27..ee9580c1283 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -152,8 +152,7 @@ def test_install(parser, args): finally: jrf = JunitResultFormat() handled = {} - for spec in specs: - create_test_output(spec, newInstalls, jrf) + create_test_output(topSpec, newInstalls, jrf) with open(outputFpath, 'wb') as F: jrf.write_to(F) From 6cd976d036cce518d899202baeebc7103a0a6f2a Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 20:13:08 -0700 Subject: [PATCH 068/121] Better description for test-install command --- lib/spack/spack/cmd/test-install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index ee9580c1283..0facf59b943 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -34,7 +34,7 @@ import spack import spack.cmd -description = "Build and install packages" +description = "Treat package installations as unit tests and output formatted test results" def setup_parser(subparser): subparser.add_argument( From 39f0f000f89f40a32b9e25d9ba681d6d032d025a Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 15 Oct 2015 22:02:14 -0700 Subject: [PATCH 069/121] Created unit test for core logic in test-install command. --- lib/spack/spack/cmd/test-install.py | 36 +++++--- lib/spack/spack/test/__init__.py | 3 +- lib/spack/spack/test/unit_install.py | 118 +++++++++++++++++++++++++++ 3 files changed, 145 insertions(+), 12 deletions(-) create mode 100644 lib/spack/spack/test/unit_install.py diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 0facf59b943..7b37f66967a 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -82,8 +82,23 @@ def __init__(self, spec): def stringId(self): return "-".join(str(x) for x in (self.name, self.version, self.hashId)) + def __hash__(self): + return hash((self.name, self.version, self.hashId)) + + def __eq__(self, other): + if not isinstance(other, BuildId): + return False + + return ((self.name, self.version, self.hashId) == + (other.name, other.version, other.hashId)) -def create_test_output(topSpec, newInstalls, output): + +def fetch_log(path): + with open(path, 'rb') as F: + return list(F.readlines()) + + +def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log): # Post-order traversal is not strictly required but it makes sense to output # tests for dependencies first. for spec in topSpec.traverse(order='post'): @@ -98,17 +113,16 @@ def create_test_output(topSpec, newInstalls, output): bId = BuildId(spec) package = spack.db.get(spec) - with open(package.build_log_path, 'rb') as F: - lines = F.readlines() - errMessages = list(line for line in lines if - re.search('error:', line, re.IGNORECASE)) - errOutput = errMessages if errMessages else lines[-10:] - errOutput = '\n'.join(itertools.chain( - [spec.to_yaml(), "Errors:"], errOutput, - ["Build Log:", package.build_log_path])) - - output.add_test(bId, package.installed, errOutput) + lines = getLogFunc(package.build_log_path) + errMessages = list(line for line in lines if + re.search('error:', line, re.IGNORECASE)) + errOutput = errMessages if errMessages else lines[-10:] + errOutput = '\n'.join(itertools.chain( + [spec.to_yaml(), "Errors:"], errOutput, + ["Build Log:", package.build_log_path])) + output.add_test(bId, package.installed, errOutput) + def test_install(parser, args): if not args.package: diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 6b3715be6f9..6fd80d10848 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -56,7 +56,8 @@ 'spec_yaml', 'optional_deps', 'make_executable', - 'configure_guess'] + 'configure_guess', + 'unit_install'] def list_tests(): diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py new file mode 100644 index 00000000000..ab7d4902d09 --- /dev/null +++ b/lib/spack/spack/test/unit_install.py @@ -0,0 +1,118 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import unittest +import itertools + +import spack +test_install = __import__("spack.cmd.test-install", + fromlist=["BuildId", "create_test_output"]) + +class MockOutput(object): + def __init__(self): + self.results = {} + + def add_test(self, buildId, passed=True, buildInfo=None): + self.results[buildId] = passed + + def write_to(self, stream): + pass + +class MockSpec(object): + def __init__(self, name, version, hashStr=None): + self.dependencies = {} + self.name = name + self.version = version + self.hash = hashStr if hashStr else hash((name, version)) + + def traverse(self, order=None): + allDeps = itertools.chain.from_iterable(i.traverse() for i in + self.dependencies.itervalues()) + return set(itertools.chain([self], allDeps)) + + def dag_hash(self): + return self.hash + + def to_yaml(self): + return "<<>>".format(test_install.BuildId(self).stringId()) + +class MockPackage(object): + def __init__(self, buildLogPath): + self.installed = False + self.build_log_path = buildLogPath + +specX = MockSpec("X", "1.2.0") +specY = MockSpec("Y", "2.3.8") +specX.dependencies['Y'] = specY +pkgX = MockPackage('logX') +pkgY = MockPackage('logY') +bIdX = test_install.BuildId(specX) +bIdY = test_install.BuildId(specY) + +class UnitInstallTest(unittest.TestCase): + """Tests test-install where X->Y""" + + def setUp(self): + super(UnitInstallTest, self).setUp() + + #import pdb; pdb.set_trace() + pkgX.installed = False + pkgY.installed = False + + pkgDb = MockPackageDb({specX:pkgX, specY:pkgY}) + spack.db = pkgDb + + def tearDown(self): + super(UnitInstallTest, self).tearDown() + + def test_installing_both(self): + mo = MockOutput() + + pkgX.installed = True + pkgY.installed = True + test_install.create_test_output(specX, [specX, specY], mo, getLogFunc=test_fetch_log) + + self.assertEqual(mo.results, {bIdX:True, bIdY:True}) + + def test_dependency_already_installed(self): + mo = MockOutput() + + pkgX.installed = True + pkgY.installed = True + test_install.create_test_output(specX, [specX], mo, getLogFunc=test_fetch_log) + + self.assertEqual(mo.results, {bIdX:True}) + +class MockPackageDb(object): + def __init__(self, init=None): + self.specToPkg = {} + if init: + self.specToPkg.update(init) + + def get(self, spec): + return self.specToPkg[spec] + +def test_fetch_log(path): + return [] + From 13e02bb179176218ad05975d848b013728abead9 Mon Sep 17 00:00:00 2001 From: Jeff Squyres Date: Fri, 16 Oct 2015 06:33:03 -0400 Subject: [PATCH 070/121] README.md: trivial typo fix Signed-off-by: Jeff Squyres --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 74d618ed2fe..03a5bc8af09 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ can coexist on the same system. Most importantly, Spack is simple. It offers a simple spec syntax so that users can specify versions and configuration options concisely. Spack is also simple for package authors: package files are -writtin in pure Python, and specs allow package authors to write a +written in pure Python, and specs allow package authors to write a single build script for many different builds of the same package. See the From f39e40664f8415493b2c5e511fdc608e97009186 Mon Sep 17 00:00:00 2001 From: Jeff Squyres Date: Fri, 16 Oct 2015 15:26:58 -0400 Subject: [PATCH 071/121] openmpi: add v1.10.0 and update v1.8.2->v1.8.8 --- var/spack/packages/openmpi/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/openmpi/package.py b/var/spack/packages/openmpi/package.py index 7e84cbaf65a..925d390281c 100644 --- a/var/spack/packages/openmpi/package.py +++ b/var/spack/packages/openmpi/package.py @@ -11,8 +11,10 @@ class Openmpi(Package): homepage = "http://www.open-mpi.org" - version('1.8.2', 'ab538ed8e328079d566fc797792e016e', - url='http://www.open-mpi.org/software/ompi/v1.8/downloads/openmpi-1.8.2.tar.gz') + version('1.10.0', '280cf952de68369cebaca886c5ce0304', + url = "http://www.open-mpi.org/software/ompi/v1.10/downloads/openmpi-1.10.0.tar.bz2") + version('1.8.8', '0dab8e602372da1425e9242ae37faf8c', + url = 'http://www.open-mpi.org/software/ompi/v1.8/downloads/openmpi-1.8.8.tar.bz2') version('1.6.5', '03aed2a4aa4d0b27196962a2a65fc475', url = "http://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2") From 43f58d9f6183725f84c90bdd33a8386a5a9a8a97 Mon Sep 17 00:00:00 2001 From: Jeff Squyres Date: Fri, 16 Oct 2015 15:27:22 -0400 Subject: [PATCH 072/121] openmpi: update the MPI conformance for each Open MPI version --- var/spack/packages/openmpi/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/packages/openmpi/package.py b/var/spack/packages/openmpi/package.py index 925d390281c..f9d00348789 100644 --- a/var/spack/packages/openmpi/package.py +++ b/var/spack/packages/openmpi/package.py @@ -21,7 +21,9 @@ class Openmpi(Package): patch('ad_lustre_rwcontig_open_source.patch', when="@1.6.5") patch('llnl-platforms.patch', when="@1.6.5") - provides('mpi@:2') + provides('mpi@:2.2', when='@1.6.5') # Open MPI 1.6.5 supports MPI-2.2 + provides('mpi@:3.0', when='@1.8.8') # Open MPI 1.8.8 supports MPI-3.0 + provides('mpi@:3.0', when='@1.10.0') # Open MPI 1.10.0 supports MPI-3.0 def install(self, spec, prefix): config_args = ["--prefix=%s" % prefix] From 67b94acd42fcb883993d99fc965beae3d0a76a45 Mon Sep 17 00:00:00 2001 From: Jeff Squyres Date: Sat, 17 Oct 2015 07:06:06 -0400 Subject: [PATCH 073/121] mpich: add fine-grained MPI conformance versions Similar to the Open MPI and MVAPICH packages. Signed-off-by: Jeff Squyres --- var/spack/packages/mpich/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/mpich/package.py b/var/spack/packages/mpich/package.py index b6b2dfde214..c25c3eed91a 100644 --- a/var/spack/packages/mpich/package.py +++ b/var/spack/packages/mpich/package.py @@ -35,8 +35,8 @@ class Mpich(Package): version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') - provides('mpi@:3', when='@3:') - provides('mpi@:1', when='@1:') + provides('mpi@:3.0', when='@3:') + provides('mpi@:1.3', when='@1:') def setup_dependent_environment(self, module, spec, dep_spec): """For dependencies, make mpicc's use spack wrapper.""" From 44ba16a391d75f97bab9ebf0a8f1b609b18aba1d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 17 Oct 2015 17:03:49 -0700 Subject: [PATCH 074/121] Add libiberty option to binutils --- var/spack/packages/binutils/package.py | 16 ++++++++++++++-- var/spack/packages/gcc/package.py | 2 +- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/var/spack/packages/binutils/package.py b/var/spack/packages/binutils/package.py index 5a3059bbcf5..0c33bf04260 100644 --- a/var/spack/packages/binutils/package.py +++ b/var/spack/packages/binutils/package.py @@ -10,8 +10,20 @@ class Binutils(Package): version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e') version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764') - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) + variant('libiberty', default=False, description='Also install libiberty.') + def install(self, spec, prefix): + configure_args = [ + '--prefix=%s' % prefix, + '--disable-dependency-tracking', + '--enable-interwork', + '--enable-multilib', + '--enable-64-bit-bfd', + '--enable-targets=all'] + + if '+libiberty' in spec: + configure_args.append('--enable-install-libiberty') + + configure(*configure_args) make() make("install") diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py index 5e3d1a3efac..2fc6794b70c 100644 --- a/var/spack/packages/gcc/package.py +++ b/var/spack/packages/gcc/package.py @@ -47,7 +47,7 @@ class Gcc(Package): depends_on("gmp") depends_on("mpc") # when @4.5: depends_on("libelf") - depends_on("binutils") + depends_on("binutils~libiberty") # Save these until we can do optional deps. #depends_on("isl") From 7598612b86c35c062c2b7f4ed2bcee1729e11a03 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 17 Oct 2015 17:34:39 -0700 Subject: [PATCH 075/121] Better info variant output. --- lib/spack/spack/cmd/info.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index c6209523f05..085e4db44d8 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -65,11 +65,21 @@ def print_text_info(pkg): print "None" else: pad = padder(pkg.variants, 4) + + maxv = max(len(v) for v in sorted(pkg.variants)) + fmt = "%%-%ss%%-10s%%s" % (maxv + 4) + + print " " + fmt % ('Name', 'Default', 'Description') + print for name in sorted(pkg.variants): v = pkg.variants[name] - print " %s%s" % ( - pad(('+' if v.default else '-') + name + ':'), - "\n".join(textwrap.wrap(v.description))) + default = 'on' if v.default else 'off' + + lines = textwrap.wrap(v.description) + lines[1:] = [" " + (" " * maxv) + l for l in lines[1:]] + desc = "\n".join(lines) + + print " " + fmt % (name, default, desc) print print "Dependencies:" From 805fa969517c24c65e123afe9c4304259aff288e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 17 Oct 2015 17:41:17 -0700 Subject: [PATCH 076/121] Update to link to more contributors. --- README.md | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 03a5bc8af09..98d781564c8 100644 --- a/README.md +++ b/README.md @@ -62,21 +62,9 @@ latest stable release. Authors ---------------- -Spack was written by Todd Gamblin, tgamblin@llnl.gov. +Many thanks go to Spack's [contributors](https://github.com/scalability-llnl/spack/graphs/contributors). -Significant contributions were also made by: - - * David Beckingsale - * David Boehme - * Alfredo Gimenez - * Luc Jaulmes - * Matt Legendre - * Greg Lee - * Adam Moody - * Saravan Pantham - * Joachim Protze - * Bob Robey - * Justin Too +Spack was originally written by Todd Gamblin, tgamblin@llnl.gov. Release ---------------- From 3c788147cacfbf48f2eec9f3ee785d0e5d67bed3 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 18 Oct 2015 18:45:32 -0700 Subject: [PATCH 077/121] Add Spack logo. --- README.md | 4 ++-- lib/spack/docs/conf.py | 6 +++--- share/spack/logo/favicon.ico | Bin 0 -> 1150 bytes share/spack/logo/spack-logo-text-64.png | Bin 0 -> 18644 bytes share/spack/logo/spack-logo-white-text-48.png | Bin 0 -> 12201 bytes 5 files changed, 5 insertions(+), 5 deletions(-) create mode 100755 share/spack/logo/favicon.ico create mode 100644 share/spack/logo/spack-logo-text-64.png create mode 100644 share/spack/logo/spack-logo-white-text-48.png diff --git a/README.md b/README.md index 98d781564c8..3a2c535d4e6 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ -Spack -=========== +![image](share/spack/logo/spack-logo-text-64.png "Spack") +============ Spack is a package management tool designed to support multiple versions and configurations of software on a wide variety of platforms diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index 7303d7fef68..bce9ef0e94f 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -149,7 +149,7 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = [('show_copyright', False)] +html_theme_options = { 'logo_only' : True } # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ["_themes"] @@ -163,12 +163,12 @@ # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +html_logo = '../../../share/spack/logo/spack-logo-white-text-48.png' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +html_favicon = '../../../share/spack/logo/favicon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, diff --git a/share/spack/logo/favicon.ico b/share/spack/logo/favicon.ico new file mode 100755 index 0000000000000000000000000000000000000000..95a67ae5b1a21ec4a5a1a98278fa609a4ac91570 GIT binary patch literal 1150 zcmah|TS!z<6g^TPGBY|ES&&p;rifJJM`}3|HKB8_6J+?v(MEiPjQKX51jSJ!Lz}k} zW-ni;2^ES_m|uNV|IJKg+E4YBLV-}R+WStDnbN)N``CN0b=Fzu93evJ)@lVk_lXNz zg@_VD#8Oa-GQm8(grM4XRWOwQ4IUojHsO-g!kwu-<&iZ34*J%+B#rPGnQ`-ru}(RH zu{x(Q(%)2+f+lkc=-bCRhpgR{V+}Tq$80huW5?(RibYt>N%*Z}LyjO;D_oh{G>U*% zQYn~S!|$+j=q)Z2-epjOYl^ltI#$PB#k%EiRFxTEs<)url7+7NEIe^qaK9-7PAU8! z4)%pP6|W>UY`O6;w%Cx=n&;a!$ebn_@6< zK92O8NJA>=Kf?N7lm3$~N%0`)pR7SH&AG7MhKJruXsge|qqaN@6vp9MZ!Km$)vTGr zPuNuUkM|_+D_#T21xO1nmS&*2xdemPt1x}F5D!|`k7Zz&1gmQ_cJ`B~6k%g_tJD{A^@?CA@yo?xZt{k*3 zt=OCeE_DFZpF=_Yp89Yhyv6!>r$(2{IBP3vFz+h zh2f^#Q}z(c&Q-Qvbq0^cx8M{S0cko!0IKW)zi1ExAq+lUt7(Xs`wcXHox#i<$;C}j zjPkXHW_PgO@wQ^0wR0>B!mw9NVlYMA#aj^HslJgzzg-d!DmKcZ-?AwQK(gxAR?Gpw z?wIE1@AFguoOszLlLrjNjxpS_c*LhMm+%1O^g*F6cZ|3PA=?6gZ@JB0EGS0Nyc~Ne z%MSKu)cWzFN$bI!i#2`V<94*K%Jg+JS|TT5NWDv@EF8UoQ?_l&NiEsQiNWH&?L+U% zCPk3ei`eLwC!Gt1?6F0_@kroCmkg`#cI{wo`C$!54C6dorT$+HNR7tGEH&t{5F7Q*8S+@J!4hbUkH zGlA?1@OF}*Mk(>-e_fDk8O}S*-WBi*{AC8D8p`jAXav&hLw9|L79uZ+8VObAgxdhy zA%~u0;eVC-mP}BJnf+lAi_{R=Pg+V{5Dz#c9g~ceD)}(OP)WoR_N`c2UF@3HlcWRL zIP9?qX@-gy@dYk096Jks-VA*i&=RuJAVP*L-#@V>8cR!^IK!_fFDB3%oo0w>oe9!c%$%R=O(-#)HO{q{nP;Y z4qP@n79qnJ=_B4Da}8=EGU9gTs{^%SF2#3-91WmvGhFy~v25U-ggh}jcxp;y zvL9Adg_PPQ{UrUQ%QVp|oqy?n%j(ECRO^-rR0`yfPve1HL3YP#)2dS(GL9hr^lIr# z5FdyMdm*CKN1ukjOEtcDE?iY(tl+F2?VIFOsV;*B3w!=51LEn`M zPHn|etCBoyn4>SFCiyU5<~P^Y{IOa1x7BY>a(RK$r}?zQp=-)N-Pb5q^j4o>OfWJS z39M;c6S1Gu^qxnPxGX*`zB4}Qig&+#dUtx2zuxZiTNiW21B)|DKn6{Qbp~8U(l-?y zmj%3q{BKU*)P8Z=GuWHiuh>5=uvaHcS4{g9Oy`^MzU@6n{cY%o`O@}ie>3n`>~9)L z4oMFQF^LWdgm;*aILA3>EC-fzz&p}xYxwR5$Pi|D@Z&RQM)v8Xwl#fIoKSIX-ATp! zS@F{KYOz|2$|ilA64jRL>jWljCu~~p zkJu;;m<d2ZMTuRr?6YH$D8=N7XQ*&&Db?OdYt^YlyKlZ=rp*#EVlB}_QZ;*jF}=I zH;?YC`*Hnh>HOev z=}8SF2&zH(gIWrb1L-1(zvD*1LGS_%K;fXBp_CkRCUi!>2;e_FOM6F%qVS;^<3D;3 zv9s~kFdX}}>G%fu`heb>%gRU(b}A%|&W*~)B@ieR_{fdP56Ldbf0Xr?g~%#H^w|uW z_T=IXhjC`c*E zH?iJgaQmxZDs|T;?LIKjQd(!!Q|iGm>p{YW@x9e^3J4P37(Ocl88nWuP^?qLSKNhH zllrmc)o5}F-m6MaPA_5RshX|&X&2sn-#q_;-Ym9()tE<&v72Lnw}ex=W87}xR0sE~ zJtHN=ON$maWy_`Mg3G%V4v8NzDYd#+@ zn;!4i$Y&_usRKrB#A2P%YP+hBF|=d*rspWhiONY~<$OYJBEgDu(>ZqImqw9BLyxI# z)7k#;hxQK|+uiS{K4CZxIpRB>tXntL+OI-rKF{8mPwd3*sL`*b9&H)~>OHJa-iDua zm)({<`D}S3`>d@Wc3kOan;U#ynW@_{u7yore!Xn%)I09cHT&ddifNZ*nbgc(+1RaA zZ!f=4@wfCZm?SA@zx%I$>q-9ak=3Q%O{d^_xCG!3k~jq(Sy%Arow`G!Cz9cMCbQWZq90%DQ{y1~$K6Gt*$=t0?>_G@Tcr5E;#zVc@0K#*++X=x`RVwF`1LQ}uZPYj;vw_9 z5xvZ>Eq58S)rCPmPu(`=V}s`ovq#wyN*k3uLXV5LP6w8M_U3);&dMudH>!IiZox0b zPZrlBDTl{u_i7o12@*a5D9@L^3{KYjn|I+?Pu#)=LI0Ukx+)lW008fB|8v3ta`K1( z0C)jAZGBJuk1E0zE>7%bmM-R2?7mK}|Ih${sITz9qLY=U8I`Y-qqB#wuNcjLA%y>x z|3l`Wq53a~r-K-c{zr8xX%}}ZDn52zc1{{`R4OVeQFlvgVNDsi|7QPpCq`rI>FFxW z!QtcM!|ubw?&5C4!6hUl#KFnU!Oi{tAL6}-pR=c#?|Wwt+W#Tsf69@u^008XbM>@y zai;o@Tr+bQFHbQVn*S*JU&sHj)6>rSe`|8~_;0uVImq#!CmdYtoE-m4_CHq9|4@bX zHLW~c9KHS{UtEA&^uM70FXI1J^gkF3{=XRhPvZa0AnoGh>Tc!X@gEI*|E=Kvr2XG` zHFrC!e|7O+7IFPI{{N)?-*|Of7f+Xe?c#1{q3G;s<^E6Ee{=s&cmDqo|1C?D<3CmV ze^mAVXvhDe{%c5aR8fxq)hWbL;cfyu005C&MHwk=U$~P%Bwyp9+1s^m&Wfk9?juA3 zvD8GaFd!*%ovBiuq%BwiEJ=m+#S0mQE1ZNX8~_JTlxzLvI}j0VIE0kwF4j@G#ZpnX z{U&%*%%(1}?o_AA=w3XbV4-E@KZnrjR6F~M#8&ET>z7g!LP%ARm6Z1WIu3!G>>Z{^ z?T;3vA{}_7L{NFpAI0&>KcBi4uMBw%>1SBLFEnsRiaqt;%E4KFh}LK{Ckzsv3W9il z87W#P@KNgD2c5*yN20%D40S!K=|?WTF{uXK)`M;{QW`P|ubk0;4_OSxMTq2|7;*Z&;oz4#<64U zt9&tSW~Jfie9jh1F{M8SF`}9FBS>#6rzx2F08*NiK6hVR9UJ(5x?{c#I%r<}XK^H5 zLu-PJBjPLtA`MZBK>tSiwL&G!7CwY97uWh5*4OrEIcfH!&9em15bNx`ch~Leig_l5 z)+ZZ0^c!=Bpnvv%1>KgXR9aE7m{DxE_8GAV_Zcf#)4)A+W^1YR(ukBZiGT57f`$8=|%FrkmRS{uU6H}M=eiv~k zG{p*)YC(yul`K?dY+~cV&&rtNE9gr^ejX)}j3TuaB3S)z=C%ri7+J3_(X-{Z8jdm0 zkK@RhMC8vs*L<+zt(Eoh06mI!eMHaCOiho?m6%c--(lw#>e1v$gm@DFAN zz3u80UtW3q_I`R5e-?A^8d{J##}cdE8?j+x`m}+vT~y(~(!geSK6)RbkyXzB6kb3Z3|N~ z;wE0!N3udu)WEAJ8$@V`0#u)o55i@x^B!uqDU(3U2NEnLo0@W87jdL%W`&qH(&H8A zu-Iw{HX*nYnZ?~Ec+hM)zjN2I4x7B^-B>MHKeK#p9lFIh)IvR$9sX$N#@7(82}d{EtT9)9_0~W z^;QgiLHybhYu~G&`mMC0UjU1PiS_i#uhgN_Gv$)gnt?!ferOppYbUBs4VW&0Q4QaPZ)LV`0X(Ra{B2w8F}Tr&{9 z!-sm3Tg3%s7lXUDp^&a&wGN{E@xL*MMd? zx$xqk#jM61W!GhMNpn5&qvgHWGYPs1HJFi#PJay($`Wc@;B=LI%EHtul(Hxw!P;rc zBSD+uv<+x`9tAt_;bHYB5mmke-8S9+_=_@+Ss5kikpQ}*3fTX4BAPmmd zl{U}aMS4WA@{P_Em?aDm^CaqB;7#3S(wxQC!k%PEz z38##9Dkb9t+|H6p)<@9pt`K0^%zW6}*%;y$WM%RR5R-f?yGe~7RXv>hH}NFn(Ny*K z&AwK|R8YEK!}*@>^AWZr`h)|=c6%l{`FCM z8~TaTru6phF!kHYK_IIZ(wDteLMYVfql4B}gV5-Jc%ci^bHz53GRCEEHg|&KJTi#O z!Z5yUfDxG^y~n#DDuqG zz_u!&^Bd{09{f{^poAo2LBsAO?M4ug<;yNuIo|IU4ZecN8qtgtxh3>>&S9dGPaAz_ zpu4_v*&5%igy!dUIf{T&8*XomOmf+E%ZDbLv7z0kae0S=G!IpO6U!aFxs9_-nq#ri zNYs>_=$glK_`_d432f{4HIpV9G+~_;$_Sa{F#Vh@F|2r*V+|R%xcS`yM7w0=qI#ZuK^})b$?kq2Mr8d1}rf3}T2wI*dn`~B_>egj3$E8}>rBR~m zC&9TS0YPmlEzI;kO5vN}-DC4!Rvcg3h_^DFqB%aqw!wzacND?BKL*bikEPUewTjCl zh`Nrv{Vf%RCJ_=vY?6XMxz9nydI=IDI;1!C&XV;`#dQ$T1QqrJ?h~GE+=}mmf2+zh zE2mWi^C&q>^du>eimE)7PRuunLu%r+{k$LPN*^o_GnEX0z{Xk@8saSP^F`1k09|J0 zpMNDy4PaqA$`cLO^ErTL6e*JYmZaB*Nl8=5O)`wAU<}{|AJ#8PERzXWh`$ip)JdNR z#=TM0cMOZ@EfJ|FM?7n;s8!;N;jx&_Uz~pC7$Tjy8 zt20OAgS!~lJ6qCZgY!e|V&x0k_KAcjm6JoAbmp*m{skPG&Z@d;5)Dn$9gG5@{8_%? zp#`v;A&+$bMD*;lESSeN$lA$5izIxd7s4(e$yLT3318kyU|U$$SA^E;!Gyw?%y3BW z!7Hh%9`Mm7Usc{{KZU~W6V5Wl9DU%N^`|^9hpfX%J@Irsx*tS>e$*PZtA{*+E|-#z zAK_s11}4>xxgVMc*in9A?q~6|C>l7#6e9@5C9|}~ANClRQ(P1|p75yrwqZ-Qk$B2C z`rg($I(*{3dlR;&XOol(b7&dsr3lhw&q-x@TaMtuu1dwq#L`)gtKmMg+p+{r#J*~I z!0wg;ne#;~$5(J8$A)c!sUM7g$8O$bi(V`XD~>hYa-m^b5B&Q4CH!t|ZU9Y5^y10f zPm-WgM%Wi!`QC;*%(~S0_ai^+-|igN@umHHm8E@>>uQ|mu%?l>kQR=UFlT0OsO=vOU&XepYX!wCWrHa zb&$KtqxMF-e#qtJyAvw79Ts!<7R8O`Xg)j)inhZVwaQH8+t(MTt) zVaI55`iBJndNdNZ=0*ifa%2!lD3}im` zK|54+cDCxU*Z@qKb)ZwpDyiFLq8e$Ax9e8wCe5Bpn5drFnKd!mVXFeIOZfg}egTQGEoYH@yz9tf4X&WsLNTnew`kmrwdL3NMv*PmE)k6#j|_=#U8?nft>`$wZeCI25C2cED7XHB_t zcEpbHCG@45&H<|;ZNmG&0Bi$tPvxbxkodn(5gIG)n=eMFk76{PNnJ{Hy^m;z35;*F z(?8QBHsV;}7;WB_+eXg+&R9&GJ#Ro-Eh?yxU`fWU=w^qg1sjOJeMyia^YfH&L;sns z1EW67-_EFGb{>;#g+Zs)ZbEt{Y`TxYx!KrjlY3Q297aFZ@SSM|JZHY-daxvDWoJ(| zBfI)GKTxMXpbcKUXP@WAaAv`sXqZ3Tc?x~V+c*v1ouk^|%2&G;4iPrcJ@EyYVTgCI z2=+7dZc2c{cHeL9Q>&6?QZq?hh$Gd^4ZZhULbt`WVa*#)a(>4TH|`^_u)XFqj5JcYyY$29U`Ayn>Js?TviDglC_>P)4O;1MTp7#c7>I#w z^4RD|HV5$|bGu&08d+sg8aTS&Pl>N0m}BSJu#l|q>E<+ zkUfe-Y=Dl!c1$iTsKdmGNadsvKGu>KG0G7%GfR!*o6yQPyYmG^znVh9QVT@xfY8M; z{svLRzxxQjE}*ORTJ=%&fpONACQUQL$NgE~nI zdw)yEfVyqO6b)2K2LI?1>QnG7#pR#?|KVm}R`MA?Fo16W&=M!g|IR+#{Dn!C$>&*m z-~usXB@B5Lu|MMHm{-=*TgRPh_KXium0hV!G?}5$aWaiG(<}3?ax<4_(xzQuQU|MZ6>DPuGAkz7w$CK`$7J)&3e5K(D#4+a z!fXP0e3FS09rczrWr~V+5nWe@O6~spN{VNxk=fBB^3vO|xiEh`T#BslvJr>Hxq-)& zA_g8yd`_h#s-+>y{*U1nNE*yL;A2u*xe;_ZUrxjeFZH<}(HtLq#qo}K{ArzA6OKxy za=wH2CvB-zU<=@{v2_;~8Uk^w)A4jW`GO$w8QsbL1t=G`Q+6WMuf%%zp>*QrY3O`9 z%0g)IQ;ENBo;!^*y=5c(Gycj2fAj(M2C>`aSmU4$6}iQqkO8R+vNbuuxw)VZGk}Q* z=mdRV_mHZ^W~J1RH1iX$#S*`3MTp;CRHA5u@!l@z`apW-?Hx7)#Z~r_k*NV6R$!lq zO|Wr__3vXUk|WyDBh_EQVfi+DXMGOtGk=+$AV1t0)>m;wxOt2>e#JA1`(yrnYrN#n&P6>-H~{j^TIquOmZaYi-a4u^-sC~(uU+w7*Fqnt>FLK$fp+2Ygvt6<|_>{ zxOHrh2D#iD?RsFa8nim!`qh3P(_hMIV3w+c-{09xwjdPez=C;OA(V^7kNk&1YrVg z#K;NuOj&<)q677;T1CIu`Eo7`U9l*ei<<+ATqv;BCwQ27mRrLkT3 zuSWzB4kAG-MzR+8E>_8}jV1sG}KGV-^4(qLGCQV{lOu z`SyxyIdi}BZo!N{(M~X9qGbGYX$5{VMsq!tLrAv~%XWLKcuDM;aMT9LZ>F012kX`h`AM3aA&%Mk!r6E(uTv6VFfS$Ws zjZzWV6+Qu>*x0#Tvas;g)OiDB@vE_3T zCt_qu3I^H^PLvxVVl4?u3)Fz}>lj>srYgSc;g$`?o(HJ^0x{Hy`x5w?QF+7>joVCF ztN-plvxWHo(bQr^E}6I0?uj}92yim z**AX;yl-K^U(L-VEnI%l0_`d4OScUkWN(G5eB zgG-@{aO%1cms(N(p>LiL*YrpsW2EU{^}Sf_#qk?Q z_H46dO7@rsT5B3^73#@J&+c!Jeah)BF8%nx5Kv-P#E6r)WplsQ?S^T7!rtq+oSk&` z@k8n?UY@Te#eE0g9!a8Q+>!}>1pUjN#OfT~i)ruPu2{8`pjY3X)Z7fU;}5$B3hs$T zy)92#o>h+e8n(>PlcCgw>bQq*8IZyZ&Wt{?tPo{>mvv?rjtyq=bE|4@PGF3=aM;`S zXiLcEkMRbrJY}<1&>vH;6(xRbt)JUlFrzj%^~?b6oq*Uz+A*$-g+r`b;G~{~MLNRu zX=L$M+FxhVT)~g{Jl@6T%F&N_aemqgL1Vdv2PG+{fvTo=SZ$OuxX$Q#@a$WW;xh_j z;;<5@BDL%}n-|Zndrur+!2+|8MZ2NYH-RqmKDP$1mf5`LCX9LU=95kDo=Y=%2fxYr z%W0~~Y=pu`(8mgmam4y}D)zf?+5E*ubxu^OG8!%GRU$QozP!4S4@eYIYNm$DhfUO# zZ)>41$fwFRg+3H)HrIXug>Xr1F{wZvV8}higEgQ>(J$Fovz3@xf7-I%H!#ZyT>Hi* zr0VUc4Cz_!Wg29Tw8J0F(4WgB`>D?W55EY|%hbo{q~X)RwLYo_FEf%~eyLY^6>@+Z z_Q#7Ic$GaX0tgf5S~^z^S8x^vc~Kv`;PvwS<NG2@CO_KRd?3(oT3bG<-nd5uC(w+{&pFl&r9}5}(Alh=np-wlY zt6xtKvi^lR$oC8#4X`f}md&qehDrre0>MpiiwAoPOagQ6oXJo9Nl1%|8w)n_Gj8=# z6LWw@Cl;jf@EdRVpSv1&(#e!}blls^81JJ@HBJ3?F1Ov<5{BBEewLGunxF!wU#YXW zbvd_#rGYO`1Z#G#y$$Qr+w(6t&fJf$6!M3I15j`X6^ul76C*O1hU_Tj?9%}0{reB( zYlHOaiRc%OSh-izIJ{~CfNbVsVw~ldstHI8N4tV4YbHtH9L?o*IsfIzn`+v7S=t%C z26~DtET6J;-D`-vc&DNBrv(jm;x^!B%6ZnAQfbM%<@sY2;oDHVs-y}`-A`R@dGw#d z=S5RP!77|j@48I`IKK+v&K6kusi<>q88`a=bZ$TI2!oZ?2PG3H_0o#?T-ZyY z@yc7cVn`#s5gc;#n>gr=1#JPoQZZ4@DIgB+N|x<-jDZsf!zk-jWMYtXe$fzp_qxcw z8T=@O_VuRHBQBw$naE1yr>QilKQ}eTG6e}-OM-9S`22Trm7~yqiw228=cx->P4P}; z@96~#$O0pTB>+?P+qU}An?yjkW|AJxa}0C%5LkYo%Wv>AOyQs@#c!&GItPm+EUm#5 z7&HE<(Yvwm0Yg<_R6YRM*>RN?Xr z%+3JjmT_}^uj~cscn_sCec!_bUyNPfv07U6@O^p*Dv~(zR@_cTdm7+wTSPFL`xAMz zsJzLMa#T-B*J>j4dE?18CQtMc!S5~iJ-3;@nq6)#wXDblZ1ZYy6r!wLDnG5lygxyv zt_tD!Nr|rG7y_@x6nu2~!9(7BpZEq{Ces0p?ubZtQVJSC-sPx-fcP77en=6ro92Uj zzjH7tM;(M_L_mfFhlKkg<8?j;g>*fdVSSYLD^SJ4*)|kf45r0G>mZLNSS4oo8Iw*` zBREv)!FuVx5H>F)MUhtNz~Z4=As@D=aL$c)O4VFUd)Xc=wJRS0j9-=XQ;*D?vbTQ; zs^OgJpp)Vgtn}#4pB6l_V-q}5&@M0Mig8b?Sg=95I^gn&9)Vey*$L+1?5{r5F>CuII!o z@!d=n)ee=mn%)`xuI}Q=E(msHzvVW5eA$)KUXBCeC}lkfhe+%METYxyUhR0u;y)@3uv4q0&@ek0sVOY%`D zlx}J`!OGy5hmoK(wkmzc{7#MtU&aQ+t}#riI$rMA<1iR%8Jpy?{^l%TrY5Spgoh9_ zQen!Zq!TbDdcS|*b503G3uA;g%EXGY%2x`lTtNS~^T3Qz&=G3^Dy3=MXSz+rOWW@3 z9L9VB)PGmWW`C5vAf8DK0$z~)rKmyJYWfKp4|z_GoF}3(1lk^<4hpXRL6S&Ou}`i| z9*^hUeTW@#x`5)FNb&5X=l+!s@L!>rVt61=mayP=qu3d=$^F5BwdPUD+LqmlxTlx& zh)UV%%ZfInD7Er}0ikea6H~fi_=B;QufAmwmr9;ao^I9o!tJ=|`|#XcY2?xGNtV08 zmQ?G&I-m5J^=|t}yWVj;^^^Ba#*`DG8DA{_7L~){Z+6XJ*12RDwBX^K#gkHWB%oKByi zxwdm(eQ*M69s&@|KW0IARt28HP>AX`!7LDpfc?$^8i2v1_WvS13ifikbmK66?^v_$g|liw4GD+(nl`GJyM zVxfcR0mnN*Two>kMk+*`^K3QujM*ZjYY;gPt4zY<}mWJxh;ylB;qK-Ch5z zq03y6q~s{g77fQmoz~u9HWD%S{fS@vT?ik7Pca=d&QK;Q;aJ@OZovF=RHb zE`9UKew+brtoPJ{y0$s!ux_I5TkjuUqI>g#qVSMOg+WO1Ts3q{$b%1YsF1U53chwD zx_Zu5NQXeeS}A+^$h4!JS(Ur@J@@avi~+v#_??;i9|#g-e=ZxNPrYWgvfh-NJ*q8Ii6GY+9Jo$ABY4m6Mk0miHn5 zM$fATP$c{T?&Hf!j`<;SU4P>a=X1|+4!NECL#GAxe$n;mb=`L_#+!k#B4hQ}?(#6a z^hEAVtA$%B?&_}YntS)ofX=^2(vCKY?Ol%-B;haI58+_z%;e3oe9Q8R(G$#W#xpS; zDrqF^?Tnk`_ON2I1_38|pxKF3a~F8#?;nq?`X9I~+fCAyY}eJ>$j)dR&bkd_Z#ox( zY5MjMYuq%cK))&O@?NPXTHgr|YhEnfq2rK7Ao&1K93OHZHQ^liiE%aK~$#gA$2IyBanli><|_B z-tG{Ah&u~P-4tA*28)Fgq%lqdXAeN-x(aKK&?!q@A1($Es?pz7FS(@YQ5tc~2W@_e zyq*@Y!%7q)>E5d`o3kc(M;Z6Hc5Mhs90(_}X zUp~!9F2anjB!MAbTt!}f)8N?1{A1^Qurxdx@Uh9<>#eDmS@;>?ajziKS9fMNARczz$@U(#$ClR1&RB7V6=b1N_AUk128Jxwcf(uTxM=fLw{QmH3K z`im$RRJ9fE@{rBpUhcJ6K@a%HF<0l?ZSXuJXUIUZDDEk7+rf506ZwGJz&e`z21>## z+%3A@6*VtH;5F*AcANcVU0sA)QO{6y?>5pQ;nySvXwnbTZL!G6aBBULPs&LG!A%Q< z?uFqWC(AA(`>$!1m7q@)U9?;-CsLO`J9u3ANIlRR3NybOFpzaEdS2P&c8o2O%a0n7 z6~=O~b$_{s!PErCGo9!!VW|nW>JfQd-XYD30azXE>N?EE<> zDxgLK>`aizO1x&!mHc9jBTw8ixp<)y3~09*{E3Sm(d26T z-4XHhu4I#?iWVOpuNA|%ms)V;k+?(AKh&@xuE3M#dI{o>HUlsJ#KU=qCX|G+8z9J0 za5~dEfijYI(tO~PIx>)K@)>xb60|&}9AL2T{FFemV2R|jiFeM<%52t}Et=nNXdugY z^5t&}S8q>u^B?F4nG`L>eZC7H#x+nMuP{ya$F@1*GaWWMjP~mUO}_c~WNvpCCKpob zu-(;8WCREE=~@$5Om0y*8#C$j{3b}Z`D{R?*4?U^*jrRc7Q6xX5!j^i|QD zy-*b+Wq6$PC+5hb5V-_WB38Y?_ypnTsxzm3K~cN6=myV?RF%LoaVd8Fvtgt=^>z5s zG@viNHT33HfR}4jT-sPT*Y%e0(6$@$J6%w#R0RhPCiP6#*5m{lR!mE5)Z7sNkR|CS zbi^Bj@@gm40^!kwx62g|+ipYHc5>bO=qL9t@O^*Jk_!JP@CB9TRqZ2^)^gKCfY93%TqkR8-|qo#&VvNWCd z0y`r30+_$q8NI7B9SRNTP1u+-(vqei5phf75la#rw9|e%r$OFjo&&6#z{H76-nGF! z#uGA?k=uVMXES1C3XjyKZlv<2V$UlL1ahfDGTq6?X;llxgm9?`WS~2!pH11Qd|t!I zlcZw$na%V1Mg4Cz7Q>(q@8O?5zjaf6w>f*{=IXYwdq8q$|16V6&ECqPKGIcR;=$Q@ z)7EI=VnUG}?gUmk#1c5!-}p)@^r1><8M*ZEk71hJXT{a64~b;y|NbSXr6I}S!cS;1 zltQ~p4z_XNj_5bk)JHr?r3(Gojy!p(@J3Izx(P$~WD{x@52Q9Nh}<-KL^Ma=%^JV6ub7nz6Lz;5j4&Yh?3=0R6&;&+ zD^Z(xgGewDnw3nTD!fvX=dtWXZY(p?4gjHJO{YtVwT4xEI==Ru29o%M410OY)#Q=ubzp`jO(g3^ zVWz(U$2TKHTaq4PORNaL9=j>nNN6$qj=E-^_dAz^GK+_DaoX7qur!1H8ICcXg3+p& z;jHSItz?jBPc=L-zR-$NkjFbrf2Ta5)@X@j5Wr0_>Z*Wd)-hd;9^+(zW#Mns-Z9F%hiR=G{*AB6WF`K3`Jk6T_R{Z+rNRtsp zK3p-M?dr&mu~LM7hmM{z z+Rl6QnSfmZW<3i+$lE;XugElj^k7!q`C-lTzJzTZR~F12Cd{8)IBYJqD=#9HZkLqP zz>7+)0NXe6fM=4P77Z(K>2Mf5TKD!0VEDNY=RZ=&uRN#=fA>BPRp z)kIMwVg zmM5t43yw&-0ccM|C`l~8?E9_l+e`^(+y2V^g=cn+oe0(|hpXNIN7R*c`8t+BZ+V0u zczl|lA+_-G9@RHn#WG|(!VDG{`GPPw9YQO1S#>EnbB!HnLq1MBkCIM0AviGJ0~9S!5r|4Z+di>4~YTiam4Gk=7h z5QoV6Qw$ZWY6!V3ci8ZoF*ieSXX$<1#YMFWQG~gIpk*FXZ#ZE>^Y)p`n46z(a&f0Y zc>TAgE};D_6&<%XrK6_sw>n(yvNe>5tPqSR3Zgv4WUKulV6IGA@w6&k;=IO~ex_gy z8VL4{cSCZ-BwP^O=4Q@)81ssq6swCEB+$nwCcJpEmn{e?=SmVC0@_eIy2Si^=N2>4 zf~~LG;B#f-ThE1Nn(b?9Jm`cZfDpM&B3O6lBJ(VIAMgcNKQmS}=qSvRv}|u;6#w~i z$a#R^+vsJ80hEH8oiD&GmpswAiJ1kV~y(TjZhMvS=8yRVMGBOiD_ zF1x;MwD>1+N42j(GVU*u=0A~E)$xA58iP585TsM?eYN=nc_J_=;H?hQxdsSyb1ZnOx9Gz&-;&9j|ncT{~JKTzf3JsUC&+uplCHOf^iDDwF!p+rm=7r(x@s}^Hr zhngQ~3RR(nj}-3(XliD$IUv$L4~MWg0m71c18(q9QwZl~RH-n?9~fTRoH^U}Vmi+{ zrEE~2ty~AK{PFcS(h+)tCEGyk_=#c05C0~`7oGt7mutnOdv#PT%K6uK2im2Em$JFG zjx|?T9ha8mdrf4b`KWK`f{Q#>|aCzi7?|ygeN?b7*>lWj|DgC!|BKzN(|7Ba-JMIH9 zRCkBA{@$ORjgG44^vor>na{-hn~5JzTeykD0!BhjM#`^G2v(HizkS@tv)FXj{4L6O zQ#pEETC1L{_5TD*2ekO0r2N+9Mmvq^Yyvxnc=YM4hS&8qpSZ+f|4<>g=Kl`<*{I_p z_^O!1YYt?A&N3J_e(7m32yjNAO07ypU-_Z0{1Xp9p%`6`W!5M>(dOD^6Y;`w#OIC5 z3Gj>HXk64VDNW*0PE2RLDj&v)PvcSvcP9A06uzw%2-Sp%%XfhC;B{gcVA8B$>_6*d zzw837Q_+r_5XNDAOa`pq$rqvqobq5k<3<+ncY)63pm(9cw9?t8WT5=hS4UiDda|-i zH?&dTvk1R#Ba$D2(xb2K{|yYq$%re@#A`GtkwzCth)6?$qemW(dOcwrk6KF)9Bc&H z8>JZZg4fFbv8uCd+cgYd-Y&yFM0Smf^BC-fLOs{W1FwX;H@-g^78ez79 zypy)l)cUkerqeOby^67dIKBJ8G{ zJ02{Ph%e<$`F4R1-dT76<&T6s?njuw{QBgfIP{fgh6${f7oupi>jUbP!A0HIAvo4=cG;MD$A~sD!8bs>?jX@c7a^|b)NLi2;*~3_!>8+faYyZE-}P2w&=BA@5^J( zhTB^ggTkjwRY|-Uj8yL7c*0Q;4?<^kLAVXVd_ro+vg7b|zGx}(wj#U|;V+=kWXuA4FLF?JLGy@jMmEdU@{3TitKHt=wg!)IL?t>7|MVK=fPWN{rOrGXp zF!0H>8IC}H$a`ZTHh3VD!%+Wt(4;Z#jl%ra1Hbt|{>l(8fd3ubXTX~We-j+R&_Ma4 zz(-faxkPa4Fy)ehcC3TkI74DyGFc(Z3>Ig9@fX&QNBRi3L%>gG`24L=9`UlBp9{EUQ zUokBnNSCh(d=CEyxcX?umrY2p4pNg6^weW2jzr!sGl&CgiP4Zqs4xU7 zy%RX7+QYR)$7VtC_{883&f+KAgJlTwY0f8S{J={_52&@k45G#u2`K5XKp;RPL1V?i zMnzKKnx4mvq{qBnwhzxhyfM`DhJ3?c91`owgd-o67oUohg%haMtTSGoPMr8~V+R89 zlrN_YNRP&x_cEBzJ|S;(PG~A`jpfT(M}6U@(rYUIUN91Dj?++7TlXj;_VH+Y7>YIc z*+48J@cal$8+fH<7(2L4*(W_L_zD&^!Tw+jXAoh`Aol4+srovksd-dvIsz3uUI|S` zf`9BtDkkyi>U%mNI|fGCn`k6-Ho&s1H{L)gaVJmYr?G9(^6HZgWl~>V#8rN@{m%L{ zk9diI{54i)THeW5L*=#pM7$}z`h2HeB*G)zrWi!!ClM`YT2pi|R*r(%J7B&7b!(vg zl-f%V-i_)6L&1ty1+DSZ2Oq@caVI~#3r}YdKb9p%Q{<|5yp)cRl=BUY9q;>vdayZf}am<X{~-^C1B)><5m6|Rsb#nR}B}mT^0N&Tqy_+v59iyvK3k-Z~#ew zzrmTv5HO8-t*|E&FasV(pd)N!K!cFb!H^tbSJY}G_C(_U2g>fhDJKoWJRE2cMq@Xf zKU~}jdny6SlXA?%t;2KSYh3%8c{B?AT>rn|^P3imgJ3JPN}#R;{y(GzsByfN+~xoP N002ovPDHLkV1jT!k9+_C literal 0 HcmV?d00001 diff --git a/share/spack/logo/spack-logo-white-text-48.png b/share/spack/logo/spack-logo-white-text-48.png new file mode 100644 index 0000000000000000000000000000000000000000..9e60867e81cac2b86fe38b12bd506653682e964b GIT binary patch literal 12201 zcmZ{K19WChv+f&XqKPLH+qN<-5jj&PPo0s~SmMA91s9_UZ*_ybA|{VSD{ zfkHQ!co4C^DEb#aU%f@R8x+uBkK7N@{q=BjxXTK*+l|!sA=S<0!=}Y$<4cdlRsiVD z7qZu01HX+sFt~{OJey+*=1q(Ba4<0XHH}H-56LRz}fr>ehi5W8oF1MU`x4+8b zq3D37Z6rB~d@lz_b@E#~Yi4*GXd{(uJ1-nEM@J!WBi?ecu4otuUsahIdU1x$KpxzQ%aDTfTcu5GjfdbTTq~GEPT4=@g z?;uco2|GKpumj67r>8mMc+K*)9_aHJ@A6c6VgV$v4C>!T7)ZvgM_HNWkJ%}xRH%`?Ls z-NZo~;1R$DYLkqALA0d6V? zSu4c9^mhv~ErB`unb`p5d|am?WqsLgpfw~UIQHT_<)VWtUL zq2Iwg{J$oH|ItNU0yKIrSM%V)iuaCf{cy%ehr#Mz-Ii*`gWq<8RKvIR zlyn#Lq!{Ou^^=u;VjZv>p|JQo`xfZU*A%KylnXe*v42gApzBxCiPBN8PO5=e(mmtB zMH=e6i%gVANNPci2>b5;wU<@Srx<8Y9Ejr5uc1d(6S*YRE{RJn8;33# zNQ(7~lq#j(M?unzSd*NOyn`rigxf&e7F$(N3s^~-MW{`N94i2drzA%QO5utMFUxXC zwFtThx`-6~LM*nLQz=bu%}@h&{P2+SU=WXENxDti9jS;bi!o0)O>!dx#xEv0C27-V zQD_r+NqrQ)HG|9b2=>VPh;7&R@&!nWrsgK-1?#2ih3NV1unowOwa5CzHlYGb z1dpp{$qI;Yg#Q${%r@7{)N`*Gt}x<>bL7ANi8~&+BRJ}~gEJ&ERNtrE$KA)-uN(aZ z-OH%+#PaJ~L3mtvTX@th>p{!Z?$j!Km8sTeJ5|CnwH38{0&#*-0$4&+=}$%LdGz^= zQp-}{9HSYznXcKg+3P%gS;SP)lvCzZhBoWx-doT~b!&)g^NZPi-wEGI98M}uC(bt< zMVutoLAGzHR;eSY`>BVlL-i(VUm76Q_SFs>v=|doF2)s%$m+tlaw;m%i|A(f^ViGx zD)fu%RE+av>n71OW9ij7gqk#(`99 zX+6a`p*dZ;nYww~`Xr?>Wi!-yYI!_^ocfKwL$CRF)ybU#_Yn(gY>O%|>nG*pz(Z8~k4a*4%>F- z_jWU`Z4A8ueYm!>P0pO+YQnDC>C3pxV#J|izh(dW65sNB^D7OqG;)l1*id{?+#$}? z7p>r=D&-jEs;1aSy-TypB7ScE5dH{$v`)v)G7p9)-FNVJ$Cvfn#jC@Y#aAFCCuBL? z5qv(R7^D&m{}*OBR450?z8iY5Q{#Upz}rOm3_I7o9Cajp4e_!a}1}Ew3QKWU$bpApSEmv?R<|n0{ zs!>~VB=V48Lz$g>KBrWy@7}>X_7B9t;Y% z2+M7E&`qi%SqxZUSe&mL)m50SCJ}4R-0O|)gzf;z)?!aLRXvoS*T*0H&pQep3SOPI z9ATZ-){k3nRZ{d+wU(zVw=^sE$8U0Pn%b1lI+b+Qf9oKdMj1raGZ)u(NL87M&ljEK zpZMTJr5<#gxHX+;tPHI#c5PaE{Q--B7=Yo&N5^gV8h!+sR|r`S=&o&-_f4JVp0suu zdue<9IG4DNnGz2WFBs~La+v3tTjVI_h-dGOh#joi{XJy;zI89+hE{eAV##ZHg5)IP zy51RjBDl;%;9YhJ^(Z~2c+YxhoK_1}%UM`k2x-h`H+B2zwt4LRCj)O1k4_-%)BP^| zAtkkL)L42dW)dSkjVsH^^G0|u;5sY>D~Ma!p1-wzZ#JW{x^iR8C#g#7y2Z9+wd3_o zF{#YBl1r1vBg3QPJn%hY0lzIXQTxo(^tj>Dvg6pM?mcn0BC-?89xaHZ>0aqeC9MoKx(77lj~*S!Sc}HXz!1c>1AP2=tfzmz=O|w&a3|2P|Wce z@ClfZ6(Qi{4)=EBOm1m(u=(hJ`^wC%>iIXPgtL)QwF3aYp#9Up0IBKN0020Lse+2V zip)=LeQQg4T?1=9LwaXRo4;%TfY+J(Z`ab$UYF3>(!$D)+nJB}UkvWQ{eNHvV#0q> z?9KUzRb)VfBG$HsglzPz^o+#(@PvefytW2L+`oQ^{agI+8y~TWy}b=L1A~*36TK4) zy|t||0}~e)7Xu?R12Z$-Uk06>iMg70P|2Fg=0;>Nnf&U5qZvhc&OB-85JG*}@u>IS>|HS?`A82c8_;)S-H6x~f z^ZzIIzxf~&YkTXzaj`Ykm$b4swEb)B-_rl-&i|kBZ(Y0$|19nQSnB_XW5UVfACakc(nlFUB_qdStx?X^6B?i_4#xQ+l|U?|>ZsS$@Y8RQgHJ7#q6e{vNYz*m&CpG;3#q)C!Oyc9&!n3 z3aaPql{Z%F_!Vo~0xA3SX?$H4w=(Pq#&@-~wbt`O)0zVoD<&YMI0R8x2;noJn@yS;x5C2OAisT8#D}3=!lI z$<>7}6o>~)@DG9#&g!Y- zi8J6}mZ!;85HY(erE`;|iU)&VM5<|t%s-ms4h2W-X$Bn5`HB^Gv0G~M-A`qfgVXnI zEd<>M1lC>mrPpF$?(8oj;5pJ1^=~nsd{%CL2DBY@UsTG&$n-ml%Z|yKG~=G*p1>o` zQY?xq_uMqkBZlj1N<7=4xC<=d#9u3Roy4V{z;YGk%qEb7RJg~!DMSXgVMRMB*f+E7 z%*|aBz%2b)DfiAP@OX@u1@fd_WeEy7Or$}fRr6`nrtH3V(O=Ia*CKwNO?Aei^?Sbt zx;D`ST~}1zTMeDQ6>#ug9FD7_`_J*ZkusBU_~-f+-}fu0`4Q68L#^;Eki>dqBUrPf zwCf3VEE@bt8W`q1YLq*iGure({`i~)d@sZoayF^2xR1G?O4w}4kfS{}=ykvovG!mG z4f5~4*E?O|a2|T@i1RadBfA`DOgAzN0&ht+2u?rWm-kQzm|V-fE-*5RVt3|qK*2`o z^cu)PnPe36`i5;d-+GQf>x>ZqDE4X$_8|=k6>Ih)?;t;M?;^h@pEyCk@}T5&aT$T+ zS=q*V@|9+&T1S$0M{bzV0v7gdH$flhXvExT6wkuW{dbm0WsmnulnhRfOs64)8-}ju z-=nwJhw^W=QXY~!?_EPbA1W6odUSsUrV2J9b!0&j^2`s-D*6B7#|_R7T2M&PqF4=Q z&1}~^iId)d-twTNaiY0OO4zSz8-fWzV!tgOu=l8+P4 zS+i)LOmI}{`qfv1r`ob?ljl&}J}>HAkU?@HH*}!-tTY8L2Wh~=uE``}yfp}CH-m6V z(|P6jaM;6CT0~yfpU!pb-jua|k>!5Pw}e)l+}8bg;#QQ!D9Mon*M8f>dHJ%oGbEdS z(Ur`&K2@4bz?^$Ge?bU~Z0W0&WD@PZAG3(R)$A20404xO7Yd^iQmrvPYKG9@u+mDB_)gVQI~+exV7yq zlHsb^!9x~Ef`f-u^EPJ|T%>2+XwUHnl4y|+`%np+Y-*u;shmdXu(|$S6jv@7? zMbAL2p~P$1^}xkv!Jkx+apLKkhsA~p$J2D$^oeM+N_`mpEn`3xzYP%L?RN@aj9c*K zv$?WGXe=+RpbWy9W=NjGH$u!GyA~yb$qIj16gL`m7L7It}K`GI9g+C;yY}$>WtyJJjzISd(>0zIGxxw6L)b@ z*tQfD$`rLFVl|%>ejJd$8#wK0Hf`dnx zQ~Lzb^-W)sOcJY79|J)ERIoZ;lp^DDyajUUu0$0eA31mz6JTNl%Syshmk0M6s1nk@OzMvCD3C|NxN2mUd~@Ba#V;Iv@? z`6(^>2h=q}SDk!=@IZRwxu?W8((X0lsWtO<|88=Eww%l0jNS}UTxL~A*JD%1k#TPr&Uy;ja1u%Saa38>DT4$)$$W@$=7HbeIj>c z%HY-S7@#>|Y7 znRICqF^PRGHmm?)j&}wNdL-g4hjBo3rETNDtSuh*_tK7A@~wj{qi<(FRq8rV;2=;| zklvta{Rc=XFT;Dop?9KU7WqekuVsRV(MZ}HS0k=Hj@q z+lTzn-~(7u7dzd$dbuT}@j`7RSE--MCr*-#dv-wQpTc4)=Q*Z)%Z~GKeaT3yy)#U; z8-ixuj4B@Y6v(auP5BeWILt3}M_LmTWyS3Lh4i+@y;#8jbSLK(NlYmt84hDOq z4JqMPPwFJ)=WP;!+tnU~(;0$@SAZp;ZK6oUMRFW!ne^hEYFj$?spOD&c+vS#CP(2fRF66bN$UX`{dLTy zPKB8)V?$IR4Bd8)9^j|-pL3}{VeNC!bJxyWl{M?kJ4y#?316cVPqOmkucUL_$kL_E zpz|ymf=jxQ z)_e)|6XQ3M$1A3{EwZSt(QY}uvhkBJ5A6nagOD9fc#*u%B=#@H!-JnT7c7I@ zZ&=GUH3_G4i_aCGLkSKa49XMq_D^X9+>20?DHCg58(Bw=tEOL?WXCXZu(!u$W&Lyx zzc4kJua2HKj44@d_zqlL0;BBxf+8Hkl9b#0+#GF<@tdMnYo*0kNu&fagfMHI-q>`P z{dKNAY7E4qYx*Dg`?kRxvx{QrMNXpWF!xQZE8a^iaueaapijz~FtzZOHlgoFMkuZr z0tzDqH4LS}=JWQ~A8t;n7kiYuxw4VQVZ1#AxiTL1Y{4@W^T#~53byXEXAR>B!-Smp z9oJKzm_dHxE4!PL?33xlUjk3Q*DUyxTb>{~*w=t}aLF$T7B+MAUZO@!%`G zQ)kWZGVf{0q(05oCGU`d1t0;?stq0Z%Wec}R(FTp*?0QVSvF3vo(5%JaG)3-`GDa1+lZs?klrzO ziMvLwQvRs@l2phj{thifc56&*Fk1HX=FC5|8sQSeqN9p+xVSl+;Ngch`E6IsgTD_E zyFTeVdv(?8h)-K!ion;Jovx9AXgmEu89U{T&yv-bW+bb!g^6`B_vYrukMAu^Q&Wyr z9q+0r<5+4Z`HXebM1@V^dMxT4@Qi-NBCKvluIEErAkcXOQjEp~H~2MGRd^b?&#VIp z8MP-@fD%35)f#+bpf;XC?Qkg;>upMBYYh&i?5LTI`IagB{4YnWJ>mc##$B=J)O&~T zXpJOH$f%XTCS^c%T2sD2#%mcWRTn*WqZnw}7VjjbMSa}N2Su&sx+Nj?SLP)9V@=lHzm6fYqT$| zOpe?AgVxg$3JS+PHYX-u2(K~Fe-Y+8IAS#jxUnC#g^|yCdR@cW|zEPtBmk`i-MP5stMq&NUxEWZw>@23M39jV5$1ORHV@?QU4=ZIbZhZVv#&3^UZh*u?%qznsX#a zJb^w)?jB*n>U>hEHIoNCue;u7hfm{s>(H(uf{ zTw+^f8rlT?JBVmh51f<`zf&ib|18)p8hRZTRu`yoQgbm&1}bdci1Ev8GPTmEZDU6W z?egb6>}yO}wl$wlZH&A>ti_BU-+|Q@MI;|S@E7Erc1$_u;3Ni{(dyPk++hx*&=^35 zlf%T^;3#~__6x|j1YgHn!&>`(24^N{0249Mc_J}SrvZHdcz^*yO{BmsfU_+aL_~5$ ze7jyBE^_VF3BH!endP7@tKqASu%T)V;XqJ=b=%##qyEFS!wy2cv&tqs4kp#uv{q(L z=7LCGZz!07?9rh@niYKkyWMgw=Tx4ahj#U#l}bVazi+We@oyCN%7(pBkt}q@f z=(&?=)HJmfLwkr?!EY%TOKb~?Gj6VveTrE%*j>2TR_1qFyKC)0y14N49N(xEd2Q59 zOx#yUXFC<+r*aX2rcm1y9G^o+s1lMlCL|V7Cs~K3gDFDuWY0ax&)Kf}bQM7cU?kzr zt0Lj}CqRG1_4o|CFc>9&4WB9=T@AG~4VW7Z1tLvDE2K6E)qMZVWSVcO& zM<#tHj8>hrgtCGxtq9QZ8{fk`Fy37mtwclXCm<&~NtbEvdFb|&8u3RTTO{5ypRHkE z9Hj{^mYt_#bzAEHjo@&!#uCkYA7xO z(eAMHN2DE5l9>yjZm!#y(if|gDur1rV&=*chau*we&u|H1_J%@Cbi<={WRZ~IQUw9 zkl|-z0`WI3w4Tn{Fmj}c{dMDaE5reDU(^gfZ|VqoLx~gOM6fy%(wRug`oSDcmnJD| z9YH&>lA8eHX6Au*jb{=hqj7A_5_Ikxj>}+;SNO6Omoz8DM75p_x7b=ioEcDlULjLB z#*5t)a>E)^!DC2dAJ+jn!kP7r;53NXenk_fm-iIFsacOwTt`%?sBk& zQp*O4dsqLEobU7|0Znrm%gZiq%YF>3oFSj2P7aj8%mlqdz!aeZ;P zKFq|j2C;}=G?AzO)ZA#fApQNv*zNBDkk9J$t*~M029>+EDBjZ4KDH@k(d}070J0t#^#(^al?b@p3UqarHr5+`ECNgffW&3@at&n8Cpw;AzE$N zys~3FT;lIleHt7&Ao02y`{{2oH09*eB6%_f-PirB5?W>Q_m#!M8EtQDwS&VhI4sH1-M%&abXAd5+Ji^}~_0!IQmn=Z;AfG#D0Ny^m*znC9J5<$UjKf1hw z`E5PKd;xulrfEKi>J}L5$-eaE47zbN-owASdqt<-W}lN#_e)o7Mn*+bBJZq}gKHUo zLHxvnaz+A4B#dHZ>vC^C!|pa6X`#ATdRyrWq~R4U90G(NBTdlvV4f3#xG!49hwu{* ztvA*)sU)XyxFW{2p>OG7y?xJ9=7lw7>zR+!(wZq3=4mOrn2XVeD_4pD3NzUJ#@cuEL#3L~T_R^7!(&8Yqmuv&nG>prO> z8@T96&{eJu;M*^-2|-`P?n)vByY8?V9O3s=kHmp^h8*nyn?|HUL+#T8svB%M^>Bd} z!|L=_g?8VIzo*oTvG~8@fGb^*x?j18$5U%NCoyN0SFtMMhin@EDBiATYvob+gbmkm zTNwH@2!YbDLP<_^{Z#83?r7nakQFXzIed|AE8GYao&e6s&0O4UtbOLZ#r*3Qc;ZiA zj}*6vL1^JGb%UK7no;BUS0g^kqq-Hpagqa&Y}2Sks;SH6)K)3)fzoF)sXhX`M1kW! zteUg}5Rc|4EWmH@Jh5}#F4i#A4v?1G#Dlx|ckCYbm!K|s(gs^C8<&PFqVz#9A4#Re zozpd-dAcD=7n*`z=y(zPR-ACT)*UH|;t@dz)z8wWg69P+G>NRt?R61dgAz^OZt=7W2m)oi z>=n$$*_|*)lq&Mz!0weC?x8TI6QYjuY4dYAN1~-IgKmGIwrsa5kc9T^ODzz%Cjsmo z=(cZ$=(t}`!)Kf?TEt;7g(rVcm2Of#Ssb*mAU2y(^8*m6!p!{i;4y8$`vGqvtQ`rF zG;`nxLpqV_Y#X}}G8PkQ+^mr{nUhPO*64JoITKg5GHG=kYj}|_bjkU+Z|iiWnxSd8 zQJhFS-^Y6s8(0#~tq6@;IsFadLywTlcky0ClR5Jnw>=J?19jkhXvv!tXw0z!g4>ik z+P}?t$^K}8mQr&fQ0)99p*&Z|T_DGuVI-i8Y}9|8Ou|#(z_Y)R5V(*kmE)p|6xHbdU`nyZ{&+x(@578_ookAfAFIiAsFd?T2ar%*an;{J_I? zLMaXpxCDW8JPglUr4aJRp1}p~bf&uvaJnkhnnDd1g8h!><+4Nd`(PIkPWD=zTCGs@ zAMPHnLv#2AJnSXZ155MxaW0>45}mqE5uCLz|S1;&T=cIDe06( zsKcO7Dv*wC1+LTS2mz+*x4isMR95~x(gJ2HEQa4@eZ?}#2c*>c`-Vj%-E=)xTliZJ z*_zZ#*$s`w8IChCNLsUjzTmrcw#s4|<-opB@XhRQ&Au$UvhRZc(m-|<_W~;4T9$Ed zl@@5fZ@xJ~jHje8pJf{_GR+M)Ks{xv${XwVs;O0VxPX0>l+RdkJ+dk2w90Mp=)?tU zQyS?QLp(P)T?m%;o#KSa=|t^tH3}-A>nh-z*b?`+Nyj8uRrX-y$W}@SlKW1&F#D#x z;=9`@63B9(ywU~(xb_5Fc*Dl%aM6s}4nDWF@KcE(*$*-FKKWeO3nWpt5>!)dHCTRG z`(iejiD>kPHhun@BBgO@Aw($dLmblwt4(ED_>eJ!J&)cE8Q$Z^x7gX2san?`%3lb1 z@v{BHc!DkTk(gbxAETzNKi5WIi869lU8fE&KK$Bzq2hx0>k90VN7Z}HmZdPvIq&DN zH(oabQsW4|{qhqU9Lmf=i&r8Dof<_!U`*e9D8(w>-+-3)n9S)KcT-FVm}RRo3x}33 zLE10tJkzO_Z4}aVXb5E3DEV8H{)yLL=I)_$ukO}QQ(v|fq@Q-SR`QU-VkP8^iyaQF zI>|3+I8hUCsN)5m?OJR3tQc=?De(%st{omw5Voh*GNzxBEzXV*RapwP{IIk)@F9@C zJk;H0lOwl`qj;@eVn{@KEly3P&;zmuem#c3ju7HX6XSnEAYI2rCWW9EYxk?Ok zBCCQS6luO;Y`o%=s0z0Z+$N5B98qkLFxe5%OnZs%8KQ(JP4w_6x?g)O?IPwDz85>D zM;OpL_&3oYmrsIDMBKQCGxzR@4S0fnK zZN9#8R8${hfp8TFDjR=3t>Z<0ohP=iXHwee$<{$+);_87 z^zR`nf(>tR+03Ivm3OIz4dhNe6<7fk(mDXW3fgb4SY-UbLz18z^@_3sxw!5J76|6$ zd5aUI+;}Mx0gTdhj40VdCk^5~&B`3leYKHUhbVPI$c!D$zlFSlut9H60F5Ymr1`a) zO05d1xXJ`sY|Cn=?j4pBtXM^T_eT9a*IModLL6%`+GkGy)kc;xM)| zSdY7Xfg>?2v^9--<<}MQ$7U+oQ*zlK)9dd2?! z=0&L7628I=EtU8@phhS+g^R~6P_HIqC*~uyu@?c4S@~T2)ow&V8`2|}vmizGg;AkS zCy%g`atb=zU_~k}p~Mk71?nxpq1cK>JGtmWA+sSzC4;)I*HMIGB(^gERPPlo`N9fP zvM8&&EMHk+rA9%8+oX;F@M)0JNT60d%gkC1OqUfVu=%OYI?2kZVJ?*FshC@@CHqoS z;G0mVfj;%>l@t&;MCHxKPZ_eAB3%%(! z1ZtXAv5Rc<7m(lOLT3PTxHWjZcT}I>!!;jK(IPRM-d2TaQS-}z)zD3dOS(~Cc?_B5 zw~KW2ue(H2txGRtkDLBWH9f4l@@ZG$;e<3qK7K#CBD%r@I|&2I?q8TXiwUz~&z_16 zbQS5qsCan1U{$jeGk_c-QBFo#-Gz}W?4Bqp;10WbThV literal 0 HcmV?d00001 From f4460894beea13c2d028338503c9cef5608e05b6 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 25 Sep 2015 10:44:47 +0200 Subject: [PATCH 078/121] hdf5 : updated version mpich : updated version trilinos : created stub --- var/spack/packages/hdf5/package.py | 3 +- var/spack/packages/mpich/package.py | 5 ++++ var/spack/packages/trilinos/package.py | 40 ++++++++++++++++++++++++++ 3 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 var/spack/packages/trilinos/package.py diff --git a/var/spack/packages/hdf5/package.py b/var/spack/packages/hdf5/package.py index 992dd8ec700..15e0ef93388 100644 --- a/var/spack/packages/hdf5/package.py +++ b/var/spack/packages/hdf5/package.py @@ -10,7 +10,8 @@ class Hdf5(Package): url = "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz" list_url = "http://www.hdfgroup.org/ftp/HDF5/releases" list_depth = 3 - + + version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24') version('1.8.13', 'c03426e9e77d7766944654280b467289') depends_on("mpi") diff --git a/var/spack/packages/mpich/package.py b/var/spack/packages/mpich/package.py index b6b2dfde214..315fc4a5a32 100644 --- a/var/spack/packages/mpich/package.py +++ b/var/spack/packages/mpich/package.py @@ -33,6 +33,11 @@ class Mpich(Package): list_url = "http://www.mpich.org/static/downloads/" list_depth = 2 + version('3.1.4', '2ab544607986486562e076b83937bba2') + version('3.1.3', '93cb17f91ac758cbf9174ecb03563778') + version('3.1.2', '7fbf4b81dcb74b07ae85939d1ceee7f1') + version('3.1.1', '40dc408b1e03cc36d80209baaa2d32b7') + version('3.1', '5643dd176499bfb7d25079aaff25f2ec') version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') provides('mpi@:3', when='@3:') diff --git a/var/spack/packages/trilinos/package.py b/var/spack/packages/trilinos/package.py new file mode 100644 index 00000000000..2fa4f3cc847 --- /dev/null +++ b/var/spack/packages/trilinos/package.py @@ -0,0 +1,40 @@ +# FIXME: +# This is a template package file for Spack. We've conveniently +# put "FIXME" labels next to all the things you'll want to change. +# +# Once you've edited all the FIXME's, delete this whole message, +# save this file, and test out your package like this: +# +# spack install trilinos +# +# You can always get back here to change things with: +# +# spack edit trilinos +# +# See the spack documentation for more information on building +# packages. +# +from spack import * + +class Trilinos(Package): + """FIXME: put a proper description of your package here.""" + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.example.com" + url = "http://trilinos.csbsju.edu/download/files/trilinos-12.2.1-Source.tar.gz" + + version('12.2.1' , '6161926ea247863c690e927687f83be9') + version('12.0.1' , 'bd99741d047471e127b8296b2ec08017') + version('11.14.3', '2f4f83f8333e4233c57d0f01c4b57426') + version('11.14.2', 'a43590cf896c677890d75bfe75bc6254') + version('11.14.1', '40febc57f76668be8b6a77b7607bb67f') + + # FIXME: Add dependencies if this package requires them. + # depends_on("foo") + + def install(self, spec, prefix): + # FIXME: Modify the configure line to suit your build system here. + cmake('.', *std_cmake_args) + + # FIXME: Add logic to build and install here + make() + make("install") From ff0d039a1fea0b713455c38ab4df6a5cbe0d4783 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 15 Oct 2015 08:48:39 +0200 Subject: [PATCH 079/121] trilinos : prototype for the whole package --- var/spack/packages/trilinos/package.py | 53 +++++++++++++++++++------- 1 file changed, 39 insertions(+), 14 deletions(-) diff --git a/var/spack/packages/trilinos/package.py b/var/spack/packages/trilinos/package.py index 2fa4f3cc847..6f7cc253ddc 100644 --- a/var/spack/packages/trilinos/package.py +++ b/var/spack/packages/trilinos/package.py @@ -16,25 +16,50 @@ # from spack import * -class Trilinos(Package): - """FIXME: put a proper description of your package here.""" - # FIXME: add a proper url for your package's homepage here. - homepage = "http://www.example.com" - url = "http://trilinos.csbsju.edu/download/files/trilinos-12.2.1-Source.tar.gz" - version('12.2.1' , '6161926ea247863c690e927687f83be9') - version('12.0.1' , 'bd99741d047471e127b8296b2ec08017') +class Trilinos(Package): + """ + The Trilinos Project is an effort to develop algorithms and enabling technologies within an object-oriented + software framework for the solution of large-scale, complex multi-physics engineering and scientific problems. + A unique design feature of Trilinos is its focus on packages. + """ + homepage = "https://trilinos.org/" + url = "http://trilinos.csbsju.edu/download/files/trilinos-12.2.1-Source.tar.gz" + + version('12.2.1', '6161926ea247863c690e927687f83be9') + version('12.0.1', 'bd99741d047471e127b8296b2ec08017') version('11.14.3', '2f4f83f8333e4233c57d0f01c4b57426') version('11.14.2', 'a43590cf896c677890d75bfe75bc6254') version('11.14.1', '40febc57f76668be8b6a77b7607bb67f') - # FIXME: Add dependencies if this package requires them. - # depends_on("foo") + variant('mpi', default=True, description='Add a dependency on MPI and enables MPI dependent packages') + + # Everything should be compiled with -fpic + depends_on('blas') + depends_on('lapack') + depends_on('boost') + depends_on('netcdf') + depends_on('matio') + depends_on('glm') + depends_on('swig') + depends_on('mpi', when='+mpi') def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. - cmake('.', *std_cmake_args) - # FIXME: Add logic to build and install here - make() - make("install") + options = [ + '-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON', + '-DTrilinos_ENABLE_TESTS:BOOL=ON', + '-DBUILD_SHARED_LIBS:BOOL=ON', + '-DBLAS_LIBRARY_DIRS:PATH=%s' % spec['blas'].prefix, + '-DLAPACK_LIBRARY_DIRS:PATH=%s' % spec['lapack'].prefix + ] + if '+mpi' in spec: + mpi_options = ['-DTPL_ENABLE_MPI:BOOL=ON'] + options.extend(mpi_options) + + # -DCMAKE_INSTALL_PREFIX and all the likes... + options.extend(std_cmake_args) + with working_dir('spack-build', create=True): + cmake('..', *options) + make() + make('install') From 3a4bdf1856685ae63116793e532d45aa2ee7fd28 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 16 Oct 2015 13:02:27 +0200 Subject: [PATCH 080/121] trilinos : first version that compiles on bellatrix --- var/spack/packages/swig/package.py | 2 ++ var/spack/packages/trilinos/package.py | 19 ++----------------- 2 files changed, 4 insertions(+), 17 deletions(-) diff --git a/var/spack/packages/swig/package.py b/var/spack/packages/swig/package.py index d7a3d815b96..ee536d7063d 100644 --- a/var/spack/packages/swig/package.py +++ b/var/spack/packages/swig/package.py @@ -38,6 +38,8 @@ class Swig(Package): version('3.0.2', '62f9b0d010cef36a13a010dc530d0d41') + depends_on('pcre') + def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() diff --git a/var/spack/packages/trilinos/package.py b/var/spack/packages/trilinos/package.py index 6f7cc253ddc..7c43f796a4e 100644 --- a/var/spack/packages/trilinos/package.py +++ b/var/spack/packages/trilinos/package.py @@ -1,19 +1,3 @@ -# FIXME: -# This is a template package file for Spack. We've conveniently -# put "FIXME" labels next to all the things you'll want to change. -# -# Once you've edited all the FIXME's, delete this whole message, -# save this file, and test out your package like this: -# -# spack install trilinos -# -# You can always get back here to change things with: -# -# spack edit trilinos -# -# See the spack documentation for more information on building -# packages. -# from spack import * @@ -48,7 +32,8 @@ def install(self, spec, prefix): options = [ '-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON', - '-DTrilinos_ENABLE_TESTS:BOOL=ON', + '-DTrilinos_ENABLE_TESTS:BOOL=OFF', + '-DTrilinos_ENABLE_EXAMPLES:BOOL=OFF', '-DBUILD_SHARED_LIBS:BOOL=ON', '-DBLAS_LIBRARY_DIRS:PATH=%s' % spec['blas'].prefix, '-DLAPACK_LIBRARY_DIRS:PATH=%s' % spec['lapack'].prefix From aab7102b9bcf2b35461ef3816f874f53d9eef976 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 14 Oct 2015 15:52:46 +0200 Subject: [PATCH 081/121] matio : added basic configuration for the package --- var/spack/packages/matio/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 var/spack/packages/matio/package.py diff --git a/var/spack/packages/matio/package.py b/var/spack/packages/matio/package.py new file mode 100644 index 00000000000..12cfb809261 --- /dev/null +++ b/var/spack/packages/matio/package.py @@ -0,0 +1,15 @@ +from spack import * + + +class Matio(Package): + """matio is an C library for reading and writing Matlab MAT files""" + homepage = "http://sourceforge.net/projects/matio/" + url = "http://downloads.sourceforge.net/project/matio/matio/1.5.2/matio-1.5.2.tar.gz" + + version('1.5.2', '85b007b99916c63791f28398f6a4c6f1') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") From ec5560374c218bfb1f16934259108ebc305cf838 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 14 Oct 2015 16:13:56 +0200 Subject: [PATCH 082/121] glm : added basic configuration for the package --- var/spack/packages/glm/package.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 var/spack/packages/glm/package.py diff --git a/var/spack/packages/glm/package.py b/var/spack/packages/glm/package.py new file mode 100644 index 00000000000..d00c301b4ce --- /dev/null +++ b/var/spack/packages/glm/package.py @@ -0,0 +1,19 @@ +from spack import * + + +class Glm(Package): + """ + OpenGL Mathematics (GLM) is a header only C++ mathematics library for graphics software based on + the OpenGL Shading Language (GLSL) specification. + """ + + homepage = "https://github.com/g-truc/glm" + url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz" + + version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8') + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") From 7e2f3cff443dd1ae634eb375fd06ee41df149989 Mon Sep 17 00:00:00 2001 From: David Beckingsale Date: Mon, 19 Oct 2015 15:41:46 -0700 Subject: [PATCH 083/121] Added ncdu package --- var/spack/packages/ncdu/package.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 var/spack/packages/ncdu/package.py diff --git a/var/spack/packages/ncdu/package.py b/var/spack/packages/ncdu/package.py new file mode 100644 index 00000000000..234f9730d61 --- /dev/null +++ b/var/spack/packages/ncdu/package.py @@ -0,0 +1,28 @@ +from spack import * + +class Ncdu(Package): + """ + Ncdu is a disk usage analyzer with an ncurses interface. It is designed + to find space hogs on a remote server where you don't have an entire + gaphical setup available, but it is a useful tool even on regular desktop + systems. Ncdu aims to be fast, simple and easy to use, and should be able + to run in any minimal POSIX-like environment with ncurses installed. + """ + + homepage = "http://dev.yorhel.nl/ncdu" + url = "http://dev.yorhel.nl/download/ncdu-1.11.tar.gz" + + version('1.11', '9e44240a5356b029f05f0e70a63c4d12') + version('1.10', '7535decc8d54eca811493e82d4bfab2d') + version('1.9' , '93258079db897d28bb8890e2db89b1fb') + version('1.8' , '94d7a821f8a0d7ba8ef3dd926226f7d5') + version('1.7' , '172047c29d232724cc62e773e82e592a') + + depends_on("ncurses") + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix, + '--with-ncurses=%s' % spec['ncurses']) + + make() + make("install") From e0e79a2cb3cda043bf1fb456c58ceff8d4e9375d Mon Sep 17 00:00:00 2001 From: Kevin Huck Date: Wed, 21 Oct 2015 11:56:01 -0700 Subject: [PATCH 084/121] Adding package for activeharmony. --- var/spack/packages/activeharmony/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 var/spack/packages/activeharmony/package.py diff --git a/var/spack/packages/activeharmony/package.py b/var/spack/packages/activeharmony/package.py new file mode 100644 index 00000000000..45dcc7c0e8e --- /dev/null +++ b/var/spack/packages/activeharmony/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Activeharmony(Package): + """Active Harmony: a framework for auto-tuning (the automated search for values to improve the performance of a target application).""" + homepage = "http://www.dyninst.org/harmony" + url = "http://www.dyninst.org/sites/default/files/downloads/harmony/ah-4.5.tar.gz" + + version('4.5', 'caee5b864716d376e2c25d739251b2a9') + + def install(self, spec, prefix): + make("CFLAGS=-O3") + make("install", 'PREFIX=%s' % prefix) + +from spack import * + From c082abfb6574c1bd2610990aeb799ff6d7c853a0 Mon Sep 17 00:00:00 2001 From: Kevin Huck Date: Wed, 21 Oct 2015 18:32:57 -0700 Subject: [PATCH 085/121] Modified the binutils package to use --enable-shared. Required by tools that use binutils and get this error (or equivalent): "relocation R_X86_64_32S against `bfd_errmsgs' can not be used when making a shared object; recompile with -fPIC" --- var/spack/packages/binutils/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/packages/binutils/package.py b/var/spack/packages/binutils/package.py index 0c33bf04260..cac0a0407f9 100644 --- a/var/spack/packages/binutils/package.py +++ b/var/spack/packages/binutils/package.py @@ -18,6 +18,7 @@ def install(self, spec, prefix): '--disable-dependency-tracking', '--enable-interwork', '--enable-multilib', + '--enable-shared', '--enable-64-bit-bfd', '--enable-targets=all'] From fb9eca4226e1026b3dae1c43c2190bf6ea2c56ec Mon Sep 17 00:00:00 2001 From: Kevin Huck Date: Wed, 21 Oct 2015 19:35:31 -0700 Subject: [PATCH 086/121] Added an APEX package, but it is still using the git master instead of a release. --- var/spack/packages/apex/package.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 var/spack/packages/apex/package.py diff --git a/var/spack/packages/apex/package.py b/var/spack/packages/apex/package.py new file mode 100644 index 00000000000..086694044ef --- /dev/null +++ b/var/spack/packages/apex/package.py @@ -0,0 +1,30 @@ +from spack import * +from spack.util.environment import * + +class Apex(Package): + homepage = "http://github.com/khuck/xpress-apex" + #url = "http://github.com/khuck/xpress-apex/archive/v0.1-release-candidate.tar.gz" + url = "http://github.com/khuck/xpress-apex" + + #version('0.1', '6e039c224387348296739f6bf360d081') + version('master', branch='master', git='https://github.com/khuck/xpress-apex.git') + + depends_on("binutils+libiberty") + depends_on("boost@1.54:") + depends_on("cmake@2.8.12:") + depends_on("activeharmony@4.5:") + + def install(self, spec, prefix): + + path=get_path("PATH") + path.remove(spec["binutils"].prefix.bin) + path_set("PATH", path) + with working_dir("build", create=True): + cmake('-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DUSE_BFD=TRUE', + '-DBFD_ROOT=%s' % spec['binutils'].prefix, + '-DUSE_ACTIVEHARMONY=TRUE', + '-DACTIVEHARMONY_ROOT=%s' % spec['activeharmony'].prefix, + '..', *std_cmake_args) + make() + make("install") From 2a61ff07948701ce681c02377e3e5bae58e25b74 Mon Sep 17 00:00:00 2001 From: Kevin Huck Date: Thu, 22 Oct 2015 03:57:33 -0700 Subject: [PATCH 087/121] Changing git checkout from "master" to commit d2e66ddde689120472fc57fc546d8cd80aab745c --- var/spack/packages/apex/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/packages/apex/package.py b/var/spack/packages/apex/package.py index 086694044ef..7cd4cad1686 100644 --- a/var/spack/packages/apex/package.py +++ b/var/spack/packages/apex/package.py @@ -7,7 +7,8 @@ class Apex(Package): url = "http://github.com/khuck/xpress-apex" #version('0.1', '6e039c224387348296739f6bf360d081') - version('master', branch='master', git='https://github.com/khuck/xpress-apex.git') + #version('master', branch='master', git='https://github.com/khuck/xpress-apex.git') + version('10-21-2015', git='https://github.com/khuck/xpress-apex.git', commit='d2e66ddde689120472fc57fc546d8cd80aab745c') depends_on("binutils+libiberty") depends_on("boost@1.54:") From 24838a62b44110fa386e404e0e41689a5182e911 Mon Sep 17 00:00:00 2001 From: Kevin Huck Date: Thu, 22 Oct 2015 06:02:56 -0700 Subject: [PATCH 088/121] Changed date formatting. --- var/spack/packages/apex/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/packages/apex/package.py b/var/spack/packages/apex/package.py index 7cd4cad1686..08fecade69c 100644 --- a/var/spack/packages/apex/package.py +++ b/var/spack/packages/apex/package.py @@ -8,7 +8,7 @@ class Apex(Package): #version('0.1', '6e039c224387348296739f6bf360d081') #version('master', branch='master', git='https://github.com/khuck/xpress-apex.git') - version('10-21-2015', git='https://github.com/khuck/xpress-apex.git', commit='d2e66ddde689120472fc57fc546d8cd80aab745c') + version('2015-10-21', git='https://github.com/khuck/xpress-apex.git', commit='d2e66ddde689120472fc57fc546d8cd80aab745c') depends_on("binutils+libiberty") depends_on("boost@1.54:") From e6a44bd8ac40bfb93e51a99d3bfde476937207e1 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 22 Oct 2015 15:55:43 +0200 Subject: [PATCH 089/121] GCC : removed dependency on libelf. Removed isl variant --- var/spack/packages/gcc/package.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py index 0e076f58e5c..4088b9f9d41 100644 --- a/var/spack/packages/gcc/package.py +++ b/var/spack/packages/gcc/package.py @@ -36,6 +36,8 @@ class Gcc(Package): list_url = 'http://open-source-box.org/gcc/' list_depth = 2 + DEPENDS_ON_ISL_PREDICATE = '@5.0:' + version('5.2.0', 'a51bcfeb3da7dd4c623e27207ed43467') version('4.9.3', '6f831b4d251872736e8e9cc09746f327') version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43') @@ -47,15 +49,14 @@ class Gcc(Package): version('4.5.4', '27e459c2566b8209ab064570e1b378f7') variant('binutils', default=False, description='Add a dependency on binutils') - + depends_on("mpfr") depends_on("gmp") depends_on("mpc") # when @4.5: - depends_on("libelf") depends_on("binutils~libiberty", when="+binutils") # Save these until we can do optional deps. - depends_on("isl", when='@5.0:') + depends_on("isl", when=DEPENDS_ON_ISL_PREDICATE) #depends_on("ppl") #depends_on("cloog") @@ -79,11 +80,6 @@ def install(self, spec, prefix): "--with-gnu-ld", "--with-gnu-as", "--with-quad"] - # Libelf - if '+libelf' in spec: - libelf_options = ["--with-libelf=%s" % spec['libelf'].prefix] - options.extend(libelf_options) - # Binutils if '+binutils' in spec: binutils_options = ["--with-stage1-ldflags=%s" % self.rpath_args, @@ -93,7 +89,7 @@ def install(self, spec, prefix): options.extend(binutils_options) # Isl - if spec.satisfies('@5.0:+isl'): + if spec.satisfies(Gcc.DEPENDS_ON_ISL_PREDICATE): isl_options = ["--with-isl=%s" % spec['isl'].prefix] options.extend(isl_options) From caf93cc4080703b80d5c622370b8094c37bf014f Mon Sep 17 00:00:00 2001 From: Kevin Huck Date: Thu, 22 Oct 2015 08:03:50 -0700 Subject: [PATCH 090/121] Adding package for OMPT support in the LLVM/Clang OpenMP runtime, which gives OMPT support for applications built with GCC, Intel or LLVM/Clang. --- var/spack/packages/ompt-openmp/package.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 var/spack/packages/ompt-openmp/package.py diff --git a/var/spack/packages/ompt-openmp/package.py b/var/spack/packages/ompt-openmp/package.py new file mode 100644 index 00000000000..5d380ebd775 --- /dev/null +++ b/var/spack/packages/ompt-openmp/package.py @@ -0,0 +1,23 @@ +from spack import * + +class OmptOpenmp(Package): + """LLVM/Clang OpenMP runtime with OMPT support. This is a fork of the OpenMPToolsInterface/LLVM-openmp fork of the official LLVM OpenMP mirror. This library provides a drop-in replacement of the OpenMP runtimes for GCC, Intel and LLVM/Clang.""" + homepage = "https://github.com/OpenMPToolsInterface/LLVM-openmp" + url = "http://github.com/khuck/LLVM-openmp/archive/v0.1-spack.tar.gz" + + version('spack', '35227b2726e377faa433fc841226e036') + + # depends_on("foo") + + def install(self, spec, prefix): + with working_dir("runtime/build", create=True): + + # FIXME: Modify the configure line to suit your build system here. + cmake('-DCMAKE_C_COMPILER=%s' % self.compiler.cc, + '-DCMAKE_CXX_COMPILER=%s' % self.compiler.cxx, + '-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '..', *std_cmake_args) + + # FIXME: Add logic to build and install here + make() + make("install") From 7cb557cba77e637ac528205865688af9fe350494 Mon Sep 17 00:00:00 2001 From: Kevin Huck Date: Thu, 22 Oct 2015 08:06:31 -0700 Subject: [PATCH 091/121] Adding dependency on ompt-openmp, now that it is a package. --- var/spack/packages/apex/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/packages/apex/package.py b/var/spack/packages/apex/package.py index 08fecade69c..6404d5208aa 100644 --- a/var/spack/packages/apex/package.py +++ b/var/spack/packages/apex/package.py @@ -14,6 +14,7 @@ class Apex(Package): depends_on("boost@1.54:") depends_on("cmake@2.8.12:") depends_on("activeharmony@4.5:") + depends_on("ompt-openmp") def install(self, spec, prefix): @@ -26,6 +27,8 @@ def install(self, spec, prefix): '-DBFD_ROOT=%s' % spec['binutils'].prefix, '-DUSE_ACTIVEHARMONY=TRUE', '-DACTIVEHARMONY_ROOT=%s' % spec['activeharmony'].prefix, + '-DUSE_OMPT=TRUE', + '-DOMPT_ROOT=%s' % spec['ompt-openmp'].prefix, '..', *std_cmake_args) make() make("install") From 41c01b523f2af4e60ed38e7a097991b55647be9f Mon Sep 17 00:00:00 2001 From: David Beckingsale Date: Thu, 22 Oct 2015 09:13:20 -0700 Subject: [PATCH 092/121] Filter wrapper data to use correct compilers in OpenMPI --- var/spack/packages/openmpi/package.py | 45 +++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/var/spack/packages/openmpi/package.py b/var/spack/packages/openmpi/package.py index 7e84cbaf65a..d2d095979e1 100644 --- a/var/spack/packages/openmpi/package.py +++ b/var/spack/packages/openmpi/package.py @@ -1,5 +1,8 @@ +import os + from spack import * + class Openmpi(Package): """Open MPI is a project combining technologies and resources from several other projects (FT-MPI, LA-MPI, LAM/MPI, and PACX-MPI) @@ -21,6 +24,14 @@ class Openmpi(Package): provides('mpi@:2') + + def setup_dependent_environment(self, module, spec, dep_spec): + """For dependencies, make mpicc's use spack wrapper.""" + os.environ['OMPI_CC'] = 'cc' + os.environ['OMPI_CXX'] = 'c++' + os.environ['OMPI_FC'] = 'f90' + + def install(self, spec, prefix): config_args = ["--prefix=%s" % prefix] @@ -39,3 +50,37 @@ def install(self, spec, prefix): configure(*config_args) make() make("install") + + self.filter_compilers() + + + def filter_compilers(self): + """Run after install to make the MPI compilers use the + compilers that Spack built the package with. + + If this isn't done, they'll have CC, CXX, F77, and FC set + to Spack's generic cc, c++, f77, and f90. We want them to + be bound to whatever compiler they were built with. + """ + kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : False } + dir = os.path.join(self.prefix, 'share/openmpi/') + + cc_wrappers = ['mpicc-vt-wrapper-data.txt', 'mpicc-wrapper-data.txt', + 'ortecc-wrapper-data.txt', 'shmemcc-wrapper-data.txt'] + + cxx_wrappers = ['mpic++-vt-wrapper-data.txt', 'mpic++-wrapper-data.txt'] + + fc_wrappers = ['mpifort-vt-wrapper-data.txt', + 'mpifort-wrapper-data.txt', 'shmemfort-wrapper-data.txt'] + + for wrapper in cc_wrappers: + filter_file('compiler=.*', 'compiler=%s' % self.compiler.cc, + os.path.join(dir, wrapper), **kwargs) + + for wrapper in cxx_wrappers: + filter_file('compiler=.*', 'compiler=%s' % self.compiler.cxx, + os.path.join(dir, wrapper), **kwargs) + + for wrapper in fc_wrappers: + filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc, + os.path.join(dir, wrapper), **kwargs) From 9f496e5efa0559dc1eccf26e20963eaf6755c00f Mon Sep 17 00:00:00 2001 From: David Beckingsale Date: Thu, 22 Oct 2015 09:50:02 -0700 Subject: [PATCH 093/121] Replace compiler in f90 and f77 wrappers if they exist --- var/spack/packages/openmpi/package.py | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/var/spack/packages/openmpi/package.py b/var/spack/packages/openmpi/package.py index d2d095979e1..c48d0557e5e 100644 --- a/var/spack/packages/openmpi/package.py +++ b/var/spack/packages/openmpi/package.py @@ -30,6 +30,7 @@ def setup_dependent_environment(self, module, spec, dep_spec): os.environ['OMPI_CC'] = 'cc' os.environ['OMPI_CXX'] = 'c++' os.environ['OMPI_FC'] = 'f90' + os.environ['OMPI_F77'] = 'f77' def install(self, spec, prefix): @@ -58,8 +59,8 @@ def filter_compilers(self): """Run after install to make the MPI compilers use the compilers that Spack built the package with. - If this isn't done, they'll have CC, CXX, F77, and FC set - to Spack's generic cc, c++, f77, and f90. We want them to + If this isn't done, they'll have CC, CXX and FC set + to Spack's generic cc, c++ and f90. We want them to be bound to whatever compiler they were built with. """ kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : False } @@ -68,7 +69,8 @@ def filter_compilers(self): cc_wrappers = ['mpicc-vt-wrapper-data.txt', 'mpicc-wrapper-data.txt', 'ortecc-wrapper-data.txt', 'shmemcc-wrapper-data.txt'] - cxx_wrappers = ['mpic++-vt-wrapper-data.txt', 'mpic++-wrapper-data.txt'] + cxx_wrappers = ['mpic++-vt-wrapper-data.txt', 'mpic++-wrapper-data.txt', + 'ortec++-wrapper-data.txt'] fc_wrappers = ['mpifort-vt-wrapper-data.txt', 'mpifort-wrapper-data.txt', 'shmemfort-wrapper-data.txt'] @@ -84,3 +86,20 @@ def filter_compilers(self): for wrapper in fc_wrappers: filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc, os.path.join(dir, wrapper), **kwargs) + + # These are symlinks in newer versions, so check that here + f77_wrappers = ['mpif77-vt-wrapper-data.txt', 'mpif77-wrapper-data.txt'] + f90_wrappers = ['mpif90-vt-wrapper-data.txt', 'mpif90-wrapper-data.txt'] + + for wrapper in f77_wrappers: + path = os.path.join(dir, wrapper) + if not os.path.islink(path): + filter_file('compiler=.*', 'compiler=%s' % self.compiler.f77, + path, **kwargs) + for wrapper in f90_wrappers: + path = os.path.join(dir, wrapper) + if not os.path.islink(path): + filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc, + path, **kwargs) + + From 246423b4b472ae0d53488c33fbca9faa035402d7 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 22 Oct 2015 16:00:03 -0700 Subject: [PATCH 094/121] Generate test results (designated as skipped) for parents of failed dependencies --- lib/spack/spack/cmd/test-install.py | 58 +++++++++++++++++++--------- lib/spack/spack/test/unit_install.py | 11 +++--- 2 files changed, 45 insertions(+), 24 deletions(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 7b37f66967a..406a6d7d33c 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -57,22 +57,32 @@ def __init__(self): self.root = ET.Element('testsuite') self.tests = [] - def add_test(self, buildId, passed=True, buildInfo=None): - self.tests.append((buildId, passed, buildInfo)) + def add_test(self, buildId, testResult, buildInfo=None): + self.tests.append((buildId, testResult, buildInfo)) def write_to(self, stream): self.root.set('tests', '{0}'.format(len(self.tests))) - for buildId, passed, buildInfo in self.tests: + for buildId, testResult, buildInfo in self.tests: testcase = ET.SubElement(self.root, 'testcase') testcase.set('classname', buildId.name) testcase.set('name', buildId.stringId()) - if not passed: + if testResult == TestResult.FAILED: failure = ET.SubElement(testcase, 'failure') failure.set('type', "Build Error") failure.text = buildInfo + elif testResult == TestResult.SKIPPED: + skipped = ET.SubElement(testcase, 'skipped') + skipped.set('type', "Skipped Build") + skipped.text = buildInfo ET.ElementTree(self.root).write(stream) +class TestResult(object): + PASSED = 0 + FAILED = 1 + SKIPPED = 2 + + class BuildId(object): def __init__(self, spec): self.name = spec.name @@ -94,6 +104,8 @@ def __eq__(self, other): def fetch_log(path): + if not os.path.exists(path): + return list() with open(path, 'rb') as F: return list(F.readlines()) @@ -105,23 +117,31 @@ def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log): if spec not in newInstalls: continue - if not all(spack.db.get(childSpec).installed for childSpec in - spec.dependencies.itervalues()): - #TODO: create a failed test if a dependency didn't install? - continue - - bId = BuildId(spec) - + failedDeps = set(childSpec for childSpec in + spec.dependencies.itervalues() if not + spack.db.get(childSpec).installed) package = spack.db.get(spec) - lines = getLogFunc(package.build_log_path) - errMessages = list(line for line in lines if - re.search('error:', line, re.IGNORECASE)) - errOutput = errMessages if errMessages else lines[-10:] - errOutput = '\n'.join(itertools.chain( - [spec.to_yaml(), "Errors:"], errOutput, - ["Build Log:", package.build_log_path])) + if failedDeps: + result = TestResult.SKIPPED + dep = iter(failedDeps).next() + depBID = BuildId(dep) + errOutput = "Skipped due to failed dependency: {0}".format( + depBID.stringId()) + elif not package.installed: + result = TestResult.FAILED + lines = getLogFunc(package.build_log_path) + errMessages = list(line for line in lines if + re.search('error:', line, re.IGNORECASE)) + errOutput = errMessages if errMessages else lines[-10:] + errOutput = '\n'.join(itertools.chain( + [spec.to_yaml(), "Errors:"], errOutput, + ["Build Log:", package.build_log_path])) + else: + result = TestResult.PASSED + errOutput = None - output.add_test(bId, package.installed, errOutput) + bId = BuildId(spec) + output.add_test(bId, result, errOutput) def test_install(parser, args): diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py index ab7d4902d09..2a94f8fec0a 100644 --- a/lib/spack/spack/test/unit_install.py +++ b/lib/spack/spack/test/unit_install.py @@ -27,7 +27,7 @@ import spack test_install = __import__("spack.cmd.test-install", - fromlist=["BuildId", "create_test_output"]) + fromlist=["BuildId", "create_test_output", "TestResult"]) class MockOutput(object): def __init__(self): @@ -75,8 +75,7 @@ class UnitInstallTest(unittest.TestCase): def setUp(self): super(UnitInstallTest, self).setUp() - - #import pdb; pdb.set_trace() + pkgX.installed = False pkgY.installed = False @@ -93,7 +92,9 @@ def test_installing_both(self): pkgY.installed = True test_install.create_test_output(specX, [specX, specY], mo, getLogFunc=test_fetch_log) - self.assertEqual(mo.results, {bIdX:True, bIdY:True}) + self.assertEqual(mo.results, + {bIdX:test_install.TestResult.PASSED, + bIdY:test_install.TestResult.PASSED}) def test_dependency_already_installed(self): mo = MockOutput() @@ -102,7 +103,7 @@ def test_dependency_already_installed(self): pkgY.installed = True test_install.create_test_output(specX, [specX], mo, getLogFunc=test_fetch_log) - self.assertEqual(mo.results, {bIdX:True}) + self.assertEqual(mo.results, {bIdX:test_install.TestResult.PASSED}) class MockPackageDb(object): def __init__(self, init=None): From ea872f8098af3525f0d3e9e0d2fd2efa41466e87 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 22 Oct 2015 17:44:16 -0700 Subject: [PATCH 095/121] 1. Added CommandError exception to build_environment 2. The parent of a failed child process in build_environment.fork no longer calls sys.exit - instead it raises a CommandError (from [1]) 3. test-install command now attempts to install all packages even if one fails --- lib/spack/spack/build_environment.py | 6 ++++- lib/spack/spack/cmd/test-install.py | 35 +++++++++++++++++----------- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index a133faa6295..0d179f563bd 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -296,4 +296,8 @@ def child_fun(): # message. Just make the parent exit with an error code. pid, returncode = os.waitpid(pid, 0) if returncode != 0: - sys.exit(1) + raise CommandError(returncode) + + +class CommandError(StandardError): + pass diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 406a6d7d33c..aeb90ae7336 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -32,6 +32,7 @@ from llnl.util.filesystem import * import spack +from spack.build_environment import CommandError import spack.cmd description = "Treat package installations as unit tests and output formatted test results" @@ -107,7 +108,12 @@ def fetch_log(path): if not os.path.exists(path): return list() with open(path, 'rb') as F: - return list(F.readlines()) + return list(line.strip() for line in F.readlines()) + + +def failed_dependencies(spec): + return set(childSpec for childSpec in spec.dependencies.itervalues() if not + spack.db.get(childSpec).installed) def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log): @@ -117,9 +123,7 @@ def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log): if spec not in newInstalls: continue - failedDeps = set(childSpec for childSpec in - spec.dependencies.itervalues() if not - spack.db.get(childSpec).installed) + failedDeps = failed_dependencies(spec) package = spack.db.get(spec) if failedDeps: result = TestResult.SKIPPED @@ -172,10 +176,13 @@ def test_install(parser, args): else: outputFpath = args.output - try: - for spec in specs: - package = spack.db.get(spec) - if not package.installed: + for spec in topSpec.traverse(order='post'): + # Calling do_install for the top-level package would be sufficient but + # this attempts to keep going if any package fails (other packages which + # are not dependents may succeed) + package = spack.db.get(spec) + if (not failed_dependencies(spec)) and (not package.installed): + try: package.do_install( keep_prefix=False, keep_stage=True, @@ -183,10 +190,12 @@ def test_install(parser, args): make_jobs=args.jobs, verbose=True, fake=False) - finally: - jrf = JunitResultFormat() - handled = {} - create_test_output(topSpec, newInstalls, jrf) + except CommandError: + pass + + jrf = JunitResultFormat() + handled = {} + create_test_output(topSpec, newInstalls, jrf) - with open(outputFpath, 'wb') as F: + with open(outputFpath, 'wb') as F: jrf.write_to(F) From e812c35689c4bc202ee33a14ac7680d87746e34a Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 23 Oct 2015 10:11:38 +0200 Subject: [PATCH 096/121] GCC : removed binutils variant --- var/spack/packages/gcc/package.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py index 4088b9f9d41..a49a1348aac 100644 --- a/var/spack/packages/gcc/package.py +++ b/var/spack/packages/gcc/package.py @@ -48,12 +48,10 @@ class Gcc(Package): version('4.6.4', 'b407a3d1480c11667f293bfb1f17d1a4') version('4.5.4', '27e459c2566b8209ab064570e1b378f7') - variant('binutils', default=False, description='Add a dependency on binutils') - depends_on("mpfr") depends_on("gmp") depends_on("mpc") # when @4.5: - depends_on("binutils~libiberty", when="+binutils") + depends_on("binutils~libiberty") # Save these until we can do optional deps. depends_on("isl", when=DEPENDS_ON_ISL_PREDICATE) @@ -81,13 +79,11 @@ def install(self, spec, prefix): "--with-gnu-as", "--with-quad"] # Binutils - if '+binutils' in spec: - binutils_options = ["--with-stage1-ldflags=%s" % self.rpath_args, - "--with-boot-ldflags=%s" % self.rpath_args, - "--with-ld=%s/bin/ld" % spec['binutils'].prefix, - "--with-as=%s/bin/as" % spec['binutils'].prefix] - options.extend(binutils_options) - + binutils_options = ["--with-stage1-ldflags=%s" % self.rpath_args, + "--with-boot-ldflags=%s" % self.rpath_args, + "--with-ld=%s/bin/ld" % spec['binutils'].prefix, + "--with-as=%s/bin/as" % spec['binutils'].prefix] + options.extend(binutils_options) # Isl if spec.satisfies(Gcc.DEPENDS_ON_ISL_PREDICATE): isl_options = ["--with-isl=%s" % spec['isl'].prefix] From d76c9236236747a2a19a10941b1efd497f0202e0 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Fri, 23 Oct 2015 16:18:06 -0700 Subject: [PATCH 097/121] 1. Rename CommandError -> InstallError 2. InstallError now subclasses SpackError vs. StandardError (so it is now handled by the spack shell script) --- lib/spack/spack/build_environment.py | 8 +++++--- lib/spack/spack/cmd/test-install.py | 4 ++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 0d179f563bd..620ad5be9e0 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -36,6 +36,7 @@ import spack import spack.compilers as compilers +from spack.error import SpackError from spack.util.executable import Executable, which from spack.util.environment import * @@ -296,8 +297,9 @@ def child_fun(): # message. Just make the parent exit with an error code. pid, returncode = os.waitpid(pid, 0) if returncode != 0: - raise CommandError(returncode) + raise InstallError("Installation process had nonzero exit code." + .format(str(returncode))) -class CommandError(StandardError): - pass +class InstallError(SpackError): + """Raised when a package fails to install""" diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index aeb90ae7336..a9f9331fcb7 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -32,7 +32,7 @@ from llnl.util.filesystem import * import spack -from spack.build_environment import CommandError +from spack.build_environment import InstallError import spack.cmd description = "Treat package installations as unit tests and output formatted test results" @@ -190,7 +190,7 @@ def test_install(parser, args): make_jobs=args.jobs, verbose=True, fake=False) - except CommandError: + except InstallError: pass jrf = JunitResultFormat() From cc0ee3dc29516d6ceb49c4144fa3cb75d0120b0d Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Fri, 23 Oct 2015 20:56:06 -0700 Subject: [PATCH 098/121] The HTML number conversion regex operating against a byte string will only convert individual bytes, so therefore incorrectly converts utf-8 encoded characters. Decoding byte strings to unicode objects results in correct HTML number encodings. --- lib/spack/spack/cmd/test-install.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index a9f9331fcb7..d9165192278 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -27,6 +27,7 @@ import itertools import re import os +import codecs import llnl.util.tty as tty from llnl.util.filesystem import * @@ -107,7 +108,7 @@ def __eq__(self, other): def fetch_log(path): if not os.path.exists(path): return list() - with open(path, 'rb') as F: + with codecs.open(path, 'rb', 'utf-8') as F: return list(line.strip() for line in F.readlines()) From 908a93a470cafcb3fad8f7412e438e7f5b939d04 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 24 Oct 2015 19:54:52 -0700 Subject: [PATCH 099/121] Add a multiprocess Barrier class to use for testing parallel code. --- lib/spack/spack/util/multiproc.py | 50 ++++++++++++++++++++++++++++++- 1 file changed, 49 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/util/multiproc.py b/lib/spack/spack/util/multiproc.py index 9e045a090f4..21cd6f543d0 100644 --- a/lib/spack/spack/util/multiproc.py +++ b/lib/spack/spack/util/multiproc.py @@ -27,9 +27,11 @@ than multiprocessing.Pool.apply() can. For example, apply() will fail to pickle functions if they're passed indirectly as parameters. """ -from multiprocessing import Process, Pipe +from multiprocessing import Process, Pipe, Semaphore, Value from itertools import izip +__all__ = ['spawn', 'parmap', 'Barrier'] + def spawn(f): def fun(pipe,x): pipe.send(f(x)) @@ -43,3 +45,49 @@ def parmap(f,X): [p.join() for p in proc] return [p.recv() for (p,c) in pipe] + +class Barrier: + """Simple reusable semaphore barrier. + + Python 2.6 doesn't have multiprocessing barriers so we implement this. + + See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41. + """ + def __init__(self, n, timeout=None): + self.n = n + self.to = timeout + self.count = Value('i', 0) + self.mutex = Semaphore(1) + self.turnstile1 = Semaphore(0) + self.turnstile2 = Semaphore(1) + + + def wait(self): + if not self.mutex.acquire(timeout=self.to): + raise BarrierTimeoutError() + self.count.value += 1 + if self.count.value == self.n: + if not self.turnstile2.acquire(timeout=self.to): + raise BarrierTimeoutError() + self.turnstile1.release() + self.mutex.release() + + if not self.turnstile1.acquire(timeout=self.to): + raise BarrierTimeoutError() + self.turnstile1.release() + + if not self.mutex.acquire(timeout=self.to): + raise BarrierTimeoutError() + self.count.value -= 1 + if self.count.value == 0: + if not self.turnstile1.acquire(timeout=self.to): + raise BarrierTimeoutError() + self.turnstile2.release() + self.mutex.release() + + if not self.turnstile2.acquire(timeout=self.to): + raise BarrierTimeoutError() + self.turnstile2.release() + + +class BarrierTimeoutError: pass From ead8ac58c6ecde1b8bd32e9f651483b57c7e3bd5 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 24 Oct 2015 19:55:22 -0700 Subject: [PATCH 100/121] Working Lock class, now uses POSIX fcntl locks, extensive unit test. - llnl.util.lock now uses fcntl.lockf instead of flock - purported to have more NFS compatibility. - Added an extensive test case for locks. - tests acquiring, releasing, upgrading, timeouts, shared, & exclusive cases. --- lib/spack/llnl/util/lock.py | 179 ++++++++++----------- lib/spack/spack/test/__init__.py | 3 +- lib/spack/spack/test/lock.py | 264 +++++++++++++++++++++++++++++++ 3 files changed, 356 insertions(+), 90 deletions(-) create mode 100644 lib/spack/spack/test/lock.py diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index 3cd02befe53..6e49bf74e6c 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -1,5 +1,5 @@ ############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. @@ -22,134 +22,135 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -"""Lock implementation for shared filesystems.""" import os import fcntl import errno import time import socket -# Default timeout for locks. -DEFAULT_TIMEOUT = 60 +# Default timeout in seconds, after which locks will raise exceptions. +_default_timeout = 60 -class _ReadLockContext(object): - """Context manager that takes and releases a read lock. - - Arguments are lock and timeout (default 5 minutes) - """ - def __init__(self, lock, timeout=DEFAULT_TIMEOUT): - self._lock = lock - self._timeout = timeout - - def __enter__(self): - self._lock.acquire_read(self._timeout) - - def __exit__(self,type,value,traceback): - self._lock.release_read() - - -class _WriteLockContext(object): - """Context manager that takes and releases a write lock. - - Arguments are lock and timeout (default 5 minutes) - """ - def __init__(self, lock, timeout=DEFAULT_TIMEOUT): - self._lock = lock - self._timeout = timeout - - def __enter__(self): - self._lock.acquire_write(self._timeout) - - def __exit__(self,type,value,traceback): - self._lock.release_write() +# Sleep time per iteration in spin loop (in seconds) +_sleep_time = 1e-5 class Lock(object): - """Distributed file-based lock using ``flock``.""" - - def __init__(self, file_path): + def __init__(self,file_path): self._file_path = file_path - self._fd = os.open(file_path,os.O_RDWR) + self._fd = None self._reads = 0 self._writes = 0 - def write_lock(self, timeout=DEFAULT_TIMEOUT): - """Convenience method that returns a write lock context.""" - return _WriteLockContext(self, timeout) + def _lock(self, op, timeout): + """This takes a lock using POSIX locks (``fnctl.lockf``). + The lock is implemented as a spin lock using a nonblocking + call to lockf(). - def read_lock(self, timeout=DEFAULT_TIMEOUT): - """Convenience method that returns a read lock context.""" - return _ReadLockContext(self, timeout) + On acquiring an exclusive lock, the lock writes this process's + pid and host to the lock file, in case the holding process + needs to be killed later. - - def acquire_read(self, timeout): + If the lock times out, it raises a ``LockError``. """ - Implements recursive lock. If held in both read and write mode, - the write lock will be maintained until all locks are released + start_time = time.time() + while (time.time() - start_time) < timeout: + try: + if self._fd is None: + self._fd = os.open(self._file_path, os.O_RDWR) + + fcntl.lockf(self._fd, op | fcntl.LOCK_NB) + if op == fcntl.LOCK_EX: + os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn())) + return + + except IOError as error: + if error.errno == errno.EAGAIN or error.errno == errno.EACCES: + pass + else: + raise + time.sleep(_sleep_time) + + raise LockError("Timed out waiting for lock.") + + + def _unlock(self): + """Releases a lock using POSIX locks (``fcntl.lockf``) + + Releases the lock regardless of mode. Note that read locks may + be masquerading as write locks, but this removes either. + + """ + fcntl.lockf(self._fd,fcntl.LOCK_UN) + os.close(self._fd) + self._fd = None + + + def acquire_read(self, timeout=_default_timeout): + """Acquires a recursive, shared lock for reading. + + Read and write locks can be acquired and released in arbitrary + order, but the POSIX lock is held until all local read and + write locks are released. + """ if self._reads == 0 and self._writes == 0: self._lock(fcntl.LOCK_SH, timeout) self._reads += 1 - def acquire_write(self, timeout): - """ - Implements recursive lock + def acquire_write(self, timeout=_default_timeout): + """Acquires a recursive, exclusive lock for writing. + + Read and write locks can be acquired and released in arbitrary + order, but the POSIX lock is held until all local read and + write locks are released. """ if self._writes == 0: self._lock(fcntl.LOCK_EX, timeout) self._writes += 1 - def _lock(self, op, timeout): - """ - The timeout is implemented using nonblocking flock() - to avoid using signals for timing - Write locks store pid and host information to the lock file - Read locks do not store data - """ - total_time = 0 - while total_time < timeout: - try: - fcntl.flock(self._fd, op | fcntl.LOCK_NB) - if op == fcntl.LOCK_EX: - with open(self._file_path, 'w') as f: - f.write("pid = " + str(os.getpid()) + ", host = " + socket.getfqdn()) - return - except IOError as error: - if error.errno == errno.EAGAIN or error.errno == EACCES: - pass - else: - raise - time.sleep(0.1) - total_time += 0.1 - - def release_read(self): - """ - Assert there is a lock of the right type to release, recursive lock + """Releases a read lock. + + Returns True if the last recursive lock was released, False if + there are still outstanding locks. + + Does limited correctness checking: if a read lock is released + when none are held, this will raise an assertion error. + """ assert self._reads > 0 - if self._reads == 1 and self._writes == 0: - self._unlock() + self._reads -= 1 + if self._reads == 0 and self._writes == 0: + self._unlock() + return True + return False def release_write(self): - """ - Assert there is a lock of the right type to release, recursive lock + """Releases a write lock. + + Returns True if the last recursive lock was released, False if + there are still outstanding locks. + + Does limited correctness checking: if a read lock is released + when none are held, this will raise an assertion error. + """ assert self._writes > 0 - if self._writes == 1 and self._reads == 0: - self._unlock() + self._writes -= 1 + if self._writes == 0 and self._reads == 0: + self._unlock() + return True + return False - def _unlock(self): - """ - Releases the lock regardless of mode. Note that read locks may be - masquerading as write locks at times, but this removes either. - """ - fcntl.flock(self._fd, fcntl.LOCK_UN) +class LockError(Exception): + """Raised when an attempt to acquire a lock times out.""" + pass diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index c3b39b76f8a..84419781e2f 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -57,6 +57,7 @@ 'optional_deps', 'make_executable', 'configure_guess', + 'lock', 'database'] @@ -77,7 +78,7 @@ def run(names, verbose=False): if test not in test_names: tty.error("%s is not a valid spack test name." % test, "Valid names are:") - colify(test_names, indent=4) + colify(sorted(test_names), indent=4) sys.exit(1) runner = unittest.TextTestRunner(verbosity=verbosity) diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py new file mode 100644 index 00000000000..2e7440bbbc3 --- /dev/null +++ b/lib/spack/spack/test/lock.py @@ -0,0 +1,264 @@ +############################################################################## +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +""" +These tests ensure that our lock works correctly. +""" +import unittest +import os +import tempfile +import shutil +from multiprocessing import Process + +from llnl.util.lock import * +from llnl.util.filesystem import join_path, touch + +from spack.util.multiproc import Barrier + +# This is the longest a failed test will take, as the barriers will +# time out and raise an exception. +barrier_timeout = 5 + + +def order_processes(*functions): + """Order some processes using simple barrier synchronization.""" + b = Barrier(len(functions), timeout=barrier_timeout) + procs = [Process(target=f, args=(b,)) for f in functions] + for p in procs: p.start() + for p in procs: p.join() + + +class LockTest(unittest.TestCase): + + def setUp(self): + self.tempdir = tempfile.mkdtemp() + self.lock_path = join_path(self.tempdir, 'lockfile') + touch(self.lock_path) + + + def tearDown(self): + shutil.rmtree(self.tempdir, ignore_errors=True) + + + # + # Process snippets below can be composed into tests. + # + def acquire_write(self, barrier): + lock = Lock(self.lock_path) + lock.acquire_write() # grab exclusive lock + barrier.wait() + barrier.wait() # hold the lock until exception raises in other procs. + + def acquire_read(self, barrier): + lock = Lock(self.lock_path) + lock.acquire_read() # grab shared lock + barrier.wait() + barrier.wait() # hold the lock until exception raises in other procs. + + def timeout_write(self, barrier): + lock = Lock(self.lock_path) + barrier.wait() # wait for lock acquire in first process + self.assertRaises(LockError, lock.acquire_write, 0.1) + barrier.wait() + + def timeout_read(self, barrier): + lock = Lock(self.lock_path) + barrier.wait() # wait for lock acquire in first process + self.assertRaises(LockError, lock.acquire_read, 0.1) + barrier.wait() + + + # + # Test that exclusive locks on other processes time out when an + # exclusive lock is held. + # + def test_write_lock_timeout_on_write(self): + order_processes(self.acquire_write, self.timeout_write) + + def test_write_lock_timeout_on_write_2(self): + order_processes(self.acquire_write, self.timeout_write, self.timeout_write) + + def test_write_lock_timeout_on_write_3(self): + order_processes(self.acquire_write, self.timeout_write, self.timeout_write, self.timeout_write) + + + # + # Test that shared locks on other processes time out when an + # exclusive lock is held. + # + def test_read_lock_timeout_on_write(self): + order_processes(self.acquire_write, self.timeout_read) + + def test_read_lock_timeout_on_write_2(self): + order_processes(self.acquire_write, self.timeout_read, self.timeout_read) + + def test_read_lock_timeout_on_write_3(self): + order_processes(self.acquire_write, self.timeout_read, self.timeout_read, self.timeout_read) + + + # + # Test that exclusive locks time out when shared locks are held. + # + def test_write_lock_timeout_on_read(self): + order_processes(self.acquire_read, self.timeout_write) + + def test_write_lock_timeout_on_read_2(self): + order_processes(self.acquire_read, self.timeout_write, self.timeout_write) + + def test_write_lock_timeout_on_read_3(self): + order_processes(self.acquire_read, self.timeout_write, self.timeout_write, self.timeout_write) + + + # + # Test that exclusive locks time while lots of shared locks are held. + # + def test_write_lock_timeout_with_multiple_readers_2_1(self): + order_processes(self.acquire_read, self.acquire_read, self.timeout_write) + + def test_write_lock_timeout_with_multiple_readers_2_2(self): + order_processes(self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write) + + def test_write_lock_timeout_with_multiple_readers_3_1(self): + order_processes(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write) + + def test_write_lock_timeout_with_multiple_readers_3_2(self): + order_processes(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write) + + + # + # Longer test case that ensures locks are reusable. Ordering is + # enforced by barriers throughout -- steps are shown with numbers. + # + def test_complex_acquire_and_release_chain(self): + def p1(barrier): + lock = Lock(self.lock_path) + + lock.acquire_write() + barrier.wait() # ---------------------------------------- 1 + # others test timeout + barrier.wait() # ---------------------------------------- 2 + lock.release_write() # release and others acquire read + barrier.wait() # ---------------------------------------- 3 + self.assertRaises(LockError, lock.acquire_write, 0.1) + lock.acquire_read() + barrier.wait() # ---------------------------------------- 4 + lock.release_read() + barrier.wait() # ---------------------------------------- 5 + + # p2 upgrades read to write + barrier.wait() # ---------------------------------------- 6 + self.assertRaises(LockError, lock.acquire_write, 0.1) + self.assertRaises(LockError, lock.acquire_read, 0.1) + barrier.wait() # ---------------------------------------- 7 + # p2 releases write and read + barrier.wait() # ---------------------------------------- 8 + + # p3 acquires read + barrier.wait() # ---------------------------------------- 9 + # p3 upgrades read to write + barrier.wait() # ---------------------------------------- 10 + self.assertRaises(LockError, lock.acquire_write, 0.1) + self.assertRaises(LockError, lock.acquire_read, 0.1) + barrier.wait() # ---------------------------------------- 11 + # p3 releases locks + barrier.wait() # ---------------------------------------- 12 + lock.acquire_read() + barrier.wait() # ---------------------------------------- 13 + lock.release_read() + + + def p2(barrier): + lock = Lock(self.lock_path) + + # p1 acquires write + barrier.wait() # ---------------------------------------- 1 + self.assertRaises(LockError, lock.acquire_write, 0.1) + self.assertRaises(LockError, lock.acquire_read, 0.1) + barrier.wait() # ---------------------------------------- 2 + lock.acquire_read() + barrier.wait() # ---------------------------------------- 3 + # p1 tests shared read + barrier.wait() # ---------------------------------------- 4 + # others release reads + barrier.wait() # ---------------------------------------- 5 + + lock.acquire_write() # upgrade read to write + barrier.wait() # ---------------------------------------- 6 + # others test timeout + barrier.wait() # ---------------------------------------- 7 + lock.release_write() # release read AND write (need both) + lock.release_read() + barrier.wait() # ---------------------------------------- 8 + + # p3 acquires read + barrier.wait() # ---------------------------------------- 9 + # p3 upgrades read to write + barrier.wait() # ---------------------------------------- 10 + self.assertRaises(LockError, lock.acquire_write, 0.1) + self.assertRaises(LockError, lock.acquire_read, 0.1) + barrier.wait() # ---------------------------------------- 11 + # p3 releases locks + barrier.wait() # ---------------------------------------- 12 + lock.acquire_read() + barrier.wait() # ---------------------------------------- 13 + lock.release_read() + + + def p3(barrier): + lock = Lock(self.lock_path) + + # p1 acquires write + barrier.wait() # ---------------------------------------- 1 + self.assertRaises(LockError, lock.acquire_write, 0.1) + self.assertRaises(LockError, lock.acquire_read, 0.1) + barrier.wait() # ---------------------------------------- 2 + lock.acquire_read() + barrier.wait() # ---------------------------------------- 3 + # p1 tests shared read + barrier.wait() # ---------------------------------------- 4 + lock.release_read() + barrier.wait() # ---------------------------------------- 5 + + # p2 upgrades read to write + barrier.wait() # ---------------------------------------- 6 + self.assertRaises(LockError, lock.acquire_write, 0.1) + self.assertRaises(LockError, lock.acquire_read, 0.1) + barrier.wait() # ---------------------------------------- 7 + # p2 releases write & read + barrier.wait() # ---------------------------------------- 8 + + lock.acquire_read() + barrier.wait() # ---------------------------------------- 9 + lock.acquire_write() + barrier.wait() # ---------------------------------------- 10 + # others test timeout + barrier.wait() # ---------------------------------------- 11 + lock.release_read() # release read AND write in opposite + lock.release_write() # order from before on p2 + barrier.wait() # ---------------------------------------- 12 + lock.acquire_read() + barrier.wait() # ---------------------------------------- 13 + lock.release_read() + + order_processes(p1, p2, p3) From 6a16040462b49acb356236a5f8114b055d680851 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Mon, 26 Oct 2015 11:58:52 -0700 Subject: [PATCH 101/121] Automatically create a 'test-output' directory in the current directory if no output path is specified. Test output files are placed in this directory. Furthermore the filenames now have the prefix "test" (but otherwise are the string representation of the spec ID as before). --- lib/spack/spack/cmd/test-install.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index d9165192278..76602474a41 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -173,7 +173,10 @@ def test_install(parser, args): if not args.output: bId = BuildId(topSpec) - outputFpath = join_path(os.getcwd(), "{0}.xml".format(bId.stringId())) + outputDir = join_path(os.getcwd(), "test-output") + if not os.path.exists(outputDir): + os.mkdir(outputDir) + outputFpath = join_path(outputDir, "test-{0}.xml".format(bId.stringId())) else: outputFpath = args.output From 9576860f8c97360eddaffe316450ed2f3b87e876 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Mon, 26 Oct 2015 14:27:44 -0700 Subject: [PATCH 102/121] Making SpackError reference consistent. --- lib/spack/spack/build_environment.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 620ad5be9e0..dac25d99401 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -36,7 +36,6 @@ import spack import spack.compilers as compilers -from spack.error import SpackError from spack.util.executable import Executable, which from spack.util.environment import * @@ -301,5 +300,5 @@ def child_fun(): .format(str(returncode))) -class InstallError(SpackError): +class InstallError(spack.error.SpackError): """Raised when a package fails to install""" From cd2a23384f92fa075c8a40382d3dc8c3dc516f86 Mon Sep 17 00:00:00 2001 From: Paul Dapolito Date: Mon, 26 Oct 2015 14:28:56 -0700 Subject: [PATCH 103/121] Added v5.2.2 for xz library --- var/spack/packages/xz/package.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/xz/package.py b/var/spack/packages/xz/package.py index 88c5793018c..ba6c9733a7e 100644 --- a/var/spack/packages/xz/package.py +++ b/var/spack/packages/xz/package.py @@ -8,9 +8,13 @@ class Xz(Package): homepage = "http://tukaani.org/xz/" url = "http://tukaani.org/xz/xz-5.2.0.tar.bz2" - version('5.2.0', '867cc8611760240ebf3440bd6e170bb9') - + version('5.2.0', '867cc8611760240ebf3440bd6e170bb9', + url = 'http://tukaani.org/xz/xz-5.2.0.tar.bz2') + version('5.2.2', 'f90c9a0c8b259aee2234c4e0d7fd70af', + url = 'http://tukaani.org/xz/xz-5.2.2.tar.bz2') + def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install") + From 9d90cb69622dbc6a22eaf2fbf796dd27a3e2def2 Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Thu, 15 Oct 2015 09:18:16 -0400 Subject: [PATCH 104/121] python: use the setdefault method on dict It allows more concise code and skips some key lookups. --- lib/spack/llnl/util/lang.py | 5 +---- lib/spack/spack/directives.py | 10 ++++------ lib/spack/spack/virtual.py | 8 ++------ 3 files changed, 7 insertions(+), 16 deletions(-) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 9e1bef18ca0..156ee34c9ef 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -87,10 +87,7 @@ def index_by(objects, *funcs): result = {} for o in objects: key = f(o) - if key not in result: - result[key] = [o] - else: - result[key].append(o) + result.setdefault(key, []).append(o) for key, objects in result.items(): result[key] = index_by(objects, *funcs[1:]) diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 9297d6dac36..78039ac6f95 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -239,12 +239,10 @@ def patch(pkg, url_or_filename, level=1, when=None): when = pkg.name when_spec = parse_anonymous_spec(when, pkg.name) - if when_spec not in pkg.patches: - pkg.patches[when_spec] = [Patch(pkg.name, url_or_filename, level)] - else: - # if this spec is identical to some other, then append this - # patch to the existing list. - pkg.patches[when_spec].append(Patch(pkg.name, url_or_filename, level)) + cur_patches = pkg.patches.setdefault(when_spec, []) + # if this spec is identical to some other, then append this + # patch to the existing list. + cur_patches.append(Patch(pkg.name, url_or_filename, level)) @directive('variants') diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/virtual.py index fa070e6bd5f..c77b259d611 100644 --- a/lib/spack/spack/virtual.py +++ b/lib/spack/spack/virtual.py @@ -73,10 +73,8 @@ def update(self, spec): for provided_spec, provider_spec in pkg.provided.iteritems(): if provider_spec.satisfies(spec, deps=False): provided_name = provided_spec.name - if provided_name not in self.providers: - self.providers[provided_name] = {} - provider_map = self.providers[provided_name] + provider_map = self.providers.setdefault(provided_name, {}) if not provided_spec in provider_map: provider_map[provided_spec] = set() @@ -133,9 +131,7 @@ def _cross_provider_maps(self, lmap, rmap): if lp_spec.name == rp_spec.name: try: const = lp_spec.copy().constrain(rp_spec,deps=False) - if constrained not in result: - result[constrained] = set() - result[constrained].add(const) + result.setdefault(constrained, set()).add(const) except spack.spec.UnsatisfiableSpecError: continue return result From 3b554c709bb1dc3704939964ac1265ccf8597718 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Mon, 26 Oct 2015 15:26:08 -0700 Subject: [PATCH 105/121] Fetch errors were also terminating runs of test-install with system exit, so stage.fetch() was updated to raise a FetchError instead of calling tty.die(). Output is the same for spack install in case of a fetch error. --- lib/spack/spack/cmd/test-install.py | 6 ++++++ lib/spack/spack/stage.py | 3 ++- lib/spack/spack/test/unit_install.py | 2 ++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 76602474a41..58ab40aa7b0 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -34,6 +34,7 @@ import spack from spack.build_environment import InstallError +from spack.fetch_strategy import FetchError import spack.cmd description = "Treat package installations as unit tests and output formatted test results" @@ -132,6 +133,9 @@ def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log): depBID = BuildId(dep) errOutput = "Skipped due to failed dependency: {0}".format( depBID.stringId()) + elif (not package.installed) and (not package.stage.archive_file): + result = TestResult.FAILED + errOutput = "Failure to fetch package resources." elif not package.installed: result = TestResult.FAILED lines = getLogFunc(package.build_log_path) @@ -196,6 +200,8 @@ def test_install(parser, args): fake=False) except InstallError: pass + except FetchError: + pass jrf = JunitResultFormat() handled = {} diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 008c5f04298..78930ecb5b2 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -261,7 +261,8 @@ def fetch(self): tty.debug(e) continue else: - tty.die("All fetchers failed for %s" % self.name) + errMessage = "All fetchers failed for %s" % self.name + raise fs.FetchError(errMessage, None) def check(self): diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py index 2a94f8fec0a..c4b9092f051 100644 --- a/lib/spack/spack/test/unit_install.py +++ b/lib/spack/spack/test/unit_install.py @@ -105,6 +105,8 @@ def test_dependency_already_installed(self): self.assertEqual(mo.results, {bIdX:test_install.TestResult.PASSED}) + #TODO: add test(s) where Y fails to install + class MockPackageDb(object): def __init__(self, init=None): self.specToPkg = {} From 17a58ee0a95ac6b15cec3731f93262a6378e7d6a Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Mon, 26 Oct 2015 18:31:25 -0400 Subject: [PATCH 106/121] architecture: use uname if available --- lib/spack/spack/architecture.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index 0c4b605e910..e0d42c2077d 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -24,6 +24,7 @@ ############################################################################## import os import platform as py_platform +import subprocess from llnl.util.lang import memoized @@ -69,12 +70,24 @@ def get_mac_sys_type(): Version(mac_ver).up_to(2), py_platform.machine()) +def get_sys_type_from_uname(): + """Return the architecture from uname.""" + try: + arch_proc = subprocess.Popen(['uname', '-i'], + stdout=subprocess.PIPE) + arch, _ = arch_proc.communicate() + return arch.strip() + except: + return None + + @memoized def sys_type(): """Returns a SysType for the current machine.""" methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment, - get_mac_sys_type] + get_mac_sys_type, + get_sys_type_from_uname] # search for a method that doesn't return None sys_type = None From 6c9b10f73dc2c5bf2281d6434d3aaa5cf2ded056 Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Mon, 26 Oct 2015 18:54:13 -0400 Subject: [PATCH 107/121] architecture: remove custom mac_type method --- lib/spack/spack/architecture.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index e0d42c2077d..05ac5d6f35f 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -23,14 +23,12 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import platform as py_platform import subprocess from llnl.util.lang import memoized import spack import spack.error as serr -from spack.version import Version class InvalidSysTypeError(serr.SpackError): @@ -60,16 +58,6 @@ def get_sys_type_from_environment(): return os.environ.get('SYS_TYPE') -def get_mac_sys_type(): - """Return a Mac OS SYS_TYPE or None if this isn't a mac.""" - mac_ver = py_platform.mac_ver()[0] - if not mac_ver: - return None - - return "macosx_%s_%s" % ( - Version(mac_ver).up_to(2), py_platform.machine()) - - def get_sys_type_from_uname(): """Return the architecture from uname.""" try: @@ -86,7 +74,6 @@ def sys_type(): """Returns a SysType for the current machine.""" methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment, - get_mac_sys_type, get_sys_type_from_uname] # search for a method that doesn't return None From 8b22a4f4f883b6e79360d3ec487f2e97279126a0 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 26 Oct 2015 15:45:23 +0100 Subject: [PATCH 108/121] llvm : updated versions --- var/spack/packages/llvm/package.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/var/spack/packages/llvm/package.py b/var/spack/packages/llvm/package.py index 9d2be690bb1..a1939aadacc 100644 --- a/var/spack/packages/llvm/package.py +++ b/var/spack/packages/llvm/package.py @@ -24,6 +24,7 @@ ############################################################################## from spack import * + class Llvm(Package): """The LLVM Project is a collection of modular and reusable compiler and toolchain technologies. Despite its name, LLVM has little to do with @@ -31,14 +32,12 @@ class Llvm(Package): that can be used to build them. The name "LLVM" itself is not an acronym; it is the full name of the project. """ - homepage = "http://llvm.org/" - list_url = "http://llvm.org/releases/download.html" + homepage = 'http://llvm.org/' + url = 'http://llvm.org/releases/3.7.0/llvm-3.7.0.src.tar.xz' + version('3.7.0', 'b98b9495e5655a672d6cb83e1a180f8e', url='http://llvm.org/releases/3.7.0/llvm-3.7.0.src.tar.xz') + version('3.6.2', '0c1ee3597d75280dee603bae9cbf5cc2', url='http://llvm.org/releases/3.6.2/llvm-3.6.2.src.tar.xz') version('3.5.1', '2d3d8004f38852aa679e5945b8ce0b14', url='http://llvm.org/releases/3.5.1/llvm-3.5.1.src.tar.xz') - version('3.4.2', 'a20669f75967440de949ac3b1bad439c', url='http://llvm.org/releases/3.4.2/llvm-3.4.2.src.tar.gz') - version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') - version('2.9', '793138412d2af2c7c7f54615f8943771', url='http://llvm.org/releases/2.9/llvm-2.9.tgz') - version('2.8', '220d361b4d17051ff4bb21c64abe05ba', url='http://llvm.org/releases/2.8/llvm-2.8.tgz') def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag From 3913c10afb12a24ce48b65584de0663cfa66794a Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 26 Oct 2015 16:58:40 +0100 Subject: [PATCH 109/121] clang : updated versions and dependencies llvm : added dependency on 'python@2.7:' for better portability --- var/spack/packages/clang/package.py | 13 +++++++++---- var/spack/packages/llvm/package.py | 6 +++--- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/var/spack/packages/clang/package.py b/var/spack/packages/clang/package.py index 4f10385dbda..4f977bf9a44 100644 --- a/var/spack/packages/clang/package.py +++ b/var/spack/packages/clang/package.py @@ -28,11 +28,16 @@ class Clang(Package): """The goal of the Clang project is to create a new C, C++, Objective C and Objective C++ front-end for the LLVM compiler. """ - homepage = "http://clang.llvm.org" - list_url = "http://llvm.org/releases/download.html" + homepage = 'http://clang.llvm.org' + url = 'http://llvm.org/releases/3.7.0/cfe-3.7.0.src.tar.xz' - depends_on("llvm") - version('3.4.2', '87945973b7c73038871c5f849a818588', url='http://llvm.org/releases/3.4.2/cfe-3.4.2.src.tar.xz') + depends_on('llvm@3.7.0', when='@3.7.0') + depends_on('llvm@3.6.2', when='@3.6.2') + depends_on('llvm@3.5.1', when='@3.5.1') + + version('3.7.0', '8f9d27335e7331cf0a4711e952f21f01', url='http://llvm.org/releases/3.7.0/cfe-3.7.0.src.tar.xz') + version('3.6.2', 'ff862793682f714bb7862325b9c06e20', url='http://llvm.org/releases/3.6.2/cfe-3.6.2.src.tar.xz') + version('3.5.1', '93f9532f8f7e6f1d8e5c1116907051cb', url='http://llvm.org/releases/3.5.1/cfe-3.5.1.src.tar.xz') def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag diff --git a/var/spack/packages/llvm/package.py b/var/spack/packages/llvm/package.py index a1939aadacc..a6759c3033e 100644 --- a/var/spack/packages/llvm/package.py +++ b/var/spack/packages/llvm/package.py @@ -39,15 +39,15 @@ class Llvm(Package): version('3.6.2', '0c1ee3597d75280dee603bae9cbf5cc2', url='http://llvm.org/releases/3.6.2/llvm-3.6.2.src.tar.xz') version('3.5.1', '2d3d8004f38852aa679e5945b8ce0b14', url='http://llvm.org/releases/3.5.1/llvm-3.5.1.src.tar.xz') + depends_on('python@2.7:') + def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag with working_dir('spack-build', create=True): cmake('..', '-DLLVM_REQUIRES_RTTI=1', - '-DPYTHON_EXECUTABLE=/usr/bin/python', - '-DPYTHON_INCLUDE_DIR=/usr/include/python2.6', - '-DPYTHON_LIBRARY=/usr/lib64/libpython2.6.so', + '-DPYTHON_EXECUTABLE=%s/bin/python' % spec['python'].prefix, *std_cmake_args) make() make("install") From af7b96c14a555805a50f13a1fa1343650db184ab Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 27 Oct 2015 00:35:06 -0700 Subject: [PATCH 110/121] Lock acquires return True/False depending on whether they got POSIX lock. --- lib/spack/llnl/util/lock.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index 6e49bf74e6c..dcca37687e7 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -95,10 +95,15 @@ def acquire_read(self, timeout=_default_timeout): order, but the POSIX lock is held until all local read and write locks are released. + Returns True if it is the first acquire and actually acquires + the POSIX lock, False if it is a nested transaction. + """ - if self._reads == 0 and self._writes == 0: - self._lock(fcntl.LOCK_SH, timeout) self._reads += 1 + if self._reads == 1 and self._writes == 0: + self._lock(fcntl.LOCK_SH, timeout) + return True + return False def acquire_write(self, timeout=_default_timeout): @@ -107,10 +112,16 @@ def acquire_write(self, timeout=_default_timeout): Read and write locks can be acquired and released in arbitrary order, but the POSIX lock is held until all local read and write locks are released. + + Returns True if it is the first acquire and actually acquires + the POSIX lock, False if it is a nested transaction. + """ - if self._writes == 0: - self._lock(fcntl.LOCK_EX, timeout) self._writes += 1 + if self._writes == 1: + self._lock(fcntl.LOCK_EX, timeout) + return True + return False def release_read(self): From e8f5a85ffd039c3c63045c0e06a41b98d98008d3 Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Tue, 27 Oct 2015 13:50:59 -0400 Subject: [PATCH 111/121] ncurses: update package --- var/spack/packages/ncurses/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/packages/ncurses/package.py b/var/spack/packages/ncurses/package.py index 8f5763bfdd2..cc180bbae1b 100644 --- a/var/spack/packages/ncurses/package.py +++ b/var/spack/packages/ncurses/package.py @@ -11,6 +11,8 @@ class Ncurses(Package): version('5.9', '8cb9c412e5f2d96bc6f459aa8c6282a1', url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-5.9.tar.gz') + version('6.0', 'ee13d052e1ead260d7c28071f46eefb1', + url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.0.tar.gz') def install(self, spec, prefix): configure("--prefix=%s" % prefix, From b88da83a121fd9fa1a29494d3c15e4c7848fc14e Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Tue, 27 Oct 2015 13:51:23 -0400 Subject: [PATCH 112/121] dbus: disable systemd Not necessary in spack. Also forcefully installs outside of the prefix. --- var/spack/packages/dbus/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/packages/dbus/package.py b/var/spack/packages/dbus/package.py index f7f394498ce..f7c302d611a 100644 --- a/var/spack/packages/dbus/package.py +++ b/var/spack/packages/dbus/package.py @@ -20,7 +20,9 @@ class Dbus(Package): version('1.8.2', 'd6f709bbec0a022a1847c7caec9d6068') def install(self, spec, prefix): - configure("--prefix=%s" % prefix) + configure( + "--prefix=%s" % prefix, + "--disable-systemd") make() make("install") From ddbfc403034c1ed98590088889687ff23f222aab Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Tue, 27 Oct 2015 09:59:47 -0400 Subject: [PATCH 113/121] paraview: add package A first attempt; builds successfully. Some dependencies have hard-coded dependencies where they shouldn't (e.g., hdf5 -> mpi, libxml -> python), but that's a different rabbit hole. --- var/spack/packages/paraview/package.py | 72 ++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 var/spack/packages/paraview/package.py diff --git a/var/spack/packages/paraview/package.py b/var/spack/packages/paraview/package.py new file mode 100644 index 00000000000..a0ff812ca23 --- /dev/null +++ b/var/spack/packages/paraview/package.py @@ -0,0 +1,72 @@ +from spack import * + +class Paraview(Package): + homepage = 'http://www.paraview.org' + url = 'http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz' + + version('4.4.0', 'fa1569857dd680ebb4d7ff89c2227378', url='http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz') + + variant('python', default=False, description='Enable Python support') + variant('matplotlib', default=False, description='Enable Matplotlib support') + variant('numpy', default=False, description='Enable NumPy support') + + variant('tcl', default=False, description='Enable TCL support') + + variant('mpi', default=False, description='Enable MPI support') + + variant('osmesa', default=False, description='Enable OSMesa support') + variant('qt', default=False, description='Enable Qt support') + + depends_on('python', when='+python') + depends_on('py-numpy', when='+python+numpy') + depends_on('py-matplotlib', when='+python+matplotlib') + depends_on('tcl', when='+tcl') + depends_on('mpi', when='+mpi') + depends_on('qt', when='+qt') + + depends_on('bzip2') + depends_on('freetype') + depends_on('hdf5') # drags in mpi + depends_on('jpeg') + depends_on('libpng') + depends_on('libtiff') + #depends_on('libxml2') # drags in python + depends_on('netcdf') + #depends_on('protobuf') # version mismatches? + #depends_on('sqlite') # external version not supported + depends_on('zlib') + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + def feature_to_bool(feature, on='ON', off='OFF'): + if feature in spec: + return on + return off + + def nfeature_to_bool(feature): + return feature_to_bool(feature, on='OFF', off='ON') + + feature_args = std_cmake_args[:] + feature_args.append('-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % feature_to_bool('+qt')) + feature_args.append('-DPARAVIEW_ENABLE_PYTHON:BOOL=%s' % feature_to_bool('+python')) + feature_args.append('-DPARAVIEW_USE_MPI:BOOL=%s' % feature_to_bool('+mpi')) + feature_args.append('-DVTK_ENABLE_TCL_WRAPPING:BOOL=%s' % feature_to_bool('+tcl')) + feature_args.append('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' % feature_to_bool('+osmesa')) + feature_args.append('-DVTK_USE_X:BOOL=%s' % nfeature_to_bool('+osmesa')) + feature_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL')) + + feature_args.extend(std_cmake_args) + + cmake('..', + '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix, + '-DBUILD_TESTING:BOOL=OFF', + '-DVTK_USER_SYSTEM_FREETYPE:BOOL=ON', + '-DVTK_USER_SYSTEM_HDF5:BOOL=ON', + '-DVTK_USER_SYSTEM_JPEG:BOOL=ON', + #'-DVTK_USER_SYSTEM_LIBXML2:BOOL=ON', + '-DVTK_USER_SYSTEM_NETCDF:BOOL=ON', + '-DVTK_USER_SYSTEM_TIFF:BOOL=ON', + '-DVTK_USER_SYSTEM_ZLIB:BOOL=ON', + *feature_args) + make() + make('install') From 50d0a2643bbef81ec2e97da209ae1974b6b77993 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Tue, 27 Oct 2015 13:34:46 -0700 Subject: [PATCH 114/121] Not all package stages have an archive file (e.g. source code repos) but all of them do have a source_path: use this instead to check whether the package resources were successfully retrieved. --- lib/spack/spack/cmd/test-install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 58ab40aa7b0..68b761d5dc8 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -133,7 +133,7 @@ def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log): depBID = BuildId(dep) errOutput = "Skipped due to failed dependency: {0}".format( depBID.stringId()) - elif (not package.installed) and (not package.stage.archive_file): + elif (not package.installed) and (not package.stage.source_path): result = TestResult.FAILED errOutput = "Failure to fetch package resources." elif not package.installed: From bf8479bec6311f28cd9e18c580e23794001cbf23 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 27 Oct 2015 16:29:14 -0700 Subject: [PATCH 115/121] Fix stupid lock bug. - Code simplification ignored case where exception was raised. - If LockError was raised, read and write counts were incremented erroneously. - updated lock test. --- lib/spack/llnl/util/lock.py | 40 ++++++++++++++++++------------- lib/spack/spack/package.py | 8 ++++--- lib/spack/spack/test/lock.py | 46 +++++++++++++++++++----------------- 3 files changed, 53 insertions(+), 41 deletions(-) diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index dcca37687e7..ac3684bd557 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -99,11 +99,13 @@ def acquire_read(self, timeout=_default_timeout): the POSIX lock, False if it is a nested transaction. """ - self._reads += 1 - if self._reads == 1 and self._writes == 0: - self._lock(fcntl.LOCK_SH, timeout) + if self._reads == 0 and self._writes == 0: + self._lock(fcntl.LOCK_SH, timeout) # can raise LockError. + self._reads += 1 return True - return False + else: + self._reads += 1 + return False def acquire_write(self, timeout=_default_timeout): @@ -117,11 +119,13 @@ def acquire_write(self, timeout=_default_timeout): the POSIX lock, False if it is a nested transaction. """ - self._writes += 1 - if self._writes == 1: - self._lock(fcntl.LOCK_EX, timeout) + if self._writes == 0: + self._lock(fcntl.LOCK_EX, timeout) # can raise LockError. + self._writes += 1 return True - return False + else: + self._writes += 1 + return False def release_read(self): @@ -136,11 +140,13 @@ def release_read(self): """ assert self._reads > 0 - self._reads -= 1 - if self._reads == 0 and self._writes == 0: - self._unlock() + if self._reads == 1 and self._writes == 0: + self._unlock() # can raise LockError. + self._reads -= 1 return True - return False + else: + self._reads -= 1 + return False def release_write(self): @@ -155,11 +161,13 @@ def release_write(self): """ assert self._writes > 0 - self._writes -= 1 - if self._writes == 0 and self._reads == 0: - self._unlock() + if self._writes == 1 and self._reads == 0: + self._unlock() # can raise LockError. + self._writes -= 1 return True - return False + else: + self._writes -= 1 + return False class LockError(Exception): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index e6944ce40cd..2957257b1a3 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -606,6 +606,7 @@ def remove_prefix(self): spack.install_layout.remove_install_directory(self.spec) spack.installed_db.remove(self.spec) + def do_fetch(self): """Creates a stage directory and downloads the taball for this package. Working directory will be set to the stage directory. @@ -812,9 +813,6 @@ def real_work(): log_install_path = spack.install_layout.build_log_path(self.spec) install(log_path, log_install_path) - #Update the database once we know install successful - spack.installed_db.add(self.spec, spack.install_layout.path_for_spec(self.spec)) - # On successful install, remove the stage. if not keep_stage: self.stage.destroy() @@ -845,6 +843,10 @@ def real_work(): # Do the build. spack.build_environment.fork(self, real_work) + # note: PARENT of the build process adds the new package to + # the database, so that we don't need to re-read from file. + spack.installed_db.add(self.spec, spack.install_layout.path_for_spec(self.spec)) + # Once everything else is done, run post install hooks spack.hooks.post_install(self) diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py index 2e7440bbbc3..5664e71b037 100644 --- a/lib/spack/spack/test/lock.py +++ b/lib/spack/spack/test/lock.py @@ -41,14 +41,6 @@ barrier_timeout = 5 -def order_processes(*functions): - """Order some processes using simple barrier synchronization.""" - b = Barrier(len(functions), timeout=barrier_timeout) - procs = [Process(target=f, args=(b,)) for f in functions] - for p in procs: p.start() - for p in procs: p.join() - - class LockTest(unittest.TestCase): def setUp(self): @@ -61,6 +53,16 @@ def tearDown(self): shutil.rmtree(self.tempdir, ignore_errors=True) + def multiproc_test(self, *functions): + """Order some processes using simple barrier synchronization.""" + b = Barrier(len(functions), timeout=barrier_timeout) + procs = [Process(target=f, args=(b,)) for f in functions] + for p in procs: p.start() + for p in procs: + p.join() + self.assertEqual(p.exitcode, 0) + + # # Process snippets below can be composed into tests. # @@ -94,13 +96,13 @@ def timeout_read(self, barrier): # exclusive lock is held. # def test_write_lock_timeout_on_write(self): - order_processes(self.acquire_write, self.timeout_write) + self.multiproc_test(self.acquire_write, self.timeout_write) def test_write_lock_timeout_on_write_2(self): - order_processes(self.acquire_write, self.timeout_write, self.timeout_write) + self.multiproc_test(self.acquire_write, self.timeout_write, self.timeout_write) def test_write_lock_timeout_on_write_3(self): - order_processes(self.acquire_write, self.timeout_write, self.timeout_write, self.timeout_write) + self.multiproc_test(self.acquire_write, self.timeout_write, self.timeout_write, self.timeout_write) # @@ -108,42 +110,42 @@ def test_write_lock_timeout_on_write_3(self): # exclusive lock is held. # def test_read_lock_timeout_on_write(self): - order_processes(self.acquire_write, self.timeout_read) + self.multiproc_test(self.acquire_write, self.timeout_read) def test_read_lock_timeout_on_write_2(self): - order_processes(self.acquire_write, self.timeout_read, self.timeout_read) + self.multiproc_test(self.acquire_write, self.timeout_read, self.timeout_read) def test_read_lock_timeout_on_write_3(self): - order_processes(self.acquire_write, self.timeout_read, self.timeout_read, self.timeout_read) + self.multiproc_test(self.acquire_write, self.timeout_read, self.timeout_read, self.timeout_read) # # Test that exclusive locks time out when shared locks are held. # def test_write_lock_timeout_on_read(self): - order_processes(self.acquire_read, self.timeout_write) + self.multiproc_test(self.acquire_read, self.timeout_write) def test_write_lock_timeout_on_read_2(self): - order_processes(self.acquire_read, self.timeout_write, self.timeout_write) + self.multiproc_test(self.acquire_read, self.timeout_write, self.timeout_write) def test_write_lock_timeout_on_read_3(self): - order_processes(self.acquire_read, self.timeout_write, self.timeout_write, self.timeout_write) + self.multiproc_test(self.acquire_read, self.timeout_write, self.timeout_write, self.timeout_write) # # Test that exclusive locks time while lots of shared locks are held. # def test_write_lock_timeout_with_multiple_readers_2_1(self): - order_processes(self.acquire_read, self.acquire_read, self.timeout_write) + self.multiproc_test(self.acquire_read, self.acquire_read, self.timeout_write) def test_write_lock_timeout_with_multiple_readers_2_2(self): - order_processes(self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write) + self.multiproc_test(self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write) def test_write_lock_timeout_with_multiple_readers_3_1(self): - order_processes(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write) + self.multiproc_test(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write) def test_write_lock_timeout_with_multiple_readers_3_2(self): - order_processes(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write) + self.multiproc_test(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write) # @@ -261,4 +263,4 @@ def p3(barrier): barrier.wait() # ---------------------------------------- 13 lock.release_read() - order_processes(p1, p2, p3) + self.multiproc_test(p1, p2, p3) From a58ae0c5d0002a7c6cce606b3308dbf53fc29317 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 27 Oct 2015 16:36:44 -0700 Subject: [PATCH 116/121] Build database working with simple transaction support; all tests passing. --- lib/spack/llnl/util/tty/color.py | 5 + lib/spack/spack/cmd/__init__.py | 19 +- lib/spack/spack/cmd/deactivate.py | 11 +- lib/spack/spack/cmd/diy.py | 11 +- lib/spack/spack/cmd/extensions.py | 3 +- lib/spack/spack/cmd/find.py | 11 +- lib/spack/spack/cmd/install.py | 8 +- lib/spack/spack/cmd/uninstall.py | 34 +-- lib/spack/spack/database.py | 401 ++++++++++++++++++++-------- lib/spack/spack/directory_layout.py | 3 - lib/spack/spack/package.py | 2 +- lib/spack/spack/test/database.py | 365 ++++++++++++++++++++----- 12 files changed, 644 insertions(+), 229 deletions(-) diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py index 22080a7b37f..0d09303da09 100644 --- a/lib/spack/llnl/util/tty/color.py +++ b/lib/spack/llnl/util/tty/color.py @@ -158,6 +158,11 @@ def clen(string): return len(re.sub(r'\033[^m]*m', '', string)) +def cextra(string): + """"Length of extra color characters in a string""" + return len(''.join(re.findall(r'\033[^m]*m', string))) + + def cwrite(string, stream=sys.stdout, color=None): """Replace all color expressions in string with ANSI control codes and write the result to the stream. If color is diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index d4778b1375a..6ce6fa0960c 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -124,16 +124,15 @@ def elide_list(line_list, max_num=10): def disambiguate_spec(spec): - with spack.installed_db.read_lock(): - matching_specs = spack.installed_db.query(spec) - if not matching_specs: - tty.die("Spec '%s' matches no installed packages." % spec) + matching_specs = spack.installed_db.query(spec) + if not matching_specs: + tty.die("Spec '%s' matches no installed packages." % spec) - elif len(matching_specs) > 1: - args = ["%s matches multiple packages." % spec, - "Matching packages:"] - args += [" " + str(s) for s in matching_specs] - args += ["Use a more specific spec."] - tty.die(*args) + elif len(matching_specs) > 1: + args = ["%s matches multiple packages." % spec, + "Matching packages:"] + args += [" " + str(s) for s in matching_specs] + args += ["Use a more specific spec."] + tty.die(*args) return matching_specs[0] diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py index 5428e3d2de3..1f0e303cdf2 100644 --- a/lib/spack/spack/cmd/deactivate.py +++ b/lib/spack/spack/cmd/deactivate.py @@ -54,13 +54,12 @@ def deactivate(parser, args): if args.all: if pkg.extendable: tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec) - with spack.installed_db.read_lock(): - ext_pkgs = spack.installed_db.installed_extensions_for(spec) + ext_pkgs = spack.installed_db.installed_extensions_for(spec) - for ext_pkg in ext_pkgs: - ext_pkg.spec.normalize() - if ext_pkg.activated: - ext_pkg.do_deactivate(force=True) + for ext_pkg in ext_pkgs: + ext_pkg.spec.normalize() + if ext_pkg.activated: + ext_pkg.do_deactivate(force=True) elif pkg.is_extension: if not args.force and not spec.package.activated: diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 6178c9c3e3d..f7998720ac4 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -54,11 +54,12 @@ def diy(self, args): if not args.spec: tty.die("spack diy requires a package spec argument.") - with spack.installed_db.write_lock(): - specs = spack.cmd.parse_specs(args.spec) - if len(specs) > 1: - tty.die("spack diy only takes one spec.") + specs = spack.cmd.parse_specs(args.spec) + if len(specs) > 1: + tty.die("spack diy only takes one spec.") + # Take a write lock before checking for existence. + with spack.installed_db.write_lock(): spec = specs[0] if not spack.db.exists(spec.name): tty.warn("No such package: %s" % spec.name) @@ -85,7 +86,7 @@ def diy(self, args): # Forces the build to run out of the current directory. package.stage = DIYStage(os.getcwd()) - # TODO: make this an argument, not a global. + # TODO: make this an argument, not a global. spack.do_checksum = False package.do_install( diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index f0f99a26910..7cadc424b00 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -80,8 +80,7 @@ def extensions(parser, args): colify(ext.name for ext in extensions) # List specs of installed extensions. - with spack.installed_db.read_lock(): - installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)] + installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)] print if not installed: tty.msg("None installed.") diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 6a0c3d11ff5..0b0dd6ef6fc 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -158,12 +158,11 @@ def find(parser, args): q_args = { 'installed' : installed, 'known' : known } # Get all the specs the user asked for - with spack.installed_db.read_lock(): - if not query_specs: - specs = set(spack.installed_db.query(**q_args)) - else: - results = [set(spack.installed_db.query(qs, **q_args)) for qs in query_specs] - specs = set.union(*results) + if not query_specs: + specs = set(spack.installed_db.query(**q_args)) + else: + results = [set(spack.installed_db.query(qs, **q_args)) for qs in query_specs] + specs = set.union(*results) if not args.mode: args.mode = 'short' diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index ada655b937b..ba824bd6583 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -68,10 +68,10 @@ def install(parser, args): if args.no_checksum: spack.do_checksum = False # TODO: remove this global. - with spack.installed_db.write_lock(): - specs = spack.cmd.parse_specs(args.packages, concretize=True) - for spec in specs: - package = spack.db.get(spec) + specs = spack.cmd.parse_specs(args.packages, concretize=True) + for spec in specs: + package = spack.db.get(spec) + with spack.installed_db.write_lock(): package.do_install( keep_prefix=args.keep_prefix, keep_stage=args.keep_stage, diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 7b7c32c0655..1dae84444ab 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -84,21 +84,21 @@ def uninstall(parser, args): # The package.py file has gone away -- but still want to uninstall. spack.Package(s).do_uninstall(force=True) - # Sort packages to be uninstalled by the number of installed dependents - # This ensures we do things in the right order - def num_installed_deps(pkg): - return len(pkg.installed_dependents) - pkgs.sort(key=num_installed_deps) + # Sort packages to be uninstalled by the number of installed dependents + # This ensures we do things in the right order + def num_installed_deps(pkg): + return len(pkg.installed_dependents) + pkgs.sort(key=num_installed_deps) - # Uninstall packages in order now. - for pkg in pkgs: - try: - pkg.do_uninstall(force=args.force) - except PackageStillNeededError, e: - tty.error("Will not uninstall %s" % e.spec.format("$_$@$%@$#", color=True)) - print - print "The following packages depend on it:" - display_specs(e.dependents, long=True) - print - print "You can use spack uninstall -f to force this action." - sys.exit(1) + # Uninstall packages in order now. + for pkg in pkgs: + try: + pkg.do_uninstall(force=args.force) + except PackageStillNeededError, e: + tty.error("Will not uninstall %s" % e.spec.format("$_$@$%@$#", color=True)) + print + print "The following packages depend on it:" + display_specs(e.dependents, long=True) + print + print "You can use spack uninstall -f to force this action." + sys.exit(1) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 1d1c640d663..9ce00a45e96 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -48,7 +48,7 @@ import llnl.util.tty as tty from llnl.util.filesystem import * -from llnl.util.lock import Lock +from llnl.util.lock import * import spack.spec from spack.version import Version @@ -62,7 +62,8 @@ _db_version = Version('0.9') # Default timeout for spack database locks is 5 min. -_db_lock_timeout = 300 +_db_lock_timeout = 60 + def _autospec(function): """Decorator that automatically converts the argument of a single-arg @@ -90,11 +91,11 @@ class InstallRecord(object): dependents left. """ - def __init__(self, spec, path, installed): + def __init__(self, spec, path, installed, ref_count=0): self.spec = spec self.path = path self.installed = installed - self.ref_count = 0 + self.ref_count = ref_count def to_dict(self): return { 'spec' : self.spec.to_node_dict(), @@ -103,25 +104,42 @@ def to_dict(self): 'ref_count' : self.ref_count } @classmethod - def from_dict(cls, d): - # TODO: check the dict more rigorously. - return InstallRecord(d['spec'], d['path'], d['installed'], d['ref_count']) + def from_dict(cls, spec, dictionary): + d = dictionary + return InstallRecord(spec, d['path'], d['installed'], d['ref_count']) class Database(object): - def __init__(self, root): - """Create an empty Database. + def __init__(self, root, db_dir=None): + """Create a Database for Spack installations under ``root``. + + A Database is a cache of Specs data from ``$prefix/spec.yaml`` + files in Spack installation directories. + + By default, Database files (data and lock files) are stored + under ``root/.spack-db``, which is created if it does not + exist. This is the ``db_dir``. + + The Database will attempt to read an ``index.yaml`` file in + ``db_dir``. If it does not find one, it will be created when + needed by scanning the entire Database root for ``spec.yaml`` + files according to Spack's ``DirectoryLayout``. + + Caller may optionally provide a custom ``db_dir`` parameter + where data will be stored. This is intended to be used for + testing the Database class. - Location defaults to root/_index.yaml - The individual data are dicts containing - spec: the top level spec of a package - path: the path to the install of that package - dep_hash: a hash of the dependence DAG for that package """ - self._root = root + self.root = root - # Set up layout of database files. - self._db_dir = join_path(self._root, _db_dirname) + if db_dir is None: + # If the db_dir is not provided, default to within the db root. + self._db_dir = join_path(self.root, _db_dirname) + else: + # Allow customizing the database directory location for testing. + self._db_dir = db_dir + + # Set up layout of database files within the db dir self._index_path = join_path(self._db_dir, 'index.yaml') self._lock_path = join_path(self._db_dir, 'lock') @@ -135,21 +153,23 @@ def __init__(self, root): # initialize rest of state. self.lock = Lock(self._lock_path) self._data = {} - self._last_write_time = 0 - def write_lock(self, timeout=_db_lock_timeout): - """Get a write lock context for use in a `with` block.""" - return self.lock.write_lock(timeout) + def write_transaction(self, timeout=_db_lock_timeout): + """Get a write lock context manager for use in a `with` block.""" + return WriteTransaction(self, self._read, self._write, timeout) - def read_lock(self, timeout=_db_lock_timeout): - """Get a read lock context for use in a `with` block.""" - return self.lock.read_lock(timeout) + def read_transaction(self, timeout=_db_lock_timeout): + """Get a read lock context manager for use in a `with` block.""" + return ReadTransaction(self, self._read, None, timeout) def _write_to_yaml(self, stream): - """Write out the databsae to a YAML file.""" + """Write out the databsae to a YAML file. + + This function does not do any locking or transactions. + """ # map from per-spec hash code to installation record. installs = dict((k, v.to_dict()) for k, v in self._data.items()) @@ -173,7 +193,10 @@ def _write_to_yaml(self, stream): def _read_spec_from_yaml(self, hash_key, installs, parent_key=None): - """Recursively construct a spec from a hash in a YAML database.""" + """Recursively construct a spec from a hash in a YAML database. + + Does not do any locking. + """ if hash_key not in installs: parent = read_spec(installs[parent_key]['path']) @@ -195,6 +218,8 @@ def _read_from_yaml(self, stream): """ Fill database from YAML, do not maintain old data Translate the spec portions from node-dict form to spec form + + Does not do any locking. """ try: if isinstance(stream, basestring): @@ -243,7 +268,7 @@ def check(cond, msg): # Insert the brand new spec in the database. Each # spec has its own copies of its dependency specs. # TODO: would a more immmutable spec implementation simplify this? - data[hash_key] = InstallRecord(spec, rec['path'], rec['installed']) + data[hash_key] = InstallRecord.from_dict(spec, rec) except Exception as e: tty.warn("Invalid database reecord:", @@ -256,57 +281,60 @@ def check(cond, msg): def reindex(self, directory_layout): - """Build database index from scratch based from a directory layout.""" - with self.write_lock(): - data = {} + """Build database index from scratch based from a directory layout. - # Ask the directory layout to traverse the filesystem. - for spec in directory_layout.all_specs(): - # Create a spec for each known package and add it. - path = directory_layout.path_for_spec(spec) - hash_key = spec.dag_hash() - data[hash_key] = InstallRecord(spec, path, True) + Locks the DB if it isn't locked already. - # Recursively examine dependencies and add them, even - # if they are NOT installed. This ensures we know - # about missing dependencies. - for dep in spec.traverse(root=False): - dep_hash = dep.dag_hash() - if dep_hash not in data: - path = directory_layout.path_for_spec(dep) - installed = os.path.isdir(path) - data[dep_hash] = InstallRecord(dep.copy(), path, installed) - data[dep_hash].ref_count += 1 - - # Assuming everything went ok, replace this object's data. - self._data = data - - # write out, blowing away the old version if necessary - self.write() - - - def read(self): """ - Re-read Database from the data in the set location - If the cache is fresh, return immediately. + with self.write_transaction(): + old_data = self._data + try: + self._data = {} + + # Ask the directory layout to traverse the filesystem. + for spec in directory_layout.all_specs(): + # Create a spec for each known package and add it. + path = directory_layout.path_for_spec(spec) + self._add(spec, path, directory_layout) + + self._check_ref_counts() + + except: + # If anything explodes, restore old data, skip write. + self._data = old_data + raise + + + def _check_ref_counts(self): + """Ensure consistency of reference counts in the DB. + + Raise an AssertionError if something is amiss. + + Does no locking. """ - if not self.is_dirty(): - return + counts = {} + for key, rec in self._data.items(): + counts.setdefault(key, 0) + for dep in rec.spec.dependencies.values(): + dep_key = dep.dag_hash() + counts.setdefault(dep_key, 0) + counts[dep_key] += 1 - if os.path.isfile(self._index_path): - # Read from YAML file if a database exists - self._read_from_yaml(self._index_path) - else: - # The file doesn't exist, try to traverse the directory. - self.reindex(spack.install_layout) + for rec in self._data.values(): + key = rec.spec.dag_hash() + expected = counts[key] + found = rec.ref_count + if not expected == found: + raise AssertionError( + "Invalid ref_count: %s: %d (expected %d), in DB %s." + % (key, found, expected, self._index_path)) - def write(self): - """ - Write the database to the standard location - Everywhere that the database is written it is read - within the same lock, so there is no need to refresh - the database within write() + def _write(self): + """Write the in-memory database index to its file path. + + Does no locking. + """ temp_name = '%s.%s.temp' % (socket.getfqdn(), os.getpid()) temp_file = join_path(self._db_dir, temp_name) @@ -314,7 +342,6 @@ def write(self): # Write a temporary database file them move it into place try: with open(temp_file, 'w') as f: - self._last_write_time = int(time.time()) self._write_to_yaml(f) os.rename(temp_file, self._index_path) @@ -325,36 +352,137 @@ def write(self): raise - def is_dirty(self): - """ - Returns true iff the database file does not exist - or was most recently written to by another spack instance. - """ - return (not os.path.isfile(self._index_path) or - (os.path.getmtime(self._index_path) > self._last_write_time)) + def _read(self): + """Re-read Database from the data in the set location. + This does no locking. + """ + if os.path.isfile(self._index_path): + # Read from YAML file if a database exists + self._read_from_yaml(self._index_path) + + else: + # The file doesn't exist, try to traverse the directory. + # reindex() takes its own write lock, so no lock here. + self.reindex(spack.install_layout) + + + def read(self): + with self.read_transaction(): pass + + + def write(self): + with self.write_transaction(): pass + + + def _add(self, spec, path, directory_layout=None): + """Add an install record for spec at path to the database. + + This assumes that the spec is not already installed. It + updates the ref counts on dependencies of the spec in the DB. + + This operation is in-memory, and does not lock the DB. + + """ + key = spec.dag_hash() + if key in self._data: + rec = self._data[key] + rec.installed = True + + # TODO: this overwrites a previous install path (when path != + # self._data[key].path), and the old path still has a + # dependent in the DB. We could consider re-RPATH-ing the + # dependents. This case is probably infrequent and may not be + # worth fixing, but this is where we can discover it. + rec.path = path + + else: + self._data[key] = InstallRecord(spec, path, True) + for dep in spec.dependencies.values(): + self._increment_ref_count(dep, directory_layout) + + + def _increment_ref_count(self, spec, directory_layout=None): + """Recursively examine dependencies and update their DB entries.""" + key = spec.dag_hash() + if key not in self._data: + installed = False + path = None + if directory_layout: + path = directory_layout.path_for_spec(spec) + installed = os.path.isdir(path) + + self._data[key] = InstallRecord(spec.copy(), path, installed) + + for dep in spec.dependencies.values(): + self._increment_ref_count(dep) + + self._data[key].ref_count += 1 @_autospec def add(self, spec, path): - """Read the database from the set location + """Add spec at path to database, locking and reading DB to sync. - Add the specified entry as a dict, then write the database - back to memory. This assumes that ALL dependencies are already in - the database. Should not be called otherwise. + ``add()`` will lock and read from the DB on disk. """ - # Should always already be locked - with self.write_lock(): - self.read() - self._data[spec.dag_hash()] = InstallRecord(spec, path, True) + # TODO: ensure that spec is concrete? + # Entire add is transactional. + with self.write_transaction(): + self._add(spec, path) - # sanity check the dependencies in case something went - # wrong during install() - # TODO: ensure no races during distributed install. - for dep in spec.traverse(root=False): - assert dep.dag_hash() in self._data - self.write() + def _get_matching_spec_key(self, spec, **kwargs): + """Get the exact spec OR get a single spec that matches.""" + key = spec.dag_hash() + if not key in self._data: + match = self.query_one(spec, **kwargs) + if match: + return match.dag_hash() + raise KeyError("No such spec in database! %s" % spec) + return key + + + @_autospec + def get_record(self, spec, **kwargs): + key = self._get_matching_spec_key(spec, **kwargs) + return self._data[key] + + + def _decrement_ref_count(self, spec): + key = spec.dag_hash() + + if not key in self._data: + # TODO: print something here? DB is corrupt, but + # not much we can do. + return + + rec = self._data[key] + rec.ref_count -= 1 + + if rec.ref_count == 0 and not rec.installed: + del self._data[key] + for dep in spec.dependencies.values(): + self._decrement_ref_count(dep) + + + def _remove(self, spec): + """Non-locking version of remove(); does real work. + """ + key = self._get_matching_spec_key(spec) + rec = self._data[key] + + if rec.ref_count > 0: + rec.installed = False + return rec.spec + + del self._data[key] + for dep in rec.spec.dependencies.values(): + self._decrement_ref_count(dep) + + # Returns the concrete spec so we know it in the case where a + # query spec was passed in. + return rec.spec @_autospec @@ -369,13 +497,9 @@ def remove(self, spec): and remvoes them if they are no longer needed. """ - # Should always already be locked - with self.write_lock(): - self.read() - hash_key = spec.dag_hash() - if hash_key in self._data: - del self._data[hash_key] - self.write() + # Take a lock around the entire removal. + with self.write_transaction(): + return self._remove(spec) @_autospec @@ -429,24 +553,75 @@ def query(self, query_spec=any, known=any, installed=True): these really special cases that only belong here? """ - with self.read_lock(): - self.read() + with self.read_transaction(): + results = [] + for key, rec in self._data.items(): + if installed is not any and rec.installed != installed: + continue + if known is not any and spack.db.exists(rec.spec.name) != known: + continue + if query_spec is any or rec.spec.satisfies(query_spec): + results.append(rec.spec) - results = [] - for key, rec in self._data.items(): - if installed is not any and rec.installed != installed: - continue - if known is not any and spack.db.exists(rec.spec.name) != known: - continue - if query_spec is any or rec.spec.satisfies(query_spec): - results.append(rec.spec) + return sorted(results) - return sorted(results) + + def query_one(self, query_spec, known=any, installed=True): + """Query for exactly one spec that matches the query spec. + + Raises an assertion error if more than one spec matches the + query. Returns None if no installed package matches. + + """ + concrete_specs = self.query(query_spec, known, installed) + assert len(concrete_specs) <= 1 + return concrete_specs[0] if concrete_specs else None def missing(self, spec): - key = spec.dag_hash() - return key in self._data and not self._data[key].installed + with self.read_transaction(): + key = spec.dag_hash() + return key in self._data and not self._data[key].installed + + +class _Transaction(object): + """Simple nested transaction context manager that uses a file lock. + + This class can trigger actions when the lock is acquired for the + first time and released for the last. + + Timeout for lock is customizable. + """ + def __init__(self, db, acquire_fn=None, release_fn=None, + timeout=_db_lock_timeout): + self._db = db + self._timeout = timeout + self._acquire_fn = acquire_fn + self._release_fn = release_fn + + def __enter__(self): + if self._enter() and self._acquire_fn: + self._acquire_fn() + + def __exit__(self, type, value, traceback): + if self._exit() and self._release_fn: + self._release_fn() + + +class ReadTransaction(_Transaction): + def _enter(self): + return self._db.lock.acquire_read(self._timeout) + + def _exit(self): + return self._db.lock.release_read() + + +class WriteTransaction(_Transaction): + def _enter(self): + return self._db.lock.acquire_write(self._timeout) + + def _exit(self): + return self._db.lock.release_write() class CorruptDatabaseError(SpackError): diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index e61929d8fdd..758ec209dbf 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -32,7 +32,6 @@ from external import yaml import llnl.util.tty as tty -from llnl.util.lang import memoized from llnl.util.filesystem import join_path, mkdirp from spack.spec import Spec @@ -263,7 +262,6 @@ def create_install_directory(self, spec): self.write_spec(spec, spec_file_path) - @memoized def all_specs(self): if not os.path.isdir(self.root): return [] @@ -274,7 +272,6 @@ def all_specs(self): return [self.read_spec(s) for s in spec_files] - @memoized def specs_by_hash(self): by_hash = {} for spec in self.all_specs(): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 2957257b1a3..b87baf403e4 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -845,7 +845,7 @@ def real_work(): # note: PARENT of the build process adds the new package to # the database, so that we don't need to re-read from file. - spack.installed_db.add(self.spec, spack.install_layout.path_for_spec(self.spec)) + spack.installed_db.add(self.spec, self.prefix) # Once everything else is done, run post install hooks spack.hooks.post_install(self) diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index a3386bad991..3c5926e8408 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -1,5 +1,5 @@ ############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. @@ -26,79 +26,320 @@ These tests check the database is functioning properly, both in memory and in its file """ -import unittest +import tempfile +import shutil +import multiprocessing from llnl.util.lock import * from llnl.util.filesystem import join_path import spack from spack.database import Database +from spack.directory_layout import YamlDirectoryLayout +from spack.test.mock_packages_test import * + +from llnl.util.tty.colify import colify + +def _print_ref_counts(): + """Print out all ref counts for the graph used here, for debugging""" + recs = [] + + def add_rec(spec): + cspecs = spack.installed_db.query(spec, installed=any) + + if not cspecs: + recs.append("[ %-7s ] %-20s-" % ('', spec)) + else: + key = cspecs[0].dag_hash() + rec = spack.installed_db.get_record(cspecs[0]) + recs.append("[ %-7s ] %-20s%d" % (key[:7], spec, rec.ref_count)) + + with spack.installed_db.read_transaction(): + add_rec('mpileaks ^mpich') + add_rec('callpath ^mpich') + add_rec('mpich') + + add_rec('mpileaks ^mpich2') + add_rec('callpath ^mpich2') + add_rec('mpich2') + + add_rec('mpileaks ^zmpi') + add_rec('callpath ^zmpi') + add_rec('zmpi') + add_rec('fake') + + add_rec('dyninst') + add_rec('libdwarf') + add_rec('libelf') + + colify(recs, cols=3) + + +class DatabaseTest(MockPackagesTest): + + def _mock_install(self, spec): + s = Spec(spec) + pkg = spack.db.get(s.concretized()) + pkg.do_install(fake=True) + + + def _mock_remove(self, spec): + specs = spack.installed_db.query(spec) + assert(len(specs) == 1) + spec = specs[0] + spec.package.do_uninstall(spec) -class DatabaseTest(unittest.TestCase): def setUp(self): - self.original_db = spack.installed_db - spack.installed_db = Database(self.original_db._root,"_test_index.yaml") - self.file_path = join_path(self.original_db._root,"_test_index.yaml") - if os.path.exists(self.file_path): - os.remove(self.file_path) + super(DatabaseTest, self).setUp() + # + # TODO: make the mockup below easier. + # + + # Make a fake install directory + self.install_path = tempfile.mkdtemp() + self.spack_install_path = spack.install_path + spack.install_path = self.install_path + + self.install_layout = YamlDirectoryLayout(self.install_path) + self.spack_install_layout = spack.install_layout + spack.install_layout = self.install_layout + + # Make fake database and fake install directory. + self.installed_db = Database(self.install_path) + self.spack_installed_db = spack.installed_db + spack.installed_db = self.installed_db + + # make a mock database with some packages installed note that + # the ref count for dyninst here will be 3, as it's recycled + # across each install. + # + # Here is what the mock DB looks like: + # + # o mpileaks o mpileaks' o mpileaks'' + # |\ |\ |\ + # | o callpath | o callpath' | o callpath'' + # |/| |/| |/| + # o | mpich o | mpich2 o | zmpi + # | | o | fake + # | | | + # | |______________/ + # | .____________/ + # |/ + # o dyninst + # |\ + # | o libdwarf + # |/ + # o libelf + # + + # Transaction used to avoid repeated writes. + with spack.installed_db.write_transaction(): + self._mock_install('mpileaks ^mpich') + self._mock_install('mpileaks ^mpich2') + self._mock_install('mpileaks ^zmpi') + def tearDown(self): - spack.installed_db = self.original_db - os.remove(self.file_path) - - def _test_read_from_install_tree(self): - specs = spack.install_layout.all_specs() - spack.installed_db.read_database() - spack.installed_db.write() - for sph in spack.installed_db._data: - self.assertTrue(sph['spec'] in specs) - self.assertEqual(len(specs),len(spack.installed_db._data)) - - def _test_remove_and_add(self): - specs = spack.install_layout.all_specs() - spack.installed_db.remove(specs[len(specs)-1]) - for sph in spack.installed_db._data: - self.assertTrue(sph['spec'] in specs[:len(specs)-1]) - self.assertEqual(len(specs)-1,len(spack.installed_db._data)) - - spack.installed_db.add(specs[len(specs)-1],"") - for sph in spack.installed_db._data: - self.assertTrue(sph['spec'] in specs) - self.assertEqual(len(specs),len(spack.installed_db._data)) - - def _test_read_from_file(self): - spack.installed_db.read_database() - size = len(spack.installed_db._data) - spack.installed_db._data = spack.installed_db._data[1:] - os.utime(spack.installed_db._file_path,None) - spack.installed_db.read_database() - self.assertEqual(size,len(spack.installed_db._data)) - - specs = spack.install_layout.all_specs() - self.assertEqual(size,len(specs)) - for sph in spack.installed_db._data: - self.assertTrue(sph['spec'] in specs) + super(DatabaseTest, self).tearDown() + shutil.rmtree(self.install_path) + spack.install_path = self.spack_install_path + spack.install_layout = self.spack_install_layout + spack.installed_db = self.spack_installed_db - def _test_write_to_file(self): - spack.installed_db.read_database() - size = len(spack.installed_db._data) - real_data = spack.installed_db._data - spack.installed_db._data = real_data[:size-1] - spack.installed_db.write() - spack.installed_db._data = real_data - os.utime(spack.installed_db._file_path,None) - spack.installed_db.read_database() - self.assertEqual(size-1,len(spack.installed_db._data)) + def test_010_all_install_sanity(self): + """Ensure that the install layout reflects what we think it does.""" + all_specs = spack.install_layout.all_specs() + self.assertEqual(len(all_specs), 13) - specs = spack.install_layout.all_specs() - self.assertEqual(size,len(specs)) - for sph in spack.installed_db._data: - self.assertTrue(sph['spec'] in specs[:size-1]) + # query specs with multiple configurations + mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')] + callpath_specs = [s for s in all_specs if s.satisfies('callpath')] + mpi_specs = [s for s in all_specs if s.satisfies('mpi')] - def test_ordered_test(self): - self._test_read_from_install_tree() - self._test_remove_and_add() - self._test_read_from_file() - self._test_write_to_file() + self.assertEqual(len(mpileaks_specs), 3) + self.assertEqual(len(callpath_specs), 3) + self.assertEqual(len(mpi_specs), 3) + + # query specs with single configurations + dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')] + libdwarf_specs = [s for s in all_specs if s.satisfies('libdwarf')] + libelf_specs = [s for s in all_specs if s.satisfies('libelf')] + + self.assertEqual(len(dyninst_specs), 1) + self.assertEqual(len(libdwarf_specs), 1) + self.assertEqual(len(libelf_specs), 1) + + # Query by dependency + self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^mpich')]), 1) + self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^mpich2')]), 1) + self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^zmpi')]), 1) + + + def test_015_write_and_read(self): + # write and read DB + with spack.installed_db.write_transaction(): + specs = spack.installed_db.query() + recs = [spack.installed_db.get_record(s) for s in specs] + spack.installed_db.write() + spack.installed_db.read() + + for spec, rec in zip(specs, recs): + new_rec = spack.installed_db.get_record(spec) + self.assertEqual(new_rec.ref_count, rec.ref_count) + self.assertEqual(new_rec.spec, rec.spec) + self.assertEqual(new_rec.path, rec.path) + self.assertEqual(new_rec.installed, rec.installed) + + + def _check_db_sanity(self): + """Utiilty function to check db against install layout.""" + expected = sorted(spack.install_layout.all_specs()) + actual = sorted(self.installed_db.query()) + + self.assertEqual(len(expected), len(actual)) + for e, a in zip(expected, actual): + self.assertEqual(e, a) + + + def test_020_db_sanity(self): + """Make sure query() returns what's actually in the db.""" + self._check_db_sanity() + + + def test_030_db_sanity_from_another_process(self): + def read_and_modify(): + self._check_db_sanity() # check that other process can read DB + with self.installed_db.write_transaction(): + self._mock_remove('mpileaks ^zmpi') + + p = multiprocessing.Process(target=read_and_modify, args=()) + p.start() + p.join() + + # ensure child process change is visible in parent process + with self.installed_db.read_transaction(): + self.assertEqual(len(self.installed_db.query('mpileaks ^zmpi')), 0) + + + def test_040_ref_counts(self): + """Ensure that we got ref counts right when we read the DB.""" + self.installed_db._check_ref_counts() + + + def test_050_basic_query(self): + """Ensure that querying the database is consistent with what is installed.""" + # query everything + self.assertEqual(len(spack.installed_db.query()), 13) + + # query specs with multiple configurations + mpileaks_specs = self.installed_db.query('mpileaks') + callpath_specs = self.installed_db.query('callpath') + mpi_specs = self.installed_db.query('mpi') + + self.assertEqual(len(mpileaks_specs), 3) + self.assertEqual(len(callpath_specs), 3) + self.assertEqual(len(mpi_specs), 3) + + # query specs with single configurations + dyninst_specs = self.installed_db.query('dyninst') + libdwarf_specs = self.installed_db.query('libdwarf') + libelf_specs = self.installed_db.query('libelf') + + self.assertEqual(len(dyninst_specs), 1) + self.assertEqual(len(libdwarf_specs), 1) + self.assertEqual(len(libelf_specs), 1) + + # Query by dependency + self.assertEqual(len(self.installed_db.query('mpileaks ^mpich')), 1) + self.assertEqual(len(self.installed_db.query('mpileaks ^mpich2')), 1) + self.assertEqual(len(self.installed_db.query('mpileaks ^zmpi')), 1) + + + def _check_remove_and_add_package(self, spec): + """Remove a spec from the DB, then add it and make sure everything's + still ok once it is added. This checks that it was + removed, that it's back when added again, and that ref + counts are consistent. + """ + original = self.installed_db.query() + self.installed_db._check_ref_counts() + + # Remove spec + concrete_spec = self.installed_db.remove(spec) + self.installed_db._check_ref_counts() + remaining = self.installed_db.query() + + # ensure spec we removed is gone + self.assertEqual(len(original) - 1, len(remaining)) + self.assertTrue(all(s in original for s in remaining)) + self.assertTrue(concrete_spec not in remaining) + + # add it back and make sure everything is ok. + self.installed_db.add(concrete_spec, "") + installed = self.installed_db.query() + self.assertEqual(len(installed), len(original)) + + # sanity check against direcory layout and check ref counts. + self._check_db_sanity() + self.installed_db._check_ref_counts() + + + def test_060_remove_and_add_root_package(self): + self._check_remove_and_add_package('mpileaks ^mpich') + + + def test_070_remove_and_add_dependency_package(self): + self._check_remove_and_add_package('dyninst') + + + def test_080_root_ref_counts(self): + rec = self.installed_db.get_record('mpileaks ^mpich') + + # Remove a top-level spec from the DB + self.installed_db.remove('mpileaks ^mpich') + + # record no longer in DB + self.assertEqual(self.installed_db.query('mpileaks ^mpich', installed=any), []) + + # record's deps have updated ref_counts + self.assertEqual(self.installed_db.get_record('callpath ^mpich').ref_count, 0) + self.assertEqual(self.installed_db.get_record('mpich').ref_count, 1) + + # put the spec back + self.installed_db.add(rec.spec, rec.path) + + # record is present again + self.assertEqual(len(self.installed_db.query('mpileaks ^mpich', installed=any)), 1) + + # dependencies have ref counts updated + self.assertEqual(self.installed_db.get_record('callpath ^mpich').ref_count, 1) + self.assertEqual(self.installed_db.get_record('mpich').ref_count, 2) + + + def test_090_non_root_ref_counts(self): + mpileaks_mpich_rec = self.installed_db.get_record('mpileaks ^mpich') + callpath_mpich_rec = self.installed_db.get_record('callpath ^mpich') + + # "force remove" a non-root spec from the DB + self.installed_db.remove('callpath ^mpich') + + # record still in DB but marked uninstalled + self.assertEqual(self.installed_db.query('callpath ^mpich', installed=True), []) + self.assertEqual(len(self.installed_db.query('callpath ^mpich', installed=any)), 1) + + # record and its deps have same ref_counts + self.assertEqual(self.installed_db.get_record('callpath ^mpich', installed=any).ref_count, 1) + self.assertEqual(self.installed_db.get_record('mpich').ref_count, 2) + + # remove only dependent of uninstalled callpath record + self.installed_db.remove('mpileaks ^mpich') + + # record and parent are completely gone. + self.assertEqual(self.installed_db.query('mpileaks ^mpich', installed=any), []) + self.assertEqual(self.installed_db.query('callpath ^mpich', installed=any), []) + + # mpich ref count updated properly. + mpich_rec = self.installed_db.get_record('mpich') + self.assertEqual(mpich_rec.ref_count, 0) From 21e9d5c5c3d9384e15339ac7cf03a5bf639f8e4d Mon Sep 17 00:00:00 2001 From: miller86 Date: Tue, 27 Oct 2015 16:51:00 -0700 Subject: [PATCH 117/121] generalize one of the cases to handle many more kinds of formatting options --- lib/spack/spack/url.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index 58838306af4..6adbfe156dd 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -209,8 +209,8 @@ def parse_version_offset(path): # e.g. foobar-4.5.1 (r'-((\d+\.)*\d+)$', stem), - # e.g. foobar-4.5.1b - (r'-((\d+\.)*\d+\-?([a-z]|rc|RC|tp|TP)\d*)$', stem), + # e.g. foobar-4.5.1b, foobar4.5RC, foobar.v4.5.1b + (r'[-._]?v?((\d+\.)*\d+[-._]?([a-z]|rc|RC|tp|TP?)\d*)$', stem), # e.g. foobar-4.5.0-beta1, or foobar-4.50-beta (r'-((\d+\.)*\d+-beta(\d+)?)$', stem), From 6db2a580081f0afdacd48ed6163bfbe6baaeb62f Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 30 Oct 2015 07:40:45 +0100 Subject: [PATCH 118/121] Fixed typo : write_lock() -> write_transaction() --- lib/spack/spack/cmd/diy.py | 2 +- lib/spack/spack/cmd/install.py | 2 +- lib/spack/spack/cmd/uninstall.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index f7998720ac4..9f8a6d39db3 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -59,7 +59,7 @@ def diy(self, args): tty.die("spack diy only takes one spec.") # Take a write lock before checking for existence. - with spack.installed_db.write_lock(): + with spack.installed_db.write_transaction(): spec = specs[0] if not spack.db.exists(spec.name): tty.warn("No such package: %s" % spec.name) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index ba824bd6583..836a6260c87 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -71,7 +71,7 @@ def install(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: package = spack.db.get(spec) - with spack.installed_db.write_lock(): + with spack.installed_db.write_transaction(): package.do_install( keep_prefix=args.keep_prefix, keep_stage=args.keep_stage, diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 1dae84444ab..e80f2d2636d 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -53,7 +53,7 @@ def uninstall(parser, args): if not args.packages: tty.die("uninstall requires at least one package argument.") - with spack.installed_db.write_lock(): + with spack.installed_db.write_transaction(): specs = spack.cmd.parse_specs(args.packages) # For each spec provided, make sure it refers to only one package. From 339da1da3d7f034595344f72f5d95e3fea0087f5 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 4 Nov 2015 07:46:17 -0800 Subject: [PATCH 119/121] Make architecture reflect OS *and* machine. Use Python's platform module. --- lib/spack/spack/architecture.py | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index 05ac5d6f35f..6c874e30be0 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -23,7 +23,8 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import subprocess +import re +import platform from llnl.util.lang import memoized @@ -58,15 +59,11 @@ def get_sys_type_from_environment(): return os.environ.get('SYS_TYPE') -def get_sys_type_from_uname(): - """Return the architecture from uname.""" - try: - arch_proc = subprocess.Popen(['uname', '-i'], - stdout=subprocess.PIPE) - arch, _ = arch_proc.communicate() - return arch.strip() - except: - return None +def get_sys_type_from_platform(): + """Return the architecture from Python's platform module.""" + sys_type = platform.system() + '-' + platform.machine() + sys_type = re.sub(r'[^\w-]', '_', sys_type) + return sys_type.lower() @memoized @@ -74,7 +71,7 @@ def sys_type(): """Returns a SysType for the current machine.""" methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment, - get_sys_type_from_uname] + get_sys_type_from_platform] # search for a method that doesn't return None sys_type = None From 0d993947ee5a4c29451cf0ee40f47519e1c3c4d9 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 4 Nov 2015 16:44:33 -0800 Subject: [PATCH 120/121] Fix SPACK-93, SPACK-94, GitHub #150 - `remove_prefix` was modified to remove from the DB, but the package may not have been added to the DB yet when `remove_prefix` is called from `cleanup`. - Made `remove_prefix` a pure utility function (it just removes the prefix) - Added `installed_db.remove()` call only after the `remove_prefix` in `uninstall`. --- lib/spack/spack/cmd/{fsck.py => reindex.py} | 5 ++--- lib/spack/spack/database.py | 19 +++++-------------- lib/spack/spack/package.py | 4 ++-- lib/spack/spack/test/database.py | 11 +++++++++-- 4 files changed, 18 insertions(+), 21 deletions(-) rename lib/spack/spack/cmd/{fsck.py => reindex.py} (92%) diff --git a/lib/spack/spack/cmd/fsck.py b/lib/spack/spack/cmd/reindex.py similarity index 92% rename from lib/spack/spack/cmd/fsck.py rename to lib/spack/spack/cmd/reindex.py index 9a3c450dcf9..b584729ea45 100644 --- a/lib/spack/spack/cmd/fsck.py +++ b/lib/spack/spack/cmd/reindex.py @@ -25,8 +25,7 @@ from external import argparse import spack -description = "Correct database irregularities" +description = "Rebuild Spack's package database." -# Very basic version of spack fsck -def fsck(parser, args): +def reindex(parser, args): spack.installed_db.reindex(spack.install_layout) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 9ce00a45e96..e0c14a04553 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -93,8 +93,8 @@ class InstallRecord(object): """ def __init__(self, spec, path, installed, ref_count=0): self.spec = spec - self.path = path - self.installed = installed + self.path = str(path) + self.installed = bool(installed) self.ref_count = ref_count def to_dict(self): @@ -173,7 +173,7 @@ def _write_to_yaml(self, stream): # map from per-spec hash code to installation record. installs = dict((k, v.to_dict()) for k, v in self._data.items()) - # databaes includes installation list and version. + # database includes installation list and version. # NOTE: this DB version does not handle multiple installs of # the same spec well. If there are 2 identical specs with @@ -336,15 +336,14 @@ def _write(self): Does no locking. """ - temp_name = '%s.%s.temp' % (socket.getfqdn(), os.getpid()) - temp_file = join_path(self._db_dir, temp_name) + temp_file = self._index_path + ( + '.%s.%s.temp' % (socket.getfqdn(), os.getpid())) # Write a temporary database file them move it into place try: with open(temp_file, 'w') as f: self._write_to_yaml(f) os.rename(temp_file, self._index_path) - except: # Clean up temp file if something goes wrong. if os.path.exists(temp_file): @@ -367,14 +366,6 @@ def _read(self): self.reindex(spack.install_layout) - def read(self): - with self.read_transaction(): pass - - - def write(self): - with self.write_transaction(): pass - - def _add(self, spec, path, directory_layout=None): """Add an install record for spec at path to the database. diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index b15d4b2040c..c631a35bf30 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -611,7 +611,6 @@ def url_version(self, version): def remove_prefix(self): """Removes the prefix for a package along with any empty parent directories.""" spack.install_layout.remove_install_directory(self.spec) - spack.installed_db.remove(self.spec) def do_fetch(self): @@ -877,7 +876,7 @@ def build_log_path(self): if self.installed: return spack.install_layout.build_log_path(self.spec) else: - return join_path(self.stage.source_path, 'spack-build.out') + return join_path(self.stage.source_path, 'spack-build.out') @property @@ -934,6 +933,7 @@ def do_uninstall(self, force=False): # Uninstalling in Spack only requires removing the prefix. self.remove_prefix() + spack.installed_db.remove(self.spec) tty.msg("Successfully uninstalled %s." % self.spec.short_spec) # Once everything else is done, run post install hooks diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 3c5926e8408..8416143f2d2 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -148,6 +148,15 @@ def tearDown(self): spack.installed_db = self.spack_installed_db + def test_005_db_exists(self): + """Make sure db cache file exists after creating.""" + index_file = join_path(self.install_path, '.spack-db', 'index.yaml') + lock_file = join_path(self.install_path, '.spack-db', 'lock') + + self.assertTrue(os.path.exists(index_file)) + self.assertTrue(os.path.exists(lock_file)) + + def test_010_all_install_sanity(self): """Ensure that the install layout reflects what we think it does.""" all_specs = spack.install_layout.all_specs() @@ -182,8 +191,6 @@ def test_015_write_and_read(self): with spack.installed_db.write_transaction(): specs = spack.installed_db.query() recs = [spack.installed_db.get_record(s) for s in specs] - spack.installed_db.write() - spack.installed_db.read() for spec, rec in zip(specs, recs): new_rec = spack.installed_db.get_record(spec) From 1c4ef2996219a00e774baafa558880c83ae3d859 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 4 Nov 2015 17:55:43 -0800 Subject: [PATCH 121/121] Add Sphinx package --- var/spack/packages/py-sphinx/package.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 var/spack/packages/py-sphinx/package.py diff --git a/var/spack/packages/py-sphinx/package.py b/var/spack/packages/py-sphinx/package.py new file mode 100644 index 00000000000..ec2e89a098d --- /dev/null +++ b/var/spack/packages/py-sphinx/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PySphinx(Package): + """Sphinx Documentation Generator.""" + homepage = "http://sphinx-doc.org" + url = "https://pypi.python.org/packages/source/S/Sphinx/Sphinx-1.3.1.tar.gz" + + version('1.3.1', '8786a194acf9673464c5455b11fd4332') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix)