Merge branch 'features/python-modules' into develop
This commit is contained in:
commit
4af85441db
@ -23,7 +23,7 @@
|
|||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
__all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir',
|
__all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir',
|
||||||
'touch', 'mkdirp', 'force_remove', 'join_path', 'ancestor',
|
'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor',
|
||||||
'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe']
|
'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe']
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@ -204,6 +204,12 @@ def touch(path):
|
|||||||
os.utime(path, None)
|
os.utime(path, None)
|
||||||
|
|
||||||
|
|
||||||
|
def touchp(path):
|
||||||
|
"""Like touch, but creates any parent directories needed for the file."""
|
||||||
|
mkdirp(os.path.dirname(path))
|
||||||
|
touch(path)
|
||||||
|
|
||||||
|
|
||||||
def join_path(prefix, *args):
|
def join_path(prefix, *args):
|
||||||
path = str(prefix)
|
path = str(prefix)
|
||||||
for elt in args:
|
for elt in args:
|
||||||
|
@ -291,6 +291,37 @@ def foo(self, **kwargs):
|
|||||||
% (next(kwargs.iterkeys()), fun.__name__))
|
% (next(kwargs.iterkeys()), fun.__name__))
|
||||||
|
|
||||||
|
|
||||||
|
def match_predicate(*args):
|
||||||
|
"""Utility function for making string matching predicates.
|
||||||
|
|
||||||
|
Each arg can be a:
|
||||||
|
- regex
|
||||||
|
- list or tuple of regexes
|
||||||
|
- predicate that takes a string.
|
||||||
|
|
||||||
|
This returns a predicate that is true if:
|
||||||
|
- any arg regex matches
|
||||||
|
- any regex in a list or tuple of regexes matches.
|
||||||
|
- any predicate in args matches.
|
||||||
|
"""
|
||||||
|
def match(string):
|
||||||
|
for arg in args:
|
||||||
|
if isinstance(arg, basestring):
|
||||||
|
if re.search(arg, string):
|
||||||
|
return True
|
||||||
|
elif isinstance(arg, list) or isinstance(arg, tuple):
|
||||||
|
if any(re.search(i, string) for i in arg):
|
||||||
|
return True
|
||||||
|
elif callable(arg):
|
||||||
|
if arg(string):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
raise ValueError("args to match_predicate must be regex, "
|
||||||
|
"list of regexes, or callable.")
|
||||||
|
return False
|
||||||
|
return match
|
||||||
|
|
||||||
|
|
||||||
class RequiredAttributeError(ValueError):
|
class RequiredAttributeError(ValueError):
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super(RequiredAttributeError, self).__init__(message)
|
super(RequiredAttributeError, self).__init__(message)
|
||||||
|
197
lib/spack/llnl/util/link_tree.py
Normal file
197
lib/spack/llnl/util/link_tree.py
Normal file
@ -0,0 +1,197 @@
|
|||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://scalability-llnl.github.io/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
"""LinkTree class for setting up trees of symbolic links."""
|
||||||
|
__all__ = ['LinkTree']
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from llnl.util.filesystem import *
|
||||||
|
|
||||||
|
empty_file_name = '.spack-empty'
|
||||||
|
|
||||||
|
|
||||||
|
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||||
|
"""Traverse two filesystem trees simultaneously.
|
||||||
|
|
||||||
|
Walks the LinkTree directory in pre or post order. Yields each
|
||||||
|
file in the source directory with a matching path from the dest
|
||||||
|
directory, along with whether the file is a directory.
|
||||||
|
e.g., for this tree::
|
||||||
|
|
||||||
|
root/
|
||||||
|
a/
|
||||||
|
file1
|
||||||
|
file2
|
||||||
|
b/
|
||||||
|
file3
|
||||||
|
|
||||||
|
When called on dest, this yields::
|
||||||
|
|
||||||
|
('root', 'dest')
|
||||||
|
('root/a', 'dest/a')
|
||||||
|
('root/a/file1', 'dest/a/file1')
|
||||||
|
('root/a/file2', 'dest/a/file2')
|
||||||
|
('root/b', 'dest/b')
|
||||||
|
('root/b/file3', 'dest/b/file3')
|
||||||
|
|
||||||
|
Optional args:
|
||||||
|
|
||||||
|
order=[pre|post] -- Whether to do pre- or post-order traveral.
|
||||||
|
|
||||||
|
ignore=<predicate> -- Predicate indicating which files to ignore.
|
||||||
|
|
||||||
|
follow_nonexisting -- Whether to descend into directories in
|
||||||
|
src that do not exit in dest. Default True.
|
||||||
|
|
||||||
|
follow_links -- Whether to descend into symlinks in src.
|
||||||
|
|
||||||
|
"""
|
||||||
|
follow_nonexisting = kwargs.get('follow_nonexisting', True)
|
||||||
|
follow_links = kwargs.get('follow_link', False)
|
||||||
|
|
||||||
|
# Yield in pre or post order?
|
||||||
|
order = kwargs.get('order', 'pre')
|
||||||
|
if order not in ('pre', 'post'):
|
||||||
|
raise ValueError("Order must be 'pre' or 'post'.")
|
||||||
|
|
||||||
|
# List of relative paths to ignore under the src root.
|
||||||
|
ignore = kwargs.get('ignore', lambda filename: False)
|
||||||
|
|
||||||
|
# Don't descend into ignored directories
|
||||||
|
if ignore(rel_path):
|
||||||
|
return
|
||||||
|
|
||||||
|
source_path = os.path.join(source_root, rel_path)
|
||||||
|
dest_path = os.path.join(dest_root, rel_path)
|
||||||
|
|
||||||
|
# preorder yields directories before children
|
||||||
|
if order == 'pre':
|
||||||
|
yield (source_path, dest_path)
|
||||||
|
|
||||||
|
for f in os.listdir(source_path):
|
||||||
|
source_child = os.path.join(source_path, f)
|
||||||
|
dest_child = os.path.join(dest_path, f)
|
||||||
|
rel_child = os.path.join(rel_path, f)
|
||||||
|
|
||||||
|
# Treat as a directory
|
||||||
|
if os.path.isdir(source_child) and (
|
||||||
|
follow_links or not os.path.islink(source_child)):
|
||||||
|
|
||||||
|
# When follow_nonexisting isn't set, don't descend into dirs
|
||||||
|
# in source that do not exist in dest
|
||||||
|
if follow_nonexisting or os.path.exists(dest_child):
|
||||||
|
tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
|
||||||
|
for t in tuples: yield t
|
||||||
|
|
||||||
|
# Treat as a file.
|
||||||
|
elif not ignore(os.path.join(rel_path, f)):
|
||||||
|
yield (source_child, dest_child)
|
||||||
|
|
||||||
|
if order == 'post':
|
||||||
|
yield (source_path, dest_path)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class LinkTree(object):
|
||||||
|
"""Class to create trees of symbolic links from a source directory.
|
||||||
|
|
||||||
|
LinkTree objects are constructed with a source root. Their
|
||||||
|
methods allow you to create and delete trees of symbolic links
|
||||||
|
back to the source tree in specific destination directories.
|
||||||
|
Trees comprise symlinks only to files; directries are never
|
||||||
|
symlinked to, to prevent the source directory from ever being
|
||||||
|
modified.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, source_root):
|
||||||
|
if not os.path.exists(source_root):
|
||||||
|
raise IOError("No such file or directory: '%s'", source_root)
|
||||||
|
|
||||||
|
self._root = source_root
|
||||||
|
|
||||||
|
|
||||||
|
def find_conflict(self, dest_root, **kwargs):
|
||||||
|
"""Returns the first file in dest that conflicts with src"""
|
||||||
|
kwargs['follow_nonexisting'] = False
|
||||||
|
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
|
||||||
|
if os.path.isdir(src):
|
||||||
|
if os.path.exists(dest) and not os.path.isdir(dest):
|
||||||
|
return dest
|
||||||
|
elif os.path.exists(dest):
|
||||||
|
return dest
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def merge(self, dest_root, **kwargs):
|
||||||
|
"""Link all files in src into dest, creating directories if necessary."""
|
||||||
|
kwargs['order'] = 'pre'
|
||||||
|
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
|
||||||
|
if os.path.isdir(src):
|
||||||
|
if not os.path.exists(dest):
|
||||||
|
mkdirp(dest)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not os.path.isdir(dest):
|
||||||
|
raise ValueError("File blocks directory: %s" % dest)
|
||||||
|
|
||||||
|
# mark empty directories so they aren't removed on unmerge.
|
||||||
|
if not os.listdir(dest):
|
||||||
|
marker = os.path.join(dest, empty_file_name)
|
||||||
|
touch(marker)
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert(not os.path.exists(dest))
|
||||||
|
os.symlink(src, dest)
|
||||||
|
|
||||||
|
|
||||||
|
def unmerge(self, dest_root, **kwargs):
|
||||||
|
"""Unlink all files in dest that exist in src.
|
||||||
|
|
||||||
|
Unlinks directories in dest if they are empty.
|
||||||
|
|
||||||
|
"""
|
||||||
|
kwargs['order'] = 'post'
|
||||||
|
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
|
||||||
|
if os.path.isdir(src):
|
||||||
|
# Skip non-existing links.
|
||||||
|
if not os.path.exists(dest):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not os.path.isdir(dest):
|
||||||
|
raise ValueError("File blocks directory: %s" % dest)
|
||||||
|
|
||||||
|
# remove directory if it is empty.
|
||||||
|
if not os.listdir(dest):
|
||||||
|
shutil.rmtree(dest, ignore_errors=True)
|
||||||
|
|
||||||
|
# remove empty dir marker if present.
|
||||||
|
marker = os.path.join(dest, empty_file_name)
|
||||||
|
if os.path.exists(marker):
|
||||||
|
os.remove(marker)
|
||||||
|
|
||||||
|
elif os.path.exists(dest):
|
||||||
|
if not os.path.islink(dest):
|
||||||
|
raise ValueError("%s is not a link tree!" % dest)
|
||||||
|
os.remove(dest)
|
@ -138,7 +138,7 @@
|
|||||||
# should live. This file is overloaded for spack core vs. for packages.
|
# should live. This file is overloaded for spack core vs. for packages.
|
||||||
#
|
#
|
||||||
__all__ = ['Package', 'Version', 'when', 'ver']
|
__all__ = ['Package', 'Version', 'when', 'ver']
|
||||||
from spack.package import Package
|
from spack.package import Package, ExtensionConflictError
|
||||||
from spack.version import Version, ver
|
from spack.version import Version, ver
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
|
@ -28,6 +28,7 @@
|
|||||||
calls you can make from within the install() function.
|
calls you can make from within the install() function.
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import platform
|
import platform
|
||||||
@ -207,3 +208,63 @@ def setup_package(pkg):
|
|||||||
set_compiler_environment_variables(pkg)
|
set_compiler_environment_variables(pkg)
|
||||||
set_build_environment_variables(pkg)
|
set_build_environment_variables(pkg)
|
||||||
set_module_variables_for_package(pkg)
|
set_module_variables_for_package(pkg)
|
||||||
|
|
||||||
|
# Allow dependencies to set up environment as well.
|
||||||
|
for dep_spec in pkg.spec.traverse(root=False):
|
||||||
|
dep_spec.package.setup_dependent_environment(
|
||||||
|
pkg.module, dep_spec, pkg.spec)
|
||||||
|
|
||||||
|
|
||||||
|
def fork(pkg, function):
|
||||||
|
"""Fork a child process to do part of a spack build.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
pkg -- pkg whose environemnt we should set up the
|
||||||
|
forked process for.
|
||||||
|
function -- arg-less function to run in the child process.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
def child_fun():
|
||||||
|
# do stuff
|
||||||
|
build_env.fork(pkg, child_fun)
|
||||||
|
|
||||||
|
Forked processes are run with the build environemnt set up by
|
||||||
|
spack.build_environment. This allows package authors to have
|
||||||
|
full control over the environment, etc. without offecting
|
||||||
|
other builds that might be executed in the same spack call.
|
||||||
|
|
||||||
|
If something goes wrong, the child process is expected toprint
|
||||||
|
the error and the parent process will exit with error as
|
||||||
|
well. If things go well, the child exits and the parent
|
||||||
|
carries on.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pid = os.fork()
|
||||||
|
except OSError, e:
|
||||||
|
raise InstallError("Unable to fork build process: %s" % e)
|
||||||
|
|
||||||
|
if pid == 0:
|
||||||
|
# Give the child process the package's build environemnt.
|
||||||
|
setup_package(pkg)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# call the forked function.
|
||||||
|
function()
|
||||||
|
|
||||||
|
# Use os._exit here to avoid raising a SystemExit exception,
|
||||||
|
# which interferes with unit tests.
|
||||||
|
os._exit(0)
|
||||||
|
except:
|
||||||
|
# Child doesn't raise or return to main spack code.
|
||||||
|
# Just runs default exception handler and exits.
|
||||||
|
sys.excepthook(*sys.exc_info())
|
||||||
|
os._exit(1)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Parent process just waits for the child to complete. If the
|
||||||
|
# child exited badly, assume it already printed an appropriate
|
||||||
|
# message. Just make the parent exit with an error code.
|
||||||
|
pid, returncode = os.waitpid(pid, 0)
|
||||||
|
if returncode != 0:
|
||||||
|
sys.exit(1)
|
||||||
|
@ -121,3 +121,18 @@ def elide_list(line_list, max_num=10):
|
|||||||
return line_list[:max_num-1] + ['...'] + line_list[-1:]
|
return line_list[:max_num-1] + ['...'] + line_list[-1:]
|
||||||
else:
|
else:
|
||||||
return line_list
|
return line_list
|
||||||
|
|
||||||
|
|
||||||
|
def disambiguate_spec(spec):
|
||||||
|
matching_specs = spack.db.get_installed(spec)
|
||||||
|
if not matching_specs:
|
||||||
|
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||||
|
|
||||||
|
elif len(matching_specs) > 1:
|
||||||
|
args = ["%s matches multiple packages." % spec,
|
||||||
|
"Matching packages:"]
|
||||||
|
args += [" " + str(s) for s in matching_specs]
|
||||||
|
args += ["Use a more specific spec."]
|
||||||
|
tty.die(*args)
|
||||||
|
|
||||||
|
return matching_specs[0]
|
||||||
|
56
lib/spack/spack/cmd/activate.py
Normal file
56
lib/spack/spack/cmd/activate.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://scalability-llnl.github.io/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
from external import argparse
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
import spack
|
||||||
|
import spack.cmd
|
||||||
|
|
||||||
|
description = "Activate a package extension."
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
subparser.add_argument(
|
||||||
|
'-f', '--force', action='store_true',
|
||||||
|
help="Activate without first activating dependencies.")
|
||||||
|
subparser.add_argument(
|
||||||
|
'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.")
|
||||||
|
|
||||||
|
|
||||||
|
def activate(parser, args):
|
||||||
|
specs = spack.cmd.parse_specs(args.spec, concretize=True)
|
||||||
|
if len(specs) != 1:
|
||||||
|
tty.die("activate requires one spec. %d given." % len(specs))
|
||||||
|
|
||||||
|
# TODO: remove this hack when DAG info is stored in dir layout.
|
||||||
|
# This ensures the ext spec is always normalized properly.
|
||||||
|
spack.db.get(specs[0])
|
||||||
|
|
||||||
|
spec = spack.cmd.disambiguate_spec(specs[0])
|
||||||
|
if not spec.package.is_extension:
|
||||||
|
tty.die("%s is not an extension." % spec.name)
|
||||||
|
|
||||||
|
if spec.package.activated:
|
||||||
|
tty.die("Package %s is already activated." % specs[0].short_spec)
|
||||||
|
|
||||||
|
spec.package.do_activate()
|
@ -23,6 +23,7 @@
|
|||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
from external import argparse
|
from external import argparse
|
||||||
|
import subprocess
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
99
lib/spack/spack/cmd/deactivate.py
Normal file
99
lib/spack/spack/cmd/deactivate.py
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://scalability-llnl.github.io/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
from external import argparse
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack
|
||||||
|
import spack.cmd
|
||||||
|
from spack.graph import topological_sort
|
||||||
|
|
||||||
|
description = "Deactivate a package extension."
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
subparser.add_argument(
|
||||||
|
'-f', '--force', action='store_true',
|
||||||
|
help="Run deactivation even if spec is NOT currently activated.")
|
||||||
|
subparser.add_argument(
|
||||||
|
'-a', '--all', action='store_true',
|
||||||
|
help="Deactivate all extensions of an extendable pacakge, or "
|
||||||
|
"deactivate an extension AND its dependencies.")
|
||||||
|
subparser.add_argument(
|
||||||
|
'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.")
|
||||||
|
|
||||||
|
|
||||||
|
def deactivate(parser, args):
|
||||||
|
specs = spack.cmd.parse_specs(args.spec, concretize=True)
|
||||||
|
if len(specs) != 1:
|
||||||
|
tty.die("deactivate requires one spec. %d given." % len(specs))
|
||||||
|
|
||||||
|
# TODO: remove this hack when DAG info is stored properly.
|
||||||
|
# This ensures the ext spec is always normalized properly.
|
||||||
|
spack.db.get(specs[0])
|
||||||
|
|
||||||
|
spec = spack.cmd.disambiguate_spec(specs[0])
|
||||||
|
pkg = spec.package
|
||||||
|
|
||||||
|
if args.all:
|
||||||
|
if pkg.extendable:
|
||||||
|
tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
|
||||||
|
ext_pkgs = spack.db.installed_extensions_for(spec)
|
||||||
|
for ext_pkg in ext_pkgs:
|
||||||
|
ext_pkg.spec.normalize()
|
||||||
|
if ext_pkg.activated:
|
||||||
|
ext_pkg.do_deactivate(force=True)
|
||||||
|
|
||||||
|
elif pkg.is_extension:
|
||||||
|
# TODO: store DAG info properly (see above)
|
||||||
|
spec.normalize()
|
||||||
|
|
||||||
|
tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
|
||||||
|
|
||||||
|
topo_order = topological_sort(spec)
|
||||||
|
index = spec.index()
|
||||||
|
|
||||||
|
for name in topo_order:
|
||||||
|
espec = index[name]
|
||||||
|
epkg = espec.package
|
||||||
|
|
||||||
|
# TODO: store DAG info properly (see above)
|
||||||
|
epkg.spec.normalize()
|
||||||
|
|
||||||
|
if epkg.extends(pkg.extendee_spec):
|
||||||
|
if epkg.activated or args.force:
|
||||||
|
|
||||||
|
epkg.do_deactivate(force=args.force)
|
||||||
|
|
||||||
|
else:
|
||||||
|
tty.die("spack deactivate --all requires an extendable package or an extension.")
|
||||||
|
|
||||||
|
else:
|
||||||
|
if not pkg.is_extension:
|
||||||
|
tty.die("spack deactivate requires an extension.",
|
||||||
|
"Did you mean 'spack deactivate --all'?")
|
||||||
|
|
||||||
|
if not args.force and not spec.package.activated:
|
||||||
|
tty.die("Package %s is not activated." % specs[0].short_spec)
|
||||||
|
|
||||||
|
spec.package.do_deactivate(force=args.force)
|
98
lib/spack/spack/cmd/extensions.py
Normal file
98
lib/spack/spack/cmd/extensions.py
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://scalability-llnl.github.io/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
import sys
|
||||||
|
from external import argparse
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.tty.colify import colify
|
||||||
|
|
||||||
|
import spack
|
||||||
|
import spack.cmd
|
||||||
|
import spack.cmd.find
|
||||||
|
|
||||||
|
description = "List extensions for package."
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
format_group = subparser.add_mutually_exclusive_group()
|
||||||
|
format_group.add_argument(
|
||||||
|
'-l', '--long', action='store_const', dest='mode', const='long',
|
||||||
|
help='Show dependency hashes as well as versions.')
|
||||||
|
format_group.add_argument(
|
||||||
|
'-p', '--paths', action='store_const', dest='mode', const='paths',
|
||||||
|
help='Show paths to extension install directories')
|
||||||
|
format_group.add_argument(
|
||||||
|
'-d', '--deps', action='store_const', dest='mode', const='deps',
|
||||||
|
help='Show full dependency DAG of extensions')
|
||||||
|
|
||||||
|
subparser.add_argument(
|
||||||
|
'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for')
|
||||||
|
|
||||||
|
|
||||||
|
def extensions(parser, args):
|
||||||
|
if not args.spec:
|
||||||
|
tty.die("extensions requires a package spec.")
|
||||||
|
|
||||||
|
# Checks
|
||||||
|
spec = spack.cmd.parse_specs(args.spec)
|
||||||
|
if len(spec) > 1:
|
||||||
|
tty.die("Can only list extensions for one package.")
|
||||||
|
|
||||||
|
if not spec[0].package.extendable:
|
||||||
|
tty.die("%s is not an extendable package." % spec[0].name)
|
||||||
|
|
||||||
|
spec = spack.cmd.disambiguate_spec(spec[0])
|
||||||
|
|
||||||
|
if not spec.package.extendable:
|
||||||
|
tty.die("%s does not have extensions." % spec.short_spec)
|
||||||
|
|
||||||
|
if not args.mode:
|
||||||
|
args.mode = 'short'
|
||||||
|
|
||||||
|
# List package names of extensions
|
||||||
|
extensions = spack.db.extensions_for(spec)
|
||||||
|
if not extensions:
|
||||||
|
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||||
|
return
|
||||||
|
tty.msg(spec.cshort_spec)
|
||||||
|
tty.msg("%d extensions:" % len(extensions))
|
||||||
|
colify(ext.name for ext in extensions)
|
||||||
|
|
||||||
|
# List specs of installed extensions.
|
||||||
|
installed = [s.spec for s in spack.db.installed_extensions_for(spec)]
|
||||||
|
print
|
||||||
|
if not installed:
|
||||||
|
tty.msg("None installed.")
|
||||||
|
return
|
||||||
|
tty.msg("%d installed:" % len(installed))
|
||||||
|
spack.cmd.find.display_specs(installed, mode=args.mode)
|
||||||
|
|
||||||
|
# List specs of activated extensions.
|
||||||
|
activated = spack.install_layout.extension_map(spec)
|
||||||
|
print
|
||||||
|
if not activated:
|
||||||
|
tty.msg("None activated.")
|
||||||
|
return
|
||||||
|
tty.msg("%d currently activated:" % len(activated))
|
||||||
|
spack.cmd.find.display_specs(activated.values(), mode=args.mode)
|
@ -41,13 +41,13 @@
|
|||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
format_group = subparser.add_mutually_exclusive_group()
|
format_group = subparser.add_mutually_exclusive_group()
|
||||||
format_group.add_argument(
|
format_group.add_argument(
|
||||||
'-l', '--long', action='store_true', dest='long',
|
'-l', '--long', action='store_const', dest='mode', const='long',
|
||||||
help='Show dependency hashes as well as versions.')
|
help='Show dependency hashes as well as versions.')
|
||||||
format_group.add_argument(
|
format_group.add_argument(
|
||||||
'-p', '--paths', action='store_true', dest='paths',
|
'-p', '--paths', action='store_const', dest='mode', const='paths',
|
||||||
help='Show paths to package install directories')
|
help='Show paths to package install directories')
|
||||||
format_group.add_argument(
|
format_group.add_argument(
|
||||||
'-d', '--deps', action='store_true', dest='full_deps',
|
'-d', '--deps', action='store_const', dest='mode', const='deps',
|
||||||
help='Show full dependency DAG of installed packages')
|
help='Show full dependency DAG of installed packages')
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
@ -55,6 +55,50 @@ def setup_parser(subparser):
|
|||||||
help='optional specs to filter results')
|
help='optional specs to filter results')
|
||||||
|
|
||||||
|
|
||||||
|
def display_specs(specs, **kwargs):
|
||||||
|
mode = kwargs.get('mode', 'short')
|
||||||
|
|
||||||
|
# Make a dict with specs keyed by architecture and compiler.
|
||||||
|
index = index_by(specs, ('architecture', 'compiler'))
|
||||||
|
|
||||||
|
# Traverse the index and print out each package
|
||||||
|
for i, (architecture, compiler) in enumerate(sorted(index)):
|
||||||
|
if i > 0: print
|
||||||
|
|
||||||
|
header = "%s{%s} / %s{%s}" % (
|
||||||
|
spack.spec.architecture_color, architecture,
|
||||||
|
spack.spec.compiler_color, compiler)
|
||||||
|
tty.hline(colorize(header), char='-')
|
||||||
|
|
||||||
|
specs = index[(architecture,compiler)]
|
||||||
|
specs.sort()
|
||||||
|
|
||||||
|
abbreviated = [s.format('$_$@$+', color=True) for s in specs]
|
||||||
|
if mode == 'paths':
|
||||||
|
# Print one spec per line along with prefix path
|
||||||
|
width = max(len(s) for s in abbreviated)
|
||||||
|
width += 2
|
||||||
|
format = " %-{}s%s".format(width)
|
||||||
|
|
||||||
|
for abbrv, spec in zip(abbreviated, specs):
|
||||||
|
print format % (abbrv, spec.prefix)
|
||||||
|
|
||||||
|
elif mode == 'deps':
|
||||||
|
for spec in specs:
|
||||||
|
print spec.tree(indent=4, format='$_$@$+$#', color=True),
|
||||||
|
|
||||||
|
elif mode in ('short', 'long'):
|
||||||
|
fmt = '$-_$@$+'
|
||||||
|
if mode == 'long':
|
||||||
|
fmt += '$#'
|
||||||
|
colify(s.format(fmt, color=True) for s in specs)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Invalid mode for display_specs: %s. Must be one of (paths, deps, short)." % mode)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def find(parser, args):
|
def find(parser, args):
|
||||||
# Filter out specs that don't exist.
|
# Filter out specs that don't exist.
|
||||||
query_specs = spack.cmd.parse_specs(args.query_specs)
|
query_specs = spack.cmd.parse_specs(args.query_specs)
|
||||||
@ -76,36 +120,10 @@ def find(parser, args):
|
|||||||
results = [set(spack.db.get_installed(qs)) for qs in query_specs]
|
results = [set(spack.db.get_installed(qs)) for qs in query_specs]
|
||||||
specs = set.union(*results)
|
specs = set.union(*results)
|
||||||
|
|
||||||
# Make a dict with specs keyed by architecture and compiler.
|
if not args.mode:
|
||||||
index = index_by(specs, ('architecture', 'compiler'))
|
args.mode = 'short'
|
||||||
|
|
||||||
# Traverse the index and print out each package
|
if sys.stdout.isatty():
|
||||||
for i, (architecture, compiler) in enumerate(sorted(index)):
|
tty.msg("%d installed packages." % len(specs))
|
||||||
if i > 0: print
|
display_specs(specs, mode=args.mode)
|
||||||
|
|
||||||
header = "%s{%s} / %s{%s}" % (
|
|
||||||
spack.spec.architecture_color, architecture,
|
|
||||||
spack.spec.compiler_color, compiler)
|
|
||||||
tty.hline(colorize(header), char='-')
|
|
||||||
|
|
||||||
specs = index[(architecture,compiler)]
|
|
||||||
specs.sort()
|
|
||||||
|
|
||||||
abbreviated = [s.format('$_$@$+', color=True) for s in specs]
|
|
||||||
if args.paths:
|
|
||||||
# Print one spec per line along with prefix path
|
|
||||||
width = max(len(s) for s in abbreviated)
|
|
||||||
width += 2
|
|
||||||
format = " %-{}s%s".format(width)
|
|
||||||
|
|
||||||
for abbrv, spec in zip(abbreviated, specs):
|
|
||||||
print format % (abbrv, spec.prefix)
|
|
||||||
|
|
||||||
elif args.full_deps:
|
|
||||||
for spec in specs:
|
|
||||||
print spec.tree(indent=4, format='$_$@$+', color=True),
|
|
||||||
else:
|
|
||||||
fmt = '$-_$@$+'
|
|
||||||
if args.long:
|
|
||||||
fmt += '$#'
|
|
||||||
colify(s.format(fmt, color=True) for s in specs)
|
|
||||||
|
@ -77,24 +77,16 @@ def location(parser, args):
|
|||||||
tty.die("You must supply a spec.")
|
tty.die("You must supply a spec.")
|
||||||
if len(specs) != 1:
|
if len(specs) != 1:
|
||||||
tty.die("Too many specs. Supply only one.")
|
tty.die("Too many specs. Supply only one.")
|
||||||
spec = specs[0]
|
|
||||||
|
|
||||||
if args.install_dir:
|
if args.install_dir:
|
||||||
# install_dir command matches against installed specs.
|
# install_dir command matches against installed specs.
|
||||||
matching_specs = spack.db.get_installed(spec)
|
spec = spack.cmd.disambiguate_spec(specs[0])
|
||||||
if not matching_specs:
|
print spec.prefix
|
||||||
tty.die("Spec '%s' matches no installed packages." % spec)
|
|
||||||
|
|
||||||
elif len(matching_specs) > 1:
|
else:
|
||||||
args = ["%s matches multiple packages." % spec,
|
spec = specs[0]
|
||||||
"Matching packages:"]
|
|
||||||
args += [" " + str(s) for s in matching_specs]
|
|
||||||
args += ["Use a more specific spec."]
|
|
||||||
tty.die(*args)
|
|
||||||
|
|
||||||
print matching_specs[0].prefix
|
if args.package_dir:
|
||||||
|
|
||||||
elif args.package_dir:
|
|
||||||
# This one just needs the spec name.
|
# This one just needs the spec name.
|
||||||
print join_path(spack.db.root, spec.name)
|
print join_path(spack.db.root, spec.name)
|
||||||
|
|
||||||
@ -111,3 +103,4 @@ def location(parser, args):
|
|||||||
tty.die("Build directory does not exist yet. Run this to create it:",
|
tty.die("Build directory does not exist yet. Run this to create it:",
|
||||||
"spack stage " + " ".join(args.spec))
|
"spack stage " + " ".join(args.spec))
|
||||||
print pkg.stage.source_path
|
print pkg.stage.source_path
|
||||||
|
|
||||||
|
@ -65,7 +65,6 @@ def uninstall(parser, args):
|
|||||||
" b) use a more specific spec."]
|
" b) use a more specific spec."]
|
||||||
tty.die(*args)
|
tty.die(*args)
|
||||||
|
|
||||||
|
|
||||||
if len(matching_specs) == 0:
|
if len(matching_specs) == 0:
|
||||||
tty.die("%s does not match any installed packages." % spec)
|
tty.die("%s does not match any installed packages." % spec)
|
||||||
|
|
||||||
|
@ -27,9 +27,11 @@
|
|||||||
import exceptions
|
import exceptions
|
||||||
import hashlib
|
import hashlib
|
||||||
import shutil
|
import shutil
|
||||||
|
import tempfile
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.lang import memoized
|
||||||
from llnl.util.filesystem import join_path, mkdirp
|
from llnl.util.filesystem import join_path, mkdirp
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
@ -53,6 +55,19 @@ def __init__(self, root):
|
|||||||
self.root = root
|
self.root = root
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hidden_file_paths(self):
|
||||||
|
"""Return a list of hidden files used by the directory layout.
|
||||||
|
|
||||||
|
Paths are relative to the root of an install directory.
|
||||||
|
|
||||||
|
If the directory layout uses no hidden files to maintain
|
||||||
|
state, this should return an empty container, e.g. [] or (,).
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
def all_specs(self):
|
def all_specs(self):
|
||||||
"""To be implemented by subclasses to traverse all specs for which there is
|
"""To be implemented by subclasses to traverse all specs for which there is
|
||||||
a directory within the root.
|
a directory within the root.
|
||||||
@ -71,6 +86,42 @@ def make_path_for_spec(self, spec):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def extension_map(self, spec):
|
||||||
|
"""Get a dict of currently installed extension packages for a spec.
|
||||||
|
|
||||||
|
Dict maps { name : extension_spec }
|
||||||
|
Modifying dict does not affect internals of this layout.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def check_extension_conflict(self, spec, ext_spec):
|
||||||
|
"""Ensure that ext_spec can be activated in spec.
|
||||||
|
|
||||||
|
If not, raise ExtensionAlreadyInstalledError or
|
||||||
|
ExtensionConflictError.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def check_activated(self, spec, ext_spec):
|
||||||
|
"""Ensure that ext_spec can be removed from spec.
|
||||||
|
|
||||||
|
If not, raise NoSuchExtensionError.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def add_extension(self, spec, ext_spec):
|
||||||
|
"""Add to the list of currently installed extensions."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def remove_extension(self, spec, ext_spec):
|
||||||
|
"""Remove from the list of currently installed extensions."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
def path_for_spec(self, spec):
|
def path_for_spec(self, spec):
|
||||||
"""Return an absolute path from the root to a directory for the spec."""
|
"""Return an absolute path from the root to a directory for the spec."""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
@ -81,12 +132,17 @@ def path_for_spec(self, spec):
|
|||||||
|
|
||||||
|
|
||||||
def remove_path_for_spec(self, spec):
|
def remove_path_for_spec(self, spec):
|
||||||
"""Removes a prefix and any empty parent directories from the root."""
|
"""Removes a prefix and any empty parent directories from the root.
|
||||||
|
Raised RemoveFailedError if something goes wrong.
|
||||||
|
"""
|
||||||
path = self.path_for_spec(spec)
|
path = self.path_for_spec(spec)
|
||||||
assert(path.startswith(self.root))
|
assert(path.startswith(self.root))
|
||||||
|
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
shutil.rmtree(path, True)
|
try:
|
||||||
|
shutil.rmtree(path)
|
||||||
|
except exceptions.OSError, e:
|
||||||
|
raise RemoveFailedError(spec, path, e)
|
||||||
|
|
||||||
path = os.path.dirname(path)
|
path = os.path.dirname(path)
|
||||||
while path != self.root:
|
while path != self.root:
|
||||||
@ -135,8 +191,17 @@ def __init__(self, root, **kwargs):
|
|||||||
to make each hash unique.
|
to make each hash unique.
|
||||||
"""
|
"""
|
||||||
spec_file_name = kwargs.get('spec_file_name', '.spec')
|
spec_file_name = kwargs.get('spec_file_name', '.spec')
|
||||||
|
extension_file_name = kwargs.get('extension_file_name', '.extensions')
|
||||||
super(SpecHashDirectoryLayout, self).__init__(root)
|
super(SpecHashDirectoryLayout, self).__init__(root)
|
||||||
self.spec_file_name = spec_file_name
|
self.spec_file_name = spec_file_name
|
||||||
|
self.extension_file_name = extension_file_name
|
||||||
|
|
||||||
|
# Cache of already written/read extension maps.
|
||||||
|
self._extension_maps = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hidden_file_paths(self):
|
||||||
|
return ('.spec', '.extensions')
|
||||||
|
|
||||||
|
|
||||||
def relative_path_for_spec(self, spec):
|
def relative_path_for_spec(self, spec):
|
||||||
@ -159,6 +224,9 @@ def read_spec(self, path):
|
|||||||
|
|
||||||
if all(spack.db.exists(s.name) for s in spec.traverse()):
|
if all(spack.db.exists(s.name) for s in spec.traverse()):
|
||||||
copy = spec.copy()
|
copy = spec.copy()
|
||||||
|
|
||||||
|
# TODO: It takes a lot of time to normalize every spec on read.
|
||||||
|
# TODO: Storing graph info with spec files would fix this.
|
||||||
copy.normalize()
|
copy.normalize()
|
||||||
if copy.concrete:
|
if copy.concrete:
|
||||||
return copy # These are specs spack still understands.
|
return copy # These are specs spack still understands.
|
||||||
@ -212,17 +280,116 @@ def make_path_for_spec(self, spec):
|
|||||||
self.write_spec(spec, spec_file_path)
|
self.write_spec(spec, spec_file_path)
|
||||||
|
|
||||||
|
|
||||||
|
@memoized
|
||||||
def all_specs(self):
|
def all_specs(self):
|
||||||
if not os.path.isdir(self.root):
|
if not os.path.isdir(self.root):
|
||||||
return
|
return []
|
||||||
|
|
||||||
|
specs = []
|
||||||
for path in traverse_dirs_at_depth(self.root, 3):
|
for path in traverse_dirs_at_depth(self.root, 3):
|
||||||
arch, compiler, last_dir = path
|
arch, compiler, last_dir = path
|
||||||
spec_file_path = join_path(
|
spec_file_path = join_path(
|
||||||
self.root, arch, compiler, last_dir, self.spec_file_name)
|
self.root, arch, compiler, last_dir, self.spec_file_name)
|
||||||
if os.path.exists(spec_file_path):
|
if os.path.exists(spec_file_path):
|
||||||
spec = self.read_spec(spec_file_path)
|
spec = self.read_spec(spec_file_path)
|
||||||
yield spec
|
specs.append(spec)
|
||||||
|
return specs
|
||||||
|
|
||||||
|
|
||||||
|
def extension_file_path(self, spec):
|
||||||
|
"""Gets full path to an installed package's extension file"""
|
||||||
|
_check_concrete(spec)
|
||||||
|
return join_path(self.path_for_spec(spec), self.extension_file_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _extension_map(self, spec):
|
||||||
|
"""Get a dict<name -> spec> for all extensions currnetly
|
||||||
|
installed for this package."""
|
||||||
|
_check_concrete(spec)
|
||||||
|
|
||||||
|
if not spec in self._extension_maps:
|
||||||
|
path = self.extension_file_path(spec)
|
||||||
|
if not os.path.exists(path):
|
||||||
|
self._extension_maps[spec] = {}
|
||||||
|
|
||||||
|
else:
|
||||||
|
exts = {}
|
||||||
|
with closing(open(path)) as ext_file:
|
||||||
|
for line in ext_file:
|
||||||
|
try:
|
||||||
|
spec = Spec(line.strip())
|
||||||
|
exts[spec.name] = spec
|
||||||
|
except spack.error.SpackError, e:
|
||||||
|
# TODO: do something better here -- should be
|
||||||
|
# resilient to corrupt files.
|
||||||
|
raise InvalidExtensionSpecError(str(e))
|
||||||
|
self._extension_maps[spec] = exts
|
||||||
|
|
||||||
|
return self._extension_maps[spec]
|
||||||
|
|
||||||
|
|
||||||
|
def extension_map(self, spec):
|
||||||
|
"""Defensive copying version of _extension_map() for external API."""
|
||||||
|
return self._extension_map(spec).copy()
|
||||||
|
|
||||||
|
|
||||||
|
def check_extension_conflict(self, spec, ext_spec):
|
||||||
|
exts = self._extension_map(spec)
|
||||||
|
if ext_spec.name in exts:
|
||||||
|
installed_spec = exts[ext_spec.name]
|
||||||
|
if ext_spec == installed_spec:
|
||||||
|
raise ExtensionAlreadyInstalledError(spec, ext_spec)
|
||||||
|
else:
|
||||||
|
raise ExtensionConflictError(spec, ext_spec, installed_spec)
|
||||||
|
|
||||||
|
|
||||||
|
def check_activated(self, spec, ext_spec):
|
||||||
|
exts = self._extension_map(spec)
|
||||||
|
if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]):
|
||||||
|
raise NoSuchExtensionError(spec, ext_spec)
|
||||||
|
|
||||||
|
|
||||||
|
def _write_extensions(self, spec, extensions):
|
||||||
|
path = self.extension_file_path(spec)
|
||||||
|
|
||||||
|
# Create a temp file in the same directory as the actual file.
|
||||||
|
dirname, basename = os.path.split(path)
|
||||||
|
tmp = tempfile.NamedTemporaryFile(
|
||||||
|
prefix=basename, dir=dirname, delete=False)
|
||||||
|
|
||||||
|
# Write temp file.
|
||||||
|
with closing(tmp):
|
||||||
|
for extension in sorted(extensions.values()):
|
||||||
|
tmp.write("%s\n" % extension)
|
||||||
|
|
||||||
|
# Atomic update by moving tmpfile on top of old one.
|
||||||
|
os.rename(tmp.name, path)
|
||||||
|
|
||||||
|
|
||||||
|
def add_extension(self, spec, ext_spec):
|
||||||
|
_check_concrete(spec)
|
||||||
|
_check_concrete(ext_spec)
|
||||||
|
|
||||||
|
# Check whether it's already installed or if it's a conflict.
|
||||||
|
exts = self._extension_map(spec)
|
||||||
|
self.check_extension_conflict(spec, ext_spec)
|
||||||
|
|
||||||
|
# do the actual adding.
|
||||||
|
exts[ext_spec.name] = ext_spec
|
||||||
|
self._write_extensions(spec, exts)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_extension(self, spec, ext_spec):
|
||||||
|
_check_concrete(spec)
|
||||||
|
_check_concrete(ext_spec)
|
||||||
|
|
||||||
|
# Make sure it's installed before removing.
|
||||||
|
exts = self._extension_map(spec)
|
||||||
|
self.check_activated(spec, ext_spec)
|
||||||
|
|
||||||
|
# do the actual removing.
|
||||||
|
del exts[ext_spec.name]
|
||||||
|
self._write_extensions(spec, exts)
|
||||||
|
|
||||||
|
|
||||||
class DirectoryLayoutError(SpackError):
|
class DirectoryLayoutError(SpackError):
|
||||||
@ -239,6 +406,15 @@ def __init__(self, installed_spec, new_spec):
|
|||||||
% installed_spec, new_spec)
|
% installed_spec, new_spec)
|
||||||
|
|
||||||
|
|
||||||
|
class RemoveFailedError(DirectoryLayoutError):
|
||||||
|
"""Raised when a DirectoryLayout cannot remove an install prefix."""
|
||||||
|
def __init__(self, installed_spec, prefix, error):
|
||||||
|
super(RemoveFailedError, self).__init__(
|
||||||
|
'Could not remove prefix %s for %s : %s'
|
||||||
|
% prefix, installed_spec.short_spec, error)
|
||||||
|
self.cause = error
|
||||||
|
|
||||||
|
|
||||||
class InconsistentInstallDirectoryError(DirectoryLayoutError):
|
class InconsistentInstallDirectoryError(DirectoryLayoutError):
|
||||||
"""Raised when a package seems to be installed to the wrong place."""
|
"""Raised when a package seems to be installed to the wrong place."""
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
@ -250,3 +426,34 @@ class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
|
|||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
super(InstallDirectoryAlreadyExistsError, self).__init__(
|
super(InstallDirectoryAlreadyExistsError, self).__init__(
|
||||||
"Install path %s already exists!")
|
"Install path %s already exists!")
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidExtensionSpecError(DirectoryLayoutError):
|
||||||
|
"""Raised when an extension file has a bad spec in it."""
|
||||||
|
def __init__(self, message):
|
||||||
|
super(InvalidExtensionSpecError, self).__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionAlreadyInstalledError(DirectoryLayoutError):
|
||||||
|
"""Raised when an extension is added to a package that already has it."""
|
||||||
|
def __init__(self, spec, ext_spec):
|
||||||
|
super(ExtensionAlreadyInstalledError, self).__init__(
|
||||||
|
"%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec))
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionConflictError(DirectoryLayoutError):
|
||||||
|
"""Raised when an extension is added to a package that already has it."""
|
||||||
|
def __init__(self, spec, ext_spec, conflict):
|
||||||
|
super(ExtensionConflictError, self).__init__(
|
||||||
|
"%s cannot be installed in %s because it conflicts with %s."% (
|
||||||
|
ext_spec.short_spec, spec.short_spec, conflict.short_spec))
|
||||||
|
|
||||||
|
|
||||||
|
class NoSuchExtensionError(DirectoryLayoutError):
|
||||||
|
"""Raised when an extension isn't there on deactivate."""
|
||||||
|
def __init__(self, spec, ext_spec):
|
||||||
|
super(NoSuchExtensionError, self).__init__(
|
||||||
|
"%s cannot be removed from %s because it's not activated."% (
|
||||||
|
ext_spec.short_spec, spec.short_spec))
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,7 +31,9 @@
|
|||||||
|
|
||||||
Currently the following hooks are supported:
|
Currently the following hooks are supported:
|
||||||
|
|
||||||
|
* pre_install()
|
||||||
* post_install()
|
* post_install()
|
||||||
|
* pre_uninstall()
|
||||||
* post_uninstall()
|
* post_uninstall()
|
||||||
|
|
||||||
This can be used to implement support for things like module
|
This can be used to implement support for things like module
|
||||||
@ -70,5 +72,8 @@ def __call__(self, pkg):
|
|||||||
#
|
#
|
||||||
# Define some functions that can be called to fire off hooks.
|
# Define some functions that can be called to fire off hooks.
|
||||||
#
|
#
|
||||||
|
pre_install = HookRunner('pre_install')
|
||||||
post_install = HookRunner('post_install')
|
post_install = HookRunner('post_install')
|
||||||
|
|
||||||
|
pre_uninstall = HookRunner('pre_uninstall')
|
||||||
post_uninstall = HookRunner('post_uninstall')
|
post_uninstall = HookRunner('post_uninstall')
|
||||||
|
36
lib/spack/spack/hooks/extensions.py
Normal file
36
lib/spack/spack/hooks/extensions.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://scalability-llnl.github.io/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
|
||||||
|
import spack
|
||||||
|
|
||||||
|
|
||||||
|
def pre_uninstall(pkg):
|
||||||
|
# Need to do this b/c uninstall does not automatically do it.
|
||||||
|
# TODO: store full graph info in stored .spec file.
|
||||||
|
pkg.spec.normalize()
|
||||||
|
|
||||||
|
if pkg.is_extension:
|
||||||
|
if pkg.activated:
|
||||||
|
pkg.do_deactivate(force=True)
|
@ -49,6 +49,7 @@
|
|||||||
import re
|
import re
|
||||||
import textwrap
|
import textwrap
|
||||||
import shutil
|
import shutil
|
||||||
|
from glob import glob
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@ -123,6 +124,13 @@ def add_path(path_name, directory):
|
|||||||
if os.path.isdir(directory):
|
if os.path.isdir(directory):
|
||||||
add_path(var, directory)
|
add_path(var, directory)
|
||||||
|
|
||||||
|
# Add python path unless it's an actual python installation
|
||||||
|
# TODO: is there a better way to do this?
|
||||||
|
if self.spec.name != 'python':
|
||||||
|
site_packages = glob(join_path(self.spec.prefix.lib, "python*/site-packages"))
|
||||||
|
if site_packages:
|
||||||
|
add_path('PYTHONPATH', site_packages[0])
|
||||||
|
|
||||||
# short description is just the package + version
|
# short description is just the package + version
|
||||||
# TODO: maybe packages can optionally provide it.
|
# TODO: maybe packages can optionally provide it.
|
||||||
self.short_description = self.spec.format("$_ $@")
|
self.short_description = self.spec.format("$_ $@")
|
||||||
|
@ -45,6 +45,7 @@
|
|||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.link_tree import LinkTree
|
||||||
from llnl.util.filesystem import *
|
from llnl.util.filesystem import *
|
||||||
from llnl.util.lang import *
|
from llnl.util.lang import *
|
||||||
|
|
||||||
@ -320,12 +321,21 @@ class SomePackage(Package):
|
|||||||
"""Patches to apply to newly expanded source, if any."""
|
"""Patches to apply to newly expanded source, if any."""
|
||||||
patches = {}
|
patches = {}
|
||||||
|
|
||||||
|
"""Specs of package this one extends, or None.
|
||||||
|
|
||||||
|
Currently, ppackages can extend at most one other package.
|
||||||
|
"""
|
||||||
|
extendees = {}
|
||||||
|
|
||||||
#
|
#
|
||||||
# These are default values for instance variables.
|
# These are default values for instance variables.
|
||||||
#
|
#
|
||||||
"""By default we build in parallel. Subclasses can override this."""
|
"""By default we build in parallel. Subclasses can override this."""
|
||||||
parallel = True
|
parallel = True
|
||||||
|
|
||||||
|
"""Most packages are NOT extendable. Set to True if you want extensions."""
|
||||||
|
extendable = False
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, spec):
|
def __init__(self, spec):
|
||||||
# this determines how the package should be built.
|
# this determines how the package should be built.
|
||||||
@ -395,6 +405,9 @@ def ensure_has_dict(attr_name):
|
|||||||
self._fetch_time = 0.0
|
self._fetch_time = 0.0
|
||||||
self._total_time = 0.0
|
self._total_time = 0.0
|
||||||
|
|
||||||
|
if self.is_extension:
|
||||||
|
spack.db.get(self.extendee_spec)._check_extendable()
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
@ -481,6 +494,47 @@ def fetcher(self, f):
|
|||||||
self._fetcher = f
|
self._fetcher = f
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extendee_spec(self):
|
||||||
|
"""Spec of the extendee of this package, or None if it is not an extension."""
|
||||||
|
if not self.extendees:
|
||||||
|
return None
|
||||||
|
name = next(iter(self.extendees))
|
||||||
|
if not name in self.spec:
|
||||||
|
spec, kwargs = self.extendees[name]
|
||||||
|
return spec
|
||||||
|
|
||||||
|
# Need to do this to get the concrete version of the spec
|
||||||
|
return self.spec[name]
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extendee_args(self):
|
||||||
|
"""Spec of the extendee of this package, or None if it is not an extension."""
|
||||||
|
if not self.extendees:
|
||||||
|
return None
|
||||||
|
name = next(iter(self.extendees))
|
||||||
|
return self.extendees[name][1]
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_extension(self):
|
||||||
|
return len(self.extendees) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def extends(self, spec):
|
||||||
|
return (spec.name in self.extendees and
|
||||||
|
spec.satisfies(self.extendees[spec.name][0]))
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def activated(self):
|
||||||
|
if not self.is_extension:
|
||||||
|
raise ValueError("is_extension called on package that is not an extension.")
|
||||||
|
exts = spack.install_layout.extension_map(self.extendee_spec)
|
||||||
|
return (self.name in exts) and (exts[self.name] == self.spec)
|
||||||
|
|
||||||
|
|
||||||
def preorder_traversal(self, visited=None, **kwargs):
|
def preorder_traversal(self, visited=None, **kwargs):
|
||||||
"""This does a preorder traversal of the package's dependence DAG."""
|
"""This does a preorder traversal of the package's dependence DAG."""
|
||||||
virtual = kwargs.get("virtual", False)
|
virtual = kwargs.get("virtual", False)
|
||||||
@ -741,34 +795,27 @@ def do_install(self, **kwargs):
|
|||||||
tty.msg("Installing %s" % self.name)
|
tty.msg("Installing %s" % self.name)
|
||||||
|
|
||||||
if not ignore_deps:
|
if not ignore_deps:
|
||||||
self.do_install_dependencies()
|
self.do_install_dependencies(**kwargs)
|
||||||
|
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
if not fake_install:
|
if not fake_install:
|
||||||
self.do_patch()
|
self.do_patch()
|
||||||
|
|
||||||
# Fork a child process to do the build. This allows each
|
|
||||||
# package authors to have full control over their environment,
|
|
||||||
# etc. without offecting other builds that might be executed
|
|
||||||
# in the same spack call.
|
|
||||||
try:
|
|
||||||
pid = os.fork()
|
|
||||||
except OSError, e:
|
|
||||||
raise InstallError("Unable to fork build process: %s" % e)
|
|
||||||
|
|
||||||
if pid == 0:
|
|
||||||
try:
|
|
||||||
tty.msg("Building %s." % self.name)
|
|
||||||
|
|
||||||
# create the install directory. The install layout
|
# create the install directory. The install layout
|
||||||
# handles this in case so that it can use whatever
|
# handles this in case so that it can use whatever
|
||||||
# package naming scheme it likes.
|
# package naming scheme it likes.
|
||||||
spack.install_layout.make_path_for_spec(self.spec)
|
spack.install_layout.make_path_for_spec(self.spec)
|
||||||
|
|
||||||
|
def real_work():
|
||||||
|
try:
|
||||||
|
tty.msg("Building %s." % self.name)
|
||||||
|
|
||||||
|
# Run the pre-install hook in the child process after
|
||||||
|
# the directory is created.
|
||||||
|
spack.hooks.pre_install(self)
|
||||||
|
|
||||||
# Set up process's build environment before running install.
|
# Set up process's build environment before running install.
|
||||||
self.stage.chdir_to_source()
|
self.stage.chdir_to_source()
|
||||||
build_env.setup_package(self)
|
|
||||||
|
|
||||||
if fake_install:
|
if fake_install:
|
||||||
self.do_fake_install()
|
self.do_fake_install()
|
||||||
else:
|
else:
|
||||||
@ -776,10 +823,7 @@ def do_install(self, **kwargs):
|
|||||||
self.install(self.spec, self.prefix)
|
self.install(self.spec, self.prefix)
|
||||||
|
|
||||||
# Ensure that something was actually installed.
|
# Ensure that something was actually installed.
|
||||||
if not os.listdir(self.prefix):
|
self._sanity_check_install()
|
||||||
raise InstallError(
|
|
||||||
"Install failed for %s. Nothing was installed!"
|
|
||||||
% self.name)
|
|
||||||
|
|
||||||
# On successful install, remove the stage.
|
# On successful install, remove the stage.
|
||||||
if not keep_stage:
|
if not keep_stage:
|
||||||
@ -790,14 +834,10 @@ def do_install(self, **kwargs):
|
|||||||
build_time = self._total_time - self._fetch_time
|
build_time = self._total_time - self._fetch_time
|
||||||
|
|
||||||
tty.msg("Successfully installed %s." % self.name,
|
tty.msg("Successfully installed %s." % self.name,
|
||||||
"Fetch: %.2f sec. Build: %.2f sec. Total: %.2f sec."
|
"Fetch: %s. Build: %s. Total: %s."
|
||||||
% (self._fetch_time, build_time, self._total_time))
|
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
|
||||||
print_pkg(self.prefix)
|
print_pkg(self.prefix)
|
||||||
|
|
||||||
# Use os._exit here to avoid raising a SystemExit exception,
|
|
||||||
# which interferes with unit tests.
|
|
||||||
os._exit(0)
|
|
||||||
|
|
||||||
except:
|
except:
|
||||||
if not keep_prefix:
|
if not keep_prefix:
|
||||||
# If anything goes wrong, remove the install prefix
|
# If anything goes wrong, remove the install prefix
|
||||||
@ -807,28 +847,26 @@ def do_install(self, **kwargs):
|
|||||||
"Spack will think this package is installed." +
|
"Spack will think this package is installed." +
|
||||||
"Manually remove this directory to fix:",
|
"Manually remove this directory to fix:",
|
||||||
self.prefix)
|
self.prefix)
|
||||||
|
raise
|
||||||
|
|
||||||
# Child doesn't raise or return to main spack code.
|
build_env.fork(self, real_work)
|
||||||
# Just runs default exception handler and exits.
|
|
||||||
sys.excepthook(*sys.exc_info())
|
|
||||||
os._exit(1)
|
|
||||||
|
|
||||||
# Parent process just waits for the child to complete. If the
|
|
||||||
# child exited badly, assume it already printed an appropriate
|
|
||||||
# message. Just make the parent exit with an error code.
|
|
||||||
pid, returncode = os.waitpid(pid, 0)
|
|
||||||
if returncode != 0:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
# Once everything else is done, run post install hooks
|
# Once everything else is done, run post install hooks
|
||||||
spack.hooks.post_install(self)
|
spack.hooks.post_install(self)
|
||||||
|
|
||||||
|
|
||||||
def do_install_dependencies(self):
|
def _sanity_check_install(self):
|
||||||
|
installed = set(os.listdir(self.prefix))
|
||||||
|
installed.difference_update(spack.install_layout.hidden_file_paths)
|
||||||
|
if not installed:
|
||||||
|
raise InstallError(
|
||||||
|
"Install failed for %s. Nothing was installed!" % self.name)
|
||||||
|
|
||||||
|
|
||||||
|
def do_install_dependencies(self, **kwargs):
|
||||||
# Pass along paths of dependencies here
|
# Pass along paths of dependencies here
|
||||||
for dep in self.spec.dependencies.values():
|
for dep in self.spec.dependencies.values():
|
||||||
dep.package.do_install()
|
dep.package.do_install(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -840,6 +878,32 @@ def module(self):
|
|||||||
fromlist=[self.__class__.__name__])
|
fromlist=[self.__class__.__name__])
|
||||||
|
|
||||||
|
|
||||||
|
def setup_dependent_environment(self, module, spec, dependent_spec):
|
||||||
|
"""Called before the install() method of dependents.
|
||||||
|
|
||||||
|
Default implementation does nothing, but this can be
|
||||||
|
overridden by an extendable package to set up the install
|
||||||
|
environment for its extensions. This is useful if there are
|
||||||
|
some common steps to installing all extensions for a
|
||||||
|
certain package.
|
||||||
|
|
||||||
|
Some examples:
|
||||||
|
|
||||||
|
1. Installing python modules generally requires PYTHONPATH to
|
||||||
|
point to the lib/pythonX.Y/site-packages directory in the
|
||||||
|
module's install prefix. This could set that variable.
|
||||||
|
|
||||||
|
2. Extensions often need to invoke the 'python' interpreter
|
||||||
|
from the Python installation being extended. This routine can
|
||||||
|
put a 'python' Execuable object in the module scope for the
|
||||||
|
extension package to simplify extension installs.
|
||||||
|
|
||||||
|
3. A lot of Qt extensions need QTDIR set. This can be used to do that.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
"""Package implementations override this with their own build configuration."""
|
"""Package implementations override this with their own build configuration."""
|
||||||
raise InstallError("Package %s provides no install method!" % self.name)
|
raise InstallError("Package %s provides no install method!" % self.name)
|
||||||
@ -859,6 +923,10 @@ def do_uninstall(self, **kwargs):
|
|||||||
"The following installed packages depend on it: %s" %
|
"The following installed packages depend on it: %s" %
|
||||||
' '.join(formatted_deps))
|
' '.join(formatted_deps))
|
||||||
|
|
||||||
|
# Pre-uninstall hook runs first.
|
||||||
|
spack.hooks.pre_uninstall(self)
|
||||||
|
|
||||||
|
# Uninstalling in Spack only requires removing the prefix.
|
||||||
self.remove_prefix()
|
self.remove_prefix()
|
||||||
tty.msg("Successfully uninstalled %s." % self.spec.short_spec)
|
tty.msg("Successfully uninstalled %s." % self.spec.short_spec)
|
||||||
|
|
||||||
@ -866,6 +934,119 @@ def do_uninstall(self, **kwargs):
|
|||||||
spack.hooks.post_uninstall(self)
|
spack.hooks.post_uninstall(self)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_extendable(self):
|
||||||
|
if not self.extendable:
|
||||||
|
raise ValueError("Package %s is not extendable!" % self.name)
|
||||||
|
|
||||||
|
|
||||||
|
def _sanity_check_extension(self):
|
||||||
|
if not self.is_extension:
|
||||||
|
raise ValueError("This package is not an extension.")
|
||||||
|
extendee_package = self.extendee_spec.package
|
||||||
|
extendee_package._check_extendable()
|
||||||
|
|
||||||
|
if not extendee_package.installed:
|
||||||
|
raise ValueError("Can only (de)activate extensions for installed packages.")
|
||||||
|
if not self.installed:
|
||||||
|
raise ValueError("Extensions must first be installed.")
|
||||||
|
if not self.extendee_spec.name in self.extendees:
|
||||||
|
raise ValueError("%s does not extend %s!" % (self.name, self.extendee.name))
|
||||||
|
|
||||||
|
|
||||||
|
def do_activate(self, **kwargs):
|
||||||
|
"""Called on an etension to invoke the extendee's activate method.
|
||||||
|
|
||||||
|
Commands should call this routine, and should not call
|
||||||
|
activate() directly.
|
||||||
|
"""
|
||||||
|
self._sanity_check_extension()
|
||||||
|
force = kwargs.get('force', False)
|
||||||
|
|
||||||
|
# TODO: get rid of this normalize - DAG handling.
|
||||||
|
self.spec.normalize()
|
||||||
|
|
||||||
|
spack.install_layout.check_extension_conflict(self.extendee_spec, self.spec)
|
||||||
|
|
||||||
|
if not force:
|
||||||
|
for spec in self.spec.traverse(root=False):
|
||||||
|
if spec.package.extends(self.extendee_spec):
|
||||||
|
# TODO: fix this normalize() requirement -- revisit DAG handling.
|
||||||
|
spec.package.spec.normalize()
|
||||||
|
if not spec.package.activated:
|
||||||
|
spec.package.do_activate(**kwargs)
|
||||||
|
|
||||||
|
self.extendee_spec.package.activate(self, **self.extendee_args)
|
||||||
|
|
||||||
|
spack.install_layout.add_extension(self.extendee_spec, self.spec)
|
||||||
|
tty.msg("Activated extension %s for %s."
|
||||||
|
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
||||||
|
|
||||||
|
|
||||||
|
def activate(self, extension, **kwargs):
|
||||||
|
"""Symlinks all files from the extension into extendee's install dir.
|
||||||
|
|
||||||
|
Package authors can override this method to support other
|
||||||
|
extension mechanisms. Spack internals (commands, hooks, etc.)
|
||||||
|
should call do_activate() method so that proper checks are
|
||||||
|
always executed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def ignore(filename):
|
||||||
|
return (filename in spack.install_layout.hidden_file_paths or
|
||||||
|
kwargs.get('ignore', lambda f: False)(filename))
|
||||||
|
|
||||||
|
tree = LinkTree(extension.prefix)
|
||||||
|
conflict = tree.find_conflict(self.prefix, ignore=ignore)
|
||||||
|
if conflict:
|
||||||
|
raise ExtensionConflictError(conflict)
|
||||||
|
tree.merge(self.prefix, ignore=ignore)
|
||||||
|
|
||||||
|
|
||||||
|
def do_deactivate(self, **kwargs):
|
||||||
|
"""Called on the extension to invoke extendee's deactivate() method."""
|
||||||
|
self._sanity_check_extension()
|
||||||
|
force = kwargs.get('force', False)
|
||||||
|
|
||||||
|
# Allow a force deactivate to happen. This can unlink
|
||||||
|
# spurious files if something was corrupted.
|
||||||
|
if not force:
|
||||||
|
spack.install_layout.check_activated(self.extendee_spec, self.spec)
|
||||||
|
|
||||||
|
activated = spack.install_layout.extension_map(self.extendee_spec)
|
||||||
|
for name, aspec in activated.items():
|
||||||
|
if aspec != self.spec and self.spec in aspec:
|
||||||
|
raise ActivationError(
|
||||||
|
"Cannot deactivate %s beacuse %s is activated and depends on it."
|
||||||
|
% (self.spec.short_spec, aspec.short_spec))
|
||||||
|
|
||||||
|
self.extendee_spec.package.deactivate(self, **self.extendee_args)
|
||||||
|
|
||||||
|
# redundant activation check -- makes SURE the spec is not
|
||||||
|
# still activated even if something was wrong above.
|
||||||
|
if self.activated:
|
||||||
|
spack.install_layout.remove_extension(self.extendee_spec, self.spec)
|
||||||
|
|
||||||
|
tty.msg("Deactivated extension %s for %s."
|
||||||
|
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
||||||
|
|
||||||
|
|
||||||
|
def deactivate(self, extension, **kwargs):
|
||||||
|
"""Unlinks all files from extension out of this package's install dir.
|
||||||
|
|
||||||
|
Package authors can override this method to support other
|
||||||
|
extension mechanisms. Spack internals (commands, hooks, etc.)
|
||||||
|
should call do_deactivate() method so that proper checks are
|
||||||
|
always executed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def ignore(filename):
|
||||||
|
return (filename in spack.install_layout.hidden_file_paths or
|
||||||
|
kwargs.get('ignore', lambda f: False)(filename))
|
||||||
|
|
||||||
|
tree = LinkTree(extension.prefix)
|
||||||
|
tree.unmerge(self.prefix, ignore=ignore)
|
||||||
|
|
||||||
|
|
||||||
def do_clean(self):
|
def do_clean(self):
|
||||||
if self.stage.expanded_archive_path:
|
if self.stage.expanded_archive_path:
|
||||||
self.stage.chdir_to_source()
|
self.stage.chdir_to_source()
|
||||||
@ -1011,6 +1192,18 @@ def print_pkg(message):
|
|||||||
print message
|
print message
|
||||||
|
|
||||||
|
|
||||||
|
def _hms(seconds):
|
||||||
|
"""Convert time in seconds to hours, minutes, seconds."""
|
||||||
|
m, s = divmod(seconds, 60)
|
||||||
|
h, m = divmod(m, 60)
|
||||||
|
|
||||||
|
parts = []
|
||||||
|
if h: parts.append("%dh" % h)
|
||||||
|
if m: parts.append("%dm" % m)
|
||||||
|
if s: parts.append("%.2fs" % s)
|
||||||
|
return ' '.join(parts)
|
||||||
|
|
||||||
|
|
||||||
class FetchError(spack.error.SpackError):
|
class FetchError(spack.error.SpackError):
|
||||||
"""Raised when something goes wrong during fetch."""
|
"""Raised when something goes wrong during fetch."""
|
||||||
def __init__(self, message, long_msg=None):
|
def __init__(self, message, long_msg=None):
|
||||||
@ -1057,3 +1250,17 @@ class NoURLError(PackageError):
|
|||||||
def __init__(self, cls):
|
def __init__(self, cls):
|
||||||
super(NoURLError, self).__init__(
|
super(NoURLError, self).__init__(
|
||||||
"Package %s has no version with a URL." % cls.__name__)
|
"Package %s has no version with a URL." % cls.__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionError(PackageError): pass
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionConflictError(ExtensionError):
|
||||||
|
def __init__(self, path):
|
||||||
|
super(ExtensionConflictError, self).__init__(
|
||||||
|
"Extension blocked by file: %s" % path)
|
||||||
|
|
||||||
|
|
||||||
|
class ActivationError(ExtensionError):
|
||||||
|
def __init__(self, msg, long_msg=None):
|
||||||
|
super(ActivationError, self).__init__(msg, long_msg)
|
||||||
|
@ -77,6 +77,8 @@ def get(self, spec, **kwargs):
|
|||||||
copy = spec.copy()
|
copy = spec.copy()
|
||||||
self.instances[copy] = package_class(copy)
|
self.instances[copy] = package_class(copy)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
|
if spack.debug:
|
||||||
|
sys.excepthook(*sys.exc_info())
|
||||||
raise FailedConstructorError(spec.name, e)
|
raise FailedConstructorError(spec.name, e)
|
||||||
|
|
||||||
return self.instances[spec]
|
return self.instances[spec]
|
||||||
@ -110,6 +112,24 @@ def providers_for(self, vpkg_spec):
|
|||||||
return providers
|
return providers
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def extensions_for(self, extendee_spec):
|
||||||
|
return [p for p in self.all_packages() if p.extends(extendee_spec)]
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def installed_extensions_for(self, extendee_spec):
|
||||||
|
for s in self.installed_package_specs():
|
||||||
|
try:
|
||||||
|
if s.package.extends(extendee_spec):
|
||||||
|
yield s.package
|
||||||
|
except UnknownPackageError, e:
|
||||||
|
# Skip packages we know nothing about
|
||||||
|
continue
|
||||||
|
# TODO: add some conditional way to do this instead of
|
||||||
|
# catching exceptions.
|
||||||
|
|
||||||
|
|
||||||
def dirname_for_package_name(self, pkg_name):
|
def dirname_for_package_name(self, pkg_name):
|
||||||
"""Get the directory name for a particular package. This is the
|
"""Get the directory name for a particular package. This is the
|
||||||
directory that contains its package.py file."""
|
directory that contains its package.py file."""
|
||||||
@ -172,6 +192,7 @@ def all_packages(self):
|
|||||||
yield self.get(name)
|
yield self.get(name)
|
||||||
|
|
||||||
|
|
||||||
|
@memoized
|
||||||
def exists(self, pkg_name):
|
def exists(self, pkg_name):
|
||||||
"""Whether a package with the supplied name exists ."""
|
"""Whether a package with the supplied name exists ."""
|
||||||
return os.path.exists(self.filename_for_package_name(pkg_name))
|
return os.path.exists(self.filename_for_package_name(pkg_name))
|
||||||
|
@ -68,7 +68,7 @@ class Mpileaks(Package):
|
|||||||
spack install mpileaks ^mvapich
|
spack install mpileaks ^mvapich
|
||||||
spack install mpileaks ^mpich
|
spack install mpileaks ^mpich
|
||||||
"""
|
"""
|
||||||
__all__ = [ 'depends_on', 'provides', 'patch', 'version' ]
|
__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version' ]
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import inspect
|
import inspect
|
||||||
@ -107,8 +107,9 @@ def depends_on(*specs):
|
|||||||
"""Adds a dependencies local variable in the locals of
|
"""Adds a dependencies local variable in the locals of
|
||||||
the calling class, based on args. """
|
the calling class, based on args. """
|
||||||
pkg = get_calling_package_name()
|
pkg = get_calling_package_name()
|
||||||
|
clocals = caller_locals()
|
||||||
|
dependencies = clocals.setdefault('dependencies', {})
|
||||||
|
|
||||||
dependencies = caller_locals().setdefault('dependencies', {})
|
|
||||||
for string in specs:
|
for string in specs:
|
||||||
for spec in spack.spec.parse(string):
|
for spec in spack.spec.parse(string):
|
||||||
if pkg == spec.name:
|
if pkg == spec.name:
|
||||||
@ -116,6 +117,34 @@ def depends_on(*specs):
|
|||||||
dependencies[spec.name] = spec
|
dependencies[spec.name] = spec
|
||||||
|
|
||||||
|
|
||||||
|
def extends(spec, **kwargs):
|
||||||
|
"""Same as depends_on, but dependency is symlinked into parent prefix.
|
||||||
|
|
||||||
|
This is for Python and other language modules where the module
|
||||||
|
needs to be installed into the prefix of the Python installation.
|
||||||
|
Spack handles this by installing modules into their own prefix,
|
||||||
|
but allowing ONE module version to be symlinked into a parent
|
||||||
|
Python install at a time.
|
||||||
|
|
||||||
|
keyword arguments can be passed to extends() so that extension
|
||||||
|
packages can pass parameters to the extendee's extension
|
||||||
|
mechanism.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pkg = get_calling_package_name()
|
||||||
|
clocals = caller_locals()
|
||||||
|
dependencies = clocals.setdefault('dependencies', {})
|
||||||
|
extendees = clocals.setdefault('extendees', {})
|
||||||
|
if extendees:
|
||||||
|
raise RelationError("Packages can extend at most one other package.")
|
||||||
|
|
||||||
|
spec = Spec(spec)
|
||||||
|
if pkg == spec.name:
|
||||||
|
raise CircularReferenceError('extends', pkg)
|
||||||
|
dependencies[spec.name] = spec
|
||||||
|
extendees[spec.name] = (spec, kwargs)
|
||||||
|
|
||||||
|
|
||||||
def provides(*specs, **kwargs):
|
def provides(*specs, **kwargs):
|
||||||
"""Allows packages to provide a virtual dependency. If a package provides
|
"""Allows packages to provide a virtual dependency. If a package provides
|
||||||
'mpi', other packages can declare that they depend on "mpi", and spack
|
'mpi', other packages can declare that they depend on "mpi", and spack
|
||||||
|
@ -552,6 +552,13 @@ def short_spec(self):
|
|||||||
return self.format('$_$@$%@$+$=$#')
|
return self.format('$_$@$%@$+$=$#')
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cshort_spec(self):
|
||||||
|
"""Returns a version of the spec with the dependencies hashed
|
||||||
|
instead of completely enumerated."""
|
||||||
|
return self.format('$_$@$%@$+$=$#', color=True)
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def prefix(self):
|
def prefix(self):
|
||||||
return Prefix(spack.install_layout.path_for_spec(self))
|
return Prefix(spack.install_layout.path_for_spec(self))
|
||||||
|
@ -51,7 +51,8 @@
|
|||||||
'hg_fetch',
|
'hg_fetch',
|
||||||
'mirror',
|
'mirror',
|
||||||
'url_extrapolate',
|
'url_extrapolate',
|
||||||
'cc']
|
'cc',
|
||||||
|
'link_tree']
|
||||||
|
|
||||||
|
|
||||||
def list_tests():
|
def list_tests():
|
||||||
|
153
lib/spack/spack/test/link_tree.py
Normal file
153
lib/spack/spack/test/link_tree.py
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://scalability-llnl.github.io/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
from contextlib import closing
|
||||||
|
|
||||||
|
from llnl.util.filesystem import *
|
||||||
|
from llnl.util.link_tree import LinkTree
|
||||||
|
|
||||||
|
from spack.stage import Stage
|
||||||
|
|
||||||
|
|
||||||
|
class LinkTreeTest(unittest.TestCase):
|
||||||
|
"""Tests Spack's LinkTree class."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.stage = Stage('link-tree-test')
|
||||||
|
|
||||||
|
with working_dir(self.stage.path):
|
||||||
|
touchp('source/1')
|
||||||
|
touchp('source/a/b/2')
|
||||||
|
touchp('source/a/b/3')
|
||||||
|
touchp('source/c/4')
|
||||||
|
touchp('source/c/d/5')
|
||||||
|
touchp('source/c/d/6')
|
||||||
|
touchp('source/c/d/e/7')
|
||||||
|
|
||||||
|
source_path = os.path.join(self.stage.path, 'source')
|
||||||
|
self.link_tree = LinkTree(source_path)
|
||||||
|
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
if self.stage:
|
||||||
|
self.stage.destroy()
|
||||||
|
|
||||||
|
|
||||||
|
def check_file_link(self, filename):
|
||||||
|
self.assertTrue(os.path.isfile(filename))
|
||||||
|
self.assertTrue(os.path.islink(filename))
|
||||||
|
|
||||||
|
|
||||||
|
def check_dir(self, filename):
|
||||||
|
self.assertTrue(os.path.isdir(filename))
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_to_new_directory(self):
|
||||||
|
with working_dir(self.stage.path):
|
||||||
|
self.link_tree.merge('dest')
|
||||||
|
|
||||||
|
self.check_file_link('dest/1')
|
||||||
|
self.check_file_link('dest/a/b/2')
|
||||||
|
self.check_file_link('dest/a/b/3')
|
||||||
|
self.check_file_link('dest/c/4')
|
||||||
|
self.check_file_link('dest/c/d/5')
|
||||||
|
self.check_file_link('dest/c/d/6')
|
||||||
|
self.check_file_link('dest/c/d/e/7')
|
||||||
|
|
||||||
|
self.link_tree.unmerge('dest')
|
||||||
|
|
||||||
|
self.assertFalse(os.path.exists('dest'))
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_to_existing_directory(self):
|
||||||
|
with working_dir(self.stage.path):
|
||||||
|
|
||||||
|
touchp('dest/x')
|
||||||
|
touchp('dest/a/b/y')
|
||||||
|
|
||||||
|
self.link_tree.merge('dest')
|
||||||
|
|
||||||
|
self.check_file_link('dest/1')
|
||||||
|
self.check_file_link('dest/a/b/2')
|
||||||
|
self.check_file_link('dest/a/b/3')
|
||||||
|
self.check_file_link('dest/c/4')
|
||||||
|
self.check_file_link('dest/c/d/5')
|
||||||
|
self.check_file_link('dest/c/d/6')
|
||||||
|
self.check_file_link('dest/c/d/e/7')
|
||||||
|
|
||||||
|
self.assertTrue(os.path.isfile('dest/x'))
|
||||||
|
self.assertTrue(os.path.isfile('dest/a/b/y'))
|
||||||
|
|
||||||
|
self.link_tree.unmerge('dest')
|
||||||
|
|
||||||
|
self.assertTrue(os.path.isfile('dest/x'))
|
||||||
|
self.assertTrue(os.path.isfile('dest/a/b/y'))
|
||||||
|
|
||||||
|
self.assertFalse(os.path.isfile('dest/1'))
|
||||||
|
self.assertFalse(os.path.isfile('dest/a/b/2'))
|
||||||
|
self.assertFalse(os.path.isfile('dest/a/b/3'))
|
||||||
|
self.assertFalse(os.path.isfile('dest/c/4'))
|
||||||
|
self.assertFalse(os.path.isfile('dest/c/d/5'))
|
||||||
|
self.assertFalse(os.path.isfile('dest/c/d/6'))
|
||||||
|
self.assertFalse(os.path.isfile('dest/c/d/e/7'))
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_with_empty_directories(self):
|
||||||
|
with working_dir(self.stage.path):
|
||||||
|
mkdirp('dest/f/g')
|
||||||
|
mkdirp('dest/a/b/h')
|
||||||
|
|
||||||
|
self.link_tree.merge('dest')
|
||||||
|
self.link_tree.unmerge('dest')
|
||||||
|
|
||||||
|
self.assertFalse(os.path.exists('dest/1'))
|
||||||
|
self.assertFalse(os.path.exists('dest/a/b/2'))
|
||||||
|
self.assertFalse(os.path.exists('dest/a/b/3'))
|
||||||
|
self.assertFalse(os.path.exists('dest/c/4'))
|
||||||
|
self.assertFalse(os.path.exists('dest/c/d/5'))
|
||||||
|
self.assertFalse(os.path.exists('dest/c/d/6'))
|
||||||
|
self.assertFalse(os.path.exists('dest/c/d/e/7'))
|
||||||
|
|
||||||
|
self.assertTrue(os.path.isdir('dest/a/b/h'))
|
||||||
|
self.assertTrue(os.path.isdir('dest/f/g'))
|
||||||
|
|
||||||
|
|
||||||
|
def test_ignore(self):
|
||||||
|
with working_dir(self.stage.path):
|
||||||
|
touchp('source/.spec')
|
||||||
|
touchp('dest/.spec')
|
||||||
|
|
||||||
|
self.link_tree.merge('dest', ignore=lambda x: x == '.spec')
|
||||||
|
self.link_tree.unmerge('dest', ignore=lambda x: x == '.spec')
|
||||||
|
|
||||||
|
self.assertFalse(os.path.exists('dest/1'))
|
||||||
|
self.assertFalse(os.path.exists('dest/a'))
|
||||||
|
self.assertFalse(os.path.exists('dest/c'))
|
||||||
|
|
||||||
|
self.assertTrue(os.path.isfile('source/.spec'))
|
||||||
|
self.assertTrue(os.path.isfile('dest/.spec'))
|
33
var/spack/packages/R/package.py
Normal file
33
var/spack/packages/R/package.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class R(Package):
|
||||||
|
"""R is 'GNU S', a freely available language and environment for
|
||||||
|
statistical computing and graphics which provides a wide va
|
||||||
|
riety of statistical and graphical techniques: linear and
|
||||||
|
nonlinear modelling, statistical tests, time series analysis,
|
||||||
|
classification, clustering, etc. Please consult the R project
|
||||||
|
homepage for further information."""
|
||||||
|
homepage = "http://www.example.com"
|
||||||
|
url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz"
|
||||||
|
|
||||||
|
version('3.1.2', '3af29ec06704cbd08d4ba8d69250ae74')
|
||||||
|
|
||||||
|
depends_on("readline")
|
||||||
|
depends_on("ncurses")
|
||||||
|
depends_on("icu")
|
||||||
|
depends_on("glib")
|
||||||
|
depends_on("zlib")
|
||||||
|
depends_on("libtiff")
|
||||||
|
depends_on("jpeg")
|
||||||
|
depends_on("cairo")
|
||||||
|
depends_on("pango")
|
||||||
|
depends_on("freetype")
|
||||||
|
depends_on("tcl")
|
||||||
|
depends_on("tk")
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
configure("--prefix=%s" % prefix,
|
||||||
|
"--enable-R-shlib",
|
||||||
|
"--enable-BLAS-shlib")
|
||||||
|
make()
|
||||||
|
make("install")
|
31
var/spack/packages/geos/package.py
Normal file
31
var/spack/packages/geos/package.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Geos(Package):
|
||||||
|
"""GEOS (Geometry Engine - Open Source) is a C++ port of the Java
|
||||||
|
Topology Suite (JTS). As such, it aims to contain the complete
|
||||||
|
functionality of JTS in C++. This includes all the OpenGIS
|
||||||
|
Simple Features for SQL spatial predicate functions and spatial
|
||||||
|
operators, as well as specific JTS enhanced topology functions."""
|
||||||
|
|
||||||
|
homepage = "http://trac.osgeo.org/geos/"
|
||||||
|
url = "http://download.osgeo.org/geos/geos-3.4.2.tar.bz2"
|
||||||
|
|
||||||
|
version('3.4.2', 'fc5df2d926eb7e67f988a43a92683bae')
|
||||||
|
version('3.4.1', '4c930dec44c45c49cd71f3e0931ded7e')
|
||||||
|
version('3.4.0', 'e41318fc76b5dc764a69d43ac6b18488')
|
||||||
|
version('3.3.9', '4794c20f07721d5011c93efc6ccb8e4e')
|
||||||
|
version('3.3.8', '75be476d0831a2d14958fed76ca266de')
|
||||||
|
version('3.3.7', '95ab996d22672b067d92c7dee2170460')
|
||||||
|
version('3.3.6', '6fadfb941541875f4976f75fb0bbc800')
|
||||||
|
version('3.3.5', '2ba61afb7fe2c5ddf642d82d7b16e75b')
|
||||||
|
version('3.3.4', '1bb9f14d57ef06ffa41cb1d67acb55a1')
|
||||||
|
version('3.3.3', '8454e653d7ecca475153cc88fd1daa26')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('swig')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
configure("--prefix=%s" % prefix,
|
||||||
|
"--enable-python")
|
||||||
|
make()
|
||||||
|
make("install")
|
@ -18,12 +18,14 @@ class Hdf5(Package):
|
|||||||
|
|
||||||
# TODO: currently hard-coded to use OpenMPI
|
# TODO: currently hard-coded to use OpenMPI
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
|
|
||||||
configure(
|
configure(
|
||||||
"--prefix=%s" % prefix,
|
"--prefix=%s" % prefix,
|
||||||
"--with-zlib=%s" % spec['zlib'].prefix,
|
"--with-zlib=%s" % spec['zlib'].prefix,
|
||||||
"--enable-parallel",
|
"--enable-parallel",
|
||||||
"CC=%s" % spec['openmpi'].prefix.bin + "/mpicc",
|
"--enable-shared",
|
||||||
"CXX=%s" % spec['openmpi'].prefix.bin + "/mpic++")
|
"CC=%s" % spec['mpich'].prefix.bin + "/mpicc",
|
||||||
|
"CXX=%s" % spec['mpich'].prefix.bin + "/mpic++")
|
||||||
|
|
||||||
make()
|
make()
|
||||||
make("install")
|
make("install")
|
||||||
|
19
var/spack/packages/libgcrypt/package.py
Normal file
19
var/spack/packages/libgcrypt/package.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Libgcrypt(Package):
|
||||||
|
"""Libgcrypt is a general purpose cryptographic library based on
|
||||||
|
the code from GnuPG. It provides functions for all cryptographic
|
||||||
|
building blocks: symmetric ciphers, hash algorithms, MACs, public
|
||||||
|
key algorithms, large integer functions, random numbers and a lot
|
||||||
|
of supporting functions. """
|
||||||
|
homepage = "http://www.gnu.org/software/libgcrypt/"
|
||||||
|
url = "ftp://ftp.gnupg.org/gcrypt/libgcrypt/libgcrypt-1.6.2.tar.bz2"
|
||||||
|
|
||||||
|
version('1.6.2', 'b54395a93cb1e57619943c082da09d5f')
|
||||||
|
|
||||||
|
depends_on("libgpg-error")
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
configure("--prefix=%s" % prefix)
|
||||||
|
make()
|
||||||
|
make("install")
|
17
var/spack/packages/libgpg-error/package.py
Normal file
17
var/spack/packages/libgpg-error/package.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class LibgpgError(Package):
|
||||||
|
"""Libgpg-error is a small library that defines common error
|
||||||
|
values for all GnuPG components. Among these are GPG, GPGSM,
|
||||||
|
GPGME, GPG-Agent, libgcrypt, Libksba, DirMngr, Pinentry,
|
||||||
|
SmartCard Daemon and possibly more in the future. """
|
||||||
|
|
||||||
|
homepage = "https://www.gnupg.org/related_software/libgpg-error"
|
||||||
|
url = "ftp://ftp.gnupg.org/gcrypt/libgpg-error/libgpg-error-1.18.tar.bz2"
|
||||||
|
|
||||||
|
version('1.18', '12312802d2065774b787cbfc22cc04e9')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
configure("--prefix=%s" % prefix)
|
||||||
|
make()
|
||||||
|
make("install")
|
@ -9,6 +9,9 @@ class Libxml2(Package):
|
|||||||
|
|
||||||
version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788')
|
version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788')
|
||||||
|
|
||||||
|
depends_on('zlib')
|
||||||
|
depends_on('xz')
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
configure("--prefix=%s" % prefix,
|
configure("--prefix=%s" % prefix,
|
||||||
"--without-python")
|
"--without-python")
|
||||||
|
24
var/spack/packages/libxslt/package.py
Normal file
24
var/spack/packages/libxslt/package.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Libxslt(Package):
|
||||||
|
"""Libxslt is the XSLT C library developed for the GNOME
|
||||||
|
project. XSLT itself is a an XML language to define
|
||||||
|
transformation for XML. Libxslt is based on libxml2 the XML C
|
||||||
|
library developed for the GNOME project. It also implements
|
||||||
|
most of the EXSLT set of processor-portable extensions
|
||||||
|
functions and some of Saxon's evaluate and expressions
|
||||||
|
extensions."""
|
||||||
|
homepage = "http://www.xmlsoft.org/XSLT/index.html"
|
||||||
|
url = "http://xmlsoft.org/sources/libxslt-1.1.28.tar.gz"
|
||||||
|
|
||||||
|
version('1.1.28', '9667bf6f9310b957254fdcf6596600b7')
|
||||||
|
|
||||||
|
depends_on("libxml2")
|
||||||
|
depends_on("xz")
|
||||||
|
depends_on("zlib")
|
||||||
|
depends_on("libgcrypt")
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
configure("--prefix=%s" % prefix)
|
||||||
|
make()
|
||||||
|
make("install")
|
20
var/spack/packages/py-basemap/package.py
Normal file
20
var/spack/packages/py-basemap/package.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from spack import *
|
||||||
|
import os
|
||||||
|
|
||||||
|
class PyBasemap(Package):
|
||||||
|
"""The matplotlib basemap toolkit is a library for plotting 2D data on maps in Python."""
|
||||||
|
homepage = "http://matplotlib.org/basemap/"
|
||||||
|
url = "https://downloads.sourceforge.net/project/matplotlib/matplotlib-toolkits/basemap-1.0.7/basemap-1.0.7.tar.gz"
|
||||||
|
|
||||||
|
version('1.0.7', '48c0557ced9e2c6e440b28b3caff2de8')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-setuptools')
|
||||||
|
depends_on('py-numpy')
|
||||||
|
depends_on('py-matplotlib')
|
||||||
|
depends_on('py-pil')
|
||||||
|
depends_on("geos")
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
env['GEOS_DIR'] = spec['geos'].prefix
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
15
var/spack/packages/py-biopython/package.py
Normal file
15
var/spack/packages/py-biopython/package.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyBiopython(Package):
|
||||||
|
"""It is a distributed collaborative effort to develop Python libraries and applications which address the needs of current and future work in bioinformatics."""
|
||||||
|
homepage = "http://biopython.org/wiki/Main_Page"
|
||||||
|
url = "http://biopython.org/DIST/biopython-1.65.tar.gz"
|
||||||
|
|
||||||
|
version('1.65', '143e7861ade85c0a8b5e2bbdd1da1f67')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-mx')
|
||||||
|
depends_on('py-numpy')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
13
var/spack/packages/py-cython/package.py
Normal file
13
var/spack/packages/py-cython/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyCython(Package):
|
||||||
|
"""The Cython compiler for writing C extensions for the Python language."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/cython"
|
||||||
|
url = "https://pypi.python.org/packages/source/C/Cython/Cython-0.21.2.tar.gz"
|
||||||
|
|
||||||
|
version('0.21.2', 'd21adb870c75680dc857cd05d41046a4')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
14
var/spack/packages/py-dateutil/package.py
Normal file
14
var/spack/packages/py-dateutil/package.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyDateutil(Package):
|
||||||
|
"""Extensions to the standard Python datetime module."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/dateutil"
|
||||||
|
url = "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.0.tar.gz"
|
||||||
|
|
||||||
|
version('2.4.0', '75714163bb96bedd07685cdb2071b8bc')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-setuptools')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
13
var/spack/packages/py-epydoc/package.py
Normal file
13
var/spack/packages/py-epydoc/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyEpydoc(Package):
|
||||||
|
"""Epydoc is a tool for generating API documentation documentation for Python modules, based on their docstrings."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/epydoc"
|
||||||
|
url = "https://pypi.python.org/packages/source/e/epydoc/epydoc-3.0.1.tar.gz"
|
||||||
|
|
||||||
|
version('3.0.1', '36407974bd5da2af00bf90ca27feeb44')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
14
var/spack/packages/py-gnuplot/package.py
Normal file
14
var/spack/packages/py-gnuplot/package.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyGnuplot(Package):
|
||||||
|
"""Gnuplot.py is a Python package that allows you to create graphs from within Python using the gnuplot plotting program."""
|
||||||
|
homepage = "http://gnuplot-py.sourceforge.net/"
|
||||||
|
url = "http://downloads.sourceforge.net/project/gnuplot-py/Gnuplot-py/1.8/gnuplot-py-1.8.tar.gz"
|
||||||
|
|
||||||
|
version('1.8', 'abd6f571e7aec68ae7db90a5217cd5b1')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-numpy')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
18
var/spack/packages/py-h5py/package.py
Normal file
18
var/spack/packages/py-h5py/package.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from spack import *
|
||||||
|
import re
|
||||||
|
|
||||||
|
class PyH5py(Package):
|
||||||
|
"""The h5py package provides both a high- and low-level interface to the HDF5 library from Python."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/h5py"
|
||||||
|
url = "https://pypi.python.org/packages/source/h/h5py/h5py-2.4.0.tar.gz"
|
||||||
|
|
||||||
|
version('2.4.0', '80c9a94ae31f84885cc2ebe1323d6758')
|
||||||
|
|
||||||
|
extends('python', ignore=lambda f: re.match(r'cy*', f))
|
||||||
|
depends_on('hdf5')
|
||||||
|
depends_on('py-numpy')
|
||||||
|
depends_on('py-cython')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'configure', '--hdf5=%s' % spec['hdf5'].prefix)
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
15
var/spack/packages/py-ipython/package.py
Normal file
15
var/spack/packages/py-ipython/package.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyIpython(Package):
|
||||||
|
"""IPython provides a rich toolkit to help you make the most out of using Python interactively."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/ipython"
|
||||||
|
url = "https://pypi.python.org/packages/source/i/ipython/ipython-2.3.1.tar.gz"
|
||||||
|
|
||||||
|
version('2.3.1', '2b7085525dac11190bfb45bb8ec8dcbf')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-pygments')
|
||||||
|
depends_on('py-setuptools')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
15
var/spack/packages/py-libxml2/package.py
Normal file
15
var/spack/packages/py-libxml2/package.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyLibxml2(Package):
|
||||||
|
"""A Python wrapper around libxml2."""
|
||||||
|
homepage = "https://xmlsoft.org/python.html"
|
||||||
|
url = "ftp://xmlsoft.org/libxml2/python/libxml2-python-2.6.21.tar.gz"
|
||||||
|
|
||||||
|
version('2.6.21', '229dd2b3d110a77defeeaa73af83f7f3')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('libxml2')
|
||||||
|
depends_on('libxslt')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
46
var/spack/packages/py-matplotlib/package.py
Normal file
46
var/spack/packages/py-matplotlib/package.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
from spack import *
|
||||||
|
import os
|
||||||
|
|
||||||
|
class PyMatplotlib(Package):
|
||||||
|
"""Python plotting package."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/matplotlib"
|
||||||
|
url = "https://pypi.python.org/packages/source/m/matplotlib/matplotlib-1.4.2.tar.gz"
|
||||||
|
|
||||||
|
version('1.4.2', '7d22efb6cce475025733c50487bd8898')
|
||||||
|
|
||||||
|
extends('python', ignore=r'bin/nosetests.*$')
|
||||||
|
|
||||||
|
depends_on('py-pyside')
|
||||||
|
depends_on('py-ipython')
|
||||||
|
depends_on('py-pyparsing')
|
||||||
|
depends_on('py-six')
|
||||||
|
depends_on('py-dateutil')
|
||||||
|
depends_on('py-pytz')
|
||||||
|
depends_on('py-nose')
|
||||||
|
depends_on('py-numpy')
|
||||||
|
|
||||||
|
depends_on('qt')
|
||||||
|
depends_on('bzip2')
|
||||||
|
depends_on('tcl')
|
||||||
|
depends_on('tk')
|
||||||
|
depends_on('qhull')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
||||||
|
|
||||||
|
if str(self.version) == '1.4.2':
|
||||||
|
# hack to fix configuration file
|
||||||
|
config_file = None
|
||||||
|
for p,d,f in os.walk(prefix.lib):
|
||||||
|
for file in f:
|
||||||
|
if file.find('matplotlibrc') != -1:
|
||||||
|
config_file = join_path(p, 'matplotlibrc')
|
||||||
|
print config_file
|
||||||
|
if config_file == None:
|
||||||
|
raise InstallError('could not find config file')
|
||||||
|
filter_file(r'backend : pyside',
|
||||||
|
'backend : Qt4Agg',
|
||||||
|
config_file)
|
||||||
|
filter_file(r'#backend.qt4 : PyQt4',
|
||||||
|
'backend.qt4 : PySide',
|
||||||
|
config_file)
|
14
var/spack/packages/py-mpi4py/package.py
Normal file
14
var/spack/packages/py-mpi4py/package.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyMpi4py(Package):
|
||||||
|
"""This package provides Python bindings for the Message Passing Interface (MPI) standard. It is implemented on top of the MPI-1/MPI-2 specification and exposes an API which grounds on the standard MPI-2 C++ bindings."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/mpi4py"
|
||||||
|
url = "https://pypi.python.org/packages/source/m/mpi4py/mpi4py-1.3.1.tar.gz"
|
||||||
|
|
||||||
|
version('1.3.1', 'dbe9d22bdc8ed965c23a7ceb6f32fc3c')
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-setuptools')
|
||||||
|
depends_on('mpi')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
13
var/spack/packages/py-mx/package.py
Normal file
13
var/spack/packages/py-mx/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyMx(Package):
|
||||||
|
"""The eGenix.com mx Base Distribution for Python is a collection of professional quality software tools which enhance Python's usability in many important areas such as fast text searching, date/time processing and high speed data types."""
|
||||||
|
homepage = "http://www.egenix.com/products/python/mxBase/"
|
||||||
|
url = "https://downloads.egenix.com/python/egenix-mx-base-3.2.8.tar.gz"
|
||||||
|
|
||||||
|
version('3.2.8', '9d9d3a25f9dc051a15e97f452413423b')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
16
var/spack/packages/py-nose/package.py
Normal file
16
var/spack/packages/py-nose/package.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyNose(Package):
|
||||||
|
"""nose extends the test loading and running features of unittest,
|
||||||
|
making it easier to write, find and run tests."""
|
||||||
|
|
||||||
|
homepage = "https://pypi.python.org/pypi/nose"
|
||||||
|
url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.4.tar.gz"
|
||||||
|
|
||||||
|
version('1.3.4', '6ed7169887580ddc9a8e16048d38274d')
|
||||||
|
|
||||||
|
extends('python', ignore=r'bin/nosetests.*$')
|
||||||
|
depends_on('py-setuptools')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
14
var/spack/packages/py-numpy/package.py
Normal file
14
var/spack/packages/py-numpy/package.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyNumpy(Package):
|
||||||
|
"""array processing for numbers, strings, records, and objects."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/numpy"
|
||||||
|
url = "https://pypi.python.org/packages/source/n/numpy/numpy-1.9.1.tar.gz"
|
||||||
|
|
||||||
|
version('1.9.1', '78842b73560ec378142665e712ae4ad9')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-nose')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
13
var/spack/packages/py-pexpect/package.py
Normal file
13
var/spack/packages/py-pexpect/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyPexpect(Package):
|
||||||
|
"""Pexpect allows easy control of interactive console applications."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/pexpect"
|
||||||
|
url = "https://pypi.python.org/packages/source/p/pexpect/pexpect-3.3.tar.gz"
|
||||||
|
|
||||||
|
version('3.3', '0de72541d3f1374b795472fed841dce8')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
14
var/spack/packages/py-pil/package.py
Normal file
14
var/spack/packages/py-pil/package.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyPil(Package):
|
||||||
|
"""The Python Imaging Library (PIL) adds image processing capabilities to your Python interpreter. This library supports many file formats, and provides powerful image processing and graphics capabilities."""
|
||||||
|
|
||||||
|
homepage = "http://www.pythonware.com/products/pil/"
|
||||||
|
url = "http://effbot.org/media/downloads/Imaging-1.1.7.tar.gz"
|
||||||
|
|
||||||
|
version('1.1.7', 'fc14a54e1ce02a0225be8854bfba478e')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
13
var/spack/packages/py-pmw/package.py
Normal file
13
var/spack/packages/py-pmw/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyPmw(Package):
|
||||||
|
"""Pmw is a toolkit for building high-level compound widgets, or megawidgets, constructed using other widgets as component parts."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/Pmw"
|
||||||
|
url = "https://pypi.python.org/packages/source/P/Pmw/Pmw-2.0.0.tar.gz"
|
||||||
|
|
||||||
|
version('2.0.0', 'c7c3f26c4f5abaa99807edefee578fc0')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
13
var/spack/packages/py-pychecker/package.py
Normal file
13
var/spack/packages/py-pychecker/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyPychecker(Package):
|
||||||
|
""""""
|
||||||
|
homepage = "http://pychecker.sourceforge.net/"
|
||||||
|
url = "http://sourceforge.net/projects/pychecker/files/pychecker/0.8.19/pychecker-0.8.19.tar.gz"
|
||||||
|
|
||||||
|
version('0.8.19', 'c37182863dfb09209d6ba4f38fce9d2b')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
14
var/spack/packages/py-pygments/package.py
Normal file
14
var/spack/packages/py-pygments/package.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyPygments(Package):
|
||||||
|
"""Pygments is a syntax highlighting package written in Python."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/pygments"
|
||||||
|
url = "https://pypi.python.org/packages/source/P/Pygments/Pygments-2.0.1.tar.gz"
|
||||||
|
|
||||||
|
version('2.0.1', 'e0daf4c14a4fe5b630da765904de4d6c')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-setuptools')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
16
var/spack/packages/py-pylint/package.py
Normal file
16
var/spack/packages/py-pylint/package.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
from spack import *
|
||||||
|
import re
|
||||||
|
|
||||||
|
class PyPylint(Package):
|
||||||
|
"""array processing for numbers, strings, records, and objects."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/pylint"
|
||||||
|
url = "https://pypi.python.org/packages/source/p/pylint/pylint-1.4.1.tar.gz"
|
||||||
|
|
||||||
|
version('1.4.1', 'df7c679bdcce5019389038847e4de622')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-nose')
|
||||||
|
depends_on('py-setuptools')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
13
var/spack/packages/py-pyparsing/package.py
Normal file
13
var/spack/packages/py-pyparsing/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyPyparsing(Package):
|
||||||
|
"""A Python Parsing Module."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/pyparsing"
|
||||||
|
url = "https://pypi.python.org/packages/source/p/pyparsing/pyparsing-2.0.3.tar.gz"
|
||||||
|
|
||||||
|
version('2.0.3', '0fe479be09fc2cf005f753d3acc35939')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
24
var/spack/packages/py-pyqt/package.py
Normal file
24
var/spack/packages/py-pyqt/package.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyPyqt(Package):
|
||||||
|
"""PyQt is a set of Python v2 and v3 bindings for Digia's Qt
|
||||||
|
application framework and runs on all platforms supported by Qt
|
||||||
|
including Windows, MacOS/X and Linux."""
|
||||||
|
homepage = "http://www.riverbankcomputing.com/software/pyqt/intro"
|
||||||
|
url = "http://sourceforge.net/projects/pyqt/files/PyQt4/PyQt-4.11.3/PyQt-x11-gpl-4.11.3.tar.gz"
|
||||||
|
|
||||||
|
version('4.11.3', '997c3e443165a89a559e0d96b061bf70')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-sip')
|
||||||
|
|
||||||
|
# TODO: allow qt5 when conditional deps are supported.
|
||||||
|
# TODO: Fix version matching so that @4 works like @:4
|
||||||
|
depends_on('qt@:4')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('configure.py',
|
||||||
|
'--confirm-license',
|
||||||
|
'--destdir=%s' % site_packages_dir)
|
||||||
|
make()
|
||||||
|
make('install')
|
47
var/spack/packages/py-pyside/package.py
Normal file
47
var/spack/packages/py-pyside/package.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from spack import *
|
||||||
|
import os
|
||||||
|
|
||||||
|
class PyPyside(Package):
|
||||||
|
"""array processing for numbers, strings, records, and objects."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/pyside"
|
||||||
|
url = "https://pypi.python.org/packages/source/P/PySide/PySide-1.2.2.tar.gz"
|
||||||
|
|
||||||
|
version('1.2.2', 'c45bc400c8a86d6b35f34c29e379e44d')
|
||||||
|
|
||||||
|
# TODO: make build dependency
|
||||||
|
# depends_on("cmake")
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-setuptools')
|
||||||
|
depends_on('qt@:4')
|
||||||
|
|
||||||
|
def patch(self):
|
||||||
|
"""Undo PySide RPATH handling and add Spack RPATH."""
|
||||||
|
# Figure out the special RPATH
|
||||||
|
pypkg = self.spec['python'].package
|
||||||
|
rpath = self.rpath
|
||||||
|
rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide'))
|
||||||
|
|
||||||
|
# Add Spack's standard CMake args to the sub-builds.
|
||||||
|
# They're called BY setup.py so we have to patch it.
|
||||||
|
filter_file(
|
||||||
|
r'OPTION_CMAKE,',
|
||||||
|
r'OPTION_CMAKE, ' + (
|
||||||
|
'"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", '
|
||||||
|
'"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)),
|
||||||
|
'setup.py')
|
||||||
|
|
||||||
|
# PySide tries to patch ELF files to remove RPATHs
|
||||||
|
# Disable this and go with the one we set.
|
||||||
|
filter_file(
|
||||||
|
r'^\s*rpath_cmd\(pyside_path, srcpath\)',
|
||||||
|
r'#rpath_cmd(pyside_path, srcpath)',
|
||||||
|
'pyside_postinstall.py')
|
||||||
|
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install',
|
||||||
|
'--prefix=%s' % prefix,
|
||||||
|
'--jobs=%s' % make_jobs)
|
||||||
|
|
||||||
|
|
13
var/spack/packages/py-pytz/package.py
Normal file
13
var/spack/packages/py-pytz/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyPytz(Package):
|
||||||
|
"""World timezone definitions, modern and historical."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/pytz"
|
||||||
|
url = "https://pypi.python.org/packages/source/p/pytz/pytz-2014.10.tar.gz"
|
||||||
|
|
||||||
|
version('2014.10', 'eb1cb941a20c5b751352c52486aa1dd7')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
16
var/spack/packages/py-rpy2/package.py
Normal file
16
var/spack/packages/py-rpy2/package.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyRpy2(Package):
|
||||||
|
"""rpy2 is a redesign and rewrite of rpy. It is providing a low-level interface to R from Python, a proposed high-level interface, including wrappers to graphical libraries, as well as R-like structures and functions."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/rpy2"
|
||||||
|
url = "https://pypi.python.org/packages/source/r/rpy2/rpy2-2.5.4.tar.gz"
|
||||||
|
|
||||||
|
version('2.5.4', '115a20ac30883f096da2bdfcab55196d')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-setuptools')
|
||||||
|
|
||||||
|
depends_on('R')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
17
var/spack/packages/py-scientificpython/package.py
Normal file
17
var/spack/packages/py-scientificpython/package.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyScientificpython(Package):
|
||||||
|
"""ScientificPython is a collection of Python modules for
|
||||||
|
scientific computing. It contains support for geometry,
|
||||||
|
mathematical functions, statistics, physical units, IO,
|
||||||
|
visualization, and parallelization."""
|
||||||
|
|
||||||
|
homepage = "https://sourcesup.renater.fr/projects/scientific-py/"
|
||||||
|
url = "https://sourcesup.renater.fr/frs/download.php/4411/ScientificPython-2.8.1.tar.gz"
|
||||||
|
|
||||||
|
version('2.8.1', '73ee0df19c7b58cdf2954261f0763c77')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
13
var/spack/packages/py-scikit-learn/package.py
Normal file
13
var/spack/packages/py-scikit-learn/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyScikitLearn(Package):
|
||||||
|
""""""
|
||||||
|
homepage = "https://pypi.python.org/pypi/scikit-learn"
|
||||||
|
url = "https://pypi.python.org/packages/source/s/scikit-learn/scikit-learn-0.15.2.tar.gz"
|
||||||
|
|
||||||
|
version('0.15.2', 'd9822ad0238e17b382a3c756ea94fe0d')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
15
var/spack/packages/py-scipy/package.py
Normal file
15
var/spack/packages/py-scipy/package.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PyScipy(Package):
|
||||||
|
"""Scientific Library for Python."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/scipy"
|
||||||
|
url = "https://pypi.python.org/packages/source/s/scipy/scipy-0.15.0.tar.gz"
|
||||||
|
|
||||||
|
version('0.15.0', '639112f077f0aeb6d80718dc5019dc7a')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-nose')
|
||||||
|
depends_on('py-numpy')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
13
var/spack/packages/py-setuptools/package.py
Normal file
13
var/spack/packages/py-setuptools/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PySetuptools(Package):
|
||||||
|
"""Easily download, build, install, upgrade, and uninstall Python packages."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/setuptools"
|
||||||
|
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
|
||||||
|
|
||||||
|
version('11.3.1', '01f69212e019a2420c1693fb43593930')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
45
var/spack/packages/py-shiboken/package.py
Normal file
45
var/spack/packages/py-shiboken/package.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from spack import *
|
||||||
|
import os
|
||||||
|
|
||||||
|
class PyShiboken(Package):
|
||||||
|
"""Shiboken generates bindings for C++ libraries using CPython source code."""
|
||||||
|
homepage = "https://shiboken.readthedocs.org/"
|
||||||
|
url = "https://pypi.python.org/packages/source/S/Shiboken/Shiboken-1.2.2.tar.gz"
|
||||||
|
|
||||||
|
version('1.2.2', '345cfebda221f525842e079a6141e555')
|
||||||
|
|
||||||
|
# TODO: make build dependency
|
||||||
|
# depends_on("cmake")
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on("py-setuptools")
|
||||||
|
depends_on("libxml2")
|
||||||
|
depends_on("qt@:4.8")
|
||||||
|
|
||||||
|
def patch(self):
|
||||||
|
"""Undo Shiboken RPATH handling and add Spack RPATH."""
|
||||||
|
# Add Spack's standard CMake args to the sub-builds.
|
||||||
|
# They're called BY setup.py so we have to patch it.
|
||||||
|
pypkg = self.spec['python'].package
|
||||||
|
rpath = self.rpath
|
||||||
|
rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'Shiboken'))
|
||||||
|
|
||||||
|
filter_file(
|
||||||
|
r'OPTION_CMAKE,',
|
||||||
|
r'OPTION_CMAKE, ' + (
|
||||||
|
'"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", '
|
||||||
|
'"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)),
|
||||||
|
'setup.py')
|
||||||
|
|
||||||
|
# Shiboken tries to patch ELF files to remove RPATHs
|
||||||
|
# Disable this and go with the one we set.
|
||||||
|
filter_file(
|
||||||
|
r'^\s*rpath_cmd\(shiboken_path, srcpath\)',
|
||||||
|
r'#rpath_cmd(shiboken_path, srcpath)',
|
||||||
|
'shiboken_postinstall.py')
|
||||||
|
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install',
|
||||||
|
'--prefix=%s' % prefix,
|
||||||
|
'--jobs=%s' % make_jobs)
|
20
var/spack/packages/py-sip/package.py
Normal file
20
var/spack/packages/py-sip/package.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from spack import *
|
||||||
|
import os
|
||||||
|
|
||||||
|
class PySip(Package):
|
||||||
|
"""SIP is a tool that makes it very easy to create Python bindings for C and C++ libraries."""
|
||||||
|
homepage = "http://www.riverbankcomputing.com/software/sip/intro"
|
||||||
|
url = "http://sourceforge.net/projects/pyqt/files/sip/sip-4.16.5/sip-4.16.5.tar.gz"
|
||||||
|
|
||||||
|
version('4.16.5', '6d01ea966a53e4c7ae5c5e48c40e49e5')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('configure.py',
|
||||||
|
'--destdir=%s' % site_packages_dir,
|
||||||
|
'--bindir=%s' % spec.prefix.bin,
|
||||||
|
'--incdir=%s' % python_include_dir,
|
||||||
|
'--sipdir=%s' % os.path.join(spec.prefix.share, 'sip'))
|
||||||
|
make()
|
||||||
|
make('install')
|
13
var/spack/packages/py-six/package.py
Normal file
13
var/spack/packages/py-six/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PySix(Package):
|
||||||
|
"""Python 2 and 3 compatibility utilities."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/six"
|
||||||
|
url = "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz"
|
||||||
|
|
||||||
|
version('1.9.0', '476881ef4012262dfc8adc645ee786c4')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
13
var/spack/packages/py-sympy/package.py
Normal file
13
var/spack/packages/py-sympy/package.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class PySympy(Package):
|
||||||
|
"""SymPy is a Python library for symbolic mathematics."""
|
||||||
|
homepage = "https://pypi.python.org/pypi/sympy"
|
||||||
|
url = "https://pypi.python.org/packages/source/s/sympy/sympy-0.7.6.tar.gz"
|
||||||
|
|
||||||
|
version('0.7.6', '3d04753974306d8a13830008e17babca')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
19
var/spack/packages/py-virtualenv/package.py
Normal file
19
var/spack/packages/py-virtualenv/package.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from spack import *
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
class PyVirtualenv(Package):
|
||||||
|
"""virtualenv is a tool to create isolated Python environments."""
|
||||||
|
homepage = "http://virtualenv.readthedocs.org/projects/virtualenv/"
|
||||||
|
url = "https://pypi.python.org/packages/source/v/virtualenv/virtualenv-1.11.6.tar.gz"
|
||||||
|
|
||||||
|
version('1.11.6', 'f61cdd983d2c4e6aeabb70b1060d6f49')
|
||||||
|
|
||||||
|
extends('python')
|
||||||
|
depends_on('py-setuptools')
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
if os.path.exists('build'):
|
||||||
|
shutil.rmtree('build')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
@ -1,10 +1,19 @@
|
|||||||
|
import os
|
||||||
|
import re
|
||||||
|
from contextlib import closing
|
||||||
|
from llnl.util.lang import match_predicate
|
||||||
|
|
||||||
from spack import *
|
from spack import *
|
||||||
|
import spack
|
||||||
|
|
||||||
|
|
||||||
class Python(Package):
|
class Python(Package):
|
||||||
"""The Python programming language."""
|
"""The Python programming language."""
|
||||||
homepage = "http://www.python.org"
|
homepage = "http://www.python.org"
|
||||||
url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tar.xz"
|
url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tar.xz"
|
||||||
|
|
||||||
|
extendable = True
|
||||||
|
|
||||||
version('2.7.8', 'd235bdfa75b8396942e360a70487ee00')
|
version('2.7.8', 'd235bdfa75b8396942e360a70487ee00')
|
||||||
|
|
||||||
depends_on("openssl")
|
depends_on("openssl")
|
||||||
@ -23,3 +32,126 @@ def install(self, spec, prefix):
|
|||||||
"--enable-shared")
|
"--enable-shared")
|
||||||
make()
|
make()
|
||||||
make("install")
|
make("install")
|
||||||
|
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Set up environment to make install easy for python extensions.
|
||||||
|
# ========================================================================
|
||||||
|
|
||||||
|
@property
|
||||||
|
def python_lib_dir(self):
|
||||||
|
return os.path.join('lib', 'python%d.%d' % self.version[:2])
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def python_include_dir(self):
|
||||||
|
return os.path.join('include', 'python%d.%d' % self.version[:2])
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def site_packages_dir(self):
|
||||||
|
return os.path.join(self.python_lib_dir, 'site-packages')
|
||||||
|
|
||||||
|
|
||||||
|
def setup_dependent_environment(self, module, spec, ext_spec):
|
||||||
|
"""Called before python modules' install() methods.
|
||||||
|
|
||||||
|
In most cases, extensions will only need to have one line::
|
||||||
|
|
||||||
|
python('setup.py', 'install', '--prefix=%s' % prefix)
|
||||||
|
"""
|
||||||
|
# Python extension builds can have a global python executable function
|
||||||
|
module.python = Executable(join_path(spec.prefix.bin, 'python'))
|
||||||
|
|
||||||
|
# Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs.
|
||||||
|
module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir)
|
||||||
|
module.python_include_dir = os.path.join(ext_spec.prefix, self.python_include_dir)
|
||||||
|
module.site_packages_dir = os.path.join(ext_spec.prefix, self.site_packages_dir)
|
||||||
|
|
||||||
|
# Make the site packages directory if it does not exist already.
|
||||||
|
mkdirp(module.site_packages_dir)
|
||||||
|
|
||||||
|
# Set PYTHONPATH to include site-packages dir for the
|
||||||
|
# extension and any other python extensions it depends on.
|
||||||
|
python_paths = []
|
||||||
|
for d in ext_spec.traverse():
|
||||||
|
if d.package.extends(self.spec):
|
||||||
|
python_paths.append(os.path.join(d.prefix, self.site_packages_dir))
|
||||||
|
os.environ['PYTHONPATH'] = ':'.join(python_paths)
|
||||||
|
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Handle specifics of activating and deactivating python modules.
|
||||||
|
# ========================================================================
|
||||||
|
|
||||||
|
def python_ignore(self, ext_pkg, args):
|
||||||
|
"""Add some ignore files to activate/deactivate args."""
|
||||||
|
ignore_arg = args.get('ignore', lambda f: False)
|
||||||
|
|
||||||
|
# Always ignore easy-install.pth, as it needs to be merged.
|
||||||
|
patterns = [r'easy-install\.pth$']
|
||||||
|
|
||||||
|
# Ignore pieces of setuptools installed by other packages.
|
||||||
|
if ext_pkg.name != 'py-setuptools':
|
||||||
|
patterns.append(r'/site\.pyc?$')
|
||||||
|
patterns.append(r'setuptools\.pth')
|
||||||
|
patterns.append(r'bin/easy_install[^/]*$')
|
||||||
|
patterns.append(r'setuptools.*egg$')
|
||||||
|
|
||||||
|
return match_predicate(ignore_arg, patterns)
|
||||||
|
|
||||||
|
|
||||||
|
def write_easy_install_pth(self, exts):
|
||||||
|
paths = []
|
||||||
|
for ext in sorted(exts.values()):
|
||||||
|
ext_site_packages = os.path.join(ext.prefix, self.site_packages_dir)
|
||||||
|
easy_pth = "%s/easy-install.pth" % ext_site_packages
|
||||||
|
|
||||||
|
if not os.path.isfile(easy_pth):
|
||||||
|
continue
|
||||||
|
|
||||||
|
with closing(open(easy_pth)) as f:
|
||||||
|
for line in f:
|
||||||
|
line = line.rstrip()
|
||||||
|
|
||||||
|
# Skip lines matching these criteria
|
||||||
|
if not line: continue
|
||||||
|
if re.search(r'^(import|#)', line): continue
|
||||||
|
if (ext.name != 'py-setuptools' and
|
||||||
|
re.search(r'setuptools.*egg$', line)): continue
|
||||||
|
|
||||||
|
paths.append(line)
|
||||||
|
|
||||||
|
site_packages = os.path.join(self.prefix, self.site_packages_dir)
|
||||||
|
main_pth = "%s/easy-install.pth" % site_packages
|
||||||
|
|
||||||
|
if not paths:
|
||||||
|
if os.path.isfile(main_pth):
|
||||||
|
os.remove(main_pth)
|
||||||
|
|
||||||
|
else:
|
||||||
|
with closing(open(main_pth, 'w')) as f:
|
||||||
|
f.write("import sys; sys.__plen = len(sys.path)\n")
|
||||||
|
for path in paths:
|
||||||
|
f.write("%s\n" % path)
|
||||||
|
f.write("import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; "
|
||||||
|
"p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)\n")
|
||||||
|
|
||||||
|
|
||||||
|
def activate(self, ext_pkg, **args):
|
||||||
|
args.update(ignore=self.python_ignore(ext_pkg, args))
|
||||||
|
super(Python, self).activate(ext_pkg, **args)
|
||||||
|
|
||||||
|
exts = spack.install_layout.extension_map(self.spec)
|
||||||
|
exts[ext_pkg.name] = ext_pkg.spec
|
||||||
|
self.write_easy_install_pth(exts)
|
||||||
|
|
||||||
|
|
||||||
|
def deactivate(self, ext_pkg, **args):
|
||||||
|
args.update(ignore=self.python_ignore(ext_pkg, args))
|
||||||
|
super(Python, self).deactivate(ext_pkg, **args)
|
||||||
|
|
||||||
|
exts = spack.install_layout.extension_map(self.spec)
|
||||||
|
if ext_pkg.name in exts: # Make deactivate idempotent.
|
||||||
|
del exts[ext_pkg.name]
|
||||||
|
self.write_easy_install_pth(exts)
|
||||||
|
27
var/spack/packages/qhull/package.py
Normal file
27
var/spack/packages/qhull/package.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Qhull(Package):
|
||||||
|
"""Qhull computes the convex hull, Delaunay triangulation, Voronoi
|
||||||
|
diagram, halfspace intersection about a point, furt hest-site
|
||||||
|
Delaunay triangulation, and furthest-site Voronoi diagram. The
|
||||||
|
source code runs in 2-d, 3-d, 4-d, and higher dimensions. Qhull
|
||||||
|
implements the Quickhull algorithm for computing the convex
|
||||||
|
hull. It handles roundoff errors from floating point
|
||||||
|
arithmetic. It computes volumes, surface areas, and
|
||||||
|
approximations to the convex hull.
|
||||||
|
|
||||||
|
Qhull does not support triangulation of non-convex surfaces,
|
||||||
|
mesh generation of non-convex objects, medium-sized inputs in
|
||||||
|
9-D and higher, alpha shapes, weighted Voronoi diagrams,
|
||||||
|
Voronoi volumes, or constrained Delaunay triangulations."""
|
||||||
|
|
||||||
|
homepage = "http://www.qhull.org"
|
||||||
|
|
||||||
|
version('1.0', 'd0f978c0d8dfb2e919caefa56ea2953c',
|
||||||
|
url="http://www.qhull.org/download/qhull-2012.1-src.tgz")
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
with working_dir('spack-build', create=True):
|
||||||
|
cmake('..', *std_cmake_args)
|
||||||
|
make()
|
||||||
|
make("install")
|
@ -1,3 +1,4 @@
|
|||||||
|
import os
|
||||||
from spack import *
|
from spack import *
|
||||||
|
|
||||||
class Qt(Package):
|
class Qt(Package):
|
||||||
@ -20,6 +21,11 @@ class Qt(Package):
|
|||||||
depends_on("libmng")
|
depends_on("libmng")
|
||||||
depends_on("jpeg")
|
depends_on("jpeg")
|
||||||
|
|
||||||
|
def setup_dependent_environment(self, module, spec, dep_spec):
|
||||||
|
"""Dependencies of Qt find it using the QTDIR environment variable."""
|
||||||
|
os.environ['QTDIR'] = self.prefix
|
||||||
|
|
||||||
|
|
||||||
def patch(self):
|
def patch(self):
|
||||||
# Fix qmake compilers in the default mkspec
|
# Fix qmake compilers in the default mkspec
|
||||||
qmake_conf = 'mkspecs/common/g++-base.conf'
|
qmake_conf = 'mkspecs/common/g++-base.conf'
|
||||||
@ -37,8 +43,9 @@ def install(self, spec, prefix):
|
|||||||
'-fast',
|
'-fast',
|
||||||
'-optimized-qmake',
|
'-optimized-qmake',
|
||||||
'-no-pch',
|
'-no-pch',
|
||||||
'-no-phonon',
|
# phonon required for py-pyqt
|
||||||
'-no-phonon-backend',
|
# '-no-phonon',
|
||||||
|
# '-no-phonon-backend',
|
||||||
'-no-openvg')
|
'-no-openvg')
|
||||||
make()
|
make()
|
||||||
make("install")
|
make("install")
|
||||||
|
22
var/spack/packages/tcl/package.py
Normal file
22
var/spack/packages/tcl/package.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Tcl(Package):
|
||||||
|
"""Tcl (Tool Command Language) is a very powerful but easy to
|
||||||
|
learn dynamic programming language, suitable for a very wide
|
||||||
|
range of uses, including web and desktop applications,
|
||||||
|
networking, administration, testing and many more. Open source
|
||||||
|
and business-friendly, Tcl is a mature yet evolving language
|
||||||
|
that is truly cross platform, easily deployed and highly
|
||||||
|
extensible."""
|
||||||
|
homepage = "http://www.tcl.tk"
|
||||||
|
|
||||||
|
version('8.6.3', 'db382feca91754b7f93da16dc4cdad1f',
|
||||||
|
url="http://prdownloads.sourceforge.net/tcl/tcl8.6.3-src.tar.gz")
|
||||||
|
|
||||||
|
depends_on('zlib')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
with working_dir('unix'):
|
||||||
|
configure("--prefix=%s" % prefix)
|
||||||
|
make()
|
||||||
|
make("install")
|
22
var/spack/packages/tk/package.py
Normal file
22
var/spack/packages/tk/package.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Tk(Package):
|
||||||
|
"""Tk is a graphical user interface toolkit that takes developing
|
||||||
|
desktop applications to a higher level than conventional
|
||||||
|
approaches. Tk is the standard GUI not only for Tcl, but for
|
||||||
|
many other dynamic languages, and can produce rich, native
|
||||||
|
applications that run unchanged across Windows, Mac OS X, Linux
|
||||||
|
and more."""
|
||||||
|
homepage = "http://www.tcl.tk"
|
||||||
|
url = "http://prdownloads.sourceforge.net/tcl/tk8.6.3-src.tar.gz"
|
||||||
|
|
||||||
|
version('src', '85ca4dbf4dcc19777fd456f6ee5d0221')
|
||||||
|
|
||||||
|
depends_on("tcl")
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
with working_dir('unix'):
|
||||||
|
configure("--prefix=%s" % prefix,
|
||||||
|
"--with-tcl=%s" % spec['tcl'].prefix.lib)
|
||||||
|
make()
|
||||||
|
make("install")
|
Loading…
Reference in New Issue
Block a user