Merge develop for v0.8.16 release.
This commit is contained in:
commit
1414480d3a
@ -8,8 +8,9 @@ Site configuration
|
||||
Temporary space
|
||||
----------------------------
|
||||
|
||||
.. warning:: Temporary space configuration will be moved to configuration files.
|
||||
The instructions here are old and refer to ``__init__.py``
|
||||
.. warning:: Temporary space configuration will eventually be moved to
|
||||
configuration files, but currently these settings are in
|
||||
``lib/spack/spack/__init__.py``
|
||||
|
||||
By default, Spack will try to do all of its building in temporary
|
||||
space. There are two main reasons for this. First, Spack is designed
|
||||
|
1
lib/spack/env/clang
vendored
1
lib/spack/env/clang
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/clang++
vendored
1
lib/spack/env/clang++
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/g++
vendored
1
lib/spack/env/g++
vendored
@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/gcc
vendored
1
lib/spack/env/gcc
vendored
@ -1 +0,0 @@
|
||||
cc
|
@ -22,9 +22,10 @@
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
__all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir',
|
||||
'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor',
|
||||
'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe']
|
||||
__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree',
|
||||
'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
|
||||
'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file',
|
||||
'change_sed_delimiter', 'is_exe', 'force_symlink']
|
||||
|
||||
import os
|
||||
import sys
|
||||
@ -140,12 +141,7 @@ def set_install_permissions(path):
|
||||
os.chmod(path, 0644)
|
||||
|
||||
|
||||
def install(src, dest):
|
||||
"""Manually install a file to a particular location."""
|
||||
tty.info("Installing %s to %s" % (src, dest))
|
||||
shutil.copy(src, dest)
|
||||
set_install_permissions(dest)
|
||||
|
||||
def copy_mode(src, dest):
|
||||
src_mode = os.stat(src).st_mode
|
||||
dest_mode = os.stat(dest).st_mode
|
||||
if src_mode | stat.S_IXUSR: dest_mode |= stat.S_IXUSR
|
||||
@ -154,6 +150,24 @@ def install(src, dest):
|
||||
os.chmod(dest, dest_mode)
|
||||
|
||||
|
||||
def install(src, dest):
|
||||
"""Manually install a file to a particular location."""
|
||||
tty.info("Installing %s to %s" % (src, dest))
|
||||
shutil.copy(src, dest)
|
||||
set_install_permissions(dest)
|
||||
copy_mode(src, dest)
|
||||
|
||||
|
||||
def install_tree(src, dest, **kwargs):
|
||||
"""Manually install a file to a particular location."""
|
||||
tty.info("Installing %s to %s" % (src, dest))
|
||||
shutil.copytree(src, dest, **kwargs)
|
||||
|
||||
for s, d in traverse_tree(src, dest, follow_nonexisting=False):
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
def is_exe(path):
|
||||
"""True if path is an executable file."""
|
||||
return os.path.isfile(path) and os.access(path, os.X_OK)
|
||||
@ -210,6 +224,14 @@ def touchp(path):
|
||||
touch(path)
|
||||
|
||||
|
||||
def force_symlink(src, dest):
|
||||
try:
|
||||
os.symlink(src, dest)
|
||||
except OSError, e:
|
||||
os.remove(dest)
|
||||
os.symlink(src, dest)
|
||||
|
||||
|
||||
def join_path(prefix, *args):
|
||||
path = str(prefix)
|
||||
for elt in args:
|
||||
@ -228,3 +250,84 @@ def ancestor(dir, n=1):
|
||||
def can_access(file_name):
|
||||
"""True if we have read/write access to the file."""
|
||||
return os.access(file_name, os.R_OK|os.W_OK)
|
||||
|
||||
|
||||
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
"""Traverse two filesystem trees simultaneously.
|
||||
|
||||
Walks the LinkTree directory in pre or post order. Yields each
|
||||
file in the source directory with a matching path from the dest
|
||||
directory, along with whether the file is a directory.
|
||||
e.g., for this tree::
|
||||
|
||||
root/
|
||||
a/
|
||||
file1
|
||||
file2
|
||||
b/
|
||||
file3
|
||||
|
||||
When called on dest, this yields::
|
||||
|
||||
('root', 'dest')
|
||||
('root/a', 'dest/a')
|
||||
('root/a/file1', 'dest/a/file1')
|
||||
('root/a/file2', 'dest/a/file2')
|
||||
('root/b', 'dest/b')
|
||||
('root/b/file3', 'dest/b/file3')
|
||||
|
||||
Optional args:
|
||||
|
||||
order=[pre|post] -- Whether to do pre- or post-order traveral.
|
||||
|
||||
ignore=<predicate> -- Predicate indicating which files to ignore.
|
||||
|
||||
follow_nonexisting -- Whether to descend into directories in
|
||||
src that do not exit in dest. Default True.
|
||||
|
||||
follow_links -- Whether to descend into symlinks in src.
|
||||
|
||||
"""
|
||||
follow_nonexisting = kwargs.get('follow_nonexisting', True)
|
||||
follow_links = kwargs.get('follow_link', False)
|
||||
|
||||
# Yield in pre or post order?
|
||||
order = kwargs.get('order', 'pre')
|
||||
if order not in ('pre', 'post'):
|
||||
raise ValueError("Order must be 'pre' or 'post'.")
|
||||
|
||||
# List of relative paths to ignore under the src root.
|
||||
ignore = kwargs.get('ignore', lambda filename: False)
|
||||
|
||||
# Don't descend into ignored directories
|
||||
if ignore(rel_path):
|
||||
return
|
||||
|
||||
source_path = os.path.join(source_root, rel_path)
|
||||
dest_path = os.path.join(dest_root, rel_path)
|
||||
|
||||
# preorder yields directories before children
|
||||
if order == 'pre':
|
||||
yield (source_path, dest_path)
|
||||
|
||||
for f in os.listdir(source_path):
|
||||
source_child = os.path.join(source_path, f)
|
||||
dest_child = os.path.join(dest_path, f)
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
|
||||
# Treat as a directory
|
||||
if os.path.isdir(source_child) and (
|
||||
follow_links or not os.path.islink(source_child)):
|
||||
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
|
||||
for t in tuples: yield t
|
||||
|
||||
# Treat as a file.
|
||||
elif not ignore(os.path.join(rel_path, f)):
|
||||
yield (source_child, dest_child)
|
||||
|
||||
if order == 'post':
|
||||
yield (source_path, dest_path)
|
||||
|
@ -32,88 +32,6 @@
|
||||
empty_file_name = '.spack-empty'
|
||||
|
||||
|
||||
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
"""Traverse two filesystem trees simultaneously.
|
||||
|
||||
Walks the LinkTree directory in pre or post order. Yields each
|
||||
file in the source directory with a matching path from the dest
|
||||
directory, along with whether the file is a directory.
|
||||
e.g., for this tree::
|
||||
|
||||
root/
|
||||
a/
|
||||
file1
|
||||
file2
|
||||
b/
|
||||
file3
|
||||
|
||||
When called on dest, this yields::
|
||||
|
||||
('root', 'dest')
|
||||
('root/a', 'dest/a')
|
||||
('root/a/file1', 'dest/a/file1')
|
||||
('root/a/file2', 'dest/a/file2')
|
||||
('root/b', 'dest/b')
|
||||
('root/b/file3', 'dest/b/file3')
|
||||
|
||||
Optional args:
|
||||
|
||||
order=[pre|post] -- Whether to do pre- or post-order traveral.
|
||||
|
||||
ignore=<predicate> -- Predicate indicating which files to ignore.
|
||||
|
||||
follow_nonexisting -- Whether to descend into directories in
|
||||
src that do not exit in dest. Default True.
|
||||
|
||||
follow_links -- Whether to descend into symlinks in src.
|
||||
|
||||
"""
|
||||
follow_nonexisting = kwargs.get('follow_nonexisting', True)
|
||||
follow_links = kwargs.get('follow_link', False)
|
||||
|
||||
# Yield in pre or post order?
|
||||
order = kwargs.get('order', 'pre')
|
||||
if order not in ('pre', 'post'):
|
||||
raise ValueError("Order must be 'pre' or 'post'.")
|
||||
|
||||
# List of relative paths to ignore under the src root.
|
||||
ignore = kwargs.get('ignore', lambda filename: False)
|
||||
|
||||
# Don't descend into ignored directories
|
||||
if ignore(rel_path):
|
||||
return
|
||||
|
||||
source_path = os.path.join(source_root, rel_path)
|
||||
dest_path = os.path.join(dest_root, rel_path)
|
||||
|
||||
# preorder yields directories before children
|
||||
if order == 'pre':
|
||||
yield (source_path, dest_path)
|
||||
|
||||
for f in os.listdir(source_path):
|
||||
source_child = os.path.join(source_path, f)
|
||||
dest_child = os.path.join(dest_path, f)
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
|
||||
# Treat as a directory
|
||||
if os.path.isdir(source_child) and (
|
||||
follow_links or not os.path.islink(source_child)):
|
||||
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
|
||||
for t in tuples: yield t
|
||||
|
||||
# Treat as a file.
|
||||
elif not ignore(os.path.join(rel_path, f)):
|
||||
yield (source_child, dest_child)
|
||||
|
||||
if order == 'post':
|
||||
yield (source_path, dest_path)
|
||||
|
||||
|
||||
|
||||
class LinkTree(object):
|
||||
"""Class to create trees of symbolic links from a source directory.
|
||||
|
||||
|
@ -189,18 +189,19 @@ def set_module_variables_for_package(pkg):
|
||||
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
|
||||
|
||||
# Emulate some shell commands for convenience
|
||||
m.pwd = os.getcwd
|
||||
m.cd = os.chdir
|
||||
m.mkdir = os.mkdir
|
||||
m.makedirs = os.makedirs
|
||||
m.remove = os.remove
|
||||
m.removedirs = os.removedirs
|
||||
m.symlink = os.symlink
|
||||
m.pwd = os.getcwd
|
||||
m.cd = os.chdir
|
||||
m.mkdir = os.mkdir
|
||||
m.makedirs = os.makedirs
|
||||
m.remove = os.remove
|
||||
m.removedirs = os.removedirs
|
||||
m.symlink = os.symlink
|
||||
|
||||
m.mkdirp = mkdirp
|
||||
m.install = install
|
||||
m.rmtree = shutil.rmtree
|
||||
m.move = shutil.move
|
||||
m.mkdirp = mkdirp
|
||||
m.install = install
|
||||
m.install_tree = install_tree
|
||||
m.rmtree = shutil.rmtree
|
||||
m.move = shutil.move
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
|
@ -62,6 +62,7 @@
|
||||
from spack.stage import Stage
|
||||
from spack.util.web import get_pages
|
||||
from spack.util.compression import allowed_archive, extension
|
||||
from spack.util.executable import ProcessError
|
||||
|
||||
"""Allowed URL schemes for spack packages."""
|
||||
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
|
||||
@ -805,6 +806,16 @@ def do_install(self, **kwargs):
|
||||
# package naming scheme it likes.
|
||||
spack.install_layout.make_path_for_spec(self.spec)
|
||||
|
||||
def cleanup():
|
||||
if not keep_prefix:
|
||||
# If anything goes wrong, remove the install prefix
|
||||
self.remove_prefix()
|
||||
else:
|
||||
tty.warn("Keeping install prefix in place despite error.",
|
||||
"Spack will think this package is installed." +
|
||||
"Manually remove this directory to fix:",
|
||||
self.prefix)
|
||||
|
||||
def real_work():
|
||||
try:
|
||||
tty.msg("Building %s." % self.name)
|
||||
@ -837,15 +848,20 @@ def real_work():
|
||||
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
|
||||
print_pkg(self.prefix)
|
||||
|
||||
except:
|
||||
if not keep_prefix:
|
||||
# If anything goes wrong, remove the install prefix
|
||||
self.remove_prefix()
|
||||
except ProcessError, e:
|
||||
# One of the processes returned an error code.
|
||||
# Suppress detailed stack trace here unless in debug mode
|
||||
if spack.debug:
|
||||
raise e
|
||||
else:
|
||||
tty.warn("Keeping install prefix in place despite error.",
|
||||
"Spack will think this package is installed." +
|
||||
"Manually remove this directory to fix:",
|
||||
self.prefix)
|
||||
tty.error(e)
|
||||
|
||||
# Still need to clean up b/c there was an error.
|
||||
cleanup()
|
||||
|
||||
except:
|
||||
# other exceptions just clean up and raise.
|
||||
cleanup()
|
||||
raise
|
||||
|
||||
build_env.fork(self, real_work)
|
||||
|
@ -7,10 +7,11 @@ class Samrai(Package):
|
||||
structured adaptive mesh refinement (SAMR) technology in large-scale parallel
|
||||
application development.
|
||||
"""
|
||||
homepage = "https://computation-rnd.llnl.gov/SAMRAI/confirm.php"
|
||||
url = "https://computation-rnd.llnl.gov/SAMRAI/download/SAMRAI-v3.7.3.tar.gz"
|
||||
homepage = "https://computation.llnl.gov/project/SAMRAI/"
|
||||
url = "https://computation.llnl.gov/project/SAMRAI/download/SAMRAI-v3.9.1.tar.gz"
|
||||
list_url = homepage
|
||||
|
||||
version('3.9.1', '232d04d0c995f5abf20d94350befd0b2')
|
||||
version('3.7.3', '12d574eacadf8c9a70f1bb4cd1a69df6')
|
||||
version('3.7.2', 'f6a716f171c9fdbf3cb12f71fa6e2737')
|
||||
version('3.6.3-beta', 'ef0510bf2893042daedaca434e5ec6ce')
|
||||
@ -24,22 +25,25 @@ class Samrai(Package):
|
||||
depends_on("mpi")
|
||||
depends_on("zlib")
|
||||
depends_on("hdf5")
|
||||
depends_on("boost@1.52.0")
|
||||
depends_on("boost")
|
||||
|
||||
# don't build tools with gcc
|
||||
patch('no-tool-build.patch', when='%gcc')
|
||||
|
||||
# TODO: currently hard-coded to use openmpi - be careful!
|
||||
def install(self, spec, prefix):
|
||||
mpi = next(m for m in ('openmpi', 'mpich', 'mvapich')
|
||||
if m in spec)
|
||||
|
||||
configure(
|
||||
"--prefix=%s" % prefix,
|
||||
"--with-CXX=%s" % spec['openmpi'].prefix.bin + "/mpic++",
|
||||
"--with-CC=%s" % spec['openmpi'].prefix.bin + "/mpicc",
|
||||
"--with-CXX=%s" % spec[mpi].prefix.bin + "/mpic++",
|
||||
"--with-CC=%s" % spec[mpi].prefix.bin + "/mpicc",
|
||||
"--with-hdf5=%s" % spec['hdf5'].prefix,
|
||||
"--with-boost=%s" % spec['boost'].prefix,
|
||||
"--with-zlib=%s" % spec['zlib'].prefix,
|
||||
"--disable-blas",
|
||||
"--disable-lapack",
|
||||
"--without-blas",
|
||||
"--without-lapack",
|
||||
"--with-hypre=no",
|
||||
"--with-petsc=no",
|
||||
"--enable-opt",
|
||||
|
15
var/spack/packages/cram/package.py
Normal file
15
var/spack/packages/cram/package.py
Normal file
@ -0,0 +1,15 @@
|
||||
from spack import *
|
||||
|
||||
class Cram(Package):
|
||||
"""Cram runs many small MPI jobs inside one large MPI job."""
|
||||
homepage = "https://github.com/scalability-llnl/cram"
|
||||
url = "http://github.com/scalability-llnl/cram/archive/v1.0.1.tar.gz"
|
||||
|
||||
version('1.0.1', 'c73711e945cf5dc603e44395f6647f5e')
|
||||
|
||||
depends_on("mpi")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
cmake(".", *std_cmake_args)
|
||||
make()
|
||||
make("install")
|
32
var/spack/packages/metis/package.py
Normal file
32
var/spack/packages/metis/package.py
Normal file
@ -0,0 +1,32 @@
|
||||
from spack import *
|
||||
|
||||
class Metis(Package):
|
||||
"""METIS is a set of serial programs for partitioning graphs,
|
||||
partitioning finite element meshes, and producing fill reducing
|
||||
orderings for sparse matrices. The algorithms implemented in
|
||||
METIS are based on the multilevel recursive-bisection,
|
||||
multilevel k-way, and multi-constraint partitioning schemes
|
||||
developed in our lab."""
|
||||
|
||||
homepage = "http://glaros.dtc.umn.edu/gkhome/metis/metis/overview"
|
||||
url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz"
|
||||
|
||||
version('5.1.0', '5465e67079419a69e0116de24fce58fe')
|
||||
|
||||
# FIXME: Add dependencies if this package requires them.
|
||||
# depends_on("foo")
|
||||
|
||||
def patch(self):
|
||||
filter_file(r'#define IDXTYPEWIDTH 32', '#define IDXTYPEWIDTH 64', 'include/metis.h',
|
||||
string=True)
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
with working_dir('spack-build', create=True):
|
||||
cmake('..',
|
||||
'-DGKLIB_PATH=../GKlib',
|
||||
'-DBUILD_SHARED_LIBS=TRUE',
|
||||
*std_cmake_args)
|
||||
make()
|
||||
make("install")
|
||||
|
@ -38,6 +38,15 @@ class Mpich(Package):
|
||||
provides('mpi@:3', when='@3:')
|
||||
provides('mpi@:1', when='@1:')
|
||||
|
||||
|
||||
def setup_dependent_environment(self, module, spec, dep_spec):
|
||||
"""For dependencies, make mpicc's use spack wrapper."""
|
||||
os.environ['MPICH_CC'] = 'cc'
|
||||
os.environ['MPICH_CXX'] = 'c++'
|
||||
os.environ['MPICH_F77'] = 'f77'
|
||||
os.environ['MPICH_F90'] = 'f90'
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
config_args = ["--prefix=" + prefix,
|
||||
"--enable-shared"]
|
||||
|
@ -10,7 +10,8 @@ class Muster(Package):
|
||||
homepage = "https://github.com/scalability-llnl/muster"
|
||||
url = "https://github.com/scalability-llnl/muster/archive/v1.0.tar.gz"
|
||||
|
||||
version('1.0', '2eec6979a4a36d3a65a792d12969be16')
|
||||
version('1.0.1', 'd709787db7e080447afb6571ac17723c')
|
||||
version('1.0', '2eec6979a4a36d3a65a792d12969be16')
|
||||
|
||||
depends_on("boost")
|
||||
depends_on("mpi")
|
||||
|
21
var/spack/packages/otf/package.py
Normal file
21
var/spack/packages/otf/package.py
Normal file
@ -0,0 +1,21 @@
|
||||
from spack import *
|
||||
|
||||
class Otf(Package):
|
||||
"""To improve scalability for very large and massively parallel
|
||||
traces the Open Trace Format (OTF) is developed at ZIH as a
|
||||
successor format to the Vampir Trace Format (VTF3)."""
|
||||
|
||||
homepage = "http://tu-dresden.de/die_tu_dresden/zentrale_einrichtungen/zih/forschung/projekte/otf/index_html/document_view?set_language=en"
|
||||
url = "http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz"
|
||||
|
||||
version('1.12.5salmon', 'bf260198633277031330e3356dcb4eec')
|
||||
|
||||
depends_on('zlib')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure('--prefix=%s' % prefix,
|
||||
'--without-vtf3',
|
||||
'--with-zlib',
|
||||
'--with-zlibsymbols')
|
||||
make()
|
||||
make("install")
|
23
var/spack/packages/ravel/package.py
Normal file
23
var/spack/packages/ravel/package.py
Normal file
@ -0,0 +1,23 @@
|
||||
from spack import *
|
||||
|
||||
class Ravel(Package):
|
||||
"""Ravel is a parallel communication trace visualization tool that
|
||||
orders events according to logical time."""
|
||||
|
||||
homepage = "https://github.com/scalability-llnl/ravel"
|
||||
url = 'https://github.com/scalability-llnl/ravel/archive/v1.0.0.tar.gz'
|
||||
|
||||
version('1.0.0', 'b25fece58331c2adfcce76c5036485c2')
|
||||
|
||||
# TODO: make this a build dependency
|
||||
depends_on('cmake@2.8.9:')
|
||||
|
||||
depends_on('muster@1.0.1:')
|
||||
depends_on('otf')
|
||||
depends_on('otf2')
|
||||
depends_on('qt@5:')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
cmake('-Wno-dev', *std_cmake_args)
|
||||
make()
|
||||
make("install")
|
40
var/spack/packages/scotch/package.py
Normal file
40
var/spack/packages/scotch/package.py
Normal file
@ -0,0 +1,40 @@
|
||||
from spack import *
|
||||
import glob
|
||||
import os
|
||||
|
||||
class Scotch(Package):
|
||||
"""Scotch is a software package for graph and mesh/hypergraph
|
||||
partitioning, graph clustering, and sparse matrix ordering."""
|
||||
homepage = "http://www.labri.fr/perso/pelegrin/scotch/"
|
||||
url = "http://gforge.inria.fr/frs/download.php/file/34099/scotch_6.0.3.tar.gz"
|
||||
list_url = "http://gforge.inria.fr/frs/?group_id=248"
|
||||
|
||||
version('6.0.3', '10b0cc0f184de2de99859eafaca83cfc')
|
||||
|
||||
depends_on('mpi')
|
||||
|
||||
|
||||
def patch(self):
|
||||
with working_dir('src/Make.inc'):
|
||||
makefiles = glob.glob('Makefile.inc.x86-64_pc_linux2*')
|
||||
filter_file(r'^CCS\s*=.*$', 'CCS = cc', *makefiles)
|
||||
filter_file(r'^CCD\s*=.*$', 'CCD = cc', *makefiles)
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# Currently support gcc and icc on x86_64 (maybe others with
|
||||
# vanilla makefile)
|
||||
makefile = 'Make.inc/Makefile.inc.x86-64_pc_linux2'
|
||||
if spec.satisfies('%icc'):
|
||||
makefile += '.icc'
|
||||
|
||||
with working_dir('src'):
|
||||
force_symlink(makefile, 'Makefile.inc')
|
||||
for app in ('scotch', 'ptscotch'):
|
||||
make(app)
|
||||
|
||||
install_tree('bin', prefix.bin)
|
||||
install_tree('lib', prefix.lib)
|
||||
install_tree('include', prefix.include)
|
||||
install_tree('man/man1', prefix.share_man1)
|
||||
|
Loading…
Reference in New Issue
Block a user