Merge pull request #1084 from epfl-scitas/packages/openmpi_without_fortran
OpenMPI : reverts part of #1079
This commit is contained in:
commit
76d950b103
@ -88,7 +88,6 @@
|
||||
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
|
||||
|
||||
|
||||
|
||||
class MakeExecutable(Executable):
|
||||
"""Special callable executable object for make so the user can
|
||||
specify parallel or not on a per-invocation basis. Using
|
||||
@ -99,6 +98,7 @@ class MakeExecutable(Executable):
|
||||
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
|
||||
everything.
|
||||
"""
|
||||
|
||||
def __init__(self, name, jobs):
|
||||
super(MakeExecutable, self).__init__(name)
|
||||
self.jobs = jobs
|
||||
@ -113,6 +113,7 @@ def __call__(self, *args, **kwargs):
|
||||
|
||||
return super(MakeExecutable, self).__call__(*args, **kwargs)
|
||||
|
||||
|
||||
def load_module(mod):
|
||||
"""Takes a module name and removes modules until it is possible to
|
||||
load that module. It then loads the provided module. Depends on the
|
||||
@ -129,11 +130,13 @@ def load_module(mod):
|
||||
text = modulecmd('show', mod, output=str, error=str).split()
|
||||
for i, word in enumerate(text):
|
||||
if word == 'conflict':
|
||||
exec(compile(modulecmd('unload', text[i+1], output=str, error=str), '<string>', 'exec'))
|
||||
exec(compile(modulecmd('unload', text[
|
||||
i + 1], output=str, error=str), '<string>', 'exec'))
|
||||
# Load the module now that there are no conflicts
|
||||
load = modulecmd('load', mod, output=str, error=str)
|
||||
exec(compile(load, '<string>', 'exec'))
|
||||
|
||||
|
||||
def get_path_from_module(mod):
|
||||
"""Inspects a TCL module for entries that indicate the absolute path
|
||||
at which the library supported by said module can be found.
|
||||
@ -171,32 +174,35 @@ def get_path_from_module(mod):
|
||||
# Unable to find module path
|
||||
return None
|
||||
|
||||
|
||||
def set_compiler_environment_variables(pkg, env):
|
||||
assert(pkg.spec.concrete)
|
||||
compiler = pkg.compiler
|
||||
flags = pkg.spec.compiler_flags
|
||||
|
||||
# Set compiler variables used by CMake and autotools
|
||||
assert all(key in compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
|
||||
assert all(key in compiler.link_paths for key in (
|
||||
'cc', 'cxx', 'f77', 'fc'))
|
||||
|
||||
# Populate an object with the list of environment modifications
|
||||
# and return it
|
||||
# TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc.
|
||||
# TODO : add additional kwargs for better diagnostics, like requestor,
|
||||
# ttyout, ttyerr, etc.
|
||||
link_dir = spack.build_env_path
|
||||
env.set('CC', join_path(link_dir, compiler.link_paths['cc']))
|
||||
env.set('CXX', join_path(link_dir, compiler.link_paths['cxx']))
|
||||
env.set('F77', join_path(link_dir, compiler.link_paths['f77']))
|
||||
env.set('FC', join_path(link_dir, compiler.link_paths['fc']))
|
||||
|
||||
# Set SPACK compiler variables so that our wrapper knows what to call
|
||||
if compiler.cc:
|
||||
env.set('SPACK_CC', compiler.cc)
|
||||
env.set('CC', join_path(link_dir, compiler.link_paths['cc']))
|
||||
if compiler.cxx:
|
||||
env.set('SPACK_CXX', compiler.cxx)
|
||||
env.set('CXX', join_path(link_dir, compiler.link_paths['cxx']))
|
||||
if compiler.f77:
|
||||
env.set('SPACK_F77', compiler.f77)
|
||||
env.set('F77', join_path(link_dir, compiler.link_paths['f77']))
|
||||
if compiler.fc:
|
||||
env.set('SPACK_FC', compiler.fc)
|
||||
env.set('FC', join_path(link_dir, compiler.link_paths['fc']))
|
||||
|
||||
# Set SPACK compiler rpath flags so that our wrapper knows what to use
|
||||
env.set('SPACK_CC_RPATH_ARG', compiler.cc_rpath_arg)
|
||||
@ -233,7 +239,8 @@ def set_build_environment_variables(pkg, env):
|
||||
# handled by putting one in the <build_env_path>/case-insensitive
|
||||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
for item in [spack.build_env_path, join_path(spack.build_env_path, pkg.compiler.name)]:
|
||||
compiler_specific = join_path(spack.build_env_path, pkg.compiler.name)
|
||||
for item in [spack.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = join_path(item, 'case-insensitive')
|
||||
if os.path.isdir(ci):
|
||||
@ -246,7 +253,8 @@ def set_build_environment_variables(pkg, env):
|
||||
# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
|
||||
dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
|
||||
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
|
||||
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Add dependencies to CMAKE_PREFIX_PATH
|
||||
# Add dependencies to CMAKE_PREFIX_PATH
|
||||
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
|
||||
|
||||
# Install prefix
|
||||
env.set(SPACK_PREFIX, pkg.prefix)
|
||||
@ -262,7 +270,8 @@ def set_build_environment_variables(pkg, env):
|
||||
env.unset('DYLD_LIBRARY_PATH')
|
||||
|
||||
# Add bin directories from dependencies to the PATH for the build.
|
||||
bin_dirs = reversed(filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
|
||||
bin_dirs = reversed(
|
||||
filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
|
||||
for item in bin_dirs:
|
||||
env.prepend_path('PATH', item)
|
||||
|
||||
@ -325,7 +334,8 @@ def set_module_variables_for_package(pkg, module):
|
||||
|
||||
# Set up CMake rpath
|
||||
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
|
||||
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
|
||||
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' %
|
||||
":".join(get_rpaths(pkg)))
|
||||
|
||||
# Put spack compiler paths in module scope.
|
||||
link_dir = spack.build_env_path
|
||||
@ -372,7 +382,9 @@ def get_rpaths(pkg):
|
||||
|
||||
|
||||
def parent_class_modules(cls):
|
||||
"""Get list of super class modules that are all descend from spack.Package"""
|
||||
"""
|
||||
Get list of super class modules that are all descend from spack.Package
|
||||
"""
|
||||
if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls):
|
||||
return []
|
||||
result = []
|
||||
@ -391,6 +403,7 @@ def load_external_modules(pkg):
|
||||
if dep.external_module:
|
||||
load_module(dep.external_module)
|
||||
|
||||
|
||||
def setup_package(pkg):
|
||||
"""Execute all environment setup routines."""
|
||||
spack_env = EnvironmentModifications()
|
||||
@ -410,7 +423,8 @@ def setup_package(pkg):
|
||||
# throwaway environment, but it is kind of dirty.
|
||||
#
|
||||
# TODO: Think about how to avoid this fix and do something cleaner.
|
||||
for s in pkg.spec.traverse(): s.package.spec = s
|
||||
for s in pkg.spec.traverse():
|
||||
s.package.spec = s
|
||||
|
||||
set_compiler_environment_variables(pkg, spack_env)
|
||||
set_build_environment_variables(pkg, spack_env)
|
||||
@ -498,7 +512,9 @@ def child_fun():
|
||||
# message. Just make the parent exit with an error code.
|
||||
pid, returncode = os.waitpid(pid, 0)
|
||||
if returncode != 0:
|
||||
raise InstallError("Installation process had nonzero exit code.".format(str(returncode)))
|
||||
message = "Installation process had nonzero exit code : {code}"
|
||||
strcode = str(returncode)
|
||||
raise InstallError(message.format(code=strcode))
|
||||
|
||||
|
||||
class InstallError(spack.error.SpackError):
|
||||
|
@ -24,6 +24,8 @@
|
||||
##############################################################################
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
@ -118,6 +120,21 @@ def setup_dependent_package(self, module, dep_spec):
|
||||
self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
|
||||
self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
|
||||
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
# As of 06/2016 there is no mechanism to specify that packages which
|
||||
# depends on MPI need C or/and Fortran implementation. For now
|
||||
# require both.
|
||||
if (self.compiler.f77 is None) or (self.compiler.fc is None):
|
||||
tty.warn('OpenMPI : FORTRAN compiler not found')
|
||||
tty.warn('OpenMPI : FORTRAN bindings will be disabled')
|
||||
spack_env.unset('FC')
|
||||
spack_env.unset('F77')
|
||||
# Setting an attribute here and using it in the 'install'
|
||||
# method is needed to ensure tty.warn is actually displayed
|
||||
# to user and not redirected to spack-build.out
|
||||
self.config_extra = ['--enable-mpi-fortran=none',
|
||||
'--disable-oshmem-fortran']
|
||||
|
||||
@property
|
||||
def verbs(self):
|
||||
# Up through version 1.6, this option was previously named
|
||||
@ -129,17 +146,14 @@ def verbs(self):
|
||||
return 'verbs'
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# As of 06/2016 there is no mechanism to specify that packages which
|
||||
# depends on MPI need C or/and Fortran implementation. For now
|
||||
# require both.
|
||||
if (self.compiler.f77 is None) or (self.compiler.fc is None):
|
||||
raise InstallError('OpenMPI requires both C and Fortran ',
|
||||
'compilers!')
|
||||
|
||||
config_args = ["--prefix=%s" % prefix,
|
||||
"--with-hwloc=%s" % spec['hwloc'].prefix,
|
||||
"--enable-shared",
|
||||
"--enable-static"]
|
||||
|
||||
if getattr(self, 'config_extra', None) is not None:
|
||||
config_args.extend(self.config_extra)
|
||||
|
||||
# Variant based arguments
|
||||
config_args.extend([
|
||||
# Schedulers
|
||||
@ -170,9 +184,6 @@ def install(self, spec, prefix):
|
||||
if self.version == ver("1.6.5") and '+lanl' in spec:
|
||||
config_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas") # NOQA: ignore=E501
|
||||
|
||||
if not self.compiler.f77 and not self.compiler.fc:
|
||||
config_args.append("--enable-mpi-fortran=no")
|
||||
|
||||
configure(*config_args)
|
||||
make()
|
||||
make("install")
|
||||
|
Loading…
Reference in New Issue
Block a user