New package version: mfem v4.4 (#29658)
This commit is contained in:
parent
0804637228
commit
34d9d58924
@ -245,6 +245,12 @@ def setup_build_environment(self, env):
|
||||
env.append_flags(
|
||||
'CXXFLAGS', '-O2' if '~debug' in spec else '-g')
|
||||
|
||||
if '+rocm' in spec:
|
||||
# As of 2022/04/05, the following are set by 'llvm-amdgpu' and
|
||||
# override hypre's default flags, so we unset them.
|
||||
env.unset('CFLAGS')
|
||||
env.unset('CXXFLAGS')
|
||||
|
||||
def build(self, spec, prefix):
|
||||
with working_dir("src"):
|
||||
make()
|
||||
|
@ -49,6 +49,10 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
# other version.
|
||||
version('develop', branch='master')
|
||||
|
||||
version('4.4.0',
|
||||
sha256='37250dbef6e97b16dc9ab50973e8d68bc165bb4afcdaf91b3b72c8972c87deef',
|
||||
url='https://bit.ly/mfem-4-4', extension='tar.gz')
|
||||
|
||||
version('4.3.0',
|
||||
sha256='3a495602121b986049286ea0b23512279cdbdfb43c15c42a1511b521051fbe38',
|
||||
url='https://bit.ly/mfem-4-3', extension='tar.gz')
|
||||
@ -190,6 +194,9 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
conflicts('+amgx', when='mfem@:4.1')
|
||||
conflicts('+amgx', when='~cuda')
|
||||
conflicts('+mpi~cuda ^hypre+cuda')
|
||||
conflicts('+mpi ^hypre+cuda', when='@:4.2')
|
||||
conflicts('+mpi~rocm ^hypre+rocm')
|
||||
conflicts('+mpi ^hypre+rocm', when='@:4.3')
|
||||
|
||||
conflicts('+superlu-dist', when='~mpi')
|
||||
conflicts('+strumpack', when='~mpi')
|
||||
@ -199,13 +206,25 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
conflicts('timer=mpi', when='~mpi')
|
||||
|
||||
# See https://github.com/mfem/mfem/issues/2957
|
||||
conflicts('^mpich@4:', when='+mpi')
|
||||
conflicts('^mpich@4:', when='@:4.3+mpi')
|
||||
|
||||
depends_on('mpi', when='+mpi')
|
||||
depends_on('hypre@2.10.0:2.13', when='@:3.3+mpi')
|
||||
depends_on('hypre@:2.20.0', when='@3.4:4.2+mpi')
|
||||
depends_on('hypre@:2.23.0', when='@4.3.0+mpi')
|
||||
depends_on('hypre', when='+mpi')
|
||||
# Propagate 'cuda_arch' to 'hypre' without propagating the '+cuda'
|
||||
# variant because we want to allow 'mfem+cuda ^hypre~cuda':
|
||||
for sm_ in CudaPackage.cuda_arch_values:
|
||||
depends_on('hypre@2.22.1:+cuda cuda_arch={0}'.format(sm_),
|
||||
when='+mpi+cuda cuda_arch={0} ^hypre+cuda'
|
||||
.format(sm_))
|
||||
# Propagate 'amdgpu_target' to 'hypre' without propagating the '+rocm'
|
||||
# variant because we want to allow 'mfem+rocm ^hypre~rocm':
|
||||
for gfx in ROCmPackage.amdgpu_targets:
|
||||
depends_on('hypre@2.23.0:+rocm amdgpu_target={0}'.format(gfx),
|
||||
when='+mpi+rocm amdgpu_target={0} ^hypre+rocm'
|
||||
.format(gfx))
|
||||
|
||||
depends_on('metis', when='+metis')
|
||||
depends_on('blas', when='+lapack')
|
||||
@ -215,16 +234,19 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
depends_on('sundials@2.7.0+mpi+hypre', when='@:3.3.0+sundials+mpi')
|
||||
depends_on('sundials@2.7.0:', when='@3.3.2:+sundials~mpi')
|
||||
depends_on('sundials@2.7.0:+mpi+hypre', when='@3.3.2:+sundials+mpi')
|
||||
depends_on('sundials@5.0.0:', when='@4.0.1-xsdk:+sundials~mpi')
|
||||
depends_on('sundials@5.0.0:+mpi+hypre', when='@4.0.1-xsdk:+sundials+mpi')
|
||||
depends_on('sundials@5.0.0:5', when='@4.0.1-xsdk:+sundials~mpi')
|
||||
depends_on('sundials@5.0.0:5+mpi+hypre', when='@4.0.1-xsdk:+sundials+mpi')
|
||||
for sm_ in CudaPackage.cuda_arch_values:
|
||||
depends_on('sundials@5.4.0:+cuda cuda_arch={0}'.format(sm_),
|
||||
depends_on('sundials@5.4.0:5+cuda cuda_arch={0}'.format(sm_),
|
||||
when='@4.2.0:+sundials+cuda cuda_arch={0}'.format(sm_))
|
||||
depends_on('pumi@2.2.3:', when='@4.2.0:+pumi')
|
||||
depends_on('pumi', when='+pumi~shared')
|
||||
depends_on('pumi+shared', when='+pumi+shared')
|
||||
depends_on('gslib@1.0.5:+mpi', when='+gslib+mpi')
|
||||
depends_on('gslib@1.0.5:~mpi~mpiio', when='+gslib~mpi')
|
||||
depends_on('pumi@2.2.3:2.2.5', when='@4.2.0:4.3.0+pumi')
|
||||
depends_on('pumi@2.2.6:', when='@4.4.0:+pumi')
|
||||
depends_on('gslib+mpi', when='+gslib+mpi')
|
||||
depends_on('gslib~mpi~mpiio', when='+gslib~mpi')
|
||||
depends_on('gslib@1.0.5:1.0.6', when='@:4.2+gslib')
|
||||
depends_on('gslib@1.0.7:', when='@4.3.0:+gslib')
|
||||
depends_on('suite-sparse', when='+suite-sparse')
|
||||
depends_on('superlu-dist', when='+superlu-dist')
|
||||
depends_on('strumpack@3.0.0:', when='+strumpack~shared')
|
||||
@ -232,14 +254,32 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
for sm_ in CudaPackage.cuda_arch_values:
|
||||
depends_on('strumpack+cuda cuda_arch={0}'.format(sm_),
|
||||
when='+strumpack+cuda cuda_arch={0}'.format(sm_))
|
||||
for gfx in ROCmPackage.amdgpu_targets:
|
||||
depends_on('strumpack+rocm amdgpu_target={0}'.format(gfx),
|
||||
when='+strumpack+rocm amdgpu_target={0}'.format(gfx))
|
||||
# The PETSc tests in MFEM will fail if PETSc is not configured with
|
||||
# SuiteSparse and MUMPS. On the other hand, if we require the variants
|
||||
# '+suite-sparse+mumps' of PETSc, the xsdk package concretization fails.
|
||||
# MUMPS (and SuiteSparse in oler versions). On the other hand, PETSc built
|
||||
# with MUMPS is not strictly required, so we do not require it here.
|
||||
depends_on('petsc@3.8:+mpi+double+hypre', when='+petsc')
|
||||
depends_on('slepc@3.8.0:', when='+slepc')
|
||||
# Recommended when building outside of xsdk:
|
||||
# depends_on('petsc@3.8:+mpi+double+hypre+suite-sparse+mumps',
|
||||
# when='+petsc')
|
||||
# Propagate 'cuda_arch' to 'petsc'/'slepc' without propagating the '+cuda'
|
||||
# variant because we want to allow 'mfem+cuda+petsc ^petsc~cuda':
|
||||
for sm_ in CudaPackage.cuda_arch_values:
|
||||
depends_on('petsc+cuda cuda_arch={0}'.format(sm_),
|
||||
when='+cuda+petsc cuda_arch={0} ^petsc+cuda'
|
||||
.format(sm_))
|
||||
depends_on('slepc+cuda cuda_arch={0}'.format(sm_),
|
||||
when='+cuda+slepc cuda_arch={0} ^petsc+cuda'
|
||||
.format(sm_))
|
||||
# Propagate 'amdgpu_target' to 'petsc'/'slepc' without propagating the
|
||||
# '+rocm' variant because we want to allow 'mfem+rocm+petsc ^petsc~rocm':
|
||||
for gfx in ROCmPackage.amdgpu_targets:
|
||||
depends_on('petsc+rocm amdgpu_target={0}'.format(gfx),
|
||||
when='+rocm+petsc amdgpu_target={0} ^petsc+rocm'
|
||||
.format(gfx))
|
||||
depends_on('slepc+rocm amdgpu_target={0}'.format(gfx),
|
||||
when='+rocm+slepc amdgpu_target={0} ^petsc+rocm'
|
||||
.format(gfx))
|
||||
depends_on('mpfr', when='+mpfr')
|
||||
depends_on('netcdf-c@4.1.3:', when='+netcdf')
|
||||
depends_on('unwind', when='+libunwind')
|
||||
@ -260,12 +300,14 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
conflicts('+strumpack ^strumpack+cuda', when='~cuda')
|
||||
|
||||
depends_on('occa@1.0.8:', when='@:4.1+occa')
|
||||
depends_on('occa@1.1.0:', when='@4.2.0:+occa')
|
||||
depends_on('occa@1.1.0', when='@4.2.0:+occa')
|
||||
depends_on('occa+cuda', when='+occa+cuda')
|
||||
# TODO: propagate '+rocm' variant to occa when it is supported
|
||||
|
||||
depends_on('raja@0.10.0:', when='@4.0.1:+raja')
|
||||
depends_on('raja@0.7.0:0.9.0', when='@4.0.0+raja')
|
||||
depends_on('raja@0.10.0:0.12.1', when='@4.0.1:4.2.0+raja')
|
||||
depends_on('raja@0.13.0', when='@4.3.0+raja')
|
||||
depends_on('raja@0.14.0:', when='@4.4.0:+raja')
|
||||
for sm_ in CudaPackage.cuda_arch_values:
|
||||
depends_on('raja+cuda cuda_arch={0}'.format(sm_),
|
||||
when='+raja+cuda cuda_arch={0}'.format(sm_))
|
||||
@ -273,8 +315,10 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
depends_on('raja+rocm amdgpu_target={0}'.format(gfx),
|
||||
when='+raja+rocm amdgpu_target={0}'.format(gfx))
|
||||
|
||||
depends_on('libceed@0.6:', when='@:4.1+libceed')
|
||||
depends_on('libceed@0.7:', when='@4.2.0:+libceed')
|
||||
depends_on('libceed@0.6', when='@:4.1+libceed')
|
||||
depends_on('libceed@0.7:0.8', when='@4.2.0+libceed')
|
||||
depends_on('libceed@0.8:0.9', when='@4.3.0+libceed')
|
||||
depends_on('libceed@0.10.1:', when='@4.4.0:+libceed')
|
||||
for sm_ in CudaPackage.cuda_arch_values:
|
||||
depends_on('libceed+cuda cuda_arch={0}'.format(sm_),
|
||||
when='+libceed+cuda cuda_arch={0}'.format(sm_))
|
||||
@ -282,7 +326,8 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
depends_on('libceed+rocm amdgpu_target={0}'.format(gfx),
|
||||
when='+libceed+rocm amdgpu_target={0}'.format(gfx))
|
||||
|
||||
depends_on('umpire@2.0.0:', when='+umpire')
|
||||
depends_on('umpire@2.0.0:2.1.0', when='@:4.3.0+umpire')
|
||||
depends_on('umpire@3.0.0:', when='@4.4.0:+umpire')
|
||||
for sm_ in CudaPackage.cuda_arch_values:
|
||||
depends_on('umpire+cuda cuda_arch={0}'.format(sm_),
|
||||
when='+umpire+cuda cuda_arch={0}'.format(sm_))
|
||||
@ -469,12 +514,23 @@ def find_optional_library(name, prefix):
|
||||
'-ccbin %s' % (spec['mpi'].mpicxx if '+mpi' in spec
|
||||
else env['CXX'])]
|
||||
if self.spec.satisfies('@4.0.0:'):
|
||||
cxxflags.append(self.compiler.cxx11_flag)
|
||||
if '+cuda' in spec:
|
||||
cxxflags.append('-std=c++11')
|
||||
else:
|
||||
cxxflags.append(self.compiler.cxx11_flag)
|
||||
# The cxxflags are set by the spack c++ compiler wrapper. We also
|
||||
# set CXXFLAGS explicitly, for clarity, and to properly export the
|
||||
# cxxflags in the variable MFEM_CXXFLAGS in config.mk.
|
||||
options += ['CXXFLAGS=%s' % ' '.join(cxxflags)]
|
||||
|
||||
# Treat any 'CXXFLAGS' in the environment as extra c++ flags which are
|
||||
# handled through the 'CPPFLAGS' makefile variable in MFEM. Also, unset
|
||||
# 'CXXFLAGS' from the environment to prevent it from overriding the
|
||||
# defaults.
|
||||
if 'CXXFLAGS' in env:
|
||||
options += ['CPPFLAGS=%s' % env['CXXFLAGS']]
|
||||
del env['CXXFLAGS']
|
||||
|
||||
if '~static' in spec:
|
||||
options += ['STATIC=NO']
|
||||
if '+shared' in spec:
|
||||
@ -488,9 +544,17 @@ def find_optional_library(name, prefix):
|
||||
# The hypre package always links with 'blas' and 'lapack'.
|
||||
all_hypre_libs = hypre.libs + hypre['lapack'].libs + \
|
||||
hypre['blas'].libs
|
||||
hypre_gpu_libs = ''
|
||||
if '+cuda' in hypre:
|
||||
hypre_gpu_libs = ' -lcusparse -lcurand'
|
||||
elif '+rocm' in hypre:
|
||||
hypre_gpu_libs = ' ' + \
|
||||
ld_flags_from_dirs([env['ROCM_PATH'] + '/lib'],
|
||||
['rocsparse', 'rocrand'])
|
||||
options += [
|
||||
'HYPRE_OPT=-I%s' % hypre.prefix.include,
|
||||
'HYPRE_LIB=%s' % ld_flags_from_library_list(all_hypre_libs)]
|
||||
'HYPRE_LIB=%s%s' %
|
||||
(ld_flags_from_library_list(all_hypre_libs), hypre_gpu_libs)]
|
||||
|
||||
if '+metis' in spec:
|
||||
options += [
|
||||
@ -559,13 +623,20 @@ def find_optional_library(name, prefix):
|
||||
if '+butterflypack' in strumpack:
|
||||
bp = strumpack['butterflypack']
|
||||
sp_opt += ['-I%s' % bp.prefix.include]
|
||||
sp_lib += [ld_flags_from_dirs([bp.prefix.lib],
|
||||
['dbutterflypack',
|
||||
'zbutterflypack'])]
|
||||
bp_libs = find_libraries(['libdbutterflypack',
|
||||
'libzbutterflypack'],
|
||||
bp.prefix,
|
||||
shared=('+shared' in bp),
|
||||
recursive=True)
|
||||
sp_lib += [ld_flags_from_library_list(bp_libs)]
|
||||
if '+zfp' in strumpack:
|
||||
zfp = strumpack['zfp']
|
||||
sp_opt += ['-I%s' % zfp.prefix.include]
|
||||
sp_lib += [ld_flags_from_dirs([zfp.prefix.lib], ['zfp'])]
|
||||
zfp_lib = find_libraries('libzfp',
|
||||
zfp.prefix,
|
||||
shared=('+shared' in zfp),
|
||||
recursive=True)
|
||||
sp_lib += [ld_flags_from_library_list(zfp_lib)]
|
||||
if '+cuda' in strumpack:
|
||||
# assuming also ('+cuda' in spec)
|
||||
sp_lib += ['-lcusolver', '-lcublas']
|
||||
@ -682,7 +753,10 @@ def find_optional_library(name, prefix):
|
||||
['occa'])]
|
||||
|
||||
if '+raja' in spec:
|
||||
options += ['RAJA_OPT=-I%s' % spec['raja'].prefix.include,
|
||||
raja_opt = '-I%s' % spec['raja'].prefix.include
|
||||
if spec['raja'].satisfies('^camp'):
|
||||
raja_opt += ' -I%s' % spec['camp'].prefix.include
|
||||
options += ['RAJA_OPT=%s' % raja_opt,
|
||||
'RAJA_LIB=%s' %
|
||||
ld_flags_from_dirs([spec['raja'].prefix.lib],
|
||||
['RAJA'])]
|
||||
@ -811,6 +885,9 @@ def install(self, spec, prefix):
|
||||
def cache_test_sources(self):
|
||||
"""Copy the example source files after the package is installed to an
|
||||
install test subdirectory for use during `spack test run`."""
|
||||
# Clean the 'examples' directory -- at least one example is always built
|
||||
# and we do not want to cache executables.
|
||||
make('examples/clean', parallel=False)
|
||||
self.cache_extra_test_sources([self.examples_src_dir,
|
||||
self.examples_data_dir])
|
||||
|
||||
|
@ -3,8 +3,12 @@
|
||||
# Set a compiler to test with, e.g. '%gcc', '%clang', etc.
|
||||
compiler=''
|
||||
cuda_arch="70"
|
||||
rocm_arch="gfx908"
|
||||
|
||||
mfem='mfem'${compiler}
|
||||
spack_jobs=''
|
||||
# spack_jobs='-j 128'
|
||||
|
||||
mfem='mfem@4.4.0'${compiler}
|
||||
mfem_dev='mfem@develop'${compiler}
|
||||
|
||||
backends='+occa+raja+libceed'
|
||||
@ -14,47 +18,43 @@ backends_specs='^occa~cuda ^raja~openmp'
|
||||
hdf5_spec='^hdf5@1.8.19:1.8'
|
||||
# petsc spec
|
||||
petsc_spec='^petsc+suite-sparse+mumps'
|
||||
# strumpack spec without cuda
|
||||
strumpack_spec='^strumpack~slate~openmp~cuda'
|
||||
strumpack_cuda_spec='^strumpack~slate~openmp'
|
||||
petsc_spec_cuda='^petsc+cuda+suite-sparse+mumps'
|
||||
# strumpack spec without cuda (use @master until version > 6.3.1 is released)
|
||||
strumpack_spec='^strumpack@master~slate~openmp~cuda'
|
||||
strumpack_cuda_spec='^strumpack@master~slate~openmp'
|
||||
strumpack_rocm_spec='^strumpack+rocm~slate~openmp~cuda'
|
||||
|
||||
builds=(
|
||||
# preferred version:
|
||||
${mfem}
|
||||
${mfem}'~mpi~metis~zlib'
|
||||
${mfem}"$backends"'+superlu-dist+strumpack+suite-sparse+petsc+slepc \
|
||||
+sundials+pumi+gslib+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
'"$backends_specs $petsc_spec $strumpack_spec $hdf5_spec"
|
||||
# TODO: add back '+gslib' when the gslib test is fixed.
|
||||
# TODO: add back '+slepc' when its build is fixed.
|
||||
${mfem}"$backends"'+superlu-dist+strumpack+suite-sparse+petsc \
|
||||
+sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
'"$backends_specs $strumpack_spec $petsc_spec $hdf5_spec"
|
||||
${mfem}'~mpi \
|
||||
'"$backends"'+suite-sparse+sundials+gslib+mpfr+netcdf \
|
||||
+zlib+gnutls+libunwind+conduit'" $backends_specs $hdf5_spec"
|
||||
# develop version:
|
||||
+zlib+gnutls+libunwind+conduit \
|
||||
'"$backends_specs $hdf5_spec"' ^sundials~mpi'
|
||||
|
||||
# develop version, shared builds:
|
||||
${mfem_dev}'+shared~static'
|
||||
${mfem_dev}'+shared~static~mpi~metis~zlib'
|
||||
|
||||
# NOTE: Shared build with +gslib works on mac but not on linux
|
||||
# FIXME: As of 2020/11/03 the next config fails in PETSc ex5p:
|
||||
# ${mfem_dev}'+shared~static \
|
||||
# '"$backends"'+superlu-dist+strumpack+suite-sparse+petsc \
|
||||
# +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
# '"$backends_specs $petsc_spec $strumpack_spec $hdf5_spec"
|
||||
# Removing just petsc works:
|
||||
# TODO: add back '+gslib' when the gslib test is fixed and the above NOTE
|
||||
# is addressed.
|
||||
# TODO: add back '+slepc' when its build is fixed.
|
||||
${mfem_dev}'+shared~static \
|
||||
'"$backends"'+superlu-dist+strumpack+suite-sparse \
|
||||
'"$backends"'+superlu-dist+strumpack+suite-sparse+petsc \
|
||||
+sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
'"$backends_specs $strumpack_spec $hdf5_spec"
|
||||
# Removing just strumpack works on linux, fails on mac:
|
||||
# ${mfem_dev}'+shared~static \
|
||||
# '"$backends"'+superlu-dist+suite-sparse+petsc \
|
||||
# +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
# '"$backends_specs $petsc_spec $hdf5_spec"
|
||||
# Petsc and strumpack: fails on linux and mac in PETSc ex5p:
|
||||
# ${mfem_dev}'+shared~static +strumpack+petsc \
|
||||
# '$petsc_spec $strumpack_spec"
|
||||
|
||||
'"$backends_specs $strumpack_spec $petsc_spec $hdf5_spec"
|
||||
# NOTE: Shared build with +gslib works on mac but not on linux
|
||||
# TODO: add back '+gslib' when the above NOTE is addressed.
|
||||
${mfem_dev}'+shared~static~mpi \
|
||||
'"$backends"'+suite-sparse+sundials+mpfr+netcdf \
|
||||
+zlib+gnutls+libunwind+conduit'" $backends_specs $hdf5_spec"
|
||||
+zlib+gnutls+libunwind+conduit \
|
||||
'"$backends_specs $hdf5_spec"' ^sundials~mpi'
|
||||
)
|
||||
|
||||
builds2=(
|
||||
@ -64,10 +64,11 @@ builds2=(
|
||||
${mfem}'+strumpack'" $strumpack_spec"
|
||||
${mfem}'+suite-sparse~mpi'
|
||||
${mfem}'+suite-sparse'
|
||||
${mfem}'+sundials~mpi'
|
||||
${mfem}'+sundials~mpi ^sundials~mpi'
|
||||
${mfem}'+sundials'
|
||||
${mfem}'+pumi'
|
||||
${mfem}'+gslib'
|
||||
# TODO: uncomment the next line when the gslib test is fixed.
|
||||
# ${mfem}'+gslib'
|
||||
${mfem}'+netcdf~mpi'
|
||||
${mfem}'+netcdf'
|
||||
${mfem}'+mpfr'
|
||||
@ -76,17 +77,21 @@ builds2=(
|
||||
${mfem}'+conduit'
|
||||
${mfem}'+umpire'
|
||||
${mfem}'+petsc'" $petsc_spec"
|
||||
${mfem}'+petsc+slepc'" $petsc_spec"
|
||||
# TODO: uncomment the next line when the slepc build is fixed.
|
||||
# ${mfem}'+petsc+slepc'" $petsc_spec"
|
||||
# TODO: uncomment the next line when the threadsafe build is fixed.
|
||||
# ${mfem}'+threadsafe'
|
||||
# develop version
|
||||
${mfem_dev}"$backends $backends_specs"
|
||||
${mfem_dev}'+superlu-dist'
|
||||
${mfem_dev}'+strumpack'" $strumpack_spec"
|
||||
${mfem_dev}'+suite-sparse~mpi'
|
||||
${mfem_dev}'+suite-sparse'
|
||||
${mfem_dev}'+sundials~mpi'
|
||||
${mfem_dev}'+sundials~mpi ^sundials~mpi'
|
||||
${mfem_dev}'+sundials'
|
||||
${mfem_dev}'+pumi'
|
||||
${mfem_dev}'+gslib'
|
||||
# TODO: uncomment the next line when the gslib test is fixed.
|
||||
# ${mfem_dev}'+gslib'
|
||||
${mfem_dev}'+netcdf~mpi'
|
||||
${mfem_dev}'+netcdf'
|
||||
${mfem_dev}'+mpfr'
|
||||
@ -96,31 +101,126 @@ builds2=(
|
||||
${mfem_dev}'+umpire'
|
||||
${mfem_dev}'+petsc'" $petsc_spec"
|
||||
${mfem_dev}'+petsc+slepc'" $petsc_spec"
|
||||
# TODO: uncomment the next line when the threadsafe build is fixed.
|
||||
# ${mfem_dev}'+threadsafe'
|
||||
)
|
||||
|
||||
|
||||
builds_cuda=(
|
||||
# hypre without cuda:
|
||||
${mfem}'+cuda cuda_arch='"${cuda_arch}"
|
||||
|
||||
${mfem}'+cuda+raja+occa+libceed cuda_arch='"${cuda_arch}"' \
|
||||
^raja+cuda~openmp'
|
||||
# hypre with cuda:
|
||||
${mfem}'+cuda cuda_arch='"${cuda_arch} ^hypre+cuda"
|
||||
|
||||
${mfem}'+cuda+openmp+raja+occa+libceed cuda_arch='"${cuda_arch}"' \
|
||||
+superlu-dist+strumpack+suite-sparse+petsc+slepc \
|
||||
+sundials+pumi+gslib+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
^raja+cuda+openmp'" $strumpack_cuda_spec $petsc_spec $hdf5_spec"
|
||||
# hypre with cuda:
|
||||
# TODO: restore '+libceed' when the libCEED CUDA unit tests take less time.
|
||||
${mfem}'+cuda+raja+occa cuda_arch='"${cuda_arch}"' \
|
||||
^raja+cuda~openmp ^hypre+cuda'
|
||||
|
||||
# hypre without cuda:
|
||||
# TODO: restore '+libceed' when the libCEED CUDA unit tests take less time.
|
||||
# TODO: add back '+gslib' when the gslib test is fixed.
|
||||
# TODO: restore '+superlu-dist' when the unit test is fixed.
|
||||
# TODO: add back "+petsc+slepc $petsc_spec_cuda" when it works.
|
||||
${mfem}'+cuda+openmp+raja+occa cuda_arch='"${cuda_arch}"' \
|
||||
+strumpack+suite-sparse \
|
||||
+sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
^raja+cuda+openmp'" $strumpack_cuda_spec"' \
|
||||
'"$hdf5_spec"
|
||||
|
||||
# hypre with cuda:
|
||||
# TODO: restore '+libceed' when the libCEED CUDA unit tests take less time.
|
||||
# TODO: add back '+gslib' when the gslib test is fixed.
|
||||
# TODO: restore '+superlu-dist' when we support it with '^hypre+cuda'.
|
||||
# TODO: add back "+strumpack $strumpack_cuda_spec" when it's supported.
|
||||
# TODO: add back "+petsc+slepc $petsc_spec_cuda" when it works.
|
||||
# TODO: add back "+sundials" when it's supported with '^hypre+cuda'.
|
||||
${mfem}'+cuda+openmp+raja+occa cuda_arch='"${cuda_arch}"' \
|
||||
+suite-sparse \
|
||||
+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
^raja+cuda+openmp ^hypre+cuda \
|
||||
'"$hdf5_spec"
|
||||
|
||||
#
|
||||
# same builds as above with ${mfem_dev}
|
||||
#
|
||||
|
||||
# hypre without cuda:
|
||||
${mfem_dev}'+cuda cuda_arch='"${cuda_arch}"
|
||||
|
||||
${mfem_dev}'+cuda+raja+occa+libceed cuda_arch='"${cuda_arch}"' \
|
||||
^raja+cuda~openmp'
|
||||
# hypre with cuda:
|
||||
${mfem_dev}'+cuda cuda_arch='"${cuda_arch} ^hypre+cuda"
|
||||
|
||||
# add '^sundials+hypre' to help the concretizer
|
||||
${mfem_dev}'+cuda+openmp+raja+occa+libceed cuda_arch='"${cuda_arch}"' \
|
||||
+superlu-dist+strumpack+suite-sparse+petsc+slepc \
|
||||
+sundials+pumi+gslib+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
^raja+cuda+openmp'" $strumpack_cuda_spec $petsc_spec"' \
|
||||
^sundials+hypre'" $hdf5_spec"
|
||||
# hypre with cuda:
|
||||
# TODO: restore '+libceed' when the libCEED CUDA unit tests take less time.
|
||||
${mfem_dev}'+cuda+raja+occa cuda_arch='"${cuda_arch}"' \
|
||||
^raja+cuda~openmp ^hypre+cuda'
|
||||
|
||||
# hypre without cuda:
|
||||
# TODO: restore '+libceed' when the libCEED CUDA unit tests take less time.
|
||||
# TODO: add back '+gslib' when the gslib test is fixed.
|
||||
# TODO: restore '+superlu-dist' when the unit test is fixed.
|
||||
# TODO: add back "+petsc+slepc $petsc_spec_cuda" when it works.
|
||||
${mfem_dev}'+cuda+openmp+raja+occa cuda_arch='"${cuda_arch}"' \
|
||||
+strumpack+suite-sparse \
|
||||
+sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
^raja+cuda+openmp'" $strumpack_cuda_spec"' \
|
||||
'"$hdf5_spec"
|
||||
|
||||
# hypre with cuda:
|
||||
# TODO: restore '+libceed' when the libCEED CUDA unit tests take less time.
|
||||
# TODO: add back '+gslib' when the gslib test is fixed.
|
||||
# TODO: restore '+superlu-dist' when we support it with '^hypre+cuda'.
|
||||
# TODO: add back "+strumpack $strumpack_cuda_spec" when it's supported.
|
||||
# TODO: add back "+petsc+slepc $petsc_spec_cuda" when it works.
|
||||
# TODO: add back "+sundials" when it's supported with '^hypre+cuda'.
|
||||
${mfem_dev}'+cuda+openmp+raja+occa cuda_arch='"${cuda_arch}"' \
|
||||
+suite-sparse \
|
||||
+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
^raja+cuda+openmp ^hypre+cuda \
|
||||
'"$hdf5_spec"
|
||||
)
|
||||
|
||||
|
||||
builds_rocm=(
|
||||
# hypre without rocm:
|
||||
${mfem}'+rocm amdgpu_target='"${rocm_arch}"
|
||||
|
||||
# hypre with rocm:
|
||||
${mfem}'+rocm amdgpu_target='"${rocm_arch} ^hypre+rocm"
|
||||
|
||||
# hypre with rocm:
|
||||
${mfem}'+rocm+raja+occa+libceed amdgpu_target='"${rocm_arch}"' \
|
||||
^raja+rocm~openmp ^occa~cuda ^hypre+rocm'
|
||||
|
||||
# hypre without rocm:
|
||||
# TODO: add back '+gslib' when the gslib test is fixed.
|
||||
# TODO: restore '+superlu-dist' when the unit test is fixed.
|
||||
# TODO: add "+petsc+slepc $petsc_spec_rocm" when it is supported.
|
||||
${mfem}'+rocm+openmp+raja+occa+libceed amdgpu_target='"${rocm_arch}"' \
|
||||
+strumpack+suite-sparse \
|
||||
+sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
^raja+rocm~openmp ^occa~cuda'" $strumpack_rocm_spec"' \
|
||||
'"$hdf5_spec"
|
||||
|
||||
# hypre with rocm:
|
||||
# TODO: add back '+gslib' when the gslib test is fixed.
|
||||
# TODO: restore '+superlu-dist' when we support it with '^hypre+rocm'.
|
||||
# TODO: add back "+strumpack $strumpack_rocm_spec" when it's supported.
|
||||
# TODO: add back "+petsc+slepc $petsc_spec_rocm" when it works.
|
||||
# TODO: add back "+sundials" when it's supported with '^hypre+rocm'.
|
||||
${mfem}'+rocm+openmp+raja+occa+libceed amdgpu_target='"${rocm_arch}"' \
|
||||
+suite-sparse \
|
||||
+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
|
||||
^raja+rocm~openmp ^occa~cuda ^hypre+rocm \
|
||||
'"$hdf5_spec"
|
||||
|
||||
#
|
||||
# same builds as above with ${mfem_dev}
|
||||
#
|
||||
|
||||
# TODO
|
||||
)
|
||||
|
||||
|
||||
@ -131,6 +231,7 @@ sep='--------------------------------------------------------------------------'
|
||||
|
||||
run_builds=("${builds[@]}" "${builds2[@]}")
|
||||
# run_builds=("${builds_cuda[@]}")
|
||||
# run_builds=("${builds_rocm[@]}")
|
||||
|
||||
for bld in "${run_builds[@]}"; do
|
||||
printf "\n%s\n" "${SEP}"
|
||||
@ -139,7 +240,7 @@ for bld in "${run_builds[@]}"; do
|
||||
eval bbb="\"${bld}\""
|
||||
spack spec -I $bbb || exit 1
|
||||
printf "%s\n" "${sep}"
|
||||
spack install --test=root $bbb || exit 2
|
||||
spack install $spack_jobs --test=root $bbb || exit 2
|
||||
done
|
||||
|
||||
# Uninstall all mfem builds:
|
||||
|
@ -199,6 +199,16 @@ def write_makefile_inc(self):
|
||||
if using_xlf:
|
||||
optf.append('-qfixed')
|
||||
|
||||
# With gfortran >= 10 we need to add '-fallow-argument-mismatch'. This
|
||||
# check handles mixed toolchains which are not handled by the method
|
||||
# 'flag_handler' defined below.
|
||||
# TODO: remove 'flag_handler' since this check covers that case too?
|
||||
if os.path.basename(spack_fc) == 'gfortran':
|
||||
gfortran = Executable(spack_fc)
|
||||
gfort_ver = Version(gfortran('-dumpversion', output=str).strip())
|
||||
if gfort_ver >= Version('10'):
|
||||
optf.append('-fallow-argument-mismatch')
|
||||
|
||||
# As of version 5.2.0, MUMPS is able to take advantage
|
||||
# of the GEMMT BLAS extension. MKL and amdblis are the only
|
||||
# known BLAS implementation supported.
|
||||
|
@ -383,10 +383,24 @@ def configure_options(self):
|
||||
options = ['--with-ssl=0',
|
||||
'--download-c2html=0',
|
||||
'--download-sowing=0',
|
||||
'--download-hwloc=0',
|
||||
'CFLAGS=%s' % ' '.join(spec.compiler_flags['cflags']),
|
||||
'FFLAGS=%s' % ' '.join(spec.compiler_flags['fflags']),
|
||||
'CXXFLAGS=%s' % ' '.join(spec.compiler_flags['cxxflags'])]
|
||||
'--download-hwloc=0']
|
||||
# If 'cflags', 'fflags', and/or 'cxxflags' are not set, let the PETSc
|
||||
# configuration script choose defaults.
|
||||
if spec.compiler_flags['cflags']:
|
||||
options += [
|
||||
'CFLAGS=%s' % ' '.join(spec.compiler_flags['cflags'])]
|
||||
if '+debug' not in spec:
|
||||
options += ['COPTFLAGS=']
|
||||
if spec.compiler_flags['fflags']:
|
||||
options += [
|
||||
'FFLAGS=%s' % ' '.join(spec.compiler_flags['fflags'])]
|
||||
if '+debug' not in spec:
|
||||
options += ['FOPTFLAGS=']
|
||||
if spec.compiler_flags['cxxflags']:
|
||||
options += [
|
||||
'CXXFLAGS=%s' % ' '.join(spec.compiler_flags['cxxflags'])]
|
||||
if '+debug' not in spec:
|
||||
options += ['CXXOPTFLAGS=']
|
||||
options.extend(self.mpi_dependent_options())
|
||||
options.extend([
|
||||
'--with-precision=%s' % (
|
||||
@ -398,10 +412,6 @@ def configure_options(self):
|
||||
'--with-openmp=%s' % ('1' if '+openmp' in spec else '0'),
|
||||
'--with-64-bit-indices=%s' % ('1' if '+int64' in spec else '0')
|
||||
])
|
||||
if '+debug' not in spec:
|
||||
options.extend(['COPTFLAGS=',
|
||||
'FOPTFLAGS=',
|
||||
'CXXOPTFLAGS='])
|
||||
|
||||
# Make sure we use exactly the same Blas/Lapack libraries
|
||||
# across the DAG. To that end list them explicitly
|
||||
@ -616,8 +626,9 @@ def headers(self):
|
||||
def setup_build_tests(self):
|
||||
"""Copy the build test files after the package is installed to an
|
||||
install test subdirectory for use during `spack test run`."""
|
||||
self.cache_extra_test_sources('src/ksp/ksp/tutorials')
|
||||
self.cache_extra_test_sources('src/snes/tutorials')
|
||||
if self.spec.satisfies('@3.13:'):
|
||||
self.cache_extra_test_sources('src/ksp/ksp/tutorials')
|
||||
self.cache_extra_test_sources('src/snes/tutorials')
|
||||
|
||||
def test(self):
|
||||
# solve Poisson equation in 2D to make sure nothing is broken:
|
||||
|
@ -86,7 +86,7 @@ class Strumpack(CMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on('slate+rocm', when='+rocm+slate')
|
||||
for val in ROCmPackage.amdgpu_targets:
|
||||
depends_on('slate amdgpu_target={0}'.format(val),
|
||||
when='amdgpu_target={0}'.format(val))
|
||||
when='+slate amdgpu_target={0}'.format(val))
|
||||
|
||||
conflicts('+parmetis', when='~mpi')
|
||||
conflicts('+butterflypack', when='~mpi')
|
||||
|
@ -340,6 +340,9 @@ def cmake_args(self):
|
||||
define('HYPRE_INCLUDE_DIR', spec['hypre'].prefix.include),
|
||||
define('HYPRE_LIBRARY_DIR', spec['hypre'].prefix.lib)
|
||||
])
|
||||
if not spec['hypre'].variants['shared'].value:
|
||||
hypre_libs = spec['blas'].libs + spec['lapack'].libs
|
||||
args.extend([define('HYPRE_LIBRARIES', hypre_libs.joined(';'))])
|
||||
|
||||
# Building with KLU
|
||||
if '+klu' in spec:
|
||||
|
Loading…
Reference in New Issue
Block a user