Merge branch 'develop' into features/shared

Conflicts:
	share/spack/spack-completion.bash
This commit is contained in:
Carson Woods
2020-06-24 10:59:49 -06:00
21 changed files with 317 additions and 66 deletions

View File

@@ -4167,16 +4167,23 @@ want to clean up the temporary directory, or if the package isn't
downloading properly, you might want to run *only* the ``fetch`` stage
of the build.
Spack performs best-effort installation of package dependencies by default,
which means it will continue to install as many dependencies as possible
after detecting failures. If you are trying to install a package with a
lot of dependencies where one or more may fail to build, you might want to
try the ``--fail-fast`` option to stop the installation process on the first
failure.
A typical package workflow might look like this:
.. code-block:: console
$ spack edit mypackage
$ spack install mypackage
$ spack install --fail-fast mypackage
... build breaks! ...
$ spack clean mypackage
$ spack edit mypackage
$ spack install mypackage
$ spack install --fail-fast mypackage
... repeat clean/install until install works ...
Below are some commands that will allow you some finer-grained

View File

@@ -162,6 +162,18 @@ def clean_environment():
if 'PKGCONF' in varname:
env.unset(varname)
# Unset the following variables because they can affect installation of
# Autotools and CMake packages.
build_system_vars = [
'CC', 'CFLAGS', 'CPP', 'CPPFLAGS', # C variables
'CXX', 'CCC', 'CXXFLAGS', 'CXXCPP', # C++ variables
'F77', 'FFLAGS', 'FLIBS', # Fortran77 variables
'FC', 'FCFLAGS', 'FCLIBS', # Fortran variables
'LDFLAGS', 'LIBS' # linker variables
]
for v in build_system_vars:
env.unset(v)
build_lang = spack.config.get('config:build_language')
if build_lang:
# Override language-related variables. This can be used to force

View File

@@ -32,6 +32,7 @@ def update_kwargs_from_args(args, kwargs):
that will be passed to Package.do_install API"""
kwargs.update({
'fail_fast': args.fail_fast,
'keep_prefix': args.keep_prefix,
'keep_stage': args.keep_stage,
'restage': not args.dont_restage,
@@ -80,6 +81,9 @@ def setup_parser(subparser):
subparser.add_argument(
'--overwrite', action='store_true',
help="reinstall an existing spec, even if it has dependents")
subparser.add_argument(
'--fail-fast', action='store_true',
help="stop all builds if any build fails (default is best effort)")
subparser.add_argument(
'--keep-prefix', action='store_true',
help="don't remove the install prefix if installation fails")

View File

@@ -289,15 +289,18 @@ def refresh(module_type, specs, args):
msg = 'Nothing to be done for {0} module files.'
tty.msg(msg.format(module_type))
return
# If we arrived here we have at least one writer
module_type_root = writers[0].layout.dirname()
spack.modules.common.generate_module_index(module_type_root, writers)
# Proceed regenerating module files
tty.msg('Regenerating {name} module files'.format(name=module_type))
if os.path.isdir(module_type_root) and args.delete_tree:
shutil.rmtree(module_type_root, ignore_errors=False)
filesystem.mkdirp(module_type_root)
# Dump module index after potentially removing module tree
spack.modules.common.generate_module_index(
module_type_root, writers, overwrite=args.delete_tree)
for x in writers:
try:
x.write(overwrite=True)

View File

@@ -355,11 +355,13 @@ def _get_compiler_link_paths(self, paths):
for flag_type in flags:
for flag in self.flags.get(flag_type, []):
compiler_exe.add_default_arg(flag)
output = ''
with self._compiler_environment():
output = str(compiler_exe(
self.verbose_flag, fin, '-o', fout,
output=str, error=str)) # str for py2
return _parse_non_system_link_dirs(output)
return _parse_non_system_link_dirs(output)
except spack.util.executable.ProcessError as pe:
tty.debug('ProcessError: Command exited with non-zero status: ' +
pe.long_message)
@@ -549,24 +551,27 @@ def _compiler_environment(self):
# store environment to replace later
backup_env = os.environ.copy()
# load modules and set env variables
for module in self.modules:
# On cray, mic-knl module cannot be loaded without cce module
# See: https://github.com/spack/spack/issues/3153
if os.environ.get("CRAY_CPU_TARGET") == 'mic-knl':
spack.util.module_cmd.load_module('cce')
spack.util.module_cmd.load_module(module)
try:
# load modules and set env variables
for module in self.modules:
# On cray, mic-knl module cannot be loaded without cce module
# See: https://github.com/spack/spack/issues/3153
if os.environ.get("CRAY_CPU_TARGET") == 'mic-knl':
spack.util.module_cmd.load_module('cce')
spack.util.module_cmd.load_module(module)
# apply other compiler environment changes
env = spack.util.environment.EnvironmentModifications()
env.extend(spack.schema.environment.parse(self.environment))
env.apply_modifications()
# apply other compiler environment changes
env = spack.util.environment.EnvironmentModifications()
env.extend(spack.schema.environment.parse(self.environment))
env.apply_modifications()
yield
# Restore environment
os.environ.clear()
os.environ.update(backup_env)
yield
except BaseException:
raise
finally:
# Restore environment regardless of whether inner code succeeded
os.environ.clear()
os.environ.update(backup_env)
class CompilerAccessError(spack.error.SpackError):

View File

@@ -549,6 +549,9 @@ def package_id(pkg):
dirty (bool): Don't clean the build environment before installing.
explicit (bool): True if package was explicitly installed, False
if package was implicitly installed (as a dependency).
fail_fast (bool): Fail if any dependency fails to install;
otherwise, the default is to install as many dependencies as
possible (i.e., best effort installation).
fake (bool): Don't really build; install fake stub files instead.
force (bool): Install again, even if already installed.
install_deps (bool): Install dependencies before installing this
@@ -1385,11 +1388,14 @@ def install(self, **kwargs):
Args:"""
fail_fast = kwargs.get('fail_fast', False)
install_deps = kwargs.get('install_deps', True)
keep_prefix = kwargs.get('keep_prefix', False)
keep_stage = kwargs.get('keep_stage', False)
restage = kwargs.get('restage', False)
fail_fast_err = 'Terminating after first install failure'
# install_package defaults True and is popped so that dependencies are
# always installed regardless of whether the root was installed
install_package = kwargs.pop('install_package', True)
@@ -1449,6 +1455,10 @@ def install(self, **kwargs):
if pkg_id in self.failed or spack.store.db.prefix_failed(spec):
tty.warn('{0} failed to install'.format(pkg_id))
self._update_failed(task)
if fail_fast:
raise InstallError(fail_fast_err)
continue
# Attempt to get a write lock. If we can't get the lock then
@@ -1530,14 +1540,28 @@ def install(self, **kwargs):
self._update_installed(task)
raise
except (Exception, KeyboardInterrupt, SystemExit) as exc:
# Assuming best effort installs so suppress the exception and
# mark as a failure UNLESS this is the explicit package.
except KeyboardInterrupt as exc:
# The build has been terminated with a Ctrl-C so terminate.
err = 'Failed to install {0} due to {1}: {2}'
tty.error(err.format(pkg.name, exc.__class__.__name__,
str(exc)))
raise
except (Exception, SystemExit) as exc:
# Best effort installs suppress the exception and mark the
# package as a failure UNLESS this is the explicit package.
err = 'Failed to install {0} due to {1}: {2}'
tty.error(err.format(pkg.name, exc.__class__.__name__,
str(exc)))
self._update_failed(task, True, exc)
if fail_fast:
# The user requested the installation to terminate on
# failure.
raise InstallError('{0}: {1}'
.format(fail_fast_err, str(exc)))
if pkg_id == self.pkg_id:
raise

View File

@@ -222,8 +222,15 @@ def root_path(name):
return spack.util.path.canonicalize_path(path)
def generate_module_index(root, modules):
entries = syaml.syaml_dict()
def generate_module_index(root, modules, overwrite=False):
index_path = os.path.join(root, 'module-index.yaml')
if overwrite or not os.path.exists(index_path):
entries = syaml.syaml_dict()
else:
with open(index_path) as index_file:
yaml_content = syaml.load(index_file)
entries = yaml_content['module_index']
for m in modules:
entry = {
'path': m.layout.filename,
@@ -231,7 +238,6 @@ def generate_module_index(root, modules):
}
entries[m.spec.dag_hash()] = entry
index = {'module_index': entries}
index_path = os.path.join(root, 'module-index.yaml')
llnl.util.filesystem.mkdirp(root)
with open(index_path, 'w') as index_file:
syaml.dump(index, default_flow_style=False, stream=index_file)

View File

@@ -14,6 +14,7 @@
module = spack.main.SpackCommand('module')
#: make sure module files are generated for all the tests here
@pytest.fixture(scope='module', autouse=True)
def ensure_module_files_are_there(
@@ -168,10 +169,10 @@ def test_loads_recursive_blacklisted(database, module_configuration):
output = module('lmod', 'loads', '-r', 'mpileaks ^mpich')
lines = output.split('\n')
assert any(re.match(r'[^#]*module load.*mpileaks', l) for l in lines)
assert not any(re.match(r'[^#]module load.*callpath', l) for l in lines)
assert any(re.match(r'## blacklisted or missing.*callpath', l)
for l in lines)
assert any(re.match(r'[^#]*module load.*mpileaks', ln) for ln in lines)
assert not any(re.match(r'[^#]module load.*callpath', ln) for ln in lines)
assert any(re.match(r'## blacklisted or missing.*callpath', ln)
for ln in lines)
# TODO: currently there is no way to separate stdout and stderr when
# invoking a SpackCommand. Supporting this requires refactoring

View File

@@ -18,6 +18,7 @@
import spack.compilers as compilers
from spack.compiler import Compiler
from spack.util.executable import ProcessError
@pytest.fixture()
@@ -653,3 +654,62 @@ def module(*args):
compiler = compilers[0]
version = compiler.get_real_version()
assert version == test_version
def test_compiler_get_real_version_fails(working_env, monkeypatch, tmpdir):
# Test variables
test_version = '2.2.2'
# Create compiler
gcc = str(tmpdir.join('gcc'))
with open(gcc, 'w') as f:
f.write("""#!/bin/bash
if [[ $CMP_ON == "1" ]]; then
echo "$CMP_VER"
fi
""")
fs.set_executable(gcc)
# Add compiler to config
compiler_info = {
'spec': 'gcc@foo',
'paths': {
'cc': gcc,
'cxx': None,
'f77': None,
'fc': None,
},
'flags': {},
'operating_system': 'fake',
'target': 'fake',
'modules': ['turn_on'],
'environment': {
'set': {'CMP_VER': test_version},
},
'extra_rpaths': [],
}
compiler_dict = {'compiler': compiler_info}
# Set module load to turn compiler on
def module(*args):
if args[0] == 'show':
return ''
elif args[0] == 'load':
os.environ['SPACK_TEST_CMP_ON'] = "1"
monkeypatch.setattr(spack.util.module_cmd, 'module', module)
# Make compiler fail when getting implicit rpaths
def _call(*args, **kwargs):
raise ProcessError("Failed intentionally")
monkeypatch.setattr(spack.util.executable.Executable, '__call__', _call)
# Run and no change to environment
compilers = spack.compilers.get_compilers([compiler_dict])
assert len(compilers) == 1
compiler = compilers[0]
try:
_ = compiler.get_real_version()
assert False
except ProcessError:
# Confirm environment does not change after failed call
assert 'SPACK_TEST_CMP_ON' not in os.environ

View File

@@ -718,6 +718,61 @@ def test_install_failed(install_mockery, monkeypatch, capsys):
assert 'Warning: b failed to install' in out
def test_install_fail_on_interrupt(install_mockery, monkeypatch):
"""Test ctrl-c interrupted install."""
err_msg = 'mock keyboard interrupt'
def _interrupt(installer, task, **kwargs):
raise KeyboardInterrupt(err_msg)
spec, installer = create_installer('a')
# Raise a KeyboardInterrupt error to trigger early termination
monkeypatch.setattr(inst.PackageInstaller, '_install_task', _interrupt)
with pytest.raises(KeyboardInterrupt, match=err_msg):
installer.install()
def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
"""Test fail_fast install when an install failure is detected."""
spec, installer = create_installer('a')
# Make sure the package is identified as failed
#
# This will prevent b from installing, which will cause the build of a
# to be skipped.
monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
with pytest.raises(spack.installer.InstallError):
installer.install(fail_fast=True)
out = str(capsys.readouterr())
assert 'Skipping build of a' in out
def test_install_fail_fast_on_except(install_mockery, monkeypatch, capsys):
"""Test fail_fast install when an install failure results from an error."""
err_msg = 'mock patch failure'
def _patch(installer, task, **kwargs):
raise RuntimeError(err_msg)
spec, installer = create_installer('a')
# Raise a non-KeyboardInterrupt exception to trigger fast failure.
#
# This will prevent b from installing, which will cause the build of a
# to be skipped.
monkeypatch.setattr(spack.package.PackageBase, 'do_patch', _patch)
with pytest.raises(spack.installer.InstallError, matches=err_msg):
installer.install(fail_fast=True)
out = str(capsys.readouterr())
assert 'Skipping build of a' in out
def test_install_lock_failures(install_mockery, monkeypatch, capfd):
"""Cover basic install lock failure handling in a single pass."""
def _requeued(installer, task):

View File

@@ -236,6 +236,7 @@ def test_module_index(
w1, s1 = factory('mpileaks')
w2, s2 = factory('callpath')
w3, s3 = factory('openblas')
test_root = str(tmpdir_factory.mktemp('module-root'))
@@ -246,6 +247,22 @@ def test_module_index(
assert index[s1.dag_hash()].use_name == w1.layout.use_name
assert index[s2.dag_hash()].path == w2.layout.filename
spack.modules.common.generate_module_index(test_root, [w3])
index = spack.modules.common.read_module_index(test_root)
assert len(index) == 3
assert index[s1.dag_hash()].use_name == w1.layout.use_name
assert index[s2.dag_hash()].path == w2.layout.filename
spack.modules.common.generate_module_index(
test_root, [w3], overwrite=True)
index = spack.modules.common.read_module_index(test_root)
assert len(index) == 1
assert index[s3.dag_hash()].use_name == w3.layout.use_name
def test_suffixes(self, module_configuration, factory):
"""Tests adding suffixes to module file name."""
module_configuration('suffix')

View File

@@ -27,9 +27,29 @@ def test_chmod_real_entries_ignores_suid_sgid(tmpdir):
def test_chmod_rejects_group_writable_suid(tmpdir):
path = str(tmpdir.join('file').ensure())
mode = stat.S_ISUID | stat.S_ISGID | stat.S_ISVTX
mode = stat.S_ISUID
fs.chmod_x(path, mode)
perms = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
perms = stat.S_IWGRP
with pytest.raises(InvalidPermissionsError):
set_permissions(path, perms)
def test_chmod_rejects_world_writable_suid(tmpdir):
path = str(tmpdir.join('file').ensure())
mode = stat.S_ISUID
fs.chmod_x(path, mode)
perms = stat.S_IWOTH
with pytest.raises(InvalidPermissionsError):
set_permissions(path, perms)
def test_chmod_rejects_world_writable_sgid(tmpdir):
path = str(tmpdir.join('file').ensure())
mode = stat.S_ISGID
fs.chmod_x(path, mode)
perms = stat.S_IWOTH
with pytest.raises(InvalidPermissionsError):
set_permissions(path, perms)

View File

@@ -337,7 +337,8 @@ def test_make_elf_binaries_relative(hello_world, copy_binary, tmpdir):
[str(new_binary)], [str(orig_binary)], str(orig_binary.dirpath())
)
assert rpaths_for(new_binary) == '$ORIGIN/lib:$ORIGIN/lib64:/opt/local/lib'
# Some compilers add rpaths so ensure changes included in final result
assert '$ORIGIN/lib:$ORIGIN/lib64:/opt/local/lib' in rpaths_for(new_binary)
def test_raise_if_not_relocatable(monkeypatch):

View File

@@ -376,7 +376,7 @@ def unset(self, name, **kwargs):
"""Stores a request to unset an environment variable.
Args:
name: name of the environment variable to be set
name: name of the environment variable to be unset
"""
kwargs.update(self._get_outside_caller_attributes())
item = UnsetEnv(name, **kwargs)

View File

@@ -25,10 +25,19 @@ def set_permissions(path, perms, group=None):
# Preserve higher-order bits of file permissions
perms |= os.stat(path).st_mode & (st.S_ISUID | st.S_ISGID | st.S_ISVTX)
# Do not let users create world writable suid binaries
if perms & st.S_ISUID and perms & st.S_IWGRP:
raise InvalidPermissionsError(
"Attepting to set suid with world writable")
# Do not let users create world/group writable suid binaries
if perms & st.S_ISUID:
if perms & st.S_IWOTH:
raise InvalidPermissionsError(
"Attempting to set suid with world writable")
if perms & st.S_IWGRP:
raise InvalidPermissionsError(
"Attempting to set suid with group writable")
# Or world writable sgid binaries
if perms & st.S_ISGID:
if perms & st.S_IWOTH:
raise InvalidPermissionsError(
"Attempting to set sgid with world writable")
fs.chmod_x(path, perms)

View File

@@ -962,7 +962,7 @@ _spack_info() {
_spack_install() {
if $list_options
then
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete -f --file --upstream -g --global --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all"
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete -f --file --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all"
else
_all_packages
fi

View File

@@ -73,9 +73,12 @@ class Gromacs(CMakePackage):
depends_on('mpi', when='+mpi')
depends_on('plumed+mpi', when='+plumed+mpi')
depends_on('plumed~mpi', when='+plumed~mpi')
depends_on('fftw')
depends_on('fftw+mpi', when='+mpi')
depends_on('fftw~mpi', when='~mpi')
depends_on('cmake@2.8.8:3.99.99', type='build')
depends_on('cmake@3.4.3:3.99.99', type='build', when='@2018:')
depends_on('cmake@3.13.0:3.99.99', type='build', when='@master')
depends_on('cmake@3.13.0:3.99.99', type='build', when='%fj')
depends_on('cuda', when='+cuda')
# TODO: openmpi constraint; remove when concretizer is fixed

View File

@@ -112,7 +112,8 @@ class Mysql(CMakePackage):
depends_on('rpcsvc-proto')
depends_on('ncurses')
depends_on('openssl')
depends_on('libtirpc', when='@5.7.0:')
depends_on('libtirpc', when='@5.7.0: platform=linux')
depends_on('libedit', type=['build', 'run'])
depends_on('perl', type=['build', 'test'], when='@:7.99.99')
depends_on('bison@2.1:', type='build')
depends_on('m4', type='build', when='@develop platform=solaris')
@@ -130,6 +131,11 @@ def cmake_args(self):
options.append('-DBOOST_ROOT={0}'.format(spec['boost'].prefix))
if '+client_only' in self.spec:
options.append('-DWITHOUT_SERVER:BOOL=ON')
options.append('-DWITH_EDITLINE=system')
options.append('-Dlibedit_INCLUDE_DIR={0}'.format(
spec['libedit'].prefix.include))
options.append('-Dlibedit_LIBRARY={0}'.format(
spec['libedit'].libs.directories[0]))
return options
def _fix_dtrace_shebang(self, env):

View File

@@ -30,12 +30,16 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package
# see http://www.cpan.org/src/README.html for
# explanation of version numbering scheme
# Maintenance releases (even numbers, recommended)
version('5.32.0', sha256='efeb1ce1f10824190ad1cadbcccf6fdb8a5d37007d0100d2d9ae5f2b5900c0b4')
# Development releases (odd numbers)
version('5.31.7', sha256='d05c4e72128f95ef6ffad42728ecbbd0d9437290bf0f88268b51af011f26b57d')
version('5.31.4', sha256='418a7e6fe6485cc713a86d1227ef112f0bb3f80322e3b715ffe42851d97804a5')
# Maintenance releases (even numbers, recommended)
version('5.30.2', sha256='66db7df8a91979eb576fac91743644da878244cf8ee152f02cd6f5cd7a731689', preferred=True)
version('5.30.3', sha256='32e04c8bb7b1aecb2742a7f7ac0eabac100f38247352a73ad7fa104e39e7406f', preferred=True)
version('5.30.2', sha256='66db7df8a91979eb576fac91743644da878244cf8ee152f02cd6f5cd7a731689')
version('5.30.1', sha256='bf3d25571ff1ee94186177c2cdef87867fd6a14aa5a84f0b1fb7bf798f42f964')
version('5.30.0', sha256='851213c754d98ccff042caa40ba7a796b2cee88c5325f121be5cbb61bbf975f2')

View File

@@ -224,6 +224,19 @@ def install(self, spec, prefix):
prefix_path = prefix.bin if '@:5.4.0' in spec else prefix
options = ['-prefix={0}'.format(prefix_path)]
# This additional flag is needed anytime the target architecture
# does not match the host architecture, which results in a binary that
# configure cannot execute on the login node. This is how we detect
# cross compilation: If the platform is NOT either Linux or Darwin
# and the target=backend, that we are in the cross-compile scenario
# scenario. This should cover Cray, BG/Q, and other custom platforms.
# The other option is to list out all the platform where you would be
# cross compiling explicitly.
if not (spec.satisfies('platform=linux') or
spec.satisfies('platform=darwin')):
if spec.satisfies('target=backend'):
options.append('--host')
# QE autoconf compiler variables has some limitations:
# 1. There is no explicit MPICC variable so we must re-purpose
# CC for the case of MPI.
@@ -294,7 +307,13 @@ def install(self, spec, prefix):
options.append('BLAS_LIBS={0}'.format(lapack_blas.ld_flags))
if '+scalapack' in spec:
scalapack_option = 'intel' if '^mkl' in spec else 'yes'
if '^mkl' in spec:
if '^openmpi' in spec:
scalapack_option = 'yes'
else: # mpich, intel-mpi
scalapack_option = 'intel'
else:
scalapack_option = 'yes'
options.append('--with-scalapack={0}'.format(scalapack_option))
if '+elpa' in spec:

View File

@@ -8,17 +8,15 @@
class RevocapRefiner(MakefilePackage):
"""The University of Tokyo, CISS Project:
Geometric processing, mesh processing, mesh generation"""
Library for refining of model meshes"""
homepage = "https://github.com/FrontISTR/REVOCAP_Refiner"
git = "https://github.com/FrontISTR/REVOCAP_Refiner.git"
homepage = "https://www.frontistr.com"
url = "https://www.frontistr.com/download/link.php?REVOCAP_Refiner-1.1.04.tar.gz"
# git = "https://gitlab.com/FrontISTR-Commons/REVOCAP_Refiner.git"
version('master', branch='master')
maintainers = ['k-tokunaga', 'kgoto', 'tuna' 'inagaki.kazuhisa']
depends_on('ruby', type='build')
depends_on('mpi')
depends_on('doxygen', type='build')
depends_on('swig', type='build')
version('1.1.04', sha256='bf3d959f4c1ab08a7e99cd7e02e710c758af28d71500f4814eed8b4eb3fb2d13')
parallel = False
@@ -28,35 +26,32 @@ class RevocapRefiner(MakefilePackage):
patch('delete_getIndices.patch')
def edit(self, spec, prefix):
cflags = ['-O']
cxxflags = ['-O', self.compiler.cxx_pic_flag]
fflags = ['']
cflags = ['-O3']
cxxflags = ['-O3', self.compiler.cxx_pic_flag]
ldflags = ['']
ldshare = ['']
libs = ['-lstdc++']
if spec.satisfies('%gcc'):
ldshare.append('g++ -shared -s')
libs = ['']
m = FileFilter('MakefileConfig.in')
m.filter(r'CC\s=.*$', 'CC={0}'.format(spec['mpi'].mpicc))
m.filter(r'CFLAGS\s=.*$', 'CFLAGS={0}'.format(' '.join(cflags)))
m.filter(r'CXX\s*=.*$', 'CXX={0}'.format(spec['mpi'].mpicxx))
m.filter(r'ARCH\s*=.*$', 'ARCH=')
m.filter(r'CC\s*=.*$', 'CC={0}'.format(spack_cc))
m.filter(r'CFLAGS\s*=.*$', 'CFLAGS={0}'.format(' '.join(cflags)))
m.filter(r'CXX\s*=.*$', 'CXX={0}'.format(spack_cxx))
m.filter(r'CXXFLAGS\s*=.*$',
'CXXFLAGS={0}'.format(' '.join(cxxflags)))
m.filter(r'AR\s*=.*$', 'AR=ar')
m.filter(r'ARFLAGS\s*=.*$', 'ARFLAGS=rsv')
m.filter(r'LD\s*=.*$', 'LD={0}'.format(spack_fc))
m.filter(r'LDFLAGS\s*=.*$',
'LDFLAGS={0}'.format(' '.join(fflags)))
'LDFLAGS={0}'.format(' '.join(ldflags)))
m.filter(r'LDSHARE\s*=.*$',
'LDSHARE={0}'.format(' '.join(ldshare)))
m.filter(r'LIBS\s*=.*$', 'LIBS={0}'.format(' '.join(libs)))
m.filter(r'LIBPATH\s*=.*$', 'LIBPATH= ')
m.filter(r'RM\s*=.*$', 'RM=rm -f')
m.filter(r'DOXYGEN\s*=.*$', 'DOXYGEN=doxygen')
m.filter(r'TAR\s*=.*$', 'TAR=tar')
m.filter(r'SWIG\s*=.*$', 'SWIG=swig')
def install(self, spec, prefix):
make()
install_tree('bin', prefix.bin)
install_tree('lib', prefix.lib)
install_tree('Refiner', prefix.include.refine)
install_tree('Refiner', prefix.include)