Refactor IntelInstaller into IntelPackage base class (#4300)
* Refactor IntelInstaller into IntelPackage base class * Move license attributes from __init__ to class-level * Flake8 fixes: remove unused imports * Fix logic that writes the silent.cfg file * More specific version numbers for Intel MPI * Rework logic that selects components to install * Final changes necessary to get intel package working * Various updates to intel-parallel-studio * Add latest version of every Intel package * Add environment variables for Intel packages * Update env vars for intel package * Finalize components for intel-parallel-studio package Adds a +tbb variant to intel-parallel-studio. The tbb package was renamed to intel-tbb. Now both intel-tbb and intel-parallel-studio+tbb provide tbb. * Overhaul environment variables set by intel-parallel-studio * Point dependent packages to the correct MPI wrappers * Never default to intel-parallel-studio * Gather env vars by sourcing setup scripts * Use mpiicc instead of mpicc when using Intel compiler * Undo change to ARCH * Add changes from intel-mpi to intel-parallel-studio * Add comment explaining mpicc vs mpiicc * Prepend env vars containing 'PATH' or separators * Flake8 fix * Fix bugs in from_sourcing_file * Indentation fix * Prepend, not set if contains separator * Fix license symlinking broken by changes to intel-parallel-studio * Use comments instead of docstrings to document attributes * Flake8 fixes * Use a set instead of a list to prevent duplicate components * Fix MKL and MPI library linking directories * Remove +all variant from intel-parallel-studio * It is not possible to build with MKL, GCC, and OpenMP at this time * Found a workaround for locating GCC libraries * Typos and variable names * Fix initialization of empty LibraryList
This commit is contained in:
parent
ad8c60239f
commit
db657d938d
@ -19,13 +19,13 @@ packages:
|
||||
providers:
|
||||
awk: [gawk]
|
||||
blas: [openblas]
|
||||
daal: [intel-parallel-studio+daal]
|
||||
daal: [intel-daal]
|
||||
elf: [elfutils]
|
||||
golang: [gcc]
|
||||
ipp: [intel-parallel-studio+ipp]
|
||||
ipp: [intel-ipp]
|
||||
java: [jdk]
|
||||
lapack: [openblas]
|
||||
mkl: [intel-parallel-studio+mkl]
|
||||
mkl: [intel-mkl]
|
||||
mpe: [mpe2]
|
||||
mpi: [openmpi, mpich]
|
||||
opencl: [pocl]
|
||||
@ -33,3 +33,4 @@ packages:
|
||||
pil: [py-pillow]
|
||||
scalapack: [netlib-scalapack]
|
||||
szip: [libszip, libaec]
|
||||
tbb: [intel-tbb]
|
||||
|
@ -2136,6 +2136,9 @@ The classes that are currently provided by Spack are:
|
||||
| :py:class:`.PerlPackage` | Specialized class for |
|
||||
| | :py:class:`.Perl` extensions |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.IntelPackage` | Specialized class for licensed |
|
||||
| | Intel software |
|
||||
+-------------------------------+----------------------------------+
|
||||
|
||||
|
||||
.. note::
|
||||
|
@ -178,6 +178,7 @@
|
||||
from spack.build_systems.python import PythonPackage
|
||||
from spack.build_systems.r import RPackage
|
||||
from spack.build_systems.perl import PerlPackage
|
||||
from spack.build_systems.intel import IntelPackage
|
||||
|
||||
__all__ += [
|
||||
'run_before',
|
||||
@ -193,6 +194,7 @@
|
||||
'PythonPackage',
|
||||
'RPackage',
|
||||
'PerlPackage',
|
||||
'IntelPackage',
|
||||
]
|
||||
|
||||
from spack.version import Version, ver
|
||||
|
@ -229,7 +229,7 @@ def set_build_environment_variables(pkg, env, dirty=False):
|
||||
# Install root prefix
|
||||
env.set(SPACK_INSTALL, spack.store.root)
|
||||
|
||||
# Stuff in here sanitizes the build environemnt to eliminate
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere.
|
||||
if not dirty:
|
||||
# Remove these vars from the environment during build because they
|
||||
@ -518,7 +518,7 @@ def fork(pkg, function, dirty=False):
|
||||
|
||||
Args:
|
||||
|
||||
pkg (PackageBase): package whose environemnt we should set up the
|
||||
pkg (PackageBase): package whose environment we should set up the
|
||||
forked process for.
|
||||
function (callable): argless function to run in the child
|
||||
process.
|
||||
|
192
lib/spack/spack/build_systems/intel.py
Normal file
192
lib/spack/spack/build_systems/intel.py
Normal file
@ -0,0 +1,192 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
|
||||
import os
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from llnl.util.filesystem import install, join_path
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
def _valid_components():
|
||||
"""A generator that yields valid components."""
|
||||
|
||||
tree = ET.parse('pset/mediaconfig.xml')
|
||||
root = tree.getroot()
|
||||
|
||||
components = root.findall('.//Abbr')
|
||||
for component in components:
|
||||
yield component.text
|
||||
|
||||
|
||||
class IntelPackage(PackageBase):
|
||||
"""Specialized class for licensed Intel software.
|
||||
|
||||
This class provides two phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.IntelPackage.configure`
|
||||
2. :py:meth:`~.IntelPackage.install`
|
||||
|
||||
They both have sensible defaults and for many packages the
|
||||
only thing necessary will be to override ``setup_environment``
|
||||
to set the appropriate environment variables.
|
||||
"""
|
||||
#: Phases of an Intel package
|
||||
phases = ['configure', 'install']
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'IntelPackage'
|
||||
|
||||
#: By default, we assume that all Intel software requires a license.
|
||||
#: This can be overridden for packages that do not require a license.
|
||||
license_required = True
|
||||
|
||||
#: Comment symbol used in the ``license.lic`` file
|
||||
license_comment = '#'
|
||||
|
||||
#: Location where Intel searches for a license file
|
||||
license_files = ['Licenses/license.lic']
|
||||
|
||||
#: Environment variables that Intel searches for a license file
|
||||
license_vars = ['INTEL_LICENSE_FILE']
|
||||
|
||||
#: URL providing information on how to acquire a license key
|
||||
license_url = 'https://software.intel.com/en-us/articles/intel-license-manager-faq'
|
||||
|
||||
#: Components of the package to install.
|
||||
#: By default, install 'ALL' components.
|
||||
components = ['ALL']
|
||||
|
||||
@property
|
||||
def _filtered_components(self):
|
||||
"""Returns a list or set of valid components that match
|
||||
the requested components from ``components``."""
|
||||
|
||||
# Don't filter 'ALL'
|
||||
if self.components == ['ALL']:
|
||||
return self.components
|
||||
|
||||
# mediaconfig.xml is known to contain duplicate components.
|
||||
# If more than one copy of the same component is used, you
|
||||
# will get an error message about invalid components.
|
||||
# Use a set to store components to prevent duplicates.
|
||||
matches = set()
|
||||
|
||||
for valid in _valid_components():
|
||||
for requested in self.components:
|
||||
if valid.startswith(requested):
|
||||
matches.add(valid)
|
||||
|
||||
return matches
|
||||
|
||||
@property
|
||||
def global_license_file(self):
|
||||
"""Returns the path where a global license file should be stored.
|
||||
|
||||
All Intel software shares the same license, so we store it in a
|
||||
common 'intel' directory."""
|
||||
return join_path(self.global_license_dir, 'intel',
|
||||
os.path.basename(self.license_files[0]))
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
"""Writes the ``silent.cfg`` file used to configure the installation.
|
||||
|
||||
See https://software.intel.com/en-us/articles/configuration-file-format
|
||||
"""
|
||||
# Patterns used to check silent configuration file
|
||||
#
|
||||
# anythingpat - any string
|
||||
# filepat - the file location pattern (/path/to/license.lic)
|
||||
# lspat - the license server address pattern (0123@hostname)
|
||||
# snpat - the serial number pattern (ABCD-01234567)
|
||||
config = {
|
||||
# Accept EULA, valid values are: {accept, decline}
|
||||
'ACCEPT_EULA': 'accept',
|
||||
|
||||
# Optional error behavior, valid values are: {yes, no}
|
||||
'CONTINUE_WITH_OPTIONAL_ERROR': 'yes',
|
||||
|
||||
# Install location, valid values are: {/opt/intel, filepat}
|
||||
'PSET_INSTALL_DIR': prefix,
|
||||
|
||||
# Continue with overwrite of existing installation directory,
|
||||
# valid values are: {yes, no}
|
||||
'CONTINUE_WITH_INSTALLDIR_OVERWRITE': 'yes',
|
||||
|
||||
# List of components to install,
|
||||
# valid values are: {ALL, DEFAULTS, anythingpat}
|
||||
'COMPONENTS': ';'.join(self._filtered_components),
|
||||
|
||||
# Installation mode, valid values are: {install, repair, uninstall}
|
||||
'PSET_MODE': 'install',
|
||||
|
||||
# Directory for non-RPM database, valid values are: {filepat}
|
||||
'NONRPM_DB_DIR': prefix,
|
||||
|
||||
# Perform validation of digital signatures of RPM files,
|
||||
# valid values are: {yes, no}
|
||||
'SIGNING_ENABLED': 'no',
|
||||
|
||||
# Select target architecture of your applications,
|
||||
# valid values are: {IA32, INTEL64, ALL}
|
||||
'ARCH_SELECTED': 'ALL',
|
||||
}
|
||||
|
||||
# Not all Intel software requires a license. Trying to specify
|
||||
# one anyway will cause the installation to fail.
|
||||
if self.license_required:
|
||||
config.update({
|
||||
# License file or license server,
|
||||
# valid values are: {lspat, filepat}
|
||||
'ACTIVATION_LICENSE_FILE': self.global_license_file,
|
||||
|
||||
# Activation type, valid values are: {exist_lic,
|
||||
# license_server, license_file, trial_lic, serial_number}
|
||||
'ACTIVATION_TYPE': 'license_file',
|
||||
|
||||
# Intel(R) Software Improvement Program opt-in,
|
||||
# valid values are: {yes, no}
|
||||
'PHONEHOME_SEND_USAGE_DATA': 'no',
|
||||
})
|
||||
|
||||
with open('silent.cfg', 'w') as cfg:
|
||||
for key in config:
|
||||
cfg.write('{0}={1}\n'.format(key, config[key]))
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Runs the ``install.sh`` installation script."""
|
||||
|
||||
install_script = Executable('./install.sh')
|
||||
install_script('--silent', 'silent.cfg')
|
||||
|
||||
@run_after('install')
|
||||
def save_silent_cfg(self):
|
||||
"""Copies the silent.cfg configuration file to ``<prefix>/.spack``."""
|
||||
install('silent.cfg', join_path(self.prefix, '.spack'))
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
@ -41,6 +41,7 @@
|
||||
QMakePackage: 'qmake',
|
||||
WafPackage: 'configure',
|
||||
PerlPackage: 'configure',
|
||||
IntelPackage: 'configure',
|
||||
}
|
||||
|
||||
|
||||
|
@ -370,6 +370,15 @@ def edit(self, spec, prefix):
|
||||
# makefile.filter('CC = .*', 'CC = cc')"""
|
||||
|
||||
|
||||
class IntelPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for licensed Intel software"""
|
||||
|
||||
base_class_name = 'IntelPackage'
|
||||
|
||||
body = """\
|
||||
# FIXME: Override `setup_environment` if necessary."""
|
||||
|
||||
|
||||
templates = {
|
||||
'autotools': AutotoolsPackageTemplate,
|
||||
'autoreconf': AutoreconfPackageTemplate,
|
||||
@ -384,6 +393,7 @@ def edit(self, spec, prefix):
|
||||
'perlbuild': PerlbuildPackageTemplate,
|
||||
'octave': OctavePackageTemplate,
|
||||
'makefile': MakefilePackageTemplate,
|
||||
'intel': IntelPackageTemplate,
|
||||
'generic': PackageTemplate,
|
||||
}
|
||||
|
||||
|
@ -284,132 +284,157 @@ def apply_modifications(self):
|
||||
x.execute()
|
||||
|
||||
@staticmethod
|
||||
def from_sourcing_files(*args, **kwargs):
|
||||
"""Returns modifications that would be made by sourcing files.
|
||||
def from_sourcing_file(filename, *args, **kwargs):
|
||||
"""Returns modifications that would be made by sourcing a file.
|
||||
|
||||
Args:
|
||||
*args (list of str): list of files to be sourced
|
||||
Parameters:
|
||||
filename (str): The file to source
|
||||
*args (list of str): Arguments to pass on the command line
|
||||
|
||||
Keyword Arguments:
|
||||
shell (str): The shell to use (default: ``bash``)
|
||||
shell_options (str): Options passed to the shell (default: ``-c``)
|
||||
source_command (str): The command to run (default: ``source``)
|
||||
suppress_output (str): Redirect used to suppress output of command
|
||||
(default: ``&> /dev/null``)
|
||||
concatenate_on_success (str): Operator used to execute a command
|
||||
only when the previous command succeeds (default: ``&&``)
|
||||
|
||||
Returns:
|
||||
EnvironmentModifications: an object that, if executed, has
|
||||
the same effect on the environment as sourcing the files
|
||||
passed as parameters
|
||||
the same effect on the environment as sourcing the file
|
||||
"""
|
||||
env = EnvironmentModifications()
|
||||
# Check if the file actually exists
|
||||
if not os.path.isfile(filename):
|
||||
msg = 'Trying to source non-existing file: {0}'.format(filename)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Check if the files are actually there
|
||||
files = [line.split(' ')[0] for line in args]
|
||||
non_existing = [file for file in files if not os.path.isfile(file)]
|
||||
if non_existing:
|
||||
message = 'trying to source non-existing files\n'
|
||||
message += '\n'.join(non_existing)
|
||||
raise RuntimeError(message)
|
||||
# Kwargs parsing and default values
|
||||
shell = kwargs.get('shell', '/bin/bash')
|
||||
shell_options = kwargs.get('shell_options', '-c')
|
||||
source_command = kwargs.get('source_command', 'source')
|
||||
suppress_output = kwargs.get('suppress_output', '&> /dev/null')
|
||||
concatenate_on_success = kwargs.get('concatenate_on_success', '&&')
|
||||
|
||||
# Relevant kwd parameters and formats
|
||||
info = dict(kwargs)
|
||||
info.setdefault('shell', '/bin/bash')
|
||||
info.setdefault('shell_options', '-c')
|
||||
info.setdefault('source_command', 'source')
|
||||
info.setdefault('suppress_output', '&> /dev/null')
|
||||
info.setdefault('concatenate_on_success', '&&')
|
||||
source_file = [source_command, filename]
|
||||
source_file.extend(args)
|
||||
source_file = ' '.join(source_file)
|
||||
|
||||
shell = '{shell}'.format(**info)
|
||||
shell_options = '{shell_options}'.format(**info)
|
||||
source_file = '{source_command} {file} {concatenate_on_success}'
|
||||
|
||||
dump_cmd = "import os, json; print(json.dumps(dict(os.environ)))"
|
||||
dump_environment = 'python -c "%s"' % dump_cmd
|
||||
dump_cmd = 'import os, json; print(json.dumps(dict(os.environ)))'
|
||||
dump_environment = 'python -c "{0}"'.format(dump_cmd)
|
||||
|
||||
# Construct the command that will be executed
|
||||
command = [source_file.format(file=file, **info) for file in args]
|
||||
command.append(dump_environment)
|
||||
command = ' '.join(command)
|
||||
command = [
|
||||
shell,
|
||||
shell_options,
|
||||
command
|
||||
' '.join([
|
||||
source_file, suppress_output,
|
||||
concatenate_on_success, dump_environment,
|
||||
]),
|
||||
]
|
||||
|
||||
# Try to source all the files,
|
||||
# Try to source the file
|
||||
proc = subprocess.Popen(
|
||||
command, stdout=subprocess.PIPE, env=os.environ)
|
||||
proc.wait()
|
||||
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError('sourcing files returned a non-zero exit code')
|
||||
msg = 'Sourcing file {0} returned a non-zero exit code'.format(
|
||||
filename)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
output = ''.join([line.decode('utf-8') for line in proc.stdout])
|
||||
|
||||
# Construct a dictionaries of the environment before and after
|
||||
# sourcing the files, so that we can diff them.
|
||||
this_environment = dict(os.environ)
|
||||
after_source_env = json.loads(output)
|
||||
# Construct dictionaries of the environment before and after
|
||||
# sourcing the file, so that we can diff them.
|
||||
env_before = dict(os.environ)
|
||||
env_after = json.loads(output)
|
||||
|
||||
# If we're in python2, convert to str objects instead of unicode
|
||||
# like json gives us. We can't put unicode in os.environ anyway.
|
||||
if sys.version_info[0] < 3:
|
||||
after_source_env = dict((k.encode('utf-8'), v.encode('utf-8'))
|
||||
for k, v in after_source_env.items())
|
||||
env_after = dict((k.encode('utf-8'), v.encode('utf-8'))
|
||||
for k, v in env_after.items())
|
||||
|
||||
# Filter variables that are not related to sourcing a file
|
||||
to_be_filtered = 'SHLVL', '_', 'PWD', 'OLDPWD'
|
||||
for d in after_source_env, this_environment:
|
||||
to_be_filtered = 'SHLVL', '_', 'PWD', 'OLDPWD', 'PS2'
|
||||
for d in env_after, env_before:
|
||||
for name in to_be_filtered:
|
||||
d.pop(name, None)
|
||||
|
||||
# Fill the EnvironmentModifications instance
|
||||
env = EnvironmentModifications()
|
||||
|
||||
# New variables
|
||||
new_variables = set(after_source_env) - set(this_environment)
|
||||
for x in new_variables:
|
||||
env.set(x, after_source_env[x])
|
||||
new_variables = set(env_after) - set(env_before)
|
||||
# Variables that have been unset
|
||||
unset_variables = set(this_environment) - set(after_source_env)
|
||||
for x in unset_variables:
|
||||
env.unset(x)
|
||||
unset_variables = set(env_before) - set(env_after)
|
||||
# Variables that have been modified
|
||||
common_variables = set(
|
||||
this_environment).intersection(set(after_source_env))
|
||||
env_before).intersection(set(env_after))
|
||||
modified_variables = [x for x in common_variables
|
||||
if this_environment[x] != after_source_env[x]]
|
||||
if env_before[x] != env_after[x]]
|
||||
|
||||
def return_separator_if_any(first_value, second_value):
|
||||
def return_separator_if_any(*args):
|
||||
separators = ':', ';'
|
||||
for separator in separators:
|
||||
if separator in first_value and separator in second_value:
|
||||
for arg in args:
|
||||
if separator in arg:
|
||||
return separator
|
||||
return None
|
||||
|
||||
for x in modified_variables:
|
||||
current = this_environment[x]
|
||||
modified = after_source_env[x]
|
||||
sep = return_separator_if_any(current, modified)
|
||||
if sep is None:
|
||||
# We just need to set the variable to the new value
|
||||
env.set(x, after_source_env[x])
|
||||
# Add variables to env.
|
||||
# Assume that variables with 'PATH' in the name or that contain
|
||||
# separators like ':' or ';' are more likely to be paths
|
||||
for x in new_variables:
|
||||
sep = return_separator_if_any(env_after[x])
|
||||
if sep:
|
||||
env.prepend_path(x, env_after[x], separator=sep)
|
||||
elif 'PATH' in x:
|
||||
env.prepend_path(x, env_after[x])
|
||||
else:
|
||||
current_list = current.split(sep)
|
||||
modified_list = modified.split(sep)
|
||||
# We just need to set the variable to the new value
|
||||
env.set(x, env_after[x])
|
||||
|
||||
for x in unset_variables:
|
||||
env.unset(x)
|
||||
|
||||
for x in modified_variables:
|
||||
before = env_before[x]
|
||||
after = env_after[x]
|
||||
sep = return_separator_if_any(before, after)
|
||||
if sep:
|
||||
before_list = before.split(sep)
|
||||
after_list = after.split(sep)
|
||||
|
||||
# Filter out empty strings
|
||||
before_list = list(filter(None, before_list))
|
||||
after_list = list(filter(None, after_list))
|
||||
|
||||
# Paths that have been removed
|
||||
remove_list = [
|
||||
ii for ii in current_list if ii not in modified_list]
|
||||
# Check that nothing has been added in the middle of vurrent
|
||||
# list
|
||||
ii for ii in before_list if ii not in after_list]
|
||||
# Check that nothing has been added in the middle of
|
||||
# before_list
|
||||
remaining_list = [
|
||||
ii for ii in current_list if ii in modified_list]
|
||||
start = modified_list.index(remaining_list[0])
|
||||
end = modified_list.index(remaining_list[-1])
|
||||
search = sep.join(modified_list[start:end + 1])
|
||||
ii for ii in before_list if ii in after_list]
|
||||
try:
|
||||
start = after_list.index(remaining_list[0])
|
||||
end = after_list.index(remaining_list[-1])
|
||||
search = sep.join(after_list[start:end + 1])
|
||||
except IndexError:
|
||||
env.prepend_path(x, env_after[x])
|
||||
|
||||
if search not in current:
|
||||
if search not in before:
|
||||
# We just need to set the variable to the new value
|
||||
env.set(x, after_source_env[x])
|
||||
break
|
||||
env.prepend_path(x, env_after[x])
|
||||
else:
|
||||
try:
|
||||
prepend_list = modified_list[:start]
|
||||
prepend_list = after_list[:start]
|
||||
except KeyError:
|
||||
prepend_list = []
|
||||
try:
|
||||
append_list = modified_list[end + 1:]
|
||||
append_list = after_list[end + 1:]
|
||||
except KeyError:
|
||||
append_list = []
|
||||
|
||||
@ -419,6 +444,9 @@ def return_separator_if_any(first_value, second_value):
|
||||
env.append_path(x, item)
|
||||
for item in prepend_list:
|
||||
env.prepend_path(x, item)
|
||||
else:
|
||||
# We just need to set the variable to the new value
|
||||
env.set(x, env_after[x])
|
||||
|
||||
return env
|
||||
|
||||
|
@ -484,38 +484,65 @@ class SomePackage(Package):
|
||||
#
|
||||
# These are default values for instance variables.
|
||||
#
|
||||
"""By default we build in parallel. Subclasses can override this."""
|
||||
|
||||
#: By default we build in parallel. Subclasses can override this.
|
||||
parallel = True
|
||||
|
||||
"""# jobs to use for parallel make. If set, overrides default of ncpus."""
|
||||
#: # jobs to use for parallel make. If set, overrides default of ncpus.
|
||||
make_jobs = spack.build_jobs
|
||||
|
||||
"""By default do not run tests within package's install()"""
|
||||
#: By default do not run tests within package's install()
|
||||
run_tests = False
|
||||
|
||||
# FIXME: this is a bad object-oriented design, should be moved to Clang.
|
||||
"""By default do not setup mockup XCode on macOS with Clang"""
|
||||
#: By default do not setup mockup XCode on macOS with Clang
|
||||
use_xcode = False
|
||||
|
||||
"""Most packages are NOT extendable. Set to True if you want extensions."""
|
||||
#: Most packages are NOT extendable. Set to True if you want extensions.
|
||||
extendable = False
|
||||
|
||||
"""When True, add RPATHs for the entire DAG. When False, add RPATHs only
|
||||
for immediate dependencies."""
|
||||
#: When True, add RPATHs for the entire DAG. When False, add RPATHs only
|
||||
#: for immediate dependencies.
|
||||
transitive_rpaths = True
|
||||
|
||||
"""List of prefix-relative file paths (or a single path). If these do
|
||||
not exist after install, or if they exist but are not files,
|
||||
sanity checks fail.
|
||||
"""
|
||||
#: List of prefix-relative file paths (or a single path). If these do
|
||||
#: not exist after install, or if they exist but are not files,
|
||||
#: sanity checks fail.
|
||||
sanity_check_is_file = []
|
||||
|
||||
"""List of prefix-relative directory paths (or a single path). If
|
||||
these do not exist after install, or if they exist but are not
|
||||
directories, sanity checks will fail.
|
||||
"""
|
||||
#: List of prefix-relative directory paths (or a single path). If
|
||||
#: these do not exist after install, or if they exist but are not
|
||||
#: directories, sanity checks will fail.
|
||||
sanity_check_is_dir = []
|
||||
|
||||
#
|
||||
# Set default licensing information
|
||||
#
|
||||
|
||||
#: Boolean. If set to ``True``, this software requires a license.
|
||||
#: If set to ``False``, all of the ``license_*`` attributes will
|
||||
#: be ignored. Defaults to ``False``.
|
||||
license_required = False
|
||||
|
||||
#: String. Contains the symbol used by the license manager to denote
|
||||
#: a comment. Defaults to ``#``.
|
||||
license_comment = '#'
|
||||
|
||||
#: List of strings. These are files that the software searches for when
|
||||
#: looking for a license. All file paths must be relative to the
|
||||
#: installation directory. More complex packages like Intel may require
|
||||
#: multiple licenses for individual components. Defaults to the empty list.
|
||||
license_files = []
|
||||
|
||||
#: List of strings. Environment variables that can be set to tell the
|
||||
#: software where to look for a license if it is not in the usual location.
|
||||
#: Defaults to the empty list.
|
||||
license_vars = []
|
||||
|
||||
#: String. A URL pointing to license setup instructions for the software.
|
||||
#: Defaults to the empty string.
|
||||
license_url = ''
|
||||
|
||||
def __init__(self, spec):
|
||||
# this determines how the package should be built.
|
||||
self.spec = spec
|
||||
@ -569,22 +596,6 @@ def __init__(self, spec):
|
||||
if not hasattr(self, 'list_depth'):
|
||||
self.list_depth = 0
|
||||
|
||||
# Set default licensing information
|
||||
if not hasattr(self, 'license_required'):
|
||||
self.license_required = False
|
||||
|
||||
if not hasattr(self, 'license_comment'):
|
||||
self.license_comment = '#'
|
||||
|
||||
if not hasattr(self, 'license_files'):
|
||||
self.license_files = []
|
||||
|
||||
if not hasattr(self, 'license_vars'):
|
||||
self.license_vars = []
|
||||
|
||||
if not hasattr(self, 'license_url'):
|
||||
self.license_url = None
|
||||
|
||||
# Set up some internal variables for timing.
|
||||
self._fetch_time = 0.0
|
||||
self._total_time = 0.0
|
||||
|
@ -89,7 +89,7 @@ def files_to_be_sourced():
|
||||
files = [
|
||||
os.path.join(datadir, 'sourceme_first.sh'),
|
||||
os.path.join(datadir, 'sourceme_second.sh'),
|
||||
os.path.join(datadir, 'sourceme_parameters.sh intel64'),
|
||||
os.path.join(datadir, 'sourceme_parameters.sh'),
|
||||
os.path.join(datadir, 'sourceme_unicode.sh')
|
||||
]
|
||||
|
||||
@ -224,7 +224,14 @@ def test_source_files(files_to_be_sourced):
|
||||
"""Tests the construction of a list of environment modifications that are
|
||||
the result of sourcing a file.
|
||||
"""
|
||||
env = EnvironmentModifications.from_sourcing_files(*files_to_be_sourced)
|
||||
env = EnvironmentModifications()
|
||||
for filename in files_to_be_sourced:
|
||||
if filename.endswith('sourceme_parameters.sh'):
|
||||
env.extend(EnvironmentModifications.from_sourcing_file(
|
||||
filename, 'intel64'))
|
||||
else:
|
||||
env.extend(EnvironmentModifications.from_sourcing_file(filename))
|
||||
|
||||
modifications = env.group_by_name()
|
||||
|
||||
# This is sensitive to the user's environment; can include
|
||||
|
@ -22,18 +22,14 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
import os
|
||||
|
||||
from spack.pkg.builtin.intel import IntelInstaller
|
||||
from spack import *
|
||||
from spack.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class IntelDaal(IntelInstaller):
|
||||
"""Intel Data Analytics Acceleration Library.
|
||||
|
||||
Note: You will have to add the download file to a
|
||||
mirror so that Spack can find it. For instructions on how to set up a
|
||||
mirror, see http://spack.readthedocs.io/en/latest/mirrors.html"""
|
||||
class IntelDaal(IntelPackage):
|
||||
"""Intel Data Analytics Acceleration Library."""
|
||||
|
||||
homepage = "https://software.intel.com/en-us/daal"
|
||||
|
||||
@ -52,11 +48,35 @@ class IntelDaal(IntelInstaller):
|
||||
|
||||
provides('daal')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
@property
|
||||
def license_required(self):
|
||||
# The Intel libraries are provided without requiring a license as of
|
||||
# version 2017.2. Trying to specify the license will fail. See:
|
||||
# https://software.intel.com/en-us/articles/free-ipsxe-tools-and-libraries
|
||||
if self.version >= Version('2017.2'):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
self.intel_prefix = os.path.join(prefix, "pkg")
|
||||
IntelInstaller.install(self, spec, prefix)
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
daal_dir = os.path.join(self.intel_prefix, "daal")
|
||||
for f in os.listdir(daal_dir):
|
||||
os.symlink(os.path.join(daal_dir, f), os.path.join(self.prefix, f))
|
||||
These environment variables come from running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source daal/bin/daalvars.sh intel64
|
||||
"""
|
||||
# NOTE: Spack runs setup_environment twice, once pre-build to set up
|
||||
# the build environment, and once post-installation to determine
|
||||
# the environment variables needed at run-time to add to the module
|
||||
# file. The script we need to source is only present post-installation,
|
||||
# so check for its existence before sourcing.
|
||||
# TODO: At some point we should split setup_environment into
|
||||
# setup_build_environment and setup_run_environment to get around
|
||||
# this problem.
|
||||
daalvars = os.path.join(self.prefix.daal.bin, 'daalvars.sh')
|
||||
|
||||
if os.path.isfile(daalvars):
|
||||
run_env.extend(EnvironmentModifications.from_sourcing_file(
|
||||
daalvars, 'intel64'))
|
||||
|
@ -22,18 +22,14 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
import os
|
||||
|
||||
from spack.pkg.builtin.intel import IntelInstaller
|
||||
from spack import *
|
||||
from spack.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class IntelIpp(IntelInstaller):
|
||||
"""Intel Integrated Performance Primitives.
|
||||
|
||||
Note: You will have to add the download file to a
|
||||
mirror so that Spack can find it. For instructions on how to set up a
|
||||
mirror, see http://spack.readthedocs.io/en/latest/mirrors.html"""
|
||||
class IntelIpp(IntelPackage):
|
||||
"""Intel Integrated Performance Primitives."""
|
||||
|
||||
homepage = "https://software.intel.com/en-us/intel-ipp"
|
||||
|
||||
@ -50,11 +46,35 @@ class IntelIpp(IntelInstaller):
|
||||
|
||||
provides('ipp')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
@property
|
||||
def license_required(self):
|
||||
# The Intel libraries are provided without requiring a license as of
|
||||
# version 2017.2. Trying to specify the license will fail. See:
|
||||
# https://software.intel.com/en-us/articles/free-ipsxe-tools-and-libraries
|
||||
if self.version >= Version('2017.2'):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
self.intel_prefix = os.path.join(prefix, "pkg")
|
||||
IntelInstaller.install(self, spec, prefix)
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
ipp_dir = os.path.join(self.intel_prefix, "ipp")
|
||||
for f in os.listdir(ipp_dir):
|
||||
os.symlink(os.path.join(ipp_dir, f), os.path.join(self.prefix, f))
|
||||
These environment variables come from running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source ipp/bin/ippvars.sh intel64
|
||||
"""
|
||||
# NOTE: Spack runs setup_environment twice, once pre-build to set up
|
||||
# the build environment, and once post-installation to determine
|
||||
# the environment variables needed at run-time to add to the module
|
||||
# file. The script we need to source is only present post-installation,
|
||||
# so check for its existence before sourcing.
|
||||
# TODO: At some point we should split setup_environment into
|
||||
# setup_build_environment and setup_run_environment to get around
|
||||
# this problem.
|
||||
ippvars = os.path.join(self.prefix.ipp.bin, 'ippvars.sh')
|
||||
|
||||
if os.path.isfile(ippvars):
|
||||
run_env.extend(EnvironmentModifications.from_sourcing_file(
|
||||
ippvars, 'intel64'))
|
||||
|
@ -22,13 +22,13 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
import os
|
||||
|
||||
from spack.pkg.builtin.intel import IntelInstaller
|
||||
from spack import *
|
||||
from spack.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class IntelMkl(IntelInstaller):
|
||||
class IntelMkl(IntelPackage):
|
||||
"""Intel Math Kernel Library."""
|
||||
|
||||
homepage = "https://software.intel.com/en-us/intel-mkl"
|
||||
@ -50,12 +50,21 @@ class IntelMkl(IntelInstaller):
|
||||
variant('ilp64', default=False, description='64 bit integers')
|
||||
variant('openmp', default=False, description='OpenMP multithreading layer')
|
||||
|
||||
# virtual dependency
|
||||
provides('blas')
|
||||
provides('lapack')
|
||||
provides('scalapack')
|
||||
provides('mkl')
|
||||
|
||||
@property
|
||||
def license_required(self):
|
||||
# The Intel libraries are provided without requiring a license as of
|
||||
# version 2017.2. Trying to specify the license will fail. See:
|
||||
# https://software.intel.com/en-us/articles/free-ipsxe-tools-and-libraries
|
||||
if self.version >= Version('2017.2'):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
@property
|
||||
def blas_libs(self):
|
||||
spec = self.spec
|
||||
@ -69,15 +78,27 @@ def blas_libs(self):
|
||||
|
||||
mkl_threading = ['libmkl_sequential']
|
||||
|
||||
omp_libs = LibraryList([])
|
||||
|
||||
if '+openmp' in spec:
|
||||
if '%intel' in spec:
|
||||
mkl_threading = ['libmkl_intel_thread', 'libiomp5']
|
||||
else:
|
||||
mkl_threading = ['libmkl_intel_thread']
|
||||
omp_threading = ['libiomp5']
|
||||
|
||||
omp_root = prefix.compilers_and_libraries.linux.lib.intel64
|
||||
omp_libs = find_libraries(
|
||||
omp_threading, root=omp_root, shared=shared)
|
||||
elif '%gcc' in spec:
|
||||
mkl_threading = ['libmkl_gnu_thread']
|
||||
|
||||
gcc = Executable(self.compiler.cc)
|
||||
libgomp = gcc('--print-file-name', 'libgomp.{0}'.format(
|
||||
dso_suffix), output=str)
|
||||
omp_libs = LibraryList(libgomp)
|
||||
|
||||
# TODO: TBB threading: ['libmkl_tbb_thread', 'libtbb', 'libstdc++']
|
||||
|
||||
mkl_root = join_path(prefix.lib, 'intel64')
|
||||
mkl_root = prefix.compilers_and_libraries.linux.mkl.lib.intel64
|
||||
|
||||
mkl_libs = find_libraries(
|
||||
mkl_integer + ['libmkl_core'] + mkl_threading,
|
||||
@ -91,7 +112,7 @@ def blas_libs(self):
|
||||
shared=shared
|
||||
)
|
||||
|
||||
return mkl_libs + system_libs
|
||||
return mkl_libs + omp_libs + system_libs
|
||||
|
||||
@property
|
||||
def lapack_libs(self):
|
||||
@ -120,30 +141,46 @@ def scalapack_libs(self):
|
||||
elif '^intel-mpi' in root:
|
||||
libnames.append('libmkl_blacs_intelmpi')
|
||||
else:
|
||||
raise InstallError("No MPI found for scalapack")
|
||||
raise InstallError('No MPI found for scalapack')
|
||||
|
||||
shared = True if '+shared' in self.spec else False
|
||||
integer = 'ilp64' if '+ilp64' in self.spec else 'lp64'
|
||||
mkl_root = self.prefix.compilers_and_libraries.linux.mkl.lib.intel64
|
||||
shared = True if '+shared' in self.spec else False
|
||||
|
||||
libs = find_libraries(
|
||||
['{0}_{1}'.format(l, integer) for l in libnames],
|
||||
root=join_path(self.prefix.lib, 'intel64'),
|
||||
root=mkl_root,
|
||||
shared=shared
|
||||
)
|
||||
|
||||
return libs
|
||||
|
||||
def install(self, spec, prefix):
|
||||
self.intel_prefix = os.path.join(prefix, "pkg")
|
||||
IntelInstaller.install(self, spec, prefix)
|
||||
|
||||
mkl_dir = os.path.join(self.intel_prefix, "mkl")
|
||||
for f in os.listdir(mkl_dir):
|
||||
os.symlink(os.path.join(mkl_dir, f), os.path.join(self.prefix, f))
|
||||
|
||||
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
|
||||
# set up MKLROOT for everyone using MKL package
|
||||
mkl_root = self.prefix.compilers_and_libraries.linux.mkl.lib.intel64
|
||||
|
||||
spack_env.set('MKLROOT', self.prefix)
|
||||
spack_env.append_path('SPACK_COMPILER_EXTRA_RPATHS',
|
||||
join_path(self.prefix.lib, 'intel64'))
|
||||
spack_env.append_path('SPACK_COMPILER_EXTRA_RPATHS', mkl_root)
|
||||
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
run_env.set('MKLROOT', self.prefix)
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source mkl/bin/mklvars.sh intel64
|
||||
"""
|
||||
# NOTE: Spack runs setup_environment twice, once pre-build to set up
|
||||
# the build environment, and once post-installation to determine
|
||||
# the environment variables needed at run-time to add to the module
|
||||
# file. The script we need to source is only present post-installation,
|
||||
# so check for its existence before sourcing.
|
||||
# TODO: At some point we should split setup_environment into
|
||||
# setup_build_environment and setup_run_environment to get around
|
||||
# this problem.
|
||||
mklvars = os.path.join(self.prefix.mkl.bin, 'mklvars.sh')
|
||||
|
||||
if os.path.isfile(mklvars):
|
||||
run_env.extend(EnvironmentModifications.from_sourcing_file(
|
||||
mklvars, 'intel64'))
|
||||
|
@ -22,30 +22,41 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
import os
|
||||
|
||||
from spack.pkg.builtin.intel import IntelInstaller
|
||||
from spack import *
|
||||
from spack.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class IntelMpi(IntelInstaller):
|
||||
class IntelMpi(IntelPackage):
|
||||
"""Intel MPI"""
|
||||
|
||||
homepage = "https://software.intel.com/en-us/intel-mpi-library"
|
||||
|
||||
version('2017.3', '721ecd5f6afa385e038777e5b5361dfb',
|
||||
version('2017.3.196', '721ecd5f6afa385e038777e5b5361dfb',
|
||||
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11595/l_mpi_2017.3.196.tgz')
|
||||
version('2017.2', 'b6c2e62c3fb9b1558ede72ccf72cf1d6',
|
||||
version('2017.2.174', 'b6c2e62c3fb9b1558ede72ccf72cf1d6',
|
||||
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11334/l_mpi_2017.2.174.tgz')
|
||||
version('2017.1', 'd5e941ac2bcf7c5576f85f6bcfee4c18',
|
||||
version('2017.1.132', 'd5e941ac2bcf7c5576f85f6bcfee4c18',
|
||||
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11014/l_mpi_2017.1.132.tgz')
|
||||
version('5.1.3', '4316e78533a932081b1a86368e890800',
|
||||
version('5.1.3.223', '4316e78533a932081b1a86368e890800',
|
||||
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9278/l_mpi_p_5.1.3.223.tgz')
|
||||
|
||||
provides('mpi')
|
||||
|
||||
@property
|
||||
def license_required(self):
|
||||
# The Intel libraries are provided without requiring a license as of
|
||||
# version 2017.2. Trying to specify the license will fail. See:
|
||||
# https://software.intel.com/en-us/articles/free-ipsxe-tools-and-libraries
|
||||
if self.version >= Version('2017.2'):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
@property
|
||||
def mpi_libs(self):
|
||||
mpi_root = self.prefix.compilers_and_libraries.linux.mpi.lib64
|
||||
query_parameters = self.spec.last_query.extra_parameters
|
||||
libraries = ['libmpifort', 'libmpi']
|
||||
|
||||
@ -53,19 +64,15 @@ def mpi_libs(self):
|
||||
libraries = ['libmpicxx'] + libraries
|
||||
|
||||
return find_libraries(
|
||||
libraries, root=self.prefix.lib64, shared=True, recurse=True
|
||||
libraries, root=mpi_root, shared=True, recurse=True
|
||||
)
|
||||
|
||||
@property
|
||||
def mpi_headers(self):
|
||||
# recurse from self.prefix will find too many things for all the
|
||||
# supported sub-architectures like 'mic'
|
||||
return find_headers(
|
||||
'mpi', root=self.prefix.include64, recurse=False)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
self.intel_prefix = prefix
|
||||
IntelInstaller.install(self, spec, prefix)
|
||||
mpi_root = self.prefix.compilers_and_libraries.linux.mpi.include64
|
||||
return find_headers('mpi', root=mpi_root, recurse=False)
|
||||
|
||||
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
|
||||
spack_env.set('I_MPI_CC', spack_cc)
|
||||
@ -75,15 +82,52 @@ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
|
||||
spack_env.set('I_MPI_FC', spack_fc)
|
||||
|
||||
def setup_dependent_package(self, module, dep_spec):
|
||||
# Check for presence of bin64 or bin directory
|
||||
if os.path.isdir(self.prefix.bin):
|
||||
bindir = self.prefix.bin
|
||||
elif os.path.isdir(self.prefix.bin64):
|
||||
bindir = self.prefix.bin64
|
||||
else:
|
||||
raise RuntimeError('No suitable bindir found')
|
||||
# Intel comes with 2 different flavors of MPI wrappers:
|
||||
#
|
||||
# * mpiicc, mpiicpc, and mpifort are hardcoded to wrap around
|
||||
# the Intel compilers.
|
||||
# * mpicc, mpicxx, mpif90, and mpif77 allow you to set which
|
||||
# compilers to wrap using I_MPI_CC and friends. By default,
|
||||
# wraps around the GCC compilers.
|
||||
#
|
||||
# In theory, these should be equivalent as long as I_MPI_CC
|
||||
# and friends are set to point to the Intel compilers, but in
|
||||
# practice, mpicc fails to compile some applications while
|
||||
# mpiicc works.
|
||||
bindir = self.prefix.compilers_and_libraries.linux.mpi.intel64.bin
|
||||
|
||||
self.spec.mpicc = join_path(bindir, 'mpicc')
|
||||
self.spec.mpicxx = join_path(bindir, 'mpicxx')
|
||||
self.spec.mpifc = join_path(bindir, 'mpif90')
|
||||
self.spec.mpif77 = join_path(bindir, 'mpif77')
|
||||
if self.compiler.name == 'intel':
|
||||
self.spec.mpicc = bindir.mpiicc
|
||||
self.spec.mpicxx = bindir.mpiicpc
|
||||
self.spec.mpifc = bindir.mpiifort
|
||||
self.spec.mpif77 = bindir.mpiifort
|
||||
else:
|
||||
self.spec.mpicc = bindir.mpicc
|
||||
self.spec.mpicxx = bindir.mpicxx
|
||||
self.spec.mpifc = bindir.mpif90
|
||||
self.spec.mpif77 = bindir.mpif77
|
||||
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source compilers_and_libraries/linux/mpi/intel64/bin/mpivars.sh
|
||||
"""
|
||||
# NOTE: Spack runs setup_environment twice, once pre-build to set up
|
||||
# the build environment, and once post-installation to determine
|
||||
# the environment variables needed at run-time to add to the module
|
||||
# file. The script we need to source is only present post-installation,
|
||||
# so check for its existence before sourcing.
|
||||
# TODO: At some point we should split setup_environment into
|
||||
# setup_build_environment and setup_run_environment to get around
|
||||
# this problem.
|
||||
mpivars = os.path.join(
|
||||
self.prefix.compilers_and_libraries.linux.mpi.intel64.bin,
|
||||
'mpivars.sh')
|
||||
|
||||
if os.path.isfile(mpivars):
|
||||
run_env.extend(EnvironmentModifications.from_sourcing_file(
|
||||
mpivars))
|
||||
|
@ -22,15 +22,14 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
|
||||
from spack.pkg.builtin.intel import IntelInstaller, filter_pick, \
|
||||
get_all_components
|
||||
from spack import *
|
||||
from spack.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class IntelParallelStudio(IntelInstaller):
|
||||
class IntelParallelStudio(IntelPackage):
|
||||
"""Intel Parallel Studio."""
|
||||
|
||||
homepage = "https://software.intel.com/en-us/intel-parallel-studio-xe"
|
||||
@ -90,34 +89,64 @@ class IntelParallelStudio(IntelInstaller):
|
||||
version('composer.2015.6', 'da9f8600c18d43d58fba0488844f79c9',
|
||||
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/8432/l_compxe_2015.6.233.tgz')
|
||||
|
||||
variant('rpath', default=True, description="Add rpath to .cfg files")
|
||||
# Generic Variants
|
||||
variant('rpath', default=True,
|
||||
description='Add rpath to .cfg files')
|
||||
variant('newdtags', default=False,
|
||||
description="Allow use of --enable-new-dtags in MPI wrappers")
|
||||
variant('all', default=False,
|
||||
description="Install all files with the requested edition")
|
||||
description='Allow use of --enable-new-dtags in MPI wrappers')
|
||||
variant('shared', default=True,
|
||||
description='Builds shared library')
|
||||
variant('ilp64', default=False,
|
||||
description='64 bit integers')
|
||||
variant('openmp', default=False,
|
||||
description='OpenMP multithreading layer')
|
||||
|
||||
# Components available in all editions
|
||||
variant('daal', default=True,
|
||||
description='Install the Intel DAAL libraries')
|
||||
variant('gdb', default=False,
|
||||
description='Install the Intel Debugger for Heterogeneous Compute')
|
||||
variant('ipp', default=True,
|
||||
description='Install the Intel IPP libraries')
|
||||
variant('mkl', default=True,
|
||||
description='Install the Intel MKL library')
|
||||
variant('mpi', default=True,
|
||||
description="Install the Intel MPI library and ITAC tool")
|
||||
variant('mkl', default=True, description="Install the Intel MKL library")
|
||||
variant('daal',
|
||||
default=True, description="Install the Intel DAAL libraries")
|
||||
variant('ipp', default=True, description="Install the Intel IPP libraries")
|
||||
variant('tools', default=True, description="Install the Intel Advisor, "
|
||||
"VTune Amplifier, and Inspector tools")
|
||||
description='Install the Intel MPI library')
|
||||
variant('tbb', default=True,
|
||||
description='Install the Intel TBB libraries')
|
||||
|
||||
variant('shared', default=True, description='Builds shared library')
|
||||
variant('ilp64', default=False, description='64 bit integers')
|
||||
variant('openmp', default=False, description='OpenMP multithreading layer')
|
||||
# Components only available in the Professional and Cluster Editions
|
||||
variant('advisor', default=False,
|
||||
description='Install the Intel Advisor')
|
||||
variant('clck', default=False,
|
||||
description='Install the Intel Cluster Checker')
|
||||
variant('inspector', default=False,
|
||||
description='Install the Intel Inspector')
|
||||
variant('itac', default=False,
|
||||
description='Install the Intel Trace Analyzer and Collector')
|
||||
variant('vtune', default=False,
|
||||
description='Install the Intel VTune Amplifier XE')
|
||||
|
||||
provides('mpi', when='@cluster.0:cluster.9999+mpi')
|
||||
provides('mkl', when='+mkl')
|
||||
provides('daal', when='+daal')
|
||||
|
||||
provides('ipp', when='+ipp')
|
||||
|
||||
# virtual dependency
|
||||
provides('mkl', when='+mkl')
|
||||
provides('blas', when='+mkl')
|
||||
provides('lapack', when='+mkl')
|
||||
provides('scalapack', when='+mkl')
|
||||
|
||||
provides('mpi', when='+mpi')
|
||||
|
||||
provides('tbb', when='+tbb')
|
||||
|
||||
# The following components are not available in the Composer Edition
|
||||
conflicts('+advisor', when='@composer.0:composer.9999')
|
||||
conflicts('+clck', when='@composer.0:composer.9999')
|
||||
conflicts('+inspector', when='@composer.0:composer.9999')
|
||||
conflicts('+itac', when='@composer.0:composer.9999')
|
||||
conflicts('+vtune', when='@composer.0:composer.9999')
|
||||
|
||||
@property
|
||||
def blas_libs(self):
|
||||
spec = self.spec
|
||||
@ -131,15 +160,27 @@ def blas_libs(self):
|
||||
|
||||
mkl_threading = ['libmkl_sequential']
|
||||
|
||||
omp_libs = LibraryList([])
|
||||
|
||||
if '+openmp' in spec:
|
||||
if '%intel' in spec:
|
||||
mkl_threading = ['libmkl_intel_thread', 'libiomp5']
|
||||
else:
|
||||
mkl_threading = ['libmkl_intel_thread']
|
||||
omp_threading = ['libiomp5']
|
||||
|
||||
omp_root = prefix.compilers_and_libraries.linux.lib.intel64
|
||||
omp_libs = find_libraries(
|
||||
omp_threading, root=omp_root, shared=shared)
|
||||
elif '%gcc' in spec:
|
||||
mkl_threading = ['libmkl_gnu_thread']
|
||||
|
||||
gcc = Executable(self.compiler.cc)
|
||||
omp_libs = gcc('--print-file-name', 'libgomp.{0}'.format(
|
||||
dso_suffix), output=str)
|
||||
omp_libs = LibraryList(omp_libs)
|
||||
|
||||
# TODO: TBB threading: ['libmkl_tbb_thread', 'libtbb', 'libstdc++']
|
||||
|
||||
mkl_root = join_path(prefix, 'mkl', 'lib', 'intel64')
|
||||
mkl_root = prefix.compilers_and_libraries.linux.mkl.lib.intel64
|
||||
|
||||
mkl_libs = find_libraries(
|
||||
mkl_integer + ['libmkl_core'] + mkl_threading,
|
||||
@ -153,7 +194,7 @@ def blas_libs(self):
|
||||
shared=shared
|
||||
)
|
||||
|
||||
return mkl_libs + system_libs
|
||||
return mkl_libs + omp_libs + system_libs
|
||||
|
||||
@property
|
||||
def lapack_libs(self):
|
||||
@ -176,109 +217,182 @@ def scalapack_libs(self):
|
||||
# elif self.spec.satisfies('^intel-parallel-studio'):
|
||||
# libnames.append('libmkl_blacs_intelmpi')
|
||||
else:
|
||||
raise InstallError("No MPI found for scalapack")
|
||||
raise InstallError('No MPI found for scalapack')
|
||||
|
||||
shared = True if '+shared' in self.spec else False
|
||||
integer = 'ilp64' if '+ilp64' in self.spec else 'lp64'
|
||||
mkl_root = self.prefix.compilers_and_libraries.linux.mkl.lib.intel64
|
||||
shared = True if '+shared' in self.spec else False
|
||||
|
||||
libs = find_libraries(
|
||||
['{0}_{1}'.format(l, integer) for l in libnames],
|
||||
root=join_path(self.prefix, 'mkl', 'lib', 'intel64'),
|
||||
root=mkl_root,
|
||||
shared=shared
|
||||
)
|
||||
return libs
|
||||
|
||||
def install(self, spec, prefix):
|
||||
base_components = "ALL" # when in doubt, install everything
|
||||
mpi_components = ""
|
||||
mkl_components = ""
|
||||
daal_components = ""
|
||||
ipp_components = ""
|
||||
@property
|
||||
def mpi_libs(self):
|
||||
mpi_root = self.prefix.compilers_and_libraries.linux.mpi.lib64
|
||||
query_parameters = self.spec.last_query.extra_parameters
|
||||
libraries = ['libmpifort', 'libmpi']
|
||||
|
||||
if not spec.satisfies('+all'):
|
||||
all_components = get_all_components()
|
||||
regex = '(comp|openmp|intel-tbb|icc|ifort|psxe)'
|
||||
base_components = \
|
||||
filter_pick(all_components, re.compile(regex).search)
|
||||
regex = '(icsxe|imb|mpi|itac|intel-ta|intel-tc|clck)'
|
||||
mpi_components = \
|
||||
filter_pick(all_components, re.compile(regex).search)
|
||||
mkl_components = \
|
||||
filter_pick(all_components, re.compile('(mkl)').search)
|
||||
daal_components = \
|
||||
filter_pick(all_components, re.compile('(daal)').search)
|
||||
ipp_components = \
|
||||
filter_pick(all_components, re.compile('(ipp)').search)
|
||||
regex = '(gdb|vtune|inspector|advisor)'
|
||||
tool_components = \
|
||||
filter_pick(all_components, re.compile(regex).search)
|
||||
components = base_components
|
||||
if 'cxx' in query_parameters:
|
||||
libraries = ['libmpicxx'] + libraries
|
||||
|
||||
if not spec.satisfies('+all'):
|
||||
if spec.satisfies('+mpi'):
|
||||
components += mpi_components
|
||||
if spec.satisfies('+mkl'):
|
||||
components += mkl_components
|
||||
if spec.satisfies('+daal'):
|
||||
components += daal_components
|
||||
if spec.satisfies('+ipp'):
|
||||
components += ipp_components
|
||||
if spec.satisfies('+tools') and (spec.satisfies('@cluster') or
|
||||
spec.satisfies('@professional')):
|
||||
components += tool_components
|
||||
return find_libraries(
|
||||
libraries, root=mpi_root, shared=True, recurse=True
|
||||
)
|
||||
|
||||
if spec.satisfies('+all'):
|
||||
self.intel_components = 'ALL'
|
||||
else:
|
||||
self.intel_components = ';'.join(components)
|
||||
IntelInstaller.install(self, spec, prefix)
|
||||
@property
|
||||
def mpi_headers(self):
|
||||
# recurse from self.prefix will find too many things for all the
|
||||
# supported sub-architectures like 'mic'
|
||||
mpi_root = self.prefix.compilers_and_libraries.linux.mpi.include64
|
||||
return find_headers('mpi', root=mpi_root, recurse=False)
|
||||
|
||||
absbindir = os.path.dirname(
|
||||
os.path.realpath(os.path.join(self.prefix.bin, "icc")))
|
||||
abslibdir = os.path.dirname(
|
||||
os.path.realpath(os.path.join(
|
||||
self.prefix.lib, "intel64", "libimf.a")))
|
||||
@property
|
||||
def components(self):
|
||||
spec = self.spec
|
||||
edition = self.version[0]
|
||||
|
||||
os.symlink(self.global_license_file, os.path.join(absbindir,
|
||||
"license.lic"))
|
||||
if spec.satisfies('+tools') and (spec.satisfies('@cluster') or
|
||||
spec.satisfies('@professional')):
|
||||
inspector_dir = "inspector_xe/licenses"
|
||||
advisor_dir = "advisor_xe/licenses"
|
||||
vtune_amplifier_dir = "vtune_amplifier_xe/licenses"
|
||||
# Intel(R) Compilers
|
||||
components = [
|
||||
# Common files
|
||||
'intel-comp-',
|
||||
'intel-openmp',
|
||||
|
||||
# C/C++
|
||||
'intel-icc',
|
||||
|
||||
# Fortran
|
||||
'intel-ifort',
|
||||
|
||||
# Parallel Studio Documentation and Licensing Files
|
||||
'intel-psxe',
|
||||
]
|
||||
|
||||
# Intel(R) Parallel Studio XE Suite Files and Documentation
|
||||
if edition == 'cluster':
|
||||
components.append('intel-icsxe')
|
||||
elif edition == 'professional':
|
||||
components.extend(['intel-ips', 'intel-ipsc', 'intel-ipsf'])
|
||||
elif edition == 'composer':
|
||||
components.extend([
|
||||
'intel-compxe', 'intel-ccompxe', 'intel-fcompxe'
|
||||
])
|
||||
|
||||
# Intel(R) Data Analytics Acceleration Library
|
||||
if '+daal' in spec:
|
||||
components.append('intel-daal')
|
||||
|
||||
# Intel(R) Debugger for Heterogeneous Compute
|
||||
if '+gdb' in spec:
|
||||
components.append('intel-gdb')
|
||||
|
||||
# Intel(R) Integrated Performance Primitives
|
||||
if '+ipp' in spec:
|
||||
components.extend(['intel-ipp', 'intel-crypto-ipp'])
|
||||
|
||||
# Intel(R) Math Kernel Library
|
||||
if '+mkl' in spec:
|
||||
components.append('intel-mkl')
|
||||
|
||||
# Intel(R) MPI Library
|
||||
if '+mpi' in spec:
|
||||
components.extend(['intel-mpi', 'intel-mpirt', 'intel-imb'])
|
||||
|
||||
# Intel(R) Threading Building Blocks
|
||||
if '+tbb' in spec:
|
||||
components.append('intel-tbb')
|
||||
|
||||
# Intel(R) Advisor
|
||||
if '+advisor' in spec:
|
||||
components.append('intel-advisor')
|
||||
|
||||
# Intel(R) Cluster Checker
|
||||
if '+clck' in spec:
|
||||
components.append('intel_clck')
|
||||
|
||||
# Intel(R) Inspector
|
||||
if '+inspector' in spec:
|
||||
components.append('intel-inspector')
|
||||
|
||||
# Intel(R) Trace Analyzer and Collector
|
||||
if '+itac' in spec:
|
||||
components.extend(['intel-itac', 'intel-ta', 'intel-tc'])
|
||||
|
||||
# Intel(R) VTune(TM) Amplifier XE
|
||||
if '+vtune' in spec:
|
||||
components.append('intel-vtune-amplifier-xe')
|
||||
|
||||
return components
|
||||
|
||||
@property
|
||||
def bin_dir(self):
|
||||
"""The relative path to the bin directory with symlinks resolved."""
|
||||
|
||||
bin_path = os.path.join(self.prefix.bin, 'icc')
|
||||
absolute_path = os.path.realpath(bin_path) # resolve symlinks
|
||||
relative_path = os.path.relpath(absolute_path, self.prefix)
|
||||
return os.path.dirname(relative_path)
|
||||
|
||||
@property
|
||||
def lib_dir(self):
|
||||
"""The relative path to the lib directory with symlinks resolved."""
|
||||
|
||||
lib_path = os.path.join(self.prefix.lib, 'intel64', 'libimf.a')
|
||||
absolute_path = os.path.realpath(lib_path) # resolve symlinks
|
||||
relative_path = os.path.relpath(absolute_path, self.prefix)
|
||||
return os.path.dirname(relative_path)
|
||||
|
||||
@property
|
||||
def license_files(self):
|
||||
spec = self.spec
|
||||
year = self.version[1]
|
||||
|
||||
directories = [
|
||||
'Licenses',
|
||||
self.bin_dir
|
||||
]
|
||||
|
||||
if '+advisor' in spec:
|
||||
advisor_dir = 'advisor_xe/licenses'
|
||||
|
||||
year = int(str(self.version).split('.')[1])
|
||||
if year >= 2017:
|
||||
inspector_dir = "inspector/licenses"
|
||||
advisor_dir = "advisor/licenses"
|
||||
advisor_dir = 'advisor/licenses'
|
||||
|
||||
os.mkdir(os.path.join(self.prefix, inspector_dir))
|
||||
os.symlink(self.global_license_file, os.path.join(
|
||||
self.prefix, inspector_dir, "license.lic"))
|
||||
os.mkdir(os.path.join(self.prefix, advisor_dir))
|
||||
os.symlink(self.global_license_file, os.path.join(
|
||||
self.prefix, advisor_dir, "license.lic"))
|
||||
os.mkdir(os.path.join(self.prefix, vtune_amplifier_dir))
|
||||
os.symlink(self.global_license_file, os.path.join(
|
||||
self.prefix, vtune_amplifier_dir, "license.lic"))
|
||||
directories.append(advisor_dir)
|
||||
|
||||
if (spec.satisfies('+all') or spec.satisfies('+mpi')) and \
|
||||
spec.satisfies('@cluster'):
|
||||
for ifile in os.listdir(os.path.join(self.prefix, "itac")):
|
||||
if os.path.isdir(os.path.join(self.prefix, "itac", ifile)):
|
||||
os.symlink(self.global_license_file,
|
||||
os.path.join(self.prefix, "itac", ifile,
|
||||
"license.lic"))
|
||||
if os.path.isdir(os.path.join(self.prefix, "itac",
|
||||
ifile, "intel64")):
|
||||
os.symlink(self.global_license_file,
|
||||
os.path.join(self.prefix, "itac",
|
||||
ifile, "intel64",
|
||||
"license.lic"))
|
||||
if spec.satisfies('~newdtags'):
|
||||
wrappers = ["mpif77", "mpif77", "mpif90", "mpif90",
|
||||
"mpigcc", "mpigcc", "mpigxx", "mpigxx",
|
||||
"mpiicc", "mpiicc", "mpiicpc", "mpiicpc",
|
||||
"mpiifort", "mpiifort"]
|
||||
if '+inspector' in spec:
|
||||
inspector_dir = 'inspector_xe/licenses'
|
||||
|
||||
if year >= 2017:
|
||||
inspector_dir = 'inspector/licenses'
|
||||
|
||||
directories.append(inspector_dir)
|
||||
|
||||
if '+itac' in spec:
|
||||
itac_dir = 'itac_{0}'.format(year)
|
||||
|
||||
directories.append(itac_dir)
|
||||
|
||||
if '+vtune' in spec:
|
||||
vtune_dir = 'vtune_amplifier_xe/licenses'
|
||||
|
||||
directories.append(vtune_dir)
|
||||
|
||||
return [os.path.join(dir, 'license.lic') for dir in directories]
|
||||
|
||||
@run_after('install')
|
||||
def filter_compiler_wrappers(self):
|
||||
spec = self.spec
|
||||
|
||||
if '+mpi' in spec:
|
||||
if '~newdtags' in spec:
|
||||
wrappers = [
|
||||
'mpif77', 'mpif90', 'mpigcc', 'mpigxx',
|
||||
'mpiicc', 'mpiicpc', 'mpiifort'
|
||||
]
|
||||
wrapper_paths = []
|
||||
for root, dirs, files in os.walk(spec.prefix):
|
||||
for name in files:
|
||||
@ -286,153 +400,95 @@ def install(self, spec, prefix):
|
||||
wrapper_paths.append(os.path.join(spec.prefix,
|
||||
root, name))
|
||||
for wrapper in wrapper_paths:
|
||||
filter_file(r'-Xlinker --enable-new-dtags', r' ',
|
||||
wrapper)
|
||||
filter_file('-Xlinker --enable-new-dtags', ' ',
|
||||
wrapper, string=True)
|
||||
|
||||
if spec.satisfies('+rpath'):
|
||||
for compiler_command in ["icc", "icpc", "ifort"]:
|
||||
cfgfilename = os.path.join(absbindir, "%s.cfg" %
|
||||
compiler_command)
|
||||
with open(cfgfilename, "w") as f:
|
||||
f.write('-Xlinker -rpath -Xlinker %s\n' % abslibdir)
|
||||
@run_after('install')
|
||||
def rpath_configuration(self):
|
||||
spec = self.spec
|
||||
|
||||
os.symlink(os.path.join(self.prefix.man, "common", "man1"),
|
||||
os.path.join(self.prefix.man, "man1"))
|
||||
if '+rpath' in spec:
|
||||
lib_dir = os.path.join(self.prefix, self.lib_dir)
|
||||
for compiler in ['icc', 'icpc', 'ifort']:
|
||||
cfgfilename = os.path.join(
|
||||
self.prefix, self.bin_dir, '{0}.cfg'.format(compiler))
|
||||
with open(cfgfilename, 'w') as f:
|
||||
f.write('-Xlinker -rpath -Xlinker {0}\n'.format(lib_dir))
|
||||
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
# TODO: Determine variables needed for the professional edition.
|
||||
@run_after('install')
|
||||
def fix_psxevars(self):
|
||||
"""Newer versions of Intel Parallel Studio have a bug in the
|
||||
``psxevars.sh`` script."""
|
||||
|
||||
major_ver = self.version[1]
|
||||
bindir = glob.glob(join_path(
|
||||
self.prefix, 'parallel_studio*', 'bin'))[0]
|
||||
|
||||
# Remove paths that were guessed but are incorrect for this package.
|
||||
run_env.remove_path('LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib'))
|
||||
run_env.remove_path('LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib'))
|
||||
run_env.remove_path('CPATH',
|
||||
join_path(self.prefix, 'include'))
|
||||
|
||||
# Add the default set of variables
|
||||
run_env.prepend_path('LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib', 'intel64'))
|
||||
run_env.prepend_path('LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib', 'intel64'))
|
||||
run_env.prepend_path('LIBRARY_PATH',
|
||||
join_path(self.prefix, 'tbb', 'lib',
|
||||
'intel64', 'gcc4.4'))
|
||||
run_env.prepend_path('LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'tbb', 'lib',
|
||||
'intel64', 'gcc4.4'))
|
||||
run_env.prepend_path('CPATH',
|
||||
join_path(self.prefix, 'tbb', 'include'))
|
||||
run_env.prepend_path('MIC_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib', 'mic'))
|
||||
run_env.prepend_path('MIC_LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib', 'mic'))
|
||||
run_env.prepend_path('MIC_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'tbb', 'lib', 'mic'))
|
||||
run_env.prepend_path('MIC_LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'tbb', 'lib', 'mic'))
|
||||
|
||||
if self.spec.satisfies('+all'):
|
||||
run_env.prepend_path('LD_LIBRARY_PATH',
|
||||
join_path(self.prefix,
|
||||
'debugger_{0}'.format(major_ver),
|
||||
'libipt', 'intel64', 'lib'))
|
||||
run_env.set('GDBSERVER_MIC',
|
||||
join_path(self.prefix,
|
||||
'debugger_{0}'.format(major_ver), 'gdb',
|
||||
'targets', 'mic', 'bin', 'gdbserver'))
|
||||
run_env.set('GDB_CROSS',
|
||||
join_path(self.prefix,
|
||||
'debugger_{0}'.format(major_ver),
|
||||
'gdb', 'intel64_mic', 'bin', 'gdb-mic'))
|
||||
run_env.set('MPM_LAUNCHER',
|
||||
join_path(self.prefix,
|
||||
'debugger_{0}'.format(major_ver), 'mpm',
|
||||
'mic',
|
||||
'bin', 'start_mpm.sh'))
|
||||
run_env.set('INTEL_PYTHONHOME',
|
||||
join_path(self.prefix,
|
||||
'debugger_{0}'.format(major_ver), 'python',
|
||||
'intel64'))
|
||||
|
||||
if (self.spec.satisfies('+all') or self.spec.satisfies('+mpi')):
|
||||
# Only I_MPI_ROOT is set here because setting the various PATH
|
||||
# variables will potentially be in conflict with other MPI
|
||||
# environment modules. The I_MPI_ROOT environment variable can be
|
||||
# used as a base to set necessary PATH variables for using Intel
|
||||
# MPI. It is also possible to set the variables in the modules.yaml
|
||||
# file if Intel MPI is the dominant, or only, MPI on a system.
|
||||
run_env.set('I_MPI_ROOT', join_path(self.prefix, 'impi'))
|
||||
|
||||
if self.spec.satisfies('+all') or self.spec.satisfies('+mkl'):
|
||||
spack_env.set('MKLROOT', join_path(self.prefix, 'mkl'))
|
||||
|
||||
run_env.prepend_path('LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'mkl', 'lib',
|
||||
'intel64'))
|
||||
run_env.prepend_path('LIBRARY_PATH',
|
||||
join_path(self.prefix, 'mkl', 'lib',
|
||||
'intel64'))
|
||||
run_env.prepend_path('CPATH',
|
||||
join_path(self.prefix, 'mkl', 'include'))
|
||||
run_env.prepend_path('MIC_LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'mkl', 'lib', 'mic'))
|
||||
run_env.set('MKLROOT', join_path(self.prefix, 'mkl'))
|
||||
|
||||
if self.spec.satisfies('+all') or self.spec.satisfies('+daal'):
|
||||
run_env.prepend_path('LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'daal', 'lib',
|
||||
'intel64_lin'))
|
||||
run_env.prepend_path('LIBRARY_PATH',
|
||||
join_path(self.prefix, 'daal', 'lib',
|
||||
'intel64_lin'))
|
||||
run_env.prepend_path('CPATH',
|
||||
join_path(self.prefix, 'daal', 'include'))
|
||||
run_env.prepend_path('CLASSPATH',
|
||||
join_path(self.prefix, 'daal', 'lib',
|
||||
'daal.jar'))
|
||||
run_env.set('DAALROOT', join_path(self.prefix, 'daal'))
|
||||
|
||||
if self.spec.satisfies('+all') or self.spec.satisfies('+ipp'):
|
||||
run_env.prepend_path('LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'ipp', 'lib',
|
||||
'intel64'))
|
||||
run_env.prepend_path('LIBRARY_PATH',
|
||||
join_path(self.prefix, 'ipp', 'lib',
|
||||
'intel64'))
|
||||
run_env.prepend_path('CPATH',
|
||||
join_path(self.prefix, 'ipp', 'include'))
|
||||
run_env.prepend_path('MIC_LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'ipp', 'lib', 'mic'))
|
||||
run_env.set('IPPROOT', join_path(self.prefix, 'ipp'))
|
||||
|
||||
if self.spec.satisfies('+all') or self.spec.satisfies('+tools'):
|
||||
run_env.prepend_path('PATH',
|
||||
join_path(self.prefix, 'vtune_amplifier_xe',
|
||||
'bin64'))
|
||||
run_env.prepend_path('VTUNE_AMPLIFIER_XE_{0}_DIR'.format(
|
||||
major_ver),
|
||||
join_path(self.prefix, 'vtune_amplifier_xe'))
|
||||
filter_file('^SCRIPTPATH=.*', 'SCRIPTPATH={0}'.format(self.prefix),
|
||||
os.path.join(bindir, 'psxevars.sh'),
|
||||
os.path.join(bindir, 'psxevars.csh'))
|
||||
|
||||
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
|
||||
if '+mpi' in self.spec:
|
||||
spack_env.set('I_MPI_CC', spack_cc)
|
||||
spack_env.set('I_MPI_CXX', spack_cxx)
|
||||
spack_env.set('I_MPI_F77', spack_fc)
|
||||
spack_env.set('I_MPI_F90', spack_f77)
|
||||
spack_env.set('I_MPI_FC', spack_fc)
|
||||
|
||||
def setup_dependent_package(self, module, dep_spec):
|
||||
# Check for presence of bin64 or bin directory
|
||||
if os.path.isdir(self.prefix.bin):
|
||||
bindir = self.prefix.bin
|
||||
elif os.path.isdir(self.prefix.bin64):
|
||||
bindir = self.prefix.bin64
|
||||
else:
|
||||
raise RuntimeError('No suitable bindir found')
|
||||
# set up MKLROOT for everyone using MKL package
|
||||
if '+mkl' in self.spec:
|
||||
mkl_root = self.prefix.compilers_and_libraries.linux.mkl.lib.intel64 # noqa
|
||||
|
||||
self.spec.mpicc = join_path(bindir, 'mpicc')
|
||||
self.spec.mpicxx = join_path(bindir, 'mpic++')
|
||||
self.spec.mpifc = join_path(bindir, 'mpif90')
|
||||
self.spec.mpif77 = join_path(bindir, 'mpif77')
|
||||
spack_env.set('MKLROOT', self.prefix)
|
||||
spack_env.append_path('SPACK_COMPILER_EXTRA_RPATHS', mkl_root)
|
||||
|
||||
def setup_dependent_package(self, module, dep_spec):
|
||||
if '+mpi' in self.spec:
|
||||
# Intel comes with 2 different flavors of MPI wrappers:
|
||||
#
|
||||
# * mpiicc, mpiicpc, and mpifort are hardcoded to wrap around
|
||||
# the Intel compilers.
|
||||
# * mpicc, mpicxx, mpif90, and mpif77 allow you to set which
|
||||
# compilers to wrap using I_MPI_CC and friends. By default,
|
||||
# wraps around the GCC compilers.
|
||||
#
|
||||
# In theory, these should be equivalent as long as I_MPI_CC
|
||||
# and friends are set to point to the Intel compilers, but in
|
||||
# practice, mpicc fails to compile some applications while
|
||||
# mpiicc works.
|
||||
bindir = self.prefix.compilers_and_libraries.linux.mpi.intel64.bin
|
||||
|
||||
if self.compiler.name == 'intel':
|
||||
self.spec.mpicc = bindir.mpiicc
|
||||
self.spec.mpicxx = bindir.mpiicpc
|
||||
self.spec.mpifc = bindir.mpiifort
|
||||
self.spec.mpif77 = bindir.mpiifort
|
||||
else:
|
||||
self.spec.mpicc = bindir.mpicc
|
||||
self.spec.mpicxx = bindir.mpicxx
|
||||
self.spec.mpifc = bindir.mpif90
|
||||
self.spec.mpif77 = bindir.mpif77
|
||||
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source parallel_studio_xe_2017/bin/psxevars.sh intel64
|
||||
"""
|
||||
# NOTE: Spack runs setup_environment twice, once pre-build to set up
|
||||
# the build environment, and once post-installation to determine
|
||||
# the environment variables needed at run-time to add to the module
|
||||
# file. The script we need to source is only present post-installation,
|
||||
# so check for its existence before sourcing.
|
||||
# TODO: At some point we should split setup_environment into
|
||||
# setup_build_environment and setup_run_environment to get around
|
||||
# this problem.
|
||||
psxevars = glob.glob(join_path(
|
||||
self.prefix, 'parallel_studio*', 'bin', 'psxevars.sh'))
|
||||
|
||||
if psxevars:
|
||||
run_env.extend(EnvironmentModifications.from_sourcing_file(
|
||||
psxevars[0], 'intel64'))
|
||||
|
@ -26,7 +26,7 @@
|
||||
import glob
|
||||
|
||||
|
||||
class Tbb(Package):
|
||||
class IntelTbb(Package):
|
||||
"""Widely used C++ template library for task parallelism.
|
||||
Intel Threading Building Blocks (Intel TBB) lets you easily write parallel
|
||||
C++ programs that take full advantage of multicore performance, that are
|
||||
@ -47,6 +47,8 @@ class Tbb(Package):
|
||||
version('4.4.3', '80707e277f69d9b20eeebdd7a5f5331137868ce1',
|
||||
url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160128oss_src_0.tgz')
|
||||
|
||||
provides('tbb')
|
||||
|
||||
def coerce_to_spack(self, tbb_build_subdir):
|
||||
for compiler in ["icc", "gcc", "clang"]:
|
||||
fs = glob.glob(join_path(tbb_build_subdir,
|
@ -22,102 +22,13 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
import os
|
||||
import re
|
||||
|
||||
from spack import *
|
||||
from spack.environment import EnvironmentModifications
|
||||
|
||||
|
||||
def filter_pick(input_list, regex_filter):
|
||||
"""Returns the items in input_list that are found in the regex_filter"""
|
||||
return [l for l in input_list for m in (regex_filter(l),) if m]
|
||||
|
||||
|
||||
def unfilter_pick(input_list, regex_filter):
|
||||
"""Returns the items in input_list that are not found in the
|
||||
regex_filter"""
|
||||
return [l for l in input_list for m in (regex_filter(l),) if not m]
|
||||
|
||||
|
||||
def get_all_components():
|
||||
"""Returns a list of all the components associated with the downloaded
|
||||
Intel package"""
|
||||
all_components = []
|
||||
with open("pset/mediaconfig.xml", "r") as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
if line.find('<Abbr>') != -1:
|
||||
component = line[line.find('<Abbr>') + 6:line.find('</Abbr>')]
|
||||
all_components.append(component)
|
||||
return all_components
|
||||
|
||||
|
||||
class IntelInstaller(Package):
|
||||
"""Base package containing common methods for installing Intel software"""
|
||||
|
||||
homepage = "https://software.intel.com/en-us"
|
||||
intel_components = "ALL"
|
||||
license_comment = '#'
|
||||
license_files = ['Licenses/license.lic']
|
||||
license_vars = ['INTEL_LICENSE_FILE']
|
||||
license_url = \
|
||||
'https://software.intel.com/en-us/articles/intel-license-manager-faq'
|
||||
|
||||
@property
|
||||
def license_required(self):
|
||||
# The Intel libraries are provided without requiring a license as of
|
||||
# version 2017.2. Trying to specify the license will fail. See
|
||||
# https://software.intel.com/en-us/articles/free-mkl
|
||||
if (self.spec.satisfies("intel-mkl@2017.2:") or
|
||||
self.spec.satisfies("intel-daal@2017.2:") or
|
||||
self.spec.satisfies("intel-mpi@2017.2:") or
|
||||
self.spec.satisfies("intel-ipp@2017.2:")):
|
||||
return False
|
||||
return True
|
||||
|
||||
@property
|
||||
def global_license_file(self):
|
||||
"""Returns the path where a global license file should be stored."""
|
||||
if not self.license_files:
|
||||
return
|
||||
return join_path(self.global_license_dir, "intel",
|
||||
os.path.basename(self.license_files[0]))
|
||||
|
||||
def install(self, spec, prefix):
|
||||
|
||||
if not hasattr(self, "intel_prefix"):
|
||||
self.intel_prefix = self.prefix
|
||||
|
||||
silent_config_filename = 'silent.cfg'
|
||||
with open(silent_config_filename, 'w') as f:
|
||||
f.write("""
|
||||
ACCEPT_EULA=accept
|
||||
PSET_MODE=install
|
||||
CONTINUE_WITH_INSTALLDIR_OVERWRITE=yes
|
||||
PSET_INSTALL_DIR=%s
|
||||
NONRPM_DB_DIR=%s
|
||||
CONTINUE_WITH_OPTIONAL_ERROR=yes
|
||||
COMPONENTS=%s
|
||||
""" % (self.intel_prefix, self.intel_prefix, self.intel_components))
|
||||
|
||||
# The Intel libraries are provided without requiring a license as of
|
||||
# version 2017.2. Trying to specify the license will fail. See
|
||||
# https://software.intel.com/en-us/articles/free-mkl
|
||||
if not (spec.satisfies("intel-mkl@2017.2:") or
|
||||
spec.satisfies("intel-daal@2017.2:") or
|
||||
spec.satisfies("intel-mpi@2017.2:") or
|
||||
spec.satisfies("intel-ipp@2017.2:")):
|
||||
with open(silent_config_filename, 'a') as f:
|
||||
f.write("""
|
||||
ACTIVATION_LICENSE_FILE=%s
|
||||
ACTIVATION_TYPE=license_file
|
||||
PHONEHOME_SEND_USAGE_DATA=no
|
||||
""" % (self.global_license_file))
|
||||
|
||||
install_script = Executable("./install.sh")
|
||||
install_script('--silent', silent_config_filename)
|
||||
|
||||
|
||||
class Intel(IntelInstaller):
|
||||
class Intel(IntelPackage):
|
||||
"""Intel Compilers."""
|
||||
|
||||
homepage = "https://software.intel.com/en-us/intel-parallel-studio-xe"
|
||||
@ -139,64 +50,59 @@ class Intel(IntelInstaller):
|
||||
version('16.0.2', '1133fb831312eb519f7da897fec223fa',
|
||||
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/8680/parallel_studio_xe_2016_composer_edition_update2.tgz')
|
||||
|
||||
variant('rpath', default=True, description="Add rpath to .cfg files")
|
||||
variant('rpath', default=True, description='Add rpath to .cfg files')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
components = []
|
||||
all_components = get_all_components()
|
||||
regex = '(comp|openmp|intel-tbb|icc|ifort|psxe|icsxe-pset)'
|
||||
components = filter_pick(all_components, re.compile(regex).search)
|
||||
components = [
|
||||
# Common files
|
||||
'intel-comp-',
|
||||
'intel-openmp',
|
||||
|
||||
self.intel_components = ';'.join(components)
|
||||
IntelInstaller.install(self, spec, prefix)
|
||||
# C/C++
|
||||
'intel-icc',
|
||||
|
||||
absbindir = os.path.split(os.path.realpath(os.path.join(
|
||||
self.prefix.bin, "icc")))[0]
|
||||
abslibdir = os.path.split(os.path.realpath(os.path.join(
|
||||
self.prefix.lib, "intel64", "libimf.a")))[0]
|
||||
# Fortran
|
||||
'intel-ifort',
|
||||
]
|
||||
|
||||
# symlink or copy?
|
||||
os.symlink(self.global_license_file,
|
||||
os.path.join(absbindir, "license.lic"))
|
||||
@property
|
||||
def license_files(self):
|
||||
return [
|
||||
'Licenses/license.lic',
|
||||
join_path('compilers_and_libraries', 'linux', 'bin',
|
||||
'intel64', 'license.lic')
|
||||
]
|
||||
|
||||
if spec.satisfies('+rpath'):
|
||||
for compiler_command in ["icc", "icpc", "ifort"]:
|
||||
cfgfilename = os.path.join(absbindir, "%s.cfg" %
|
||||
compiler_command)
|
||||
with open(cfgfilename, "w") as f:
|
||||
f.write('-Xlinker -rpath -Xlinker %s\n' % abslibdir)
|
||||
|
||||
os.symlink(os.path.join(self.prefix.man, "common", "man1"),
|
||||
os.path.join(self.prefix.man, "man1"))
|
||||
@run_after('install')
|
||||
def rpath_configuration(self):
|
||||
if '+rpath' in self.spec:
|
||||
bin_dir = join_path(self.prefix, 'compilers_and_libraries',
|
||||
'linux', 'bin', 'intel64')
|
||||
lib_dir = join_path(self.prefix, 'compilers_and_libraries',
|
||||
'linux', 'compiler', 'lib', 'intel64_lin')
|
||||
for compiler in ['icc', 'icpc', 'ifort']:
|
||||
cfgfilename = join_path(bin_dir, '{0}.cfg'.format(compiler))
|
||||
with open(cfgfilename, 'w') as f:
|
||||
f.write('-Xlinker -rpath -Xlinker {0}\n'.format(lib_dir))
|
||||
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
# Remove paths that were guessed but are incorrect for this package.
|
||||
run_env.remove_path('LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib'))
|
||||
run_env.remove_path('LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib'))
|
||||
run_env.remove_path('CPATH',
|
||||
join_path(self.prefix, 'include'))
|
||||
These environment variables come from running:
|
||||
|
||||
# Add the default set of variables
|
||||
run_env.prepend_path('LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib', 'intel64'))
|
||||
run_env.prepend_path('LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib', 'intel64'))
|
||||
run_env.prepend_path('LIBRARY_PATH',
|
||||
join_path(self.prefix, 'tbb', 'lib',
|
||||
'intel64', 'gcc4.4'))
|
||||
run_env.prepend_path('LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'tbb', 'lib',
|
||||
'intel64', 'gcc4.4'))
|
||||
run_env.prepend_path('CPATH',
|
||||
join_path(self.prefix, 'tbb', 'include'))
|
||||
run_env.prepend_path('MIC_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib', 'mic'))
|
||||
run_env.prepend_path('MIC_LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'lib', 'mic'))
|
||||
run_env.prepend_path('MIC_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'tbb', 'lib', 'mic'))
|
||||
run_env.prepend_path('MIC_LD_LIBRARY_PATH',
|
||||
join_path(self.prefix, 'tbb', 'lib', 'mic'))
|
||||
.. code-block:: console
|
||||
|
||||
$ source bin/compilervars.sh intel64
|
||||
"""
|
||||
# NOTE: Spack runs setup_environment twice, once pre-build to set up
|
||||
# the build environment, and once post-installation to determine
|
||||
# the environment variables needed at run-time to add to the module
|
||||
# file. The script we need to source is only present post-installation,
|
||||
# so check for its existence before sourcing.
|
||||
# TODO: At some point we should split setup_environment into
|
||||
# setup_build_environment and setup_run_environment to get around
|
||||
# this problem.
|
||||
compilervars = os.path.join(self.prefix.bin, 'compilervars.sh')
|
||||
|
||||
if os.path.isfile(compilervars):
|
||||
run_env.extend(EnvironmentModifications.from_sourcing_file(
|
||||
compilervars, 'intel64'))
|
||||
|
Loading…
Reference in New Issue
Block a user