8th day of python challenges 111-117
This commit is contained in:
@@ -0,0 +1,37 @@
|
||||
# This file is generated by numpy's setup.py
|
||||
# It contains system_info results at the time of building this package.
|
||||
__all__ = ["get_info","show"]
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
extra_dll_dir = os.path.join(os.path.dirname(__file__), '.libs')
|
||||
|
||||
if sys.platform == 'win32' and os.path.isdir(extra_dll_dir):
|
||||
os.environ.setdefault('PATH', '')
|
||||
os.environ['PATH'] += os.pathsep + extra_dll_dir
|
||||
|
||||
blas_mkl_info={}
|
||||
blis_info={}
|
||||
openblas_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
|
||||
blas_opt_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
|
||||
lapack_mkl_info={}
|
||||
openblas_lapack_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
|
||||
lapack_opt_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
|
||||
|
||||
def get_info(name):
|
||||
g = globals()
|
||||
return g.get(name, g.get(name + "_info", {}))
|
||||
|
||||
def show():
|
||||
for name,info_dict in globals().items():
|
||||
if name[0] == "_" or type(info_dict) is not type({}): continue
|
||||
print(name + ":")
|
||||
if not info_dict:
|
||||
print(" NOT AVAILABLE")
|
||||
for k,v in info_dict.items():
|
||||
v = str(v)
|
||||
if k == "sources" and len(v) > 200:
|
||||
v = v[:60] + " ...\n... " + v[-60:]
|
||||
print(" %s = %s" % (k,v))
|
||||
34
venv/lib/python3.6/site-packages/numpy/distutils/__init__.py
Normal file
34
venv/lib/python3.6/site-packages/numpy/distutils/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from .__version__ import version as __version__
|
||||
# Must import local ccompiler ASAP in order to get
|
||||
# customized CCompiler.spawn effective.
|
||||
from . import ccompiler
|
||||
from . import unixccompiler
|
||||
|
||||
from .info import __doc__
|
||||
from .npy_pkg_config import *
|
||||
|
||||
# If numpy is installed, add distutils.test()
|
||||
try:
|
||||
from . import __config__
|
||||
# Normally numpy is installed if the above import works, but an interrupted
|
||||
# in-place build could also have left a __config__.py. In that case the
|
||||
# next import may still fail, so keep it inside the try block.
|
||||
from numpy._pytesttester import PytestTester
|
||||
test = PytestTester(__name__)
|
||||
del PytestTester
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def customized_fcompiler(plat=None, compiler=None):
|
||||
from numpy.distutils.fcompiler import new_fcompiler
|
||||
c = new_fcompiler(plat=plat, compiler=compiler)
|
||||
c.customize()
|
||||
return c
|
||||
|
||||
def customized_ccompiler(plat=None, compiler=None):
|
||||
c = ccompiler.new_compiler(plat=plat, compiler=compiler)
|
||||
c.customize('')
|
||||
return c
|
||||
@@ -0,0 +1,6 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
major = 0
|
||||
minor = 4
|
||||
micro = 0
|
||||
version = '%(major)d.%(minor)d.%(micro)d' % (locals())
|
||||
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
Helper functions for interacting with the shell, and consuming shell-style
|
||||
parameters provided in config files.
|
||||
"""
|
||||
import os
|
||||
import shlex
|
||||
import subprocess
|
||||
try:
|
||||
from shlex import quote
|
||||
except ImportError:
|
||||
from pipes import quote
|
||||
|
||||
__all__ = ['WindowsParser', 'PosixParser', 'NativeParser']
|
||||
|
||||
|
||||
class CommandLineParser:
|
||||
"""
|
||||
An object that knows how to split and join command-line arguments.
|
||||
|
||||
It must be true that ``argv == split(join(argv))`` for all ``argv``.
|
||||
The reverse neednt be true - `join(split(cmd))` may result in the addition
|
||||
or removal of unnecessary escaping.
|
||||
"""
|
||||
@staticmethod
|
||||
def join(argv):
|
||||
""" Join a list of arguments into a command line string """
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def split(cmd):
|
||||
""" Split a command line string into a list of arguments """
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class WindowsParser:
|
||||
"""
|
||||
The parsing behavior used by `subprocess.call("string")` on Windows, which
|
||||
matches the Microsoft C/C++ runtime.
|
||||
|
||||
Note that this is _not_ the behavior of cmd.
|
||||
"""
|
||||
@staticmethod
|
||||
def join(argv):
|
||||
# note that list2cmdline is specific to the windows syntax
|
||||
return subprocess.list2cmdline(argv)
|
||||
|
||||
@staticmethod
|
||||
def split(cmd):
|
||||
import ctypes # guarded import for systems without ctypes
|
||||
try:
|
||||
ctypes.windll
|
||||
except AttributeError:
|
||||
raise NotImplementedError
|
||||
|
||||
# Windows has special parsing rules for the executable (no quotes),
|
||||
# that we do not care about - insert a dummy element
|
||||
if not cmd:
|
||||
return []
|
||||
cmd = 'dummy ' + cmd
|
||||
|
||||
CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW
|
||||
CommandLineToArgvW.restype = ctypes.POINTER(ctypes.c_wchar_p)
|
||||
CommandLineToArgvW.argtypes = (ctypes.c_wchar_p, ctypes.POINTER(ctypes.c_int))
|
||||
|
||||
nargs = ctypes.c_int()
|
||||
lpargs = CommandLineToArgvW(cmd, ctypes.byref(nargs))
|
||||
args = [lpargs[i] for i in range(nargs.value)]
|
||||
assert not ctypes.windll.kernel32.LocalFree(lpargs)
|
||||
|
||||
# strip the element we inserted
|
||||
assert args[0] == "dummy"
|
||||
return args[1:]
|
||||
|
||||
|
||||
class PosixParser:
|
||||
"""
|
||||
The parsing behavior used by `subprocess.call("string", shell=True)` on Posix.
|
||||
"""
|
||||
@staticmethod
|
||||
def join(argv):
|
||||
return ' '.join(quote(arg) for arg in argv)
|
||||
|
||||
@staticmethod
|
||||
def split(cmd):
|
||||
return shlex.split(cmd, posix=True)
|
||||
|
||||
|
||||
if os.name == 'nt':
|
||||
NativeParser = WindowsParser
|
||||
elif os.name == 'posix':
|
||||
NativeParser = PosixParser
|
||||
798
venv/lib/python3.6/site-packages/numpy/distutils/ccompiler.py
Normal file
798
venv/lib/python3.6/site-packages/numpy/distutils/ccompiler.py
Normal file
@@ -0,0 +1,798 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import types
|
||||
import shlex
|
||||
import time
|
||||
import subprocess
|
||||
from copy import copy
|
||||
from distutils import ccompiler
|
||||
from distutils.ccompiler import *
|
||||
from distutils.errors import DistutilsExecError, DistutilsModuleError, \
|
||||
DistutilsPlatformError, CompileError
|
||||
from distutils.sysconfig import customize_compiler
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from numpy.distutils import log
|
||||
from numpy.distutils.compat import get_exception
|
||||
from numpy.distutils.exec_command import (
|
||||
filepath_from_subprocess_output, forward_bytes_to_stdout
|
||||
)
|
||||
from numpy.distutils.misc_util import cyg2win32, is_sequence, mingw32, \
|
||||
get_num_build_jobs, \
|
||||
_commandline_dep_string
|
||||
|
||||
# globals for parallel build management
|
||||
try:
|
||||
import threading
|
||||
except ImportError:
|
||||
import dummy_threading as threading
|
||||
_job_semaphore = None
|
||||
_global_lock = threading.Lock()
|
||||
_processing_files = set()
|
||||
|
||||
|
||||
def _needs_build(obj, cc_args, extra_postargs, pp_opts):
|
||||
"""
|
||||
Check if an objects needs to be rebuild based on its dependencies
|
||||
|
||||
Parameters
|
||||
----------
|
||||
obj : str
|
||||
object file
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
# defined in unixcompiler.py
|
||||
dep_file = obj + '.d'
|
||||
if not os.path.exists(dep_file):
|
||||
return True
|
||||
|
||||
# dep_file is a makefile containing 'object: dependencies'
|
||||
# formatted like posix shell (spaces escaped, \ line continuations)
|
||||
# the last line contains the compiler commandline arguments as some
|
||||
# projects may compile an extension multiple times with different
|
||||
# arguments
|
||||
with open(dep_file, "r") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
cmdline =_commandline_dep_string(cc_args, extra_postargs, pp_opts)
|
||||
last_cmdline = lines[-1]
|
||||
if last_cmdline != cmdline:
|
||||
return True
|
||||
|
||||
contents = ''.join(lines[:-1])
|
||||
deps = [x for x in shlex.split(contents, posix=True)
|
||||
if x != "\n" and not x.endswith(":")]
|
||||
|
||||
try:
|
||||
t_obj = os.stat(obj).st_mtime
|
||||
|
||||
# check if any of the dependencies is newer than the object
|
||||
# the dependencies includes the source used to create the object
|
||||
for f in deps:
|
||||
if os.stat(f).st_mtime > t_obj:
|
||||
return True
|
||||
except OSError:
|
||||
# no object counts as newer (shouldn't happen if dep_file exists)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def replace_method(klass, method_name, func):
|
||||
if sys.version_info[0] < 3:
|
||||
m = types.MethodType(func, None, klass)
|
||||
else:
|
||||
# Py3k does not have unbound method anymore, MethodType does not work
|
||||
m = lambda self, *args, **kw: func(self, *args, **kw)
|
||||
setattr(klass, method_name, m)
|
||||
|
||||
|
||||
######################################################################
|
||||
## Method that subclasses may redefine. But don't call this method,
|
||||
## it i private to CCompiler class and may return unexpected
|
||||
## results if used elsewhere. So, you have been warned..
|
||||
|
||||
def CCompiler_find_executables(self):
|
||||
"""
|
||||
Does nothing here, but is called by the get_version method and can be
|
||||
overridden by subclasses. In particular it is redefined in the `FCompiler`
|
||||
class where more documentation can be found.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
replace_method(CCompiler, 'find_executables', CCompiler_find_executables)
|
||||
|
||||
|
||||
# Using customized CCompiler.spawn.
|
||||
def CCompiler_spawn(self, cmd, display=None):
|
||||
"""
|
||||
Execute a command in a sub-process.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
cmd : str
|
||||
The command to execute.
|
||||
display : str or sequence of str, optional
|
||||
The text to add to the log file kept by `numpy.distutils`.
|
||||
If not given, `display` is equal to `cmd`.
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
|
||||
Raises
|
||||
------
|
||||
DistutilsExecError
|
||||
If the command failed, i.e. the exit status was not 0.
|
||||
|
||||
"""
|
||||
if display is None:
|
||||
display = cmd
|
||||
if is_sequence(display):
|
||||
display = ' '.join(list(display))
|
||||
log.info(display)
|
||||
try:
|
||||
subprocess.check_output(cmd)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
o = exc.output
|
||||
s = exc.returncode
|
||||
except OSError:
|
||||
# OSError doesn't have the same hooks for the exception
|
||||
# output, but exec_command() historically would use an
|
||||
# empty string for EnvironmentError (base class for
|
||||
# OSError)
|
||||
o = b''
|
||||
# status previously used by exec_command() for parent
|
||||
# of OSError
|
||||
s = 127
|
||||
else:
|
||||
# use a convenience return here so that any kind of
|
||||
# caught exception will execute the default code after the
|
||||
# try / except block, which handles various exceptions
|
||||
return None
|
||||
|
||||
if is_sequence(cmd):
|
||||
cmd = ' '.join(list(cmd))
|
||||
|
||||
forward_bytes_to_stdout(o)
|
||||
|
||||
if re.search(b'Too many open files', o):
|
||||
msg = '\nTry rerunning setup command until build succeeds.'
|
||||
else:
|
||||
msg = ''
|
||||
raise DistutilsExecError('Command "%s" failed with exit status %d%s' %
|
||||
(cmd, s, msg))
|
||||
|
||||
replace_method(CCompiler, 'spawn', CCompiler_spawn)
|
||||
|
||||
def CCompiler_object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
|
||||
"""
|
||||
Return the name of the object files for the given source files.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
source_filenames : list of str
|
||||
The list of paths to source files. Paths can be either relative or
|
||||
absolute, this is handled transparently.
|
||||
strip_dir : bool, optional
|
||||
Whether to strip the directory from the returned paths. If True,
|
||||
the file name prepended by `output_dir` is returned. Default is False.
|
||||
output_dir : str, optional
|
||||
If given, this path is prepended to the returned paths to the
|
||||
object files.
|
||||
|
||||
Returns
|
||||
-------
|
||||
obj_names : list of str
|
||||
The list of paths to the object files corresponding to the source
|
||||
files in `source_filenames`.
|
||||
|
||||
"""
|
||||
if output_dir is None:
|
||||
output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
base, ext = os.path.splitext(os.path.normpath(src_name))
|
||||
base = os.path.splitdrive(base)[1] # Chop off the drive
|
||||
base = base[os.path.isabs(base):] # If abs, chop off leading /
|
||||
if base.startswith('..'):
|
||||
# Resolve starting relative path components, middle ones
|
||||
# (if any) have been handled by os.path.normpath above.
|
||||
i = base.rfind('..')+2
|
||||
d = base[:i]
|
||||
d = os.path.basename(os.path.abspath(d))
|
||||
base = d + base[i:]
|
||||
if ext not in self.src_extensions:
|
||||
raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name))
|
||||
if strip_dir:
|
||||
base = os.path.basename(base)
|
||||
obj_name = os.path.join(output_dir, base + self.obj_extension)
|
||||
obj_names.append(obj_name)
|
||||
return obj_names
|
||||
|
||||
replace_method(CCompiler, 'object_filenames', CCompiler_object_filenames)
|
||||
|
||||
def CCompiler_compile(self, sources, output_dir=None, macros=None,
|
||||
include_dirs=None, debug=0, extra_preargs=None,
|
||||
extra_postargs=None, depends=None):
|
||||
"""
|
||||
Compile one or more source files.
|
||||
|
||||
Please refer to the Python distutils API reference for more details.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
sources : list of str
|
||||
A list of filenames
|
||||
output_dir : str, optional
|
||||
Path to the output directory.
|
||||
macros : list of tuples
|
||||
A list of macro definitions.
|
||||
include_dirs : list of str, optional
|
||||
The directories to add to the default include file search path for
|
||||
this compilation only.
|
||||
debug : bool, optional
|
||||
Whether or not to output debug symbols in or alongside the object
|
||||
file(s).
|
||||
extra_preargs, extra_postargs : ?
|
||||
Extra pre- and post-arguments.
|
||||
depends : list of str, optional
|
||||
A list of file names that all targets depend on.
|
||||
|
||||
Returns
|
||||
-------
|
||||
objects : list of str
|
||||
A list of object file names, one per source file `sources`.
|
||||
|
||||
Raises
|
||||
------
|
||||
CompileError
|
||||
If compilation fails.
|
||||
|
||||
"""
|
||||
# This method is effective only with Python >=2.3 distutils.
|
||||
# Any changes here should be applied also to fcompiler.compile
|
||||
# method to support pre Python 2.3 distutils.
|
||||
global _job_semaphore
|
||||
|
||||
jobs = get_num_build_jobs()
|
||||
|
||||
# setup semaphore to not exceed number of compile jobs when parallelized at
|
||||
# extension level (python >= 3.5)
|
||||
with _global_lock:
|
||||
if _job_semaphore is None:
|
||||
_job_semaphore = threading.Semaphore(jobs)
|
||||
|
||||
if not sources:
|
||||
return []
|
||||
# FIXME:RELATIVE_IMPORT
|
||||
if sys.version_info[0] < 3:
|
||||
from .fcompiler import FCompiler, is_f_file, has_f90_header
|
||||
else:
|
||||
from numpy.distutils.fcompiler import (FCompiler, is_f_file,
|
||||
has_f90_header)
|
||||
if isinstance(self, FCompiler):
|
||||
display = []
|
||||
for fc in ['f77', 'f90', 'fix']:
|
||||
fcomp = getattr(self, 'compiler_'+fc)
|
||||
if fcomp is None:
|
||||
continue
|
||||
display.append("Fortran %s compiler: %s" % (fc, ' '.join(fcomp)))
|
||||
display = '\n'.join(display)
|
||||
else:
|
||||
ccomp = self.compiler_so
|
||||
display = "C compiler: %s\n" % (' '.join(ccomp),)
|
||||
log.info(display)
|
||||
macros, objects, extra_postargs, pp_opts, build = \
|
||||
self._setup_compile(output_dir, macros, include_dirs, sources,
|
||||
depends, extra_postargs)
|
||||
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
|
||||
display = "compile options: '%s'" % (' '.join(cc_args))
|
||||
if extra_postargs:
|
||||
display += "\nextra options: '%s'" % (' '.join(extra_postargs))
|
||||
log.info(display)
|
||||
|
||||
def single_compile(args):
|
||||
obj, (src, ext) = args
|
||||
if not _needs_build(obj, cc_args, extra_postargs, pp_opts):
|
||||
return
|
||||
|
||||
# check if we are currently already processing the same object
|
||||
# happens when using the same source in multiple extensions
|
||||
while True:
|
||||
# need explicit lock as there is no atomic check and add with GIL
|
||||
with _global_lock:
|
||||
# file not being worked on, start working
|
||||
if obj not in _processing_files:
|
||||
_processing_files.add(obj)
|
||||
break
|
||||
# wait for the processing to end
|
||||
time.sleep(0.1)
|
||||
|
||||
try:
|
||||
# retrieve slot from our #job semaphore and build
|
||||
with _job_semaphore:
|
||||
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
|
||||
finally:
|
||||
# register being done processing
|
||||
with _global_lock:
|
||||
_processing_files.remove(obj)
|
||||
|
||||
|
||||
if isinstance(self, FCompiler):
|
||||
objects_to_build = list(build.keys())
|
||||
f77_objects, other_objects = [], []
|
||||
for obj in objects:
|
||||
if obj in objects_to_build:
|
||||
src, ext = build[obj]
|
||||
if self.compiler_type=='absoft':
|
||||
obj = cyg2win32(obj)
|
||||
src = cyg2win32(src)
|
||||
if is_f_file(src) and not has_f90_header(src):
|
||||
f77_objects.append((obj, (src, ext)))
|
||||
else:
|
||||
other_objects.append((obj, (src, ext)))
|
||||
|
||||
# f77 objects can be built in parallel
|
||||
build_items = f77_objects
|
||||
# build f90 modules serial, module files are generated during
|
||||
# compilation and may be used by files later in the list so the
|
||||
# ordering is important
|
||||
for o in other_objects:
|
||||
single_compile(o)
|
||||
else:
|
||||
build_items = build.items()
|
||||
|
||||
if len(build) > 1 and jobs > 1:
|
||||
# build parallel
|
||||
import multiprocessing.pool
|
||||
pool = multiprocessing.pool.ThreadPool(jobs)
|
||||
pool.map(single_compile, build_items)
|
||||
pool.close()
|
||||
else:
|
||||
# build serial
|
||||
for o in build_items:
|
||||
single_compile(o)
|
||||
|
||||
# Return *all* object filenames, not just the ones we just built.
|
||||
return objects
|
||||
|
||||
replace_method(CCompiler, 'compile', CCompiler_compile)
|
||||
|
||||
def CCompiler_customize_cmd(self, cmd, ignore=()):
|
||||
"""
|
||||
Customize compiler using distutils command.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
cmd : class instance
|
||||
An instance inheriting from `distutils.cmd.Command`.
|
||||
ignore : sequence of str, optional
|
||||
List of `CCompiler` commands (without ``'set_'``) that should not be
|
||||
altered. Strings that are checked for are:
|
||||
``('include_dirs', 'define', 'undef', 'libraries', 'library_dirs',
|
||||
'rpath', 'link_objects')``.
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
|
||||
"""
|
||||
log.info('customize %s using %s' % (self.__class__.__name__,
|
||||
cmd.__class__.__name__))
|
||||
def allow(attr):
|
||||
return getattr(cmd, attr, None) is not None and attr not in ignore
|
||||
|
||||
if allow('include_dirs'):
|
||||
self.set_include_dirs(cmd.include_dirs)
|
||||
if allow('define'):
|
||||
for (name, value) in cmd.define:
|
||||
self.define_macro(name, value)
|
||||
if allow('undef'):
|
||||
for macro in cmd.undef:
|
||||
self.undefine_macro(macro)
|
||||
if allow('libraries'):
|
||||
self.set_libraries(self.libraries + cmd.libraries)
|
||||
if allow('library_dirs'):
|
||||
self.set_library_dirs(self.library_dirs + cmd.library_dirs)
|
||||
if allow('rpath'):
|
||||
self.set_runtime_library_dirs(cmd.rpath)
|
||||
if allow('link_objects'):
|
||||
self.set_link_objects(cmd.link_objects)
|
||||
|
||||
replace_method(CCompiler, 'customize_cmd', CCompiler_customize_cmd)
|
||||
|
||||
def _compiler_to_string(compiler):
|
||||
props = []
|
||||
mx = 0
|
||||
keys = list(compiler.executables.keys())
|
||||
for key in ['version', 'libraries', 'library_dirs',
|
||||
'object_switch', 'compile_switch',
|
||||
'include_dirs', 'define', 'undef', 'rpath', 'link_objects']:
|
||||
if key not in keys:
|
||||
keys.append(key)
|
||||
for key in keys:
|
||||
if hasattr(compiler, key):
|
||||
v = getattr(compiler, key)
|
||||
mx = max(mx, len(key))
|
||||
props.append((key, repr(v)))
|
||||
fmt = '%-' + repr(mx+1) + 's = %s'
|
||||
lines = [fmt % prop for prop in props]
|
||||
return '\n'.join(lines)
|
||||
|
||||
def CCompiler_show_customization(self):
|
||||
"""
|
||||
Print the compiler customizations to stdout.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
None
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
|
||||
Notes
|
||||
-----
|
||||
Printing is only done if the distutils log threshold is < 2.
|
||||
|
||||
"""
|
||||
if 0:
|
||||
for attrname in ['include_dirs', 'define', 'undef',
|
||||
'libraries', 'library_dirs',
|
||||
'rpath', 'link_objects']:
|
||||
attr = getattr(self, attrname, None)
|
||||
if not attr:
|
||||
continue
|
||||
log.info("compiler '%s' is set to %s" % (attrname, attr))
|
||||
try:
|
||||
self.get_version()
|
||||
except Exception:
|
||||
pass
|
||||
if log._global_log.threshold<2:
|
||||
print('*'*80)
|
||||
print(self.__class__)
|
||||
print(_compiler_to_string(self))
|
||||
print('*'*80)
|
||||
|
||||
replace_method(CCompiler, 'show_customization', CCompiler_show_customization)
|
||||
|
||||
def CCompiler_customize(self, dist, need_cxx=0):
|
||||
"""
|
||||
Do any platform-specific customization of a compiler instance.
|
||||
|
||||
This method calls `distutils.sysconfig.customize_compiler` for
|
||||
platform-specific customization, as well as optionally remove a flag
|
||||
to suppress spurious warnings in case C++ code is being compiled.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
dist : object
|
||||
This parameter is not used for anything.
|
||||
need_cxx : bool, optional
|
||||
Whether or not C++ has to be compiled. If so (True), the
|
||||
``"-Wstrict-prototypes"`` option is removed to prevent spurious
|
||||
warnings. Default is False.
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
|
||||
Notes
|
||||
-----
|
||||
All the default options used by distutils can be extracted with::
|
||||
|
||||
from distutils import sysconfig
|
||||
sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS',
|
||||
'CCSHARED', 'LDSHARED', 'SO')
|
||||
|
||||
"""
|
||||
# See FCompiler.customize for suggested usage.
|
||||
log.info('customize %s' % (self.__class__.__name__))
|
||||
customize_compiler(self)
|
||||
if need_cxx:
|
||||
# In general, distutils uses -Wstrict-prototypes, but this option is
|
||||
# not valid for C++ code, only for C. Remove it if it's there to
|
||||
# avoid a spurious warning on every compilation.
|
||||
try:
|
||||
self.compiler_so.remove('-Wstrict-prototypes')
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
|
||||
if hasattr(self, 'compiler') and 'cc' in self.compiler[0]:
|
||||
if not self.compiler_cxx:
|
||||
if self.compiler[0].startswith('gcc'):
|
||||
a, b = 'gcc', 'g++'
|
||||
else:
|
||||
a, b = 'cc', 'c++'
|
||||
self.compiler_cxx = [self.compiler[0].replace(a, b)]\
|
||||
+ self.compiler[1:]
|
||||
else:
|
||||
if hasattr(self, 'compiler'):
|
||||
log.warn("#### %s #######" % (self.compiler,))
|
||||
if not hasattr(self, 'compiler_cxx'):
|
||||
log.warn('Missing compiler_cxx fix for ' + self.__class__.__name__)
|
||||
|
||||
|
||||
# check if compiler supports gcc style automatic dependencies
|
||||
# run on every extension so skip for known good compilers
|
||||
if hasattr(self, 'compiler') and ('gcc' in self.compiler[0] or
|
||||
'g++' in self.compiler[0] or
|
||||
'clang' in self.compiler[0]):
|
||||
self._auto_depends = True
|
||||
elif os.name == 'posix':
|
||||
import tempfile
|
||||
import shutil
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
fn = os.path.join(tmpdir, "file.c")
|
||||
with open(fn, "w") as f:
|
||||
f.write("int a;\n")
|
||||
self.compile([fn], output_dir=tmpdir,
|
||||
extra_preargs=['-MMD', '-MF', fn + '.d'])
|
||||
self._auto_depends = True
|
||||
except CompileError:
|
||||
self._auto_depends = False
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
return
|
||||
|
||||
replace_method(CCompiler, 'customize', CCompiler_customize)
|
||||
|
||||
def simple_version_match(pat=r'[-.\d]+', ignore='', start=''):
|
||||
"""
|
||||
Simple matching of version numbers, for use in CCompiler and FCompiler.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
pat : str, optional
|
||||
A regular expression matching version numbers.
|
||||
Default is ``r'[-.\\d]+'``.
|
||||
ignore : str, optional
|
||||
A regular expression matching patterns to skip.
|
||||
Default is ``''``, in which case nothing is skipped.
|
||||
start : str, optional
|
||||
A regular expression matching the start of where to start looking
|
||||
for version numbers.
|
||||
Default is ``''``, in which case searching is started at the
|
||||
beginning of the version string given to `matcher`.
|
||||
|
||||
Returns
|
||||
-------
|
||||
matcher : callable
|
||||
A function that is appropriate to use as the ``.version_match``
|
||||
attribute of a `CCompiler` class. `matcher` takes a single parameter,
|
||||
a version string.
|
||||
|
||||
"""
|
||||
def matcher(self, version_string):
|
||||
# version string may appear in the second line, so getting rid
|
||||
# of new lines:
|
||||
version_string = version_string.replace('\n', ' ')
|
||||
pos = 0
|
||||
if start:
|
||||
m = re.match(start, version_string)
|
||||
if not m:
|
||||
return None
|
||||
pos = m.end()
|
||||
while True:
|
||||
m = re.search(pat, version_string[pos:])
|
||||
if not m:
|
||||
return None
|
||||
if ignore and re.match(ignore, m.group(0)):
|
||||
pos = m.end()
|
||||
continue
|
||||
break
|
||||
return m.group(0)
|
||||
return matcher
|
||||
|
||||
def CCompiler_get_version(self, force=False, ok_status=[0]):
|
||||
"""
|
||||
Return compiler version, or None if compiler is not available.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
force : bool, optional
|
||||
If True, force a new determination of the version, even if the
|
||||
compiler already has a version attribute. Default is False.
|
||||
ok_status : list of int, optional
|
||||
The list of status values returned by the version look-up process
|
||||
for which a version string is returned. If the status value is not
|
||||
in `ok_status`, None is returned. Default is ``[0]``.
|
||||
|
||||
Returns
|
||||
-------
|
||||
version : str or None
|
||||
Version string, in the format of `distutils.version.LooseVersion`.
|
||||
|
||||
"""
|
||||
if not force and hasattr(self, 'version'):
|
||||
return self.version
|
||||
self.find_executables()
|
||||
try:
|
||||
version_cmd = self.version_cmd
|
||||
except AttributeError:
|
||||
return None
|
||||
if not version_cmd or not version_cmd[0]:
|
||||
return None
|
||||
try:
|
||||
matcher = self.version_match
|
||||
except AttributeError:
|
||||
try:
|
||||
pat = self.version_pattern
|
||||
except AttributeError:
|
||||
return None
|
||||
def matcher(version_string):
|
||||
m = re.match(pat, version_string)
|
||||
if not m:
|
||||
return None
|
||||
version = m.group('version')
|
||||
return version
|
||||
|
||||
try:
|
||||
output = subprocess.check_output(version_cmd, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
output = exc.output
|
||||
status = exc.returncode
|
||||
except OSError:
|
||||
# match the historical returns for a parent
|
||||
# exception class caught by exec_command()
|
||||
status = 127
|
||||
output = b''
|
||||
else:
|
||||
# output isn't actually a filepath but we do this
|
||||
# for now to match previous distutils behavior
|
||||
output = filepath_from_subprocess_output(output)
|
||||
status = 0
|
||||
|
||||
version = None
|
||||
if status in ok_status:
|
||||
version = matcher(output)
|
||||
if version:
|
||||
version = LooseVersion(version)
|
||||
self.version = version
|
||||
return version
|
||||
|
||||
replace_method(CCompiler, 'get_version', CCompiler_get_version)
|
||||
|
||||
def CCompiler_cxx_compiler(self):
|
||||
"""
|
||||
Return the C++ compiler.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
None
|
||||
|
||||
Returns
|
||||
-------
|
||||
cxx : class instance
|
||||
The C++ compiler, as a `CCompiler` instance.
|
||||
|
||||
"""
|
||||
if self.compiler_type in ('msvc', 'intelw', 'intelemw'):
|
||||
return self
|
||||
|
||||
cxx = copy(self)
|
||||
cxx.compiler_so = [cxx.compiler_cxx[0]] + cxx.compiler_so[1:]
|
||||
if sys.platform.startswith('aix') and 'ld_so_aix' in cxx.linker_so[0]:
|
||||
# AIX needs the ld_so_aix script included with Python
|
||||
cxx.linker_so = [cxx.linker_so[0], cxx.compiler_cxx[0]] \
|
||||
+ cxx.linker_so[2:]
|
||||
else:
|
||||
cxx.linker_so = [cxx.compiler_cxx[0]] + cxx.linker_so[1:]
|
||||
return cxx
|
||||
|
||||
replace_method(CCompiler, 'cxx_compiler', CCompiler_cxx_compiler)
|
||||
|
||||
compiler_class['intel'] = ('intelccompiler', 'IntelCCompiler',
|
||||
"Intel C Compiler for 32-bit applications")
|
||||
compiler_class['intele'] = ('intelccompiler', 'IntelItaniumCCompiler',
|
||||
"Intel C Itanium Compiler for Itanium-based applications")
|
||||
compiler_class['intelem'] = ('intelccompiler', 'IntelEM64TCCompiler',
|
||||
"Intel C Compiler for 64-bit applications")
|
||||
compiler_class['intelw'] = ('intelccompiler', 'IntelCCompilerW',
|
||||
"Intel C Compiler for 32-bit applications on Windows")
|
||||
compiler_class['intelemw'] = ('intelccompiler', 'IntelEM64TCCompilerW',
|
||||
"Intel C Compiler for 64-bit applications on Windows")
|
||||
compiler_class['pathcc'] = ('pathccompiler', 'PathScaleCCompiler',
|
||||
"PathScale Compiler for SiCortex-based applications")
|
||||
ccompiler._default_compilers += (('linux.*', 'intel'),
|
||||
('linux.*', 'intele'),
|
||||
('linux.*', 'intelem'),
|
||||
('linux.*', 'pathcc'),
|
||||
('nt', 'intelw'),
|
||||
('nt', 'intelemw'))
|
||||
|
||||
if sys.platform == 'win32':
|
||||
compiler_class['mingw32'] = ('mingw32ccompiler', 'Mingw32CCompiler',
|
||||
"Mingw32 port of GNU C Compiler for Win32"\
|
||||
"(for MSC built Python)")
|
||||
if mingw32():
|
||||
# On windows platforms, we want to default to mingw32 (gcc)
|
||||
# because msvc can't build blitz stuff.
|
||||
log.info('Setting mingw32 as default compiler for nt.')
|
||||
ccompiler._default_compilers = (('nt', 'mingw32'),) \
|
||||
+ ccompiler._default_compilers
|
||||
|
||||
|
||||
_distutils_new_compiler = new_compiler
|
||||
def new_compiler (plat=None,
|
||||
compiler=None,
|
||||
verbose=0,
|
||||
dry_run=0,
|
||||
force=0):
|
||||
# Try first C compilers from numpy.distutils.
|
||||
if plat is None:
|
||||
plat = os.name
|
||||
try:
|
||||
if compiler is None:
|
||||
compiler = get_default_compiler(plat)
|
||||
(module_name, class_name, long_description) = compiler_class[compiler]
|
||||
except KeyError:
|
||||
msg = "don't know how to compile C/C++ code on platform '%s'" % plat
|
||||
if compiler is not None:
|
||||
msg = msg + " with '%s' compiler" % compiler
|
||||
raise DistutilsPlatformError(msg)
|
||||
module_name = "numpy.distutils." + module_name
|
||||
try:
|
||||
__import__ (module_name)
|
||||
except ImportError:
|
||||
msg = str(get_exception())
|
||||
log.info('%s in numpy.distutils; trying from distutils',
|
||||
str(msg))
|
||||
module_name = module_name[6:]
|
||||
try:
|
||||
__import__(module_name)
|
||||
except ImportError:
|
||||
msg = str(get_exception())
|
||||
raise DistutilsModuleError("can't compile C/C++ code: unable to load module '%s'" % \
|
||||
module_name)
|
||||
try:
|
||||
module = sys.modules[module_name]
|
||||
klass = vars(module)[class_name]
|
||||
except KeyError:
|
||||
raise DistutilsModuleError(("can't compile C/C++ code: unable to find class '%s' " +
|
||||
"in module '%s'") % (class_name, module_name))
|
||||
compiler = klass(None, dry_run, force)
|
||||
log.debug('new_compiler returns %s' % (klass))
|
||||
return compiler
|
||||
|
||||
ccompiler.new_compiler = new_compiler
|
||||
|
||||
_distutils_gen_lib_options = gen_lib_options
|
||||
def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
|
||||
# the version of this function provided by CPython allows the following
|
||||
# to return lists, which are unpacked automatically:
|
||||
# - compiler.runtime_library_dir_option
|
||||
# our version extends the behavior to:
|
||||
# - compiler.library_dir_option
|
||||
# - compiler.library_option
|
||||
# - compiler.find_library_file
|
||||
r = _distutils_gen_lib_options(compiler, library_dirs,
|
||||
runtime_library_dirs, libraries)
|
||||
lib_opts = []
|
||||
for i in r:
|
||||
if is_sequence(i):
|
||||
lib_opts.extend(list(i))
|
||||
else:
|
||||
lib_opts.append(i)
|
||||
return lib_opts
|
||||
ccompiler.gen_lib_options = gen_lib_options
|
||||
|
||||
# Also fix up the various compiler modules, which do
|
||||
# from distutils.ccompiler import gen_lib_options
|
||||
# Don't bother with mwerks, as we don't support Classic Mac.
|
||||
for _cc in ['msvc9', 'msvc', '_msvc', 'bcpp', 'cygwinc', 'emxc', 'unixc']:
|
||||
_m = sys.modules.get('distutils.' + _cc + 'compiler')
|
||||
if _m is not None:
|
||||
setattr(_m, 'gen_lib_options', gen_lib_options)
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
"""distutils.command
|
||||
|
||||
Package containing implementation of all the standard Distutils
|
||||
commands.
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
def test_na_writable_attributes_deletion():
|
||||
a = np.NA(2)
|
||||
attr = ['payload', 'dtype']
|
||||
for s in attr:
|
||||
assert_raises(AttributeError, delattr, a, s)
|
||||
|
||||
|
||||
__revision__ = "$Id: __init__.py,v 1.3 2005/05/16 11:08:49 pearu Exp $"
|
||||
|
||||
distutils_all = [ #'build_py',
|
||||
'clean',
|
||||
'install_clib',
|
||||
'install_scripts',
|
||||
'bdist',
|
||||
'bdist_dumb',
|
||||
'bdist_wininst',
|
||||
]
|
||||
|
||||
__import__('distutils.command', globals(), locals(), distutils_all)
|
||||
|
||||
__all__ = ['build',
|
||||
'config_compiler',
|
||||
'config',
|
||||
'build_src',
|
||||
'build_py',
|
||||
'build_ext',
|
||||
'build_clib',
|
||||
'build_scripts',
|
||||
'install',
|
||||
'install_data',
|
||||
'install_headers',
|
||||
'install_lib',
|
||||
'bdist_rpm',
|
||||
'sdist',
|
||||
] + distutils_all
|
||||
@@ -0,0 +1,122 @@
|
||||
"""This module implements additional tests ala autoconf which can be useful.
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import textwrap
|
||||
|
||||
# We put them here since they could be easily reused outside numpy.distutils
|
||||
|
||||
def check_inline(cmd):
|
||||
"""Return the inline identifier (may be empty)."""
|
||||
cmd._check_compiler()
|
||||
body = textwrap.dedent("""
|
||||
#ifndef __cplusplus
|
||||
static %(inline)s int static_func (void)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
%(inline)s int nostatic_func (void)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
#endif""")
|
||||
|
||||
for kw in ['inline', '__inline__', '__inline']:
|
||||
st = cmd.try_compile(body % {'inline': kw}, None, None)
|
||||
if st:
|
||||
return kw
|
||||
|
||||
return ''
|
||||
|
||||
|
||||
def check_restrict(cmd):
|
||||
"""Return the restrict identifier (may be empty)."""
|
||||
cmd._check_compiler()
|
||||
body = textwrap.dedent("""
|
||||
static int static_func (char * %(restrict)s a)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
""")
|
||||
|
||||
for kw in ['restrict', '__restrict__', '__restrict']:
|
||||
st = cmd.try_compile(body % {'restrict': kw}, None, None)
|
||||
if st:
|
||||
return kw
|
||||
|
||||
return ''
|
||||
|
||||
|
||||
def check_compiler_gcc4(cmd):
|
||||
"""Return True if the C compiler is GCC 4.x."""
|
||||
cmd._check_compiler()
|
||||
body = textwrap.dedent("""
|
||||
int
|
||||
main()
|
||||
{
|
||||
#if (! defined __GNUC__) || (__GNUC__ < 4)
|
||||
#error gcc >= 4 required
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
""")
|
||||
return cmd.try_compile(body, None, None)
|
||||
|
||||
|
||||
def check_gcc_function_attribute(cmd, attribute, name):
|
||||
"""Return True if the given function attribute is supported."""
|
||||
cmd._check_compiler()
|
||||
body = textwrap.dedent("""
|
||||
#pragma GCC diagnostic error "-Wattributes"
|
||||
#pragma clang diagnostic error "-Wattributes"
|
||||
|
||||
int %s %s(void*);
|
||||
|
||||
int
|
||||
main()
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
""") % (attribute, name)
|
||||
return cmd.try_compile(body, None, None) != 0
|
||||
|
||||
|
||||
def check_gcc_function_attribute_with_intrinsics(cmd, attribute, name, code,
|
||||
include):
|
||||
"""Return True if the given function attribute is supported with
|
||||
intrinsics."""
|
||||
cmd._check_compiler()
|
||||
body = textwrap.dedent("""
|
||||
#include<%s>
|
||||
int %s %s(void)
|
||||
{
|
||||
%s;
|
||||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
main()
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
""") % (include, attribute, name, code)
|
||||
return cmd.try_compile(body, None, None) != 0
|
||||
|
||||
|
||||
def check_gcc_variable_attribute(cmd, attribute):
|
||||
"""Return True if the given variable attribute is supported."""
|
||||
cmd._check_compiler()
|
||||
body = textwrap.dedent("""
|
||||
#pragma GCC diagnostic error "-Wattributes"
|
||||
#pragma clang diagnostic error "-Wattributes"
|
||||
|
||||
int %s foo;
|
||||
|
||||
int
|
||||
main()
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
""") % (attribute, )
|
||||
return cmd.try_compile(body, None, None) != 0
|
||||
@@ -0,0 +1,24 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
if 'setuptools' in sys.modules:
|
||||
from setuptools.command.bdist_rpm import bdist_rpm as old_bdist_rpm
|
||||
else:
|
||||
from distutils.command.bdist_rpm import bdist_rpm as old_bdist_rpm
|
||||
|
||||
class bdist_rpm(old_bdist_rpm):
|
||||
|
||||
def _make_spec_file(self):
|
||||
spec_file = old_bdist_rpm._make_spec_file(self)
|
||||
|
||||
# Replace hardcoded setup.py script name
|
||||
# with the real setup script name.
|
||||
setup_py = os.path.basename(sys.argv[0])
|
||||
if setup_py == 'setup.py':
|
||||
return spec_file
|
||||
new_spec_file = []
|
||||
for line in spec_file:
|
||||
line = line.replace('setup.py', setup_py)
|
||||
new_spec_file.append(line)
|
||||
return new_spec_file
|
||||
@@ -0,0 +1,47 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
from distutils.command.build import build as old_build
|
||||
from distutils.util import get_platform
|
||||
from numpy.distutils.command.config_compiler import show_fortran_compilers
|
||||
|
||||
class build(old_build):
|
||||
|
||||
sub_commands = [('config_cc', lambda *args: True),
|
||||
('config_fc', lambda *args: True),
|
||||
('build_src', old_build.has_ext_modules),
|
||||
] + old_build.sub_commands
|
||||
|
||||
user_options = old_build.user_options + [
|
||||
('fcompiler=', None,
|
||||
"specify the Fortran compiler type"),
|
||||
('parallel=', 'j',
|
||||
"number of parallel jobs"),
|
||||
]
|
||||
|
||||
help_options = old_build.help_options + [
|
||||
('help-fcompiler', None, "list available Fortran compilers",
|
||||
show_fortran_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
old_build.initialize_options(self)
|
||||
self.fcompiler = None
|
||||
self.parallel = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.parallel:
|
||||
try:
|
||||
self.parallel = int(self.parallel)
|
||||
except ValueError:
|
||||
raise ValueError("--parallel/-j argument must be an integer")
|
||||
build_scripts = self.build_scripts
|
||||
old_build.finalize_options(self)
|
||||
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
|
||||
if build_scripts is None:
|
||||
self.build_scripts = os.path.join(self.build_base,
|
||||
'scripts' + plat_specifier)
|
||||
|
||||
def run(self):
|
||||
old_build.run(self)
|
||||
@@ -0,0 +1,323 @@
|
||||
""" Modified version of build_clib that handles fortran source files.
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
from glob import glob
|
||||
import shutil
|
||||
from distutils.command.build_clib import build_clib as old_build_clib
|
||||
from distutils.errors import DistutilsSetupError, DistutilsError, \
|
||||
DistutilsFileError
|
||||
|
||||
from numpy.distutils import log
|
||||
from distutils.dep_util import newer_group
|
||||
from numpy.distutils.misc_util import filter_sources, has_f_sources,\
|
||||
has_cxx_sources, all_strings, get_lib_source_files, is_sequence, \
|
||||
get_numpy_include_dirs
|
||||
|
||||
# Fix Python distutils bug sf #1718574:
|
||||
_l = old_build_clib.user_options
|
||||
for _i in range(len(_l)):
|
||||
if _l[_i][0] in ['build-clib', 'build-temp']:
|
||||
_l[_i] = (_l[_i][0] + '=',) + _l[_i][1:]
|
||||
#
|
||||
|
||||
|
||||
class build_clib(old_build_clib):
|
||||
|
||||
description = "build C/C++/F libraries used by Python extensions"
|
||||
|
||||
user_options = old_build_clib.user_options + [
|
||||
('fcompiler=', None,
|
||||
"specify the Fortran compiler type"),
|
||||
('inplace', 'i', 'Build in-place'),
|
||||
('parallel=', 'j',
|
||||
"number of parallel jobs"),
|
||||
]
|
||||
|
||||
boolean_options = old_build_clib.boolean_options + ['inplace']
|
||||
|
||||
def initialize_options(self):
|
||||
old_build_clib.initialize_options(self)
|
||||
self.fcompiler = None
|
||||
self.inplace = 0
|
||||
self.parallel = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.parallel:
|
||||
try:
|
||||
self.parallel = int(self.parallel)
|
||||
except ValueError:
|
||||
raise ValueError("--parallel/-j argument must be an integer")
|
||||
old_build_clib.finalize_options(self)
|
||||
self.set_undefined_options('build', ('parallel', 'parallel'))
|
||||
|
||||
def have_f_sources(self):
|
||||
for (lib_name, build_info) in self.libraries:
|
||||
if has_f_sources(build_info.get('sources', [])):
|
||||
return True
|
||||
return False
|
||||
|
||||
def have_cxx_sources(self):
|
||||
for (lib_name, build_info) in self.libraries:
|
||||
if has_cxx_sources(build_info.get('sources', [])):
|
||||
return True
|
||||
return False
|
||||
|
||||
def run(self):
|
||||
if not self.libraries:
|
||||
return
|
||||
|
||||
# Make sure that library sources are complete.
|
||||
languages = []
|
||||
|
||||
# Make sure that extension sources are complete.
|
||||
self.run_command('build_src')
|
||||
|
||||
for (lib_name, build_info) in self.libraries:
|
||||
l = build_info.get('language', None)
|
||||
if l and l not in languages:
|
||||
languages.append(l)
|
||||
|
||||
from distutils.ccompiler import new_compiler
|
||||
self.compiler = new_compiler(compiler=self.compiler,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force)
|
||||
self.compiler.customize(self.distribution,
|
||||
need_cxx=self.have_cxx_sources())
|
||||
|
||||
libraries = self.libraries
|
||||
self.libraries = None
|
||||
self.compiler.customize_cmd(self)
|
||||
self.libraries = libraries
|
||||
|
||||
self.compiler.show_customization()
|
||||
|
||||
if self.have_f_sources():
|
||||
from numpy.distutils.fcompiler import new_fcompiler
|
||||
self._f_compiler = new_fcompiler(compiler=self.fcompiler,
|
||||
verbose=self.verbose,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force,
|
||||
requiref90='f90' in languages,
|
||||
c_compiler=self.compiler)
|
||||
if self._f_compiler is not None:
|
||||
self._f_compiler.customize(self.distribution)
|
||||
|
||||
libraries = self.libraries
|
||||
self.libraries = None
|
||||
self._f_compiler.customize_cmd(self)
|
||||
self.libraries = libraries
|
||||
|
||||
self._f_compiler.show_customization()
|
||||
else:
|
||||
self._f_compiler = None
|
||||
|
||||
self.build_libraries(self.libraries)
|
||||
|
||||
if self.inplace:
|
||||
for l in self.distribution.installed_libraries:
|
||||
libname = self.compiler.library_filename(l.name)
|
||||
source = os.path.join(self.build_clib, libname)
|
||||
target = os.path.join(l.target_dir, libname)
|
||||
self.mkpath(l.target_dir)
|
||||
shutil.copy(source, target)
|
||||
|
||||
def get_source_files(self):
|
||||
self.check_library_list(self.libraries)
|
||||
filenames = []
|
||||
for lib in self.libraries:
|
||||
filenames.extend(get_lib_source_files(lib))
|
||||
return filenames
|
||||
|
||||
def build_libraries(self, libraries):
|
||||
for (lib_name, build_info) in libraries:
|
||||
self.build_a_library(build_info, lib_name, libraries)
|
||||
|
||||
def build_a_library(self, build_info, lib_name, libraries):
|
||||
# default compilers
|
||||
compiler = self.compiler
|
||||
fcompiler = self._f_compiler
|
||||
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not is_sequence(sources):
|
||||
raise DistutilsSetupError(("in 'libraries' option (library '%s'), " +
|
||||
"'sources' must be present and must be " +
|
||||
"a list of source filenames") % lib_name)
|
||||
sources = list(sources)
|
||||
|
||||
c_sources, cxx_sources, f_sources, fmodule_sources \
|
||||
= filter_sources(sources)
|
||||
requiref90 = not not fmodule_sources or \
|
||||
build_info.get('language', 'c') == 'f90'
|
||||
|
||||
# save source type information so that build_ext can use it.
|
||||
source_languages = []
|
||||
if c_sources:
|
||||
source_languages.append('c')
|
||||
if cxx_sources:
|
||||
source_languages.append('c++')
|
||||
if requiref90:
|
||||
source_languages.append('f90')
|
||||
elif f_sources:
|
||||
source_languages.append('f77')
|
||||
build_info['source_languages'] = source_languages
|
||||
|
||||
lib_file = compiler.library_filename(lib_name,
|
||||
output_dir=self.build_clib)
|
||||
depends = sources + build_info.get('depends', [])
|
||||
if not (self.force or newer_group(depends, lib_file, 'newer')):
|
||||
log.debug("skipping '%s' library (up-to-date)", lib_name)
|
||||
return
|
||||
else:
|
||||
log.info("building '%s' library", lib_name)
|
||||
|
||||
config_fc = build_info.get('config_fc', {})
|
||||
if fcompiler is not None and config_fc:
|
||||
log.info('using additional config_fc from setup script '
|
||||
'for fortran compiler: %s'
|
||||
% (config_fc,))
|
||||
from numpy.distutils.fcompiler import new_fcompiler
|
||||
fcompiler = new_fcompiler(compiler=fcompiler.compiler_type,
|
||||
verbose=self.verbose,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force,
|
||||
requiref90=requiref90,
|
||||
c_compiler=self.compiler)
|
||||
if fcompiler is not None:
|
||||
dist = self.distribution
|
||||
base_config_fc = dist.get_option_dict('config_fc').copy()
|
||||
base_config_fc.update(config_fc)
|
||||
fcompiler.customize(base_config_fc)
|
||||
|
||||
# check availability of Fortran compilers
|
||||
if (f_sources or fmodule_sources) and fcompiler is None:
|
||||
raise DistutilsError("library %s has Fortran sources"
|
||||
" but no Fortran compiler found" % (lib_name))
|
||||
|
||||
if fcompiler is not None:
|
||||
fcompiler.extra_f77_compile_args = build_info.get(
|
||||
'extra_f77_compile_args') or []
|
||||
fcompiler.extra_f90_compile_args = build_info.get(
|
||||
'extra_f90_compile_args') or []
|
||||
|
||||
macros = build_info.get('macros')
|
||||
include_dirs = build_info.get('include_dirs')
|
||||
if include_dirs is None:
|
||||
include_dirs = []
|
||||
extra_postargs = build_info.get('extra_compiler_args') or []
|
||||
|
||||
include_dirs.extend(get_numpy_include_dirs())
|
||||
# where compiled F90 module files are:
|
||||
module_dirs = build_info.get('module_dirs') or []
|
||||
module_build_dir = os.path.dirname(lib_file)
|
||||
if requiref90:
|
||||
self.mkpath(module_build_dir)
|
||||
|
||||
if compiler.compiler_type == 'msvc':
|
||||
# this hack works around the msvc compiler attributes
|
||||
# problem, msvc uses its own convention :(
|
||||
c_sources += cxx_sources
|
||||
cxx_sources = []
|
||||
|
||||
objects = []
|
||||
if c_sources:
|
||||
log.info("compiling C sources")
|
||||
objects = compiler.compile(c_sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_postargs)
|
||||
|
||||
if cxx_sources:
|
||||
log.info("compiling C++ sources")
|
||||
cxx_compiler = compiler.cxx_compiler()
|
||||
cxx_objects = cxx_compiler.compile(cxx_sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_postargs)
|
||||
objects.extend(cxx_objects)
|
||||
|
||||
if f_sources or fmodule_sources:
|
||||
extra_postargs = []
|
||||
f_objects = []
|
||||
|
||||
if requiref90:
|
||||
if fcompiler.module_dir_switch is None:
|
||||
existing_modules = glob('*.mod')
|
||||
extra_postargs += fcompiler.module_options(
|
||||
module_dirs, module_build_dir)
|
||||
|
||||
if fmodule_sources:
|
||||
log.info("compiling Fortran 90 module sources")
|
||||
f_objects += fcompiler.compile(fmodule_sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_postargs)
|
||||
|
||||
if requiref90 and self._f_compiler.module_dir_switch is None:
|
||||
# move new compiled F90 module files to module_build_dir
|
||||
for f in glob('*.mod'):
|
||||
if f in existing_modules:
|
||||
continue
|
||||
t = os.path.join(module_build_dir, f)
|
||||
if os.path.abspath(f) == os.path.abspath(t):
|
||||
continue
|
||||
if os.path.isfile(t):
|
||||
os.remove(t)
|
||||
try:
|
||||
self.move_file(f, module_build_dir)
|
||||
except DistutilsFileError:
|
||||
log.warn('failed to move %r to %r'
|
||||
% (f, module_build_dir))
|
||||
|
||||
if f_sources:
|
||||
log.info("compiling Fortran sources")
|
||||
f_objects += fcompiler.compile(f_sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_postargs)
|
||||
else:
|
||||
f_objects = []
|
||||
|
||||
if f_objects and not fcompiler.can_ccompiler_link(compiler):
|
||||
# Default linker cannot link Fortran object files, and results
|
||||
# need to be wrapped later. Instead of creating a real static
|
||||
# library, just keep track of the object files.
|
||||
listfn = os.path.join(self.build_clib,
|
||||
lib_name + '.fobjects')
|
||||
with open(listfn, 'w') as f:
|
||||
f.write("\n".join(os.path.abspath(obj) for obj in f_objects))
|
||||
|
||||
listfn = os.path.join(self.build_clib,
|
||||
lib_name + '.cobjects')
|
||||
with open(listfn, 'w') as f:
|
||||
f.write("\n".join(os.path.abspath(obj) for obj in objects))
|
||||
|
||||
# create empty "library" file for dependency tracking
|
||||
lib_fname = os.path.join(self.build_clib,
|
||||
lib_name + compiler.static_lib_extension)
|
||||
with open(lib_fname, 'wb') as f:
|
||||
pass
|
||||
else:
|
||||
# assume that default linker is suitable for
|
||||
# linking Fortran object files
|
||||
objects.extend(f_objects)
|
||||
compiler.create_static_lib(objects, lib_name,
|
||||
output_dir=self.build_clib,
|
||||
debug=self.debug)
|
||||
|
||||
# fix library dependencies
|
||||
clib_libraries = build_info.get('libraries', [])
|
||||
for lname, binfo in libraries:
|
||||
if lname in clib_libraries:
|
||||
clib_libraries.extend(binfo.get('libraries', []))
|
||||
if clib_libraries:
|
||||
build_info['libraries'] = clib_libraries
|
||||
@@ -0,0 +1,598 @@
|
||||
""" Modified version of build_ext that handles fortran source files.
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
from glob import glob
|
||||
|
||||
from distutils.dep_util import newer_group
|
||||
from distutils.command.build_ext import build_ext as old_build_ext
|
||||
from distutils.errors import DistutilsFileError, DistutilsSetupError,\
|
||||
DistutilsError
|
||||
from distutils.file_util import copy_file
|
||||
|
||||
from numpy.distutils import log
|
||||
from numpy.distutils.exec_command import filepath_from_subprocess_output
|
||||
from numpy.distutils.system_info import combine_paths, system_info
|
||||
from numpy.distutils.misc_util import filter_sources, has_f_sources, \
|
||||
has_cxx_sources, get_ext_source_files, \
|
||||
get_numpy_include_dirs, is_sequence, get_build_architecture, \
|
||||
msvc_version
|
||||
from numpy.distutils.command.config_compiler import show_fortran_compilers
|
||||
|
||||
|
||||
|
||||
class build_ext (old_build_ext):
|
||||
|
||||
description = "build C/C++/F extensions (compile/link to build directory)"
|
||||
|
||||
user_options = old_build_ext.user_options + [
|
||||
('fcompiler=', None,
|
||||
"specify the Fortran compiler type"),
|
||||
('parallel=', 'j',
|
||||
"number of parallel jobs"),
|
||||
]
|
||||
|
||||
help_options = old_build_ext.help_options + [
|
||||
('help-fcompiler', None, "list available Fortran compilers",
|
||||
show_fortran_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
old_build_ext.initialize_options(self)
|
||||
self.fcompiler = None
|
||||
self.parallel = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.parallel:
|
||||
try:
|
||||
self.parallel = int(self.parallel)
|
||||
except ValueError:
|
||||
raise ValueError("--parallel/-j argument must be an integer")
|
||||
|
||||
# Ensure that self.include_dirs and self.distribution.include_dirs
|
||||
# refer to the same list object. finalize_options will modify
|
||||
# self.include_dirs, but self.distribution.include_dirs is used
|
||||
# during the actual build.
|
||||
# self.include_dirs is None unless paths are specified with
|
||||
# --include-dirs.
|
||||
# The include paths will be passed to the compiler in the order:
|
||||
# numpy paths, --include-dirs paths, Python include path.
|
||||
if isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
incl_dirs = self.include_dirs or []
|
||||
if self.distribution.include_dirs is None:
|
||||
self.distribution.include_dirs = []
|
||||
self.include_dirs = self.distribution.include_dirs
|
||||
self.include_dirs.extend(incl_dirs)
|
||||
|
||||
old_build_ext.finalize_options(self)
|
||||
self.set_undefined_options('build', ('parallel', 'parallel'))
|
||||
|
||||
def run(self):
|
||||
if not self.extensions:
|
||||
return
|
||||
|
||||
# Make sure that extension sources are complete.
|
||||
self.run_command('build_src')
|
||||
|
||||
if self.distribution.has_c_libraries():
|
||||
if self.inplace:
|
||||
if self.distribution.have_run.get('build_clib'):
|
||||
log.warn('build_clib already run, it is too late to '
|
||||
'ensure in-place build of build_clib')
|
||||
build_clib = self.distribution.get_command_obj(
|
||||
'build_clib')
|
||||
else:
|
||||
build_clib = self.distribution.get_command_obj(
|
||||
'build_clib')
|
||||
build_clib.inplace = 1
|
||||
build_clib.ensure_finalized()
|
||||
build_clib.run()
|
||||
self.distribution.have_run['build_clib'] = 1
|
||||
|
||||
else:
|
||||
self.run_command('build_clib')
|
||||
build_clib = self.get_finalized_command('build_clib')
|
||||
self.library_dirs.append(build_clib.build_clib)
|
||||
else:
|
||||
build_clib = None
|
||||
|
||||
# Not including C libraries to the list of
|
||||
# extension libraries automatically to prevent
|
||||
# bogus linking commands. Extensions must
|
||||
# explicitly specify the C libraries that they use.
|
||||
|
||||
from distutils.ccompiler import new_compiler
|
||||
from numpy.distutils.fcompiler import new_fcompiler
|
||||
|
||||
compiler_type = self.compiler
|
||||
# Initialize C compiler:
|
||||
self.compiler = new_compiler(compiler=compiler_type,
|
||||
verbose=self.verbose,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force)
|
||||
self.compiler.customize(self.distribution)
|
||||
self.compiler.customize_cmd(self)
|
||||
self.compiler.show_customization()
|
||||
|
||||
# Setup directory for storing generated extra DLL files on Windows
|
||||
self.extra_dll_dir = os.path.join(self.build_temp, '.libs')
|
||||
if not os.path.isdir(self.extra_dll_dir):
|
||||
os.makedirs(self.extra_dll_dir)
|
||||
|
||||
# Create mapping of libraries built by build_clib:
|
||||
clibs = {}
|
||||
if build_clib is not None:
|
||||
for libname, build_info in build_clib.libraries or []:
|
||||
if libname in clibs and clibs[libname] != build_info:
|
||||
log.warn('library %r defined more than once,'
|
||||
' overwriting build_info\n%s... \nwith\n%s...'
|
||||
% (libname, repr(clibs[libname])[:300], repr(build_info)[:300]))
|
||||
clibs[libname] = build_info
|
||||
# .. and distribution libraries:
|
||||
for libname, build_info in self.distribution.libraries or []:
|
||||
if libname in clibs:
|
||||
# build_clib libraries have a precedence before distribution ones
|
||||
continue
|
||||
clibs[libname] = build_info
|
||||
|
||||
# Determine if C++/Fortran 77/Fortran 90 compilers are needed.
|
||||
# Update extension libraries, library_dirs, and macros.
|
||||
all_languages = set()
|
||||
for ext in self.extensions:
|
||||
ext_languages = set()
|
||||
c_libs = []
|
||||
c_lib_dirs = []
|
||||
macros = []
|
||||
for libname in ext.libraries:
|
||||
if libname in clibs:
|
||||
binfo = clibs[libname]
|
||||
c_libs += binfo.get('libraries', [])
|
||||
c_lib_dirs += binfo.get('library_dirs', [])
|
||||
for m in binfo.get('macros', []):
|
||||
if m not in macros:
|
||||
macros.append(m)
|
||||
|
||||
for l in clibs.get(libname, {}).get('source_languages', []):
|
||||
ext_languages.add(l)
|
||||
if c_libs:
|
||||
new_c_libs = ext.libraries + c_libs
|
||||
log.info('updating extension %r libraries from %r to %r'
|
||||
% (ext.name, ext.libraries, new_c_libs))
|
||||
ext.libraries = new_c_libs
|
||||
ext.library_dirs = ext.library_dirs + c_lib_dirs
|
||||
if macros:
|
||||
log.info('extending extension %r defined_macros with %r'
|
||||
% (ext.name, macros))
|
||||
ext.define_macros = ext.define_macros + macros
|
||||
|
||||
# determine extension languages
|
||||
if has_f_sources(ext.sources):
|
||||
ext_languages.add('f77')
|
||||
if has_cxx_sources(ext.sources):
|
||||
ext_languages.add('c++')
|
||||
l = ext.language or self.compiler.detect_language(ext.sources)
|
||||
if l:
|
||||
ext_languages.add(l)
|
||||
# reset language attribute for choosing proper linker
|
||||
if 'c++' in ext_languages:
|
||||
ext_language = 'c++'
|
||||
elif 'f90' in ext_languages:
|
||||
ext_language = 'f90'
|
||||
elif 'f77' in ext_languages:
|
||||
ext_language = 'f77'
|
||||
else:
|
||||
ext_language = 'c' # default
|
||||
if l and l != ext_language and ext.language:
|
||||
log.warn('resetting extension %r language from %r to %r.' %
|
||||
(ext.name, l, ext_language))
|
||||
ext.language = ext_language
|
||||
# global language
|
||||
all_languages.update(ext_languages)
|
||||
|
||||
need_f90_compiler = 'f90' in all_languages
|
||||
need_f77_compiler = 'f77' in all_languages
|
||||
need_cxx_compiler = 'c++' in all_languages
|
||||
|
||||
# Initialize C++ compiler:
|
||||
if need_cxx_compiler:
|
||||
self._cxx_compiler = new_compiler(compiler=compiler_type,
|
||||
verbose=self.verbose,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force)
|
||||
compiler = self._cxx_compiler
|
||||
compiler.customize(self.distribution, need_cxx=need_cxx_compiler)
|
||||
compiler.customize_cmd(self)
|
||||
compiler.show_customization()
|
||||
self._cxx_compiler = compiler.cxx_compiler()
|
||||
else:
|
||||
self._cxx_compiler = None
|
||||
|
||||
# Initialize Fortran 77 compiler:
|
||||
if need_f77_compiler:
|
||||
ctype = self.fcompiler
|
||||
self._f77_compiler = new_fcompiler(compiler=self.fcompiler,
|
||||
verbose=self.verbose,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force,
|
||||
requiref90=False,
|
||||
c_compiler=self.compiler)
|
||||
fcompiler = self._f77_compiler
|
||||
if fcompiler:
|
||||
ctype = fcompiler.compiler_type
|
||||
fcompiler.customize(self.distribution)
|
||||
if fcompiler and fcompiler.get_version():
|
||||
fcompiler.customize_cmd(self)
|
||||
fcompiler.show_customization()
|
||||
else:
|
||||
self.warn('f77_compiler=%s is not available.' %
|
||||
(ctype))
|
||||
self._f77_compiler = None
|
||||
else:
|
||||
self._f77_compiler = None
|
||||
|
||||
# Initialize Fortran 90 compiler:
|
||||
if need_f90_compiler:
|
||||
ctype = self.fcompiler
|
||||
self._f90_compiler = new_fcompiler(compiler=self.fcompiler,
|
||||
verbose=self.verbose,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force,
|
||||
requiref90=True,
|
||||
c_compiler=self.compiler)
|
||||
fcompiler = self._f90_compiler
|
||||
if fcompiler:
|
||||
ctype = fcompiler.compiler_type
|
||||
fcompiler.customize(self.distribution)
|
||||
if fcompiler and fcompiler.get_version():
|
||||
fcompiler.customize_cmd(self)
|
||||
fcompiler.show_customization()
|
||||
else:
|
||||
self.warn('f90_compiler=%s is not available.' %
|
||||
(ctype))
|
||||
self._f90_compiler = None
|
||||
else:
|
||||
self._f90_compiler = None
|
||||
|
||||
# Build extensions
|
||||
self.build_extensions()
|
||||
|
||||
# Copy over any extra DLL files
|
||||
# FIXME: In the case where there are more than two packages,
|
||||
# we blindly assume that both packages need all of the libraries,
|
||||
# resulting in a larger wheel than is required. This should be fixed,
|
||||
# but it's so rare that I won't bother to handle it.
|
||||
pkg_roots = {
|
||||
self.get_ext_fullname(ext.name).split('.')[0]
|
||||
for ext in self.extensions
|
||||
}
|
||||
for pkg_root in pkg_roots:
|
||||
shared_lib_dir = os.path.join(pkg_root, '.libs')
|
||||
if not self.inplace:
|
||||
shared_lib_dir = os.path.join(self.build_lib, shared_lib_dir)
|
||||
for fn in os.listdir(self.extra_dll_dir):
|
||||
if not os.path.isdir(shared_lib_dir):
|
||||
os.makedirs(shared_lib_dir)
|
||||
if not fn.lower().endswith('.dll'):
|
||||
continue
|
||||
runtime_lib = os.path.join(self.extra_dll_dir, fn)
|
||||
copy_file(runtime_lib, shared_lib_dir)
|
||||
|
||||
def swig_sources(self, sources, extensions=None):
|
||||
# Do nothing. Swig sources have been handled in build_src command.
|
||||
return sources
|
||||
|
||||
def build_extension(self, ext):
|
||||
sources = ext.sources
|
||||
if sources is None or not is_sequence(sources):
|
||||
raise DistutilsSetupError(
|
||||
("in 'ext_modules' option (extension '%s'), " +
|
||||
"'sources' must be present and must be " +
|
||||
"a list of source filenames") % ext.name)
|
||||
sources = list(sources)
|
||||
|
||||
if not sources:
|
||||
return
|
||||
|
||||
fullname = self.get_ext_fullname(ext.name)
|
||||
if self.inplace:
|
||||
modpath = fullname.split('.')
|
||||
package = '.'.join(modpath[0:-1])
|
||||
base = modpath[-1]
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
package_dir = build_py.get_package_dir(package)
|
||||
ext_filename = os.path.join(package_dir,
|
||||
self.get_ext_filename(base))
|
||||
else:
|
||||
ext_filename = os.path.join(self.build_lib,
|
||||
self.get_ext_filename(fullname))
|
||||
depends = sources + ext.depends
|
||||
|
||||
if not (self.force or newer_group(depends, ext_filename, 'newer')):
|
||||
log.debug("skipping '%s' extension (up-to-date)", ext.name)
|
||||
return
|
||||
else:
|
||||
log.info("building '%s' extension", ext.name)
|
||||
|
||||
extra_args = ext.extra_compile_args or []
|
||||
macros = ext.define_macros[:]
|
||||
for undef in ext.undef_macros:
|
||||
macros.append((undef,))
|
||||
|
||||
c_sources, cxx_sources, f_sources, fmodule_sources = \
|
||||
filter_sources(ext.sources)
|
||||
|
||||
if self.compiler.compiler_type == 'msvc':
|
||||
if cxx_sources:
|
||||
# Needed to compile kiva.agg._agg extension.
|
||||
extra_args.append('/Zm1000')
|
||||
# this hack works around the msvc compiler attributes
|
||||
# problem, msvc uses its own convention :(
|
||||
c_sources += cxx_sources
|
||||
cxx_sources = []
|
||||
|
||||
# Set Fortran/C++ compilers for compilation and linking.
|
||||
if ext.language == 'f90':
|
||||
fcompiler = self._f90_compiler
|
||||
elif ext.language == 'f77':
|
||||
fcompiler = self._f77_compiler
|
||||
else: # in case ext.language is c++, for instance
|
||||
fcompiler = self._f90_compiler or self._f77_compiler
|
||||
if fcompiler is not None:
|
||||
fcompiler.extra_f77_compile_args = (ext.extra_f77_compile_args or []) if hasattr(
|
||||
ext, 'extra_f77_compile_args') else []
|
||||
fcompiler.extra_f90_compile_args = (ext.extra_f90_compile_args or []) if hasattr(
|
||||
ext, 'extra_f90_compile_args') else []
|
||||
cxx_compiler = self._cxx_compiler
|
||||
|
||||
# check for the availability of required compilers
|
||||
if cxx_sources and cxx_compiler is None:
|
||||
raise DistutilsError("extension %r has C++ sources"
|
||||
"but no C++ compiler found" % (ext.name))
|
||||
if (f_sources or fmodule_sources) and fcompiler is None:
|
||||
raise DistutilsError("extension %r has Fortran sources "
|
||||
"but no Fortran compiler found" % (ext.name))
|
||||
if ext.language in ['f77', 'f90'] and fcompiler is None:
|
||||
self.warn("extension %r has Fortran libraries "
|
||||
"but no Fortran linker found, using default linker" % (ext.name))
|
||||
if ext.language == 'c++' and cxx_compiler is None:
|
||||
self.warn("extension %r has C++ libraries "
|
||||
"but no C++ linker found, using default linker" % (ext.name))
|
||||
|
||||
kws = {'depends': ext.depends}
|
||||
output_dir = self.build_temp
|
||||
|
||||
include_dirs = ext.include_dirs + get_numpy_include_dirs()
|
||||
|
||||
c_objects = []
|
||||
if c_sources:
|
||||
log.info("compiling C sources")
|
||||
c_objects = self.compiler.compile(c_sources,
|
||||
output_dir=output_dir,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_args,
|
||||
**kws)
|
||||
|
||||
if cxx_sources:
|
||||
log.info("compiling C++ sources")
|
||||
c_objects += cxx_compiler.compile(cxx_sources,
|
||||
output_dir=output_dir,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_args,
|
||||
**kws)
|
||||
|
||||
extra_postargs = []
|
||||
f_objects = []
|
||||
if fmodule_sources:
|
||||
log.info("compiling Fortran 90 module sources")
|
||||
module_dirs = ext.module_dirs[:]
|
||||
module_build_dir = os.path.join(
|
||||
self.build_temp, os.path.dirname(
|
||||
self.get_ext_filename(fullname)))
|
||||
|
||||
self.mkpath(module_build_dir)
|
||||
if fcompiler.module_dir_switch is None:
|
||||
existing_modules = glob('*.mod')
|
||||
extra_postargs += fcompiler.module_options(
|
||||
module_dirs, module_build_dir)
|
||||
f_objects += fcompiler.compile(fmodule_sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_postargs,
|
||||
depends=ext.depends)
|
||||
|
||||
if fcompiler.module_dir_switch is None:
|
||||
for f in glob('*.mod'):
|
||||
if f in existing_modules:
|
||||
continue
|
||||
t = os.path.join(module_build_dir, f)
|
||||
if os.path.abspath(f) == os.path.abspath(t):
|
||||
continue
|
||||
if os.path.isfile(t):
|
||||
os.remove(t)
|
||||
try:
|
||||
self.move_file(f, module_build_dir)
|
||||
except DistutilsFileError:
|
||||
log.warn('failed to move %r to %r' %
|
||||
(f, module_build_dir))
|
||||
if f_sources:
|
||||
log.info("compiling Fortran sources")
|
||||
f_objects += fcompiler.compile(f_sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_postargs,
|
||||
depends=ext.depends)
|
||||
|
||||
if f_objects and not fcompiler.can_ccompiler_link(self.compiler):
|
||||
unlinkable_fobjects = f_objects
|
||||
objects = c_objects
|
||||
else:
|
||||
unlinkable_fobjects = []
|
||||
objects = c_objects + f_objects
|
||||
|
||||
if ext.extra_objects:
|
||||
objects.extend(ext.extra_objects)
|
||||
extra_args = ext.extra_link_args or []
|
||||
libraries = self.get_libraries(ext)[:]
|
||||
library_dirs = ext.library_dirs[:]
|
||||
|
||||
linker = self.compiler.link_shared_object
|
||||
# Always use system linker when using MSVC compiler.
|
||||
if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'):
|
||||
# expand libraries with fcompiler libraries as we are
|
||||
# not using fcompiler linker
|
||||
self._libs_with_msvc_and_fortran(
|
||||
fcompiler, libraries, library_dirs)
|
||||
|
||||
elif ext.language in ['f77', 'f90'] and fcompiler is not None:
|
||||
linker = fcompiler.link_shared_object
|
||||
if ext.language == 'c++' and cxx_compiler is not None:
|
||||
linker = cxx_compiler.link_shared_object
|
||||
|
||||
if fcompiler is not None:
|
||||
objects, libraries = self._process_unlinkable_fobjects(
|
||||
objects, libraries,
|
||||
fcompiler, library_dirs,
|
||||
unlinkable_fobjects)
|
||||
|
||||
linker(objects, ext_filename,
|
||||
libraries=libraries,
|
||||
library_dirs=library_dirs,
|
||||
runtime_library_dirs=ext.runtime_library_dirs,
|
||||
extra_postargs=extra_args,
|
||||
export_symbols=self.get_export_symbols(ext),
|
||||
debug=self.debug,
|
||||
build_temp=self.build_temp,
|
||||
target_lang=ext.language)
|
||||
|
||||
def _add_dummy_mingwex_sym(self, c_sources):
|
||||
build_src = self.get_finalized_command("build_src").build_src
|
||||
build_clib = self.get_finalized_command("build_clib").build_clib
|
||||
objects = self.compiler.compile([os.path.join(build_src,
|
||||
"gfortran_vs2003_hack.c")],
|
||||
output_dir=self.build_temp)
|
||||
self.compiler.create_static_lib(
|
||||
objects, "_gfortran_workaround", output_dir=build_clib, debug=self.debug)
|
||||
|
||||
def _process_unlinkable_fobjects(self, objects, libraries,
|
||||
fcompiler, library_dirs,
|
||||
unlinkable_fobjects):
|
||||
libraries = list(libraries)
|
||||
objects = list(objects)
|
||||
unlinkable_fobjects = list(unlinkable_fobjects)
|
||||
|
||||
# Expand possible fake static libraries to objects
|
||||
for lib in list(libraries):
|
||||
for libdir in library_dirs:
|
||||
fake_lib = os.path.join(libdir, lib + '.fobjects')
|
||||
if os.path.isfile(fake_lib):
|
||||
# Replace fake static library
|
||||
libraries.remove(lib)
|
||||
with open(fake_lib, 'r') as f:
|
||||
unlinkable_fobjects.extend(f.read().splitlines())
|
||||
|
||||
# Expand C objects
|
||||
c_lib = os.path.join(libdir, lib + '.cobjects')
|
||||
with open(c_lib, 'r') as f:
|
||||
objects.extend(f.read().splitlines())
|
||||
|
||||
# Wrap unlinkable objects to a linkable one
|
||||
if unlinkable_fobjects:
|
||||
fobjects = [os.path.relpath(obj) for obj in unlinkable_fobjects]
|
||||
wrapped = fcompiler.wrap_unlinkable_objects(
|
||||
fobjects, output_dir=self.build_temp,
|
||||
extra_dll_dir=self.extra_dll_dir)
|
||||
objects.extend(wrapped)
|
||||
|
||||
return objects, libraries
|
||||
|
||||
def _libs_with_msvc_and_fortran(self, fcompiler, c_libraries,
|
||||
c_library_dirs):
|
||||
if fcompiler is None:
|
||||
return
|
||||
|
||||
for libname in c_libraries:
|
||||
if libname.startswith('msvc'):
|
||||
continue
|
||||
fileexists = False
|
||||
for libdir in c_library_dirs or []:
|
||||
libfile = os.path.join(libdir, '%s.lib' % (libname))
|
||||
if os.path.isfile(libfile):
|
||||
fileexists = True
|
||||
break
|
||||
if fileexists:
|
||||
continue
|
||||
# make g77-compiled static libs available to MSVC
|
||||
fileexists = False
|
||||
for libdir in c_library_dirs:
|
||||
libfile = os.path.join(libdir, 'lib%s.a' % (libname))
|
||||
if os.path.isfile(libfile):
|
||||
# copy libname.a file to name.lib so that MSVC linker
|
||||
# can find it
|
||||
libfile2 = os.path.join(self.build_temp, libname + '.lib')
|
||||
copy_file(libfile, libfile2)
|
||||
if self.build_temp not in c_library_dirs:
|
||||
c_library_dirs.append(self.build_temp)
|
||||
fileexists = True
|
||||
break
|
||||
if fileexists:
|
||||
continue
|
||||
log.warn('could not find library %r in directories %s'
|
||||
% (libname, c_library_dirs))
|
||||
|
||||
# Always use system linker when using MSVC compiler.
|
||||
f_lib_dirs = []
|
||||
for dir in fcompiler.library_dirs:
|
||||
# correct path when compiling in Cygwin but with normal Win
|
||||
# Python
|
||||
if dir.startswith('/usr/lib'):
|
||||
try:
|
||||
dir = subprocess.check_output(['cygpath', '-w', dir])
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
dir = filepath_from_subprocess_output(dir)
|
||||
f_lib_dirs.append(dir)
|
||||
c_library_dirs.extend(f_lib_dirs)
|
||||
|
||||
# make g77-compiled static libs available to MSVC
|
||||
for lib in fcompiler.libraries:
|
||||
if not lib.startswith('msvc'):
|
||||
c_libraries.append(lib)
|
||||
p = combine_paths(f_lib_dirs, 'lib' + lib + '.a')
|
||||
if p:
|
||||
dst_name = os.path.join(self.build_temp, lib + '.lib')
|
||||
if not os.path.isfile(dst_name):
|
||||
copy_file(p[0], dst_name)
|
||||
if self.build_temp not in c_library_dirs:
|
||||
c_library_dirs.append(self.build_temp)
|
||||
|
||||
def get_source_files(self):
|
||||
self.check_extensions_list(self.extensions)
|
||||
filenames = []
|
||||
for ext in self.extensions:
|
||||
filenames.extend(get_ext_source_files(ext))
|
||||
return filenames
|
||||
|
||||
def get_outputs(self):
|
||||
self.check_extensions_list(self.extensions)
|
||||
|
||||
outputs = []
|
||||
for ext in self.extensions:
|
||||
if not ext.sources:
|
||||
continue
|
||||
fullname = self.get_ext_fullname(ext.name)
|
||||
outputs.append(os.path.join(self.build_lib,
|
||||
self.get_ext_filename(fullname)))
|
||||
return outputs
|
||||
@@ -0,0 +1,33 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from distutils.command.build_py import build_py as old_build_py
|
||||
from numpy.distutils.misc_util import is_string
|
||||
|
||||
class build_py(old_build_py):
|
||||
|
||||
def run(self):
|
||||
build_src = self.get_finalized_command('build_src')
|
||||
if build_src.py_modules_dict and self.packages is None:
|
||||
self.packages = list(build_src.py_modules_dict.keys ())
|
||||
old_build_py.run(self)
|
||||
|
||||
def find_package_modules(self, package, package_dir):
|
||||
modules = old_build_py.find_package_modules(self, package, package_dir)
|
||||
|
||||
# Find build_src generated *.py files.
|
||||
build_src = self.get_finalized_command('build_src')
|
||||
modules += build_src.py_modules_dict.get(package, [])
|
||||
|
||||
return modules
|
||||
|
||||
def find_modules(self):
|
||||
old_py_modules = self.py_modules[:]
|
||||
new_py_modules = [_m for _m in self.py_modules if is_string(_m)]
|
||||
self.py_modules[:] = new_py_modules
|
||||
modules = old_build_py.find_modules(self)
|
||||
self.py_modules[:] = old_py_modules
|
||||
|
||||
return modules
|
||||
|
||||
# XXX: Fix find_source_files for item in py_modules such that item is 3-tuple
|
||||
# and item[2] is source file.
|
||||
@@ -0,0 +1,51 @@
|
||||
""" Modified version of build_scripts that handles building scripts from functions.
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from distutils.command.build_scripts import build_scripts as old_build_scripts
|
||||
from numpy.distutils import log
|
||||
from numpy.distutils.misc_util import is_string
|
||||
|
||||
class build_scripts(old_build_scripts):
|
||||
|
||||
def generate_scripts(self, scripts):
|
||||
new_scripts = []
|
||||
func_scripts = []
|
||||
for script in scripts:
|
||||
if is_string(script):
|
||||
new_scripts.append(script)
|
||||
else:
|
||||
func_scripts.append(script)
|
||||
if not func_scripts:
|
||||
return new_scripts
|
||||
|
||||
build_dir = self.build_dir
|
||||
self.mkpath(build_dir)
|
||||
for func in func_scripts:
|
||||
script = func(build_dir)
|
||||
if not script:
|
||||
continue
|
||||
if is_string(script):
|
||||
log.info(" adding '%s' to scripts" % (script,))
|
||||
new_scripts.append(script)
|
||||
else:
|
||||
[log.info(" adding '%s' to scripts" % (s,)) for s in script]
|
||||
new_scripts.extend(list(script))
|
||||
return new_scripts
|
||||
|
||||
def run (self):
|
||||
if not self.scripts:
|
||||
return
|
||||
|
||||
self.scripts = self.generate_scripts(self.scripts)
|
||||
# Now make sure that the distribution object has this list of scripts.
|
||||
# setuptools' develop command requires that this be a list of filenames,
|
||||
# not functions.
|
||||
self.distribution.scripts = self.scripts
|
||||
|
||||
return old_build_scripts.run(self)
|
||||
|
||||
def get_source_files(self):
|
||||
from numpy.distutils.misc_util import get_script_files
|
||||
return get_script_files(self.scripts)
|
||||
@@ -0,0 +1,764 @@
|
||||
""" Build swig and f2py sources.
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import shlex
|
||||
import copy
|
||||
|
||||
from distutils.command import build_ext
|
||||
from distutils.dep_util import newer_group, newer
|
||||
from distutils.util import get_platform
|
||||
from distutils.errors import DistutilsError, DistutilsSetupError
|
||||
|
||||
|
||||
# this import can't be done here, as it uses numpy stuff only available
|
||||
# after it's installed
|
||||
#import numpy.f2py
|
||||
from numpy.distutils import log
|
||||
from numpy.distutils.misc_util import (
|
||||
fortran_ext_match, appendpath, is_string, is_sequence, get_cmd
|
||||
)
|
||||
from numpy.distutils.from_template import process_file as process_f_file
|
||||
from numpy.distutils.conv_template import process_file as process_c_file
|
||||
|
||||
def subst_vars(target, source, d):
|
||||
"""Substitute any occurrence of @foo@ by d['foo'] from source file into
|
||||
target."""
|
||||
var = re.compile('@([a-zA-Z_]+)@')
|
||||
with open(source, 'r') as fs:
|
||||
with open(target, 'w') as ft:
|
||||
for l in fs:
|
||||
m = var.search(l)
|
||||
if m:
|
||||
ft.write(l.replace('@%s@' % m.group(1), d[m.group(1)]))
|
||||
else:
|
||||
ft.write(l)
|
||||
|
||||
class build_src(build_ext.build_ext):
|
||||
|
||||
description = "build sources from SWIG, F2PY files or a function"
|
||||
|
||||
user_options = [
|
||||
('build-src=', 'd', "directory to \"build\" sources to"),
|
||||
('f2py-opts=', None, "list of f2py command line options"),
|
||||
('swig=', None, "path to the SWIG executable"),
|
||||
('swig-opts=', None, "list of SWIG command line options"),
|
||||
('swig-cpp', None, "make SWIG create C++ files (default is autodetected from sources)"),
|
||||
('f2pyflags=', None, "additional flags to f2py (use --f2py-opts= instead)"), # obsolete
|
||||
('swigflags=', None, "additional flags to swig (use --swig-opts= instead)"), # obsolete
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
||||
('inplace', 'i',
|
||||
"ignore build-lib and put compiled extensions into the source " +
|
||||
"directory alongside your pure Python modules"),
|
||||
]
|
||||
|
||||
boolean_options = ['force', 'inplace']
|
||||
|
||||
help_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
self.extensions = None
|
||||
self.package = None
|
||||
self.py_modules = None
|
||||
self.py_modules_dict = None
|
||||
self.build_src = None
|
||||
self.build_lib = None
|
||||
self.build_base = None
|
||||
self.force = None
|
||||
self.inplace = None
|
||||
self.package_dir = None
|
||||
self.f2pyflags = None # obsolete
|
||||
self.f2py_opts = None
|
||||
self.swigflags = None # obsolete
|
||||
self.swig_opts = None
|
||||
self.swig_cpp = None
|
||||
self.swig = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build',
|
||||
('build_base', 'build_base'),
|
||||
('build_lib', 'build_lib'),
|
||||
('force', 'force'))
|
||||
if self.package is None:
|
||||
self.package = self.distribution.ext_package
|
||||
self.extensions = self.distribution.ext_modules
|
||||
self.libraries = self.distribution.libraries or []
|
||||
self.py_modules = self.distribution.py_modules or []
|
||||
self.data_files = self.distribution.data_files or []
|
||||
|
||||
if self.build_src is None:
|
||||
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
|
||||
self.build_src = os.path.join(self.build_base, 'src'+plat_specifier)
|
||||
|
||||
# py_modules_dict is used in build_py.find_package_modules
|
||||
self.py_modules_dict = {}
|
||||
|
||||
if self.f2pyflags:
|
||||
if self.f2py_opts:
|
||||
log.warn('ignoring --f2pyflags as --f2py-opts already used')
|
||||
else:
|
||||
self.f2py_opts = self.f2pyflags
|
||||
self.f2pyflags = None
|
||||
if self.f2py_opts is None:
|
||||
self.f2py_opts = []
|
||||
else:
|
||||
self.f2py_opts = shlex.split(self.f2py_opts)
|
||||
|
||||
if self.swigflags:
|
||||
if self.swig_opts:
|
||||
log.warn('ignoring --swigflags as --swig-opts already used')
|
||||
else:
|
||||
self.swig_opts = self.swigflags
|
||||
self.swigflags = None
|
||||
|
||||
if self.swig_opts is None:
|
||||
self.swig_opts = []
|
||||
else:
|
||||
self.swig_opts = shlex.split(self.swig_opts)
|
||||
|
||||
# use options from build_ext command
|
||||
build_ext = self.get_finalized_command('build_ext')
|
||||
if self.inplace is None:
|
||||
self.inplace = build_ext.inplace
|
||||
if self.swig_cpp is None:
|
||||
self.swig_cpp = build_ext.swig_cpp
|
||||
for c in ['swig', 'swig_opt']:
|
||||
o = '--'+c.replace('_', '-')
|
||||
v = getattr(build_ext, c, None)
|
||||
if v:
|
||||
if getattr(self, c):
|
||||
log.warn('both build_src and build_ext define %s option' % (o))
|
||||
else:
|
||||
log.info('using "%s=%s" option from build_ext command' % (o, v))
|
||||
setattr(self, c, v)
|
||||
|
||||
def run(self):
|
||||
log.info("build_src")
|
||||
if not (self.extensions or self.libraries):
|
||||
return
|
||||
self.build_sources()
|
||||
|
||||
def build_sources(self):
|
||||
|
||||
if self.inplace:
|
||||
self.get_package_dir = \
|
||||
self.get_finalized_command('build_py').get_package_dir
|
||||
|
||||
self.build_py_modules_sources()
|
||||
|
||||
for libname_info in self.libraries:
|
||||
self.build_library_sources(*libname_info)
|
||||
|
||||
if self.extensions:
|
||||
self.check_extensions_list(self.extensions)
|
||||
|
||||
for ext in self.extensions:
|
||||
self.build_extension_sources(ext)
|
||||
|
||||
self.build_data_files_sources()
|
||||
self.build_npy_pkg_config()
|
||||
|
||||
def build_data_files_sources(self):
|
||||
if not self.data_files:
|
||||
return
|
||||
log.info('building data_files sources')
|
||||
from numpy.distutils.misc_util import get_data_files
|
||||
new_data_files = []
|
||||
for data in self.data_files:
|
||||
if isinstance(data, str):
|
||||
new_data_files.append(data)
|
||||
elif isinstance(data, tuple):
|
||||
d, files = data
|
||||
if self.inplace:
|
||||
build_dir = self.get_package_dir('.'.join(d.split(os.sep)))
|
||||
else:
|
||||
build_dir = os.path.join(self.build_src, d)
|
||||
funcs = [f for f in files if hasattr(f, '__call__')]
|
||||
files = [f for f in files if not hasattr(f, '__call__')]
|
||||
for f in funcs:
|
||||
if f.__code__.co_argcount==1:
|
||||
s = f(build_dir)
|
||||
else:
|
||||
s = f()
|
||||
if s is not None:
|
||||
if isinstance(s, list):
|
||||
files.extend(s)
|
||||
elif isinstance(s, str):
|
||||
files.append(s)
|
||||
else:
|
||||
raise TypeError(repr(s))
|
||||
filenames = get_data_files((d, files))
|
||||
new_data_files.append((d, filenames))
|
||||
else:
|
||||
raise TypeError(repr(data))
|
||||
self.data_files[:] = new_data_files
|
||||
|
||||
|
||||
def _build_npy_pkg_config(self, info, gd):
|
||||
template, install_dir, subst_dict = info
|
||||
template_dir = os.path.dirname(template)
|
||||
for k, v in gd.items():
|
||||
subst_dict[k] = v
|
||||
|
||||
if self.inplace == 1:
|
||||
generated_dir = os.path.join(template_dir, install_dir)
|
||||
else:
|
||||
generated_dir = os.path.join(self.build_src, template_dir,
|
||||
install_dir)
|
||||
generated = os.path.basename(os.path.splitext(template)[0])
|
||||
generated_path = os.path.join(generated_dir, generated)
|
||||
if not os.path.exists(generated_dir):
|
||||
os.makedirs(generated_dir)
|
||||
|
||||
subst_vars(generated_path, template, subst_dict)
|
||||
|
||||
# Where to install relatively to install prefix
|
||||
full_install_dir = os.path.join(template_dir, install_dir)
|
||||
return full_install_dir, generated_path
|
||||
|
||||
def build_npy_pkg_config(self):
|
||||
log.info('build_src: building npy-pkg config files')
|
||||
|
||||
# XXX: another ugly workaround to circumvent distutils brain damage. We
|
||||
# need the install prefix here, but finalizing the options of the
|
||||
# install command when only building sources cause error. Instead, we
|
||||
# copy the install command instance, and finalize the copy so that it
|
||||
# does not disrupt how distutils want to do things when with the
|
||||
# original install command instance.
|
||||
install_cmd = copy.copy(get_cmd('install'))
|
||||
if not install_cmd.finalized == 1:
|
||||
install_cmd.finalize_options()
|
||||
build_npkg = False
|
||||
if self.inplace == 1:
|
||||
top_prefix = '.'
|
||||
build_npkg = True
|
||||
elif hasattr(install_cmd, 'install_libbase'):
|
||||
top_prefix = install_cmd.install_libbase
|
||||
build_npkg = True
|
||||
|
||||
if build_npkg:
|
||||
for pkg, infos in self.distribution.installed_pkg_config.items():
|
||||
pkg_path = self.distribution.package_dir[pkg]
|
||||
prefix = os.path.join(os.path.abspath(top_prefix), pkg_path)
|
||||
d = {'prefix': prefix}
|
||||
for info in infos:
|
||||
install_dir, generated = self._build_npy_pkg_config(info, d)
|
||||
self.distribution.data_files.append((install_dir,
|
||||
[generated]))
|
||||
|
||||
def build_py_modules_sources(self):
|
||||
if not self.py_modules:
|
||||
return
|
||||
log.info('building py_modules sources')
|
||||
new_py_modules = []
|
||||
for source in self.py_modules:
|
||||
if is_sequence(source) and len(source)==3:
|
||||
package, module_base, source = source
|
||||
if self.inplace:
|
||||
build_dir = self.get_package_dir(package)
|
||||
else:
|
||||
build_dir = os.path.join(self.build_src,
|
||||
os.path.join(*package.split('.')))
|
||||
if hasattr(source, '__call__'):
|
||||
target = os.path.join(build_dir, module_base + '.py')
|
||||
source = source(target)
|
||||
if source is None:
|
||||
continue
|
||||
modules = [(package, module_base, source)]
|
||||
if package not in self.py_modules_dict:
|
||||
self.py_modules_dict[package] = []
|
||||
self.py_modules_dict[package] += modules
|
||||
else:
|
||||
new_py_modules.append(source)
|
||||
self.py_modules[:] = new_py_modules
|
||||
|
||||
def build_library_sources(self, lib_name, build_info):
|
||||
sources = list(build_info.get('sources', []))
|
||||
|
||||
if not sources:
|
||||
return
|
||||
|
||||
log.info('building library "%s" sources' % (lib_name))
|
||||
|
||||
sources = self.generate_sources(sources, (lib_name, build_info))
|
||||
|
||||
sources = self.template_sources(sources, (lib_name, build_info))
|
||||
|
||||
sources, h_files = self.filter_h_files(sources)
|
||||
|
||||
if h_files:
|
||||
log.info('%s - nothing done with h_files = %s',
|
||||
self.package, h_files)
|
||||
|
||||
#for f in h_files:
|
||||
# self.distribution.headers.append((lib_name,f))
|
||||
|
||||
build_info['sources'] = sources
|
||||
return
|
||||
|
||||
def build_extension_sources(self, ext):
|
||||
|
||||
sources = list(ext.sources)
|
||||
|
||||
log.info('building extension "%s" sources' % (ext.name))
|
||||
|
||||
fullname = self.get_ext_fullname(ext.name)
|
||||
|
||||
modpath = fullname.split('.')
|
||||
package = '.'.join(modpath[0:-1])
|
||||
|
||||
if self.inplace:
|
||||
self.ext_target_dir = self.get_package_dir(package)
|
||||
|
||||
sources = self.generate_sources(sources, ext)
|
||||
sources = self.template_sources(sources, ext)
|
||||
sources = self.swig_sources(sources, ext)
|
||||
sources = self.f2py_sources(sources, ext)
|
||||
sources = self.pyrex_sources(sources, ext)
|
||||
|
||||
sources, py_files = self.filter_py_files(sources)
|
||||
|
||||
if package not in self.py_modules_dict:
|
||||
self.py_modules_dict[package] = []
|
||||
modules = []
|
||||
for f in py_files:
|
||||
module = os.path.splitext(os.path.basename(f))[0]
|
||||
modules.append((package, module, f))
|
||||
self.py_modules_dict[package] += modules
|
||||
|
||||
sources, h_files = self.filter_h_files(sources)
|
||||
|
||||
if h_files:
|
||||
log.info('%s - nothing done with h_files = %s',
|
||||
package, h_files)
|
||||
#for f in h_files:
|
||||
# self.distribution.headers.append((package,f))
|
||||
|
||||
ext.sources = sources
|
||||
|
||||
def generate_sources(self, sources, extension):
|
||||
new_sources = []
|
||||
func_sources = []
|
||||
for source in sources:
|
||||
if is_string(source):
|
||||
new_sources.append(source)
|
||||
else:
|
||||
func_sources.append(source)
|
||||
if not func_sources:
|
||||
return new_sources
|
||||
if self.inplace and not is_sequence(extension):
|
||||
build_dir = self.ext_target_dir
|
||||
else:
|
||||
if is_sequence(extension):
|
||||
name = extension[0]
|
||||
# if 'include_dirs' not in extension[1]:
|
||||
# extension[1]['include_dirs'] = []
|
||||
# incl_dirs = extension[1]['include_dirs']
|
||||
else:
|
||||
name = extension.name
|
||||
# incl_dirs = extension.include_dirs
|
||||
#if self.build_src not in incl_dirs:
|
||||
# incl_dirs.append(self.build_src)
|
||||
build_dir = os.path.join(*([self.build_src]\
|
||||
+name.split('.')[:-1]))
|
||||
self.mkpath(build_dir)
|
||||
for func in func_sources:
|
||||
source = func(extension, build_dir)
|
||||
if not source:
|
||||
continue
|
||||
if is_sequence(source):
|
||||
[log.info(" adding '%s' to sources." % (s,)) for s in source]
|
||||
new_sources.extend(source)
|
||||
else:
|
||||
log.info(" adding '%s' to sources." % (source,))
|
||||
new_sources.append(source)
|
||||
|
||||
return new_sources
|
||||
|
||||
def filter_py_files(self, sources):
|
||||
return self.filter_files(sources, ['.py'])
|
||||
|
||||
def filter_h_files(self, sources):
|
||||
return self.filter_files(sources, ['.h', '.hpp', '.inc'])
|
||||
|
||||
def filter_files(self, sources, exts = []):
|
||||
new_sources = []
|
||||
files = []
|
||||
for source in sources:
|
||||
(base, ext) = os.path.splitext(source)
|
||||
if ext in exts:
|
||||
files.append(source)
|
||||
else:
|
||||
new_sources.append(source)
|
||||
return new_sources, files
|
||||
|
||||
def template_sources(self, sources, extension):
|
||||
new_sources = []
|
||||
if is_sequence(extension):
|
||||
depends = extension[1].get('depends')
|
||||
include_dirs = extension[1].get('include_dirs')
|
||||
else:
|
||||
depends = extension.depends
|
||||
include_dirs = extension.include_dirs
|
||||
for source in sources:
|
||||
(base, ext) = os.path.splitext(source)
|
||||
if ext == '.src': # Template file
|
||||
if self.inplace:
|
||||
target_dir = os.path.dirname(base)
|
||||
else:
|
||||
target_dir = appendpath(self.build_src, os.path.dirname(base))
|
||||
self.mkpath(target_dir)
|
||||
target_file = os.path.join(target_dir, os.path.basename(base))
|
||||
if (self.force or newer_group([source] + depends, target_file)):
|
||||
if _f_pyf_ext_match(base):
|
||||
log.info("from_template:> %s" % (target_file))
|
||||
outstr = process_f_file(source)
|
||||
else:
|
||||
log.info("conv_template:> %s" % (target_file))
|
||||
outstr = process_c_file(source)
|
||||
with open(target_file, 'w') as fid:
|
||||
fid.write(outstr)
|
||||
if _header_ext_match(target_file):
|
||||
d = os.path.dirname(target_file)
|
||||
if d not in include_dirs:
|
||||
log.info(" adding '%s' to include_dirs." % (d))
|
||||
include_dirs.append(d)
|
||||
new_sources.append(target_file)
|
||||
else:
|
||||
new_sources.append(source)
|
||||
return new_sources
|
||||
|
||||
def pyrex_sources(self, sources, extension):
|
||||
"""Pyrex not supported; this remains for Cython support (see below)"""
|
||||
new_sources = []
|
||||
ext_name = extension.name.split('.')[-1]
|
||||
for source in sources:
|
||||
(base, ext) = os.path.splitext(source)
|
||||
if ext == '.pyx':
|
||||
target_file = self.generate_a_pyrex_source(base, ext_name,
|
||||
source,
|
||||
extension)
|
||||
new_sources.append(target_file)
|
||||
else:
|
||||
new_sources.append(source)
|
||||
return new_sources
|
||||
|
||||
def generate_a_pyrex_source(self, base, ext_name, source, extension):
|
||||
"""Pyrex is not supported, but some projects monkeypatch this method.
|
||||
|
||||
That allows compiling Cython code, see gh-6955.
|
||||
This method will remain here for compatibility reasons.
|
||||
"""
|
||||
return []
|
||||
|
||||
def f2py_sources(self, sources, extension):
|
||||
new_sources = []
|
||||
f2py_sources = []
|
||||
f_sources = []
|
||||
f2py_targets = {}
|
||||
target_dirs = []
|
||||
ext_name = extension.name.split('.')[-1]
|
||||
skip_f2py = 0
|
||||
|
||||
for source in sources:
|
||||
(base, ext) = os.path.splitext(source)
|
||||
if ext == '.pyf': # F2PY interface file
|
||||
if self.inplace:
|
||||
target_dir = os.path.dirname(base)
|
||||
else:
|
||||
target_dir = appendpath(self.build_src, os.path.dirname(base))
|
||||
if os.path.isfile(source):
|
||||
name = get_f2py_modulename(source)
|
||||
if name != ext_name:
|
||||
raise DistutilsSetupError('mismatch of extension names: %s '
|
||||
'provides %r but expected %r' % (
|
||||
source, name, ext_name))
|
||||
target_file = os.path.join(target_dir, name+'module.c')
|
||||
else:
|
||||
log.debug(' source %s does not exist: skipping f2py\'ing.' \
|
||||
% (source))
|
||||
name = ext_name
|
||||
skip_f2py = 1
|
||||
target_file = os.path.join(target_dir, name+'module.c')
|
||||
if not os.path.isfile(target_file):
|
||||
log.warn(' target %s does not exist:\n '\
|
||||
'Assuming %smodule.c was generated with '\
|
||||
'"build_src --inplace" command.' \
|
||||
% (target_file, name))
|
||||
target_dir = os.path.dirname(base)
|
||||
target_file = os.path.join(target_dir, name+'module.c')
|
||||
if not os.path.isfile(target_file):
|
||||
raise DistutilsSetupError("%r missing" % (target_file,))
|
||||
log.info(' Yes! Using %r as up-to-date target.' \
|
||||
% (target_file))
|
||||
target_dirs.append(target_dir)
|
||||
f2py_sources.append(source)
|
||||
f2py_targets[source] = target_file
|
||||
new_sources.append(target_file)
|
||||
elif fortran_ext_match(ext):
|
||||
f_sources.append(source)
|
||||
else:
|
||||
new_sources.append(source)
|
||||
|
||||
if not (f2py_sources or f_sources):
|
||||
return new_sources
|
||||
|
||||
for d in target_dirs:
|
||||
self.mkpath(d)
|
||||
|
||||
f2py_options = extension.f2py_options + self.f2py_opts
|
||||
|
||||
if self.distribution.libraries:
|
||||
for name, build_info in self.distribution.libraries:
|
||||
if name in extension.libraries:
|
||||
f2py_options.extend(build_info.get('f2py_options', []))
|
||||
|
||||
log.info("f2py options: %s" % (f2py_options))
|
||||
|
||||
if f2py_sources:
|
||||
if len(f2py_sources) != 1:
|
||||
raise DistutilsSetupError(
|
||||
'only one .pyf file is allowed per extension module but got'\
|
||||
' more: %r' % (f2py_sources,))
|
||||
source = f2py_sources[0]
|
||||
target_file = f2py_targets[source]
|
||||
target_dir = os.path.dirname(target_file) or '.'
|
||||
depends = [source] + extension.depends
|
||||
if (self.force or newer_group(depends, target_file, 'newer')) \
|
||||
and not skip_f2py:
|
||||
log.info("f2py: %s" % (source))
|
||||
import numpy.f2py
|
||||
numpy.f2py.run_main(f2py_options
|
||||
+ ['--build-dir', target_dir, source])
|
||||
else:
|
||||
log.debug(" skipping '%s' f2py interface (up-to-date)" % (source))
|
||||
else:
|
||||
#XXX TODO: --inplace support for sdist command
|
||||
if is_sequence(extension):
|
||||
name = extension[0]
|
||||
else: name = extension.name
|
||||
target_dir = os.path.join(*([self.build_src]\
|
||||
+name.split('.')[:-1]))
|
||||
target_file = os.path.join(target_dir, ext_name + 'module.c')
|
||||
new_sources.append(target_file)
|
||||
depends = f_sources + extension.depends
|
||||
if (self.force or newer_group(depends, target_file, 'newer')) \
|
||||
and not skip_f2py:
|
||||
log.info("f2py:> %s" % (target_file))
|
||||
self.mkpath(target_dir)
|
||||
import numpy.f2py
|
||||
numpy.f2py.run_main(f2py_options + ['--lower',
|
||||
'--build-dir', target_dir]+\
|
||||
['-m', ext_name]+f_sources)
|
||||
else:
|
||||
log.debug(" skipping f2py fortran files for '%s' (up-to-date)"\
|
||||
% (target_file))
|
||||
|
||||
if not os.path.isfile(target_file):
|
||||
raise DistutilsError("f2py target file %r not generated" % (target_file,))
|
||||
|
||||
build_dir = os.path.join(self.build_src, target_dir)
|
||||
target_c = os.path.join(build_dir, 'fortranobject.c')
|
||||
target_h = os.path.join(build_dir, 'fortranobject.h')
|
||||
log.info(" adding '%s' to sources." % (target_c))
|
||||
new_sources.append(target_c)
|
||||
if build_dir not in extension.include_dirs:
|
||||
log.info(" adding '%s' to include_dirs." % (build_dir))
|
||||
extension.include_dirs.append(build_dir)
|
||||
|
||||
if not skip_f2py:
|
||||
import numpy.f2py
|
||||
d = os.path.dirname(numpy.f2py.__file__)
|
||||
source_c = os.path.join(d, 'src', 'fortranobject.c')
|
||||
source_h = os.path.join(d, 'src', 'fortranobject.h')
|
||||
if newer(source_c, target_c) or newer(source_h, target_h):
|
||||
self.mkpath(os.path.dirname(target_c))
|
||||
self.copy_file(source_c, target_c)
|
||||
self.copy_file(source_h, target_h)
|
||||
else:
|
||||
if not os.path.isfile(target_c):
|
||||
raise DistutilsSetupError("f2py target_c file %r not found" % (target_c,))
|
||||
if not os.path.isfile(target_h):
|
||||
raise DistutilsSetupError("f2py target_h file %r not found" % (target_h,))
|
||||
|
||||
for name_ext in ['-f2pywrappers.f', '-f2pywrappers2.f90']:
|
||||
filename = os.path.join(target_dir, ext_name + name_ext)
|
||||
if os.path.isfile(filename):
|
||||
log.info(" adding '%s' to sources." % (filename))
|
||||
f_sources.append(filename)
|
||||
|
||||
return new_sources + f_sources
|
||||
|
||||
def swig_sources(self, sources, extension):
|
||||
# Assuming SWIG 1.3.14 or later. See compatibility note in
|
||||
# http://www.swig.org/Doc1.3/Python.html#Python_nn6
|
||||
|
||||
new_sources = []
|
||||
swig_sources = []
|
||||
swig_targets = {}
|
||||
target_dirs = []
|
||||
py_files = [] # swig generated .py files
|
||||
target_ext = '.c'
|
||||
if '-c++' in extension.swig_opts:
|
||||
typ = 'c++'
|
||||
is_cpp = True
|
||||
extension.swig_opts.remove('-c++')
|
||||
elif self.swig_cpp:
|
||||
typ = 'c++'
|
||||
is_cpp = True
|
||||
else:
|
||||
typ = None
|
||||
is_cpp = False
|
||||
skip_swig = 0
|
||||
ext_name = extension.name.split('.')[-1]
|
||||
|
||||
for source in sources:
|
||||
(base, ext) = os.path.splitext(source)
|
||||
if ext == '.i': # SWIG interface file
|
||||
# the code below assumes that the sources list
|
||||
# contains not more than one .i SWIG interface file
|
||||
if self.inplace:
|
||||
target_dir = os.path.dirname(base)
|
||||
py_target_dir = self.ext_target_dir
|
||||
else:
|
||||
target_dir = appendpath(self.build_src, os.path.dirname(base))
|
||||
py_target_dir = target_dir
|
||||
if os.path.isfile(source):
|
||||
name = get_swig_modulename(source)
|
||||
if name != ext_name[1:]:
|
||||
raise DistutilsSetupError(
|
||||
'mismatch of extension names: %s provides %r'
|
||||
' but expected %r' % (source, name, ext_name[1:]))
|
||||
if typ is None:
|
||||
typ = get_swig_target(source)
|
||||
is_cpp = typ=='c++'
|
||||
else:
|
||||
typ2 = get_swig_target(source)
|
||||
if typ2 is None:
|
||||
log.warn('source %r does not define swig target, assuming %s swig target' \
|
||||
% (source, typ))
|
||||
elif typ!=typ2:
|
||||
log.warn('expected %r but source %r defines %r swig target' \
|
||||
% (typ, source, typ2))
|
||||
if typ2=='c++':
|
||||
log.warn('resetting swig target to c++ (some targets may have .c extension)')
|
||||
is_cpp = True
|
||||
else:
|
||||
log.warn('assuming that %r has c++ swig target' % (source))
|
||||
if is_cpp:
|
||||
target_ext = '.cpp'
|
||||
target_file = os.path.join(target_dir, '%s_wrap%s' \
|
||||
% (name, target_ext))
|
||||
else:
|
||||
log.warn(' source %s does not exist: skipping swig\'ing.' \
|
||||
% (source))
|
||||
name = ext_name[1:]
|
||||
skip_swig = 1
|
||||
target_file = _find_swig_target(target_dir, name)
|
||||
if not os.path.isfile(target_file):
|
||||
log.warn(' target %s does not exist:\n '\
|
||||
'Assuming %s_wrap.{c,cpp} was generated with '\
|
||||
'"build_src --inplace" command.' \
|
||||
% (target_file, name))
|
||||
target_dir = os.path.dirname(base)
|
||||
target_file = _find_swig_target(target_dir, name)
|
||||
if not os.path.isfile(target_file):
|
||||
raise DistutilsSetupError("%r missing" % (target_file,))
|
||||
log.warn(' Yes! Using %r as up-to-date target.' \
|
||||
% (target_file))
|
||||
target_dirs.append(target_dir)
|
||||
new_sources.append(target_file)
|
||||
py_files.append(os.path.join(py_target_dir, name+'.py'))
|
||||
swig_sources.append(source)
|
||||
swig_targets[source] = new_sources[-1]
|
||||
else:
|
||||
new_sources.append(source)
|
||||
|
||||
if not swig_sources:
|
||||
return new_sources
|
||||
|
||||
if skip_swig:
|
||||
return new_sources + py_files
|
||||
|
||||
for d in target_dirs:
|
||||
self.mkpath(d)
|
||||
|
||||
swig = self.swig or self.find_swig()
|
||||
swig_cmd = [swig, "-python"] + extension.swig_opts
|
||||
if is_cpp:
|
||||
swig_cmd.append('-c++')
|
||||
for d in extension.include_dirs:
|
||||
swig_cmd.append('-I'+d)
|
||||
for source in swig_sources:
|
||||
target = swig_targets[source]
|
||||
depends = [source] + extension.depends
|
||||
if self.force or newer_group(depends, target, 'newer'):
|
||||
log.info("%s: %s" % (os.path.basename(swig) \
|
||||
+ (is_cpp and '++' or ''), source))
|
||||
self.spawn(swig_cmd + self.swig_opts \
|
||||
+ ["-o", target, '-outdir', py_target_dir, source])
|
||||
else:
|
||||
log.debug(" skipping '%s' swig interface (up-to-date)" \
|
||||
% (source))
|
||||
|
||||
return new_sources + py_files
|
||||
|
||||
_f_pyf_ext_match = re.compile(r'.*[.](f90|f95|f77|for|ftn|f|pyf)\Z', re.I).match
|
||||
_header_ext_match = re.compile(r'.*[.](inc|h|hpp)\Z', re.I).match
|
||||
|
||||
#### SWIG related auxiliary functions ####
|
||||
_swig_module_name_match = re.compile(r'\s*%module\s*(.*\(\s*package\s*=\s*"(?P<package>[\w_]+)".*\)|)\s*(?P<name>[\w_]+)',
|
||||
re.I).match
|
||||
_has_c_header = re.compile(r'-[*]-\s*c\s*-[*]-', re.I).search
|
||||
_has_cpp_header = re.compile(r'-[*]-\s*c[+][+]\s*-[*]-', re.I).search
|
||||
|
||||
def get_swig_target(source):
|
||||
with open(source, 'r') as f:
|
||||
result = None
|
||||
line = f.readline()
|
||||
if _has_cpp_header(line):
|
||||
result = 'c++'
|
||||
if _has_c_header(line):
|
||||
result = 'c'
|
||||
return result
|
||||
|
||||
def get_swig_modulename(source):
|
||||
with open(source, 'r') as f:
|
||||
name = None
|
||||
for line in f:
|
||||
m = _swig_module_name_match(line)
|
||||
if m:
|
||||
name = m.group('name')
|
||||
break
|
||||
return name
|
||||
|
||||
def _find_swig_target(target_dir, name):
|
||||
for ext in ['.cpp', '.c']:
|
||||
target = os.path.join(target_dir, '%s_wrap%s' % (name, ext))
|
||||
if os.path.isfile(target):
|
||||
break
|
||||
return target
|
||||
|
||||
#### F2PY related auxiliary functions ####
|
||||
|
||||
_f2py_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]+)',
|
||||
re.I).match
|
||||
_f2py_user_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]*?'
|
||||
r'__user__[\w_]*)', re.I).match
|
||||
|
||||
def get_f2py_modulename(source):
|
||||
name = None
|
||||
with open(source) as f:
|
||||
for line in f:
|
||||
m = _f2py_module_name_match(line)
|
||||
if m:
|
||||
if _f2py_user_module_name_match(line): # skip *__user__* names
|
||||
continue
|
||||
name = m.group('name')
|
||||
break
|
||||
return name
|
||||
|
||||
##########################################
|
||||
@@ -0,0 +1,513 @@
|
||||
# Added Fortran compiler support to config. Currently useful only for
|
||||
# try_compile call. try_run works but is untested for most of Fortran
|
||||
# compilers (they must define linker_exe first).
|
||||
# Pearu Peterson
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os, signal
|
||||
import warnings
|
||||
import sys
|
||||
import subprocess
|
||||
import textwrap
|
||||
|
||||
from distutils.command.config import config as old_config
|
||||
from distutils.command.config import LANG_EXT
|
||||
from distutils import log
|
||||
from distutils.file_util import copy_file
|
||||
from distutils.ccompiler import CompileError, LinkError
|
||||
import distutils
|
||||
from numpy.distutils.exec_command import filepath_from_subprocess_output
|
||||
from numpy.distutils.mingw32ccompiler import generate_manifest
|
||||
from numpy.distutils.command.autodist import (check_gcc_function_attribute,
|
||||
check_gcc_function_attribute_with_intrinsics,
|
||||
check_gcc_variable_attribute,
|
||||
check_inline,
|
||||
check_restrict,
|
||||
check_compiler_gcc4)
|
||||
from numpy.distutils.compat import get_exception
|
||||
|
||||
LANG_EXT['f77'] = '.f'
|
||||
LANG_EXT['f90'] = '.f90'
|
||||
|
||||
class config(old_config):
|
||||
old_config.user_options += [
|
||||
('fcompiler=', None, "specify the Fortran compiler type"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.fcompiler = None
|
||||
old_config.initialize_options(self)
|
||||
|
||||
def _check_compiler (self):
|
||||
old_config._check_compiler(self)
|
||||
from numpy.distutils.fcompiler import FCompiler, new_fcompiler
|
||||
|
||||
if sys.platform == 'win32' and (self.compiler.compiler_type in
|
||||
('msvc', 'intelw', 'intelemw')):
|
||||
# XXX: hack to circumvent a python 2.6 bug with msvc9compiler:
|
||||
# initialize call query_vcvarsall, which throws an IOError, and
|
||||
# causes an error along the way without much information. We try to
|
||||
# catch it here, hoping it is early enough, and print an helpful
|
||||
# message instead of Error: None.
|
||||
if not self.compiler.initialized:
|
||||
try:
|
||||
self.compiler.initialize()
|
||||
except IOError:
|
||||
e = get_exception()
|
||||
msg = textwrap.dedent("""\
|
||||
Could not initialize compiler instance: do you have Visual Studio
|
||||
installed? If you are trying to build with MinGW, please use "python setup.py
|
||||
build -c mingw32" instead. If you have Visual Studio installed, check it is
|
||||
correctly installed, and the right version (VS 2008 for python 2.6, 2.7 and 3.2,
|
||||
VS 2010 for >= 3.3).
|
||||
|
||||
Original exception was: %s, and the Compiler class was %s
|
||||
============================================================================""") \
|
||||
% (e, self.compiler.__class__.__name__)
|
||||
print(textwrap.dedent("""\
|
||||
============================================================================"""))
|
||||
raise distutils.errors.DistutilsPlatformError(msg)
|
||||
|
||||
# After MSVC is initialized, add an explicit /MANIFEST to linker
|
||||
# flags. See issues gh-4245 and gh-4101 for details. Also
|
||||
# relevant are issues 4431 and 16296 on the Python bug tracker.
|
||||
from distutils import msvc9compiler
|
||||
if msvc9compiler.get_build_version() >= 10:
|
||||
for ldflags in [self.compiler.ldflags_shared,
|
||||
self.compiler.ldflags_shared_debug]:
|
||||
if '/MANIFEST' not in ldflags:
|
||||
ldflags.append('/MANIFEST')
|
||||
|
||||
if not isinstance(self.fcompiler, FCompiler):
|
||||
self.fcompiler = new_fcompiler(compiler=self.fcompiler,
|
||||
dry_run=self.dry_run, force=1,
|
||||
c_compiler=self.compiler)
|
||||
if self.fcompiler is not None:
|
||||
self.fcompiler.customize(self.distribution)
|
||||
if self.fcompiler.get_version():
|
||||
self.fcompiler.customize_cmd(self)
|
||||
self.fcompiler.show_customization()
|
||||
|
||||
def _wrap_method(self, mth, lang, args):
|
||||
from distutils.ccompiler import CompileError
|
||||
from distutils.errors import DistutilsExecError
|
||||
save_compiler = self.compiler
|
||||
if lang in ['f77', 'f90']:
|
||||
self.compiler = self.fcompiler
|
||||
try:
|
||||
ret = mth(*((self,)+args))
|
||||
except (DistutilsExecError, CompileError):
|
||||
str(get_exception())
|
||||
self.compiler = save_compiler
|
||||
raise CompileError
|
||||
self.compiler = save_compiler
|
||||
return ret
|
||||
|
||||
def _compile (self, body, headers, include_dirs, lang):
|
||||
src, obj = self._wrap_method(old_config._compile, lang,
|
||||
(body, headers, include_dirs, lang))
|
||||
# _compile in unixcompiler.py sometimes creates .d dependency files.
|
||||
# Clean them up.
|
||||
self.temp_files.append(obj + '.d')
|
||||
return src, obj
|
||||
|
||||
def _link (self, body,
|
||||
headers, include_dirs,
|
||||
libraries, library_dirs, lang):
|
||||
if self.compiler.compiler_type=='msvc':
|
||||
libraries = (libraries or [])[:]
|
||||
library_dirs = (library_dirs or [])[:]
|
||||
if lang in ['f77', 'f90']:
|
||||
lang = 'c' # always use system linker when using MSVC compiler
|
||||
if self.fcompiler:
|
||||
for d in self.fcompiler.library_dirs or []:
|
||||
# correct path when compiling in Cygwin but with
|
||||
# normal Win Python
|
||||
if d.startswith('/usr/lib'):
|
||||
try:
|
||||
d = subprocess.check_output(['cygpath',
|
||||
'-w', d])
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
d = filepath_from_subprocess_output(d)
|
||||
library_dirs.append(d)
|
||||
for libname in self.fcompiler.libraries or []:
|
||||
if libname not in libraries:
|
||||
libraries.append(libname)
|
||||
for libname in libraries:
|
||||
if libname.startswith('msvc'): continue
|
||||
fileexists = False
|
||||
for libdir in library_dirs or []:
|
||||
libfile = os.path.join(libdir, '%s.lib' % (libname))
|
||||
if os.path.isfile(libfile):
|
||||
fileexists = True
|
||||
break
|
||||
if fileexists: continue
|
||||
# make g77-compiled static libs available to MSVC
|
||||
fileexists = False
|
||||
for libdir in library_dirs:
|
||||
libfile = os.path.join(libdir, 'lib%s.a' % (libname))
|
||||
if os.path.isfile(libfile):
|
||||
# copy libname.a file to name.lib so that MSVC linker
|
||||
# can find it
|
||||
libfile2 = os.path.join(libdir, '%s.lib' % (libname))
|
||||
copy_file(libfile, libfile2)
|
||||
self.temp_files.append(libfile2)
|
||||
fileexists = True
|
||||
break
|
||||
if fileexists: continue
|
||||
log.warn('could not find library %r in directories %s' \
|
||||
% (libname, library_dirs))
|
||||
elif self.compiler.compiler_type == 'mingw32':
|
||||
generate_manifest(self)
|
||||
return self._wrap_method(old_config._link, lang,
|
||||
(body, headers, include_dirs,
|
||||
libraries, library_dirs, lang))
|
||||
|
||||
def check_header(self, header, include_dirs=None, library_dirs=None, lang='c'):
|
||||
self._check_compiler()
|
||||
return self.try_compile(
|
||||
"/* we need a dummy line to make distutils happy */",
|
||||
[header], include_dirs)
|
||||
|
||||
def check_decl(self, symbol,
|
||||
headers=None, include_dirs=None):
|
||||
self._check_compiler()
|
||||
body = textwrap.dedent("""
|
||||
int main(void)
|
||||
{
|
||||
#ifndef %s
|
||||
(void) %s;
|
||||
#endif
|
||||
;
|
||||
return 0;
|
||||
}""") % (symbol, symbol)
|
||||
|
||||
return self.try_compile(body, headers, include_dirs)
|
||||
|
||||
def check_macro_true(self, symbol,
|
||||
headers=None, include_dirs=None):
|
||||
self._check_compiler()
|
||||
body = textwrap.dedent("""
|
||||
int main(void)
|
||||
{
|
||||
#if %s
|
||||
#else
|
||||
#error false or undefined macro
|
||||
#endif
|
||||
;
|
||||
return 0;
|
||||
}""") % (symbol,)
|
||||
|
||||
return self.try_compile(body, headers, include_dirs)
|
||||
|
||||
def check_type(self, type_name, headers=None, include_dirs=None,
|
||||
library_dirs=None):
|
||||
"""Check type availability. Return True if the type can be compiled,
|
||||
False otherwise"""
|
||||
self._check_compiler()
|
||||
|
||||
# First check the type can be compiled
|
||||
body = textwrap.dedent(r"""
|
||||
int main(void) {
|
||||
if ((%(name)s *) 0)
|
||||
return 0;
|
||||
if (sizeof (%(name)s))
|
||||
return 0;
|
||||
}
|
||||
""") % {'name': type_name}
|
||||
|
||||
st = False
|
||||
try:
|
||||
try:
|
||||
self._compile(body % {'type': type_name},
|
||||
headers, include_dirs, 'c')
|
||||
st = True
|
||||
except distutils.errors.CompileError:
|
||||
st = False
|
||||
finally:
|
||||
self._clean()
|
||||
|
||||
return st
|
||||
|
||||
def check_type_size(self, type_name, headers=None, include_dirs=None, library_dirs=None, expected=None):
|
||||
"""Check size of a given type."""
|
||||
self._check_compiler()
|
||||
|
||||
# First check the type can be compiled
|
||||
body = textwrap.dedent(r"""
|
||||
typedef %(type)s npy_check_sizeof_type;
|
||||
int main (void)
|
||||
{
|
||||
static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) >= 0)];
|
||||
test_array [0] = 0
|
||||
|
||||
;
|
||||
return 0;
|
||||
}
|
||||
""")
|
||||
self._compile(body % {'type': type_name},
|
||||
headers, include_dirs, 'c')
|
||||
self._clean()
|
||||
|
||||
if expected:
|
||||
body = textwrap.dedent(r"""
|
||||
typedef %(type)s npy_check_sizeof_type;
|
||||
int main (void)
|
||||
{
|
||||
static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) == %(size)s)];
|
||||
test_array [0] = 0
|
||||
|
||||
;
|
||||
return 0;
|
||||
}
|
||||
""")
|
||||
for size in expected:
|
||||
try:
|
||||
self._compile(body % {'type': type_name, 'size': size},
|
||||
headers, include_dirs, 'c')
|
||||
self._clean()
|
||||
return size
|
||||
except CompileError:
|
||||
pass
|
||||
|
||||
# this fails to *compile* if size > sizeof(type)
|
||||
body = textwrap.dedent(r"""
|
||||
typedef %(type)s npy_check_sizeof_type;
|
||||
int main (void)
|
||||
{
|
||||
static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) <= %(size)s)];
|
||||
test_array [0] = 0
|
||||
|
||||
;
|
||||
return 0;
|
||||
}
|
||||
""")
|
||||
|
||||
# The principle is simple: we first find low and high bounds of size
|
||||
# for the type, where low/high are looked up on a log scale. Then, we
|
||||
# do a binary search to find the exact size between low and high
|
||||
low = 0
|
||||
mid = 0
|
||||
while True:
|
||||
try:
|
||||
self._compile(body % {'type': type_name, 'size': mid},
|
||||
headers, include_dirs, 'c')
|
||||
self._clean()
|
||||
break
|
||||
except CompileError:
|
||||
#log.info("failure to test for bound %d" % mid)
|
||||
low = mid + 1
|
||||
mid = 2 * mid + 1
|
||||
|
||||
high = mid
|
||||
# Binary search:
|
||||
while low != high:
|
||||
mid = (high - low) // 2 + low
|
||||
try:
|
||||
self._compile(body % {'type': type_name, 'size': mid},
|
||||
headers, include_dirs, 'c')
|
||||
self._clean()
|
||||
high = mid
|
||||
except CompileError:
|
||||
low = mid + 1
|
||||
return low
|
||||
|
||||
def check_func(self, func,
|
||||
headers=None, include_dirs=None,
|
||||
libraries=None, library_dirs=None,
|
||||
decl=False, call=False, call_args=None):
|
||||
# clean up distutils's config a bit: add void to main(), and
|
||||
# return a value.
|
||||
self._check_compiler()
|
||||
body = []
|
||||
if decl:
|
||||
if type(decl) == str:
|
||||
body.append(decl)
|
||||
else:
|
||||
body.append("int %s (void);" % func)
|
||||
# Handle MSVC intrinsics: force MS compiler to make a function call.
|
||||
# Useful to test for some functions when built with optimization on, to
|
||||
# avoid build error because the intrinsic and our 'fake' test
|
||||
# declaration do not match.
|
||||
body.append("#ifdef _MSC_VER")
|
||||
body.append("#pragma function(%s)" % func)
|
||||
body.append("#endif")
|
||||
body.append("int main (void) {")
|
||||
if call:
|
||||
if call_args is None:
|
||||
call_args = ''
|
||||
body.append(" %s(%s);" % (func, call_args))
|
||||
else:
|
||||
body.append(" %s;" % func)
|
||||
body.append(" return 0;")
|
||||
body.append("}")
|
||||
body = '\n'.join(body) + "\n"
|
||||
|
||||
return self.try_link(body, headers, include_dirs,
|
||||
libraries, library_dirs)
|
||||
|
||||
def check_funcs_once(self, funcs,
|
||||
headers=None, include_dirs=None,
|
||||
libraries=None, library_dirs=None,
|
||||
decl=False, call=False, call_args=None):
|
||||
"""Check a list of functions at once.
|
||||
|
||||
This is useful to speed up things, since all the functions in the funcs
|
||||
list will be put in one compilation unit.
|
||||
|
||||
Arguments
|
||||
---------
|
||||
funcs : seq
|
||||
list of functions to test
|
||||
include_dirs : seq
|
||||
list of header paths
|
||||
libraries : seq
|
||||
list of libraries to link the code snippet to
|
||||
library_dirs : seq
|
||||
list of library paths
|
||||
decl : dict
|
||||
for every (key, value), the declaration in the value will be
|
||||
used for function in key. If a function is not in the
|
||||
dictionary, no declaration will be used.
|
||||
call : dict
|
||||
for every item (f, value), if the value is True, a call will be
|
||||
done to the function f.
|
||||
"""
|
||||
self._check_compiler()
|
||||
body = []
|
||||
if decl:
|
||||
for f, v in decl.items():
|
||||
if v:
|
||||
body.append("int %s (void);" % f)
|
||||
|
||||
# Handle MS intrinsics. See check_func for more info.
|
||||
body.append("#ifdef _MSC_VER")
|
||||
for func in funcs:
|
||||
body.append("#pragma function(%s)" % func)
|
||||
body.append("#endif")
|
||||
|
||||
body.append("int main (void) {")
|
||||
if call:
|
||||
for f in funcs:
|
||||
if f in call and call[f]:
|
||||
if not (call_args and f in call_args and call_args[f]):
|
||||
args = ''
|
||||
else:
|
||||
args = call_args[f]
|
||||
body.append(" %s(%s);" % (f, args))
|
||||
else:
|
||||
body.append(" %s;" % f)
|
||||
else:
|
||||
for f in funcs:
|
||||
body.append(" %s;" % f)
|
||||
body.append(" return 0;")
|
||||
body.append("}")
|
||||
body = '\n'.join(body) + "\n"
|
||||
|
||||
return self.try_link(body, headers, include_dirs,
|
||||
libraries, library_dirs)
|
||||
|
||||
def check_inline(self):
|
||||
"""Return the inline keyword recognized by the compiler, empty string
|
||||
otherwise."""
|
||||
return check_inline(self)
|
||||
|
||||
def check_restrict(self):
|
||||
"""Return the restrict keyword recognized by the compiler, empty string
|
||||
otherwise."""
|
||||
return check_restrict(self)
|
||||
|
||||
def check_compiler_gcc4(self):
|
||||
"""Return True if the C compiler is gcc >= 4."""
|
||||
return check_compiler_gcc4(self)
|
||||
|
||||
def check_gcc_function_attribute(self, attribute, name):
|
||||
return check_gcc_function_attribute(self, attribute, name)
|
||||
|
||||
def check_gcc_function_attribute_with_intrinsics(self, attribute, name,
|
||||
code, include):
|
||||
return check_gcc_function_attribute_with_intrinsics(self, attribute,
|
||||
name, code, include)
|
||||
|
||||
def check_gcc_variable_attribute(self, attribute):
|
||||
return check_gcc_variable_attribute(self, attribute)
|
||||
|
||||
def get_output(self, body, headers=None, include_dirs=None,
|
||||
libraries=None, library_dirs=None,
|
||||
lang="c", use_tee=None):
|
||||
"""Try to compile, link to an executable, and run a program
|
||||
built from 'body' and 'headers'. Returns the exit status code
|
||||
of the program and its output.
|
||||
"""
|
||||
# 2008-11-16, RemoveMe
|
||||
warnings.warn("\n+++++++++++++++++++++++++++++++++++++++++++++++++\n" \
|
||||
"Usage of get_output is deprecated: please do not \n" \
|
||||
"use it anymore, and avoid configuration checks \n" \
|
||||
"involving running executable on the target machine.\n" \
|
||||
"+++++++++++++++++++++++++++++++++++++++++++++++++\n",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
self._check_compiler()
|
||||
exitcode, output = 255, ''
|
||||
try:
|
||||
grabber = GrabStdout()
|
||||
try:
|
||||
src, obj, exe = self._link(body, headers, include_dirs,
|
||||
libraries, library_dirs, lang)
|
||||
grabber.restore()
|
||||
except Exception:
|
||||
output = grabber.data
|
||||
grabber.restore()
|
||||
raise
|
||||
exe = os.path.join('.', exe)
|
||||
try:
|
||||
# specify cwd arg for consistency with
|
||||
# historic usage pattern of exec_command()
|
||||
# also, note that exe appears to be a string,
|
||||
# which exec_command() handled, but we now
|
||||
# use a list for check_output() -- this assumes
|
||||
# that exe is always a single command
|
||||
output = subprocess.check_output([exe], cwd='.')
|
||||
except subprocess.CalledProcessError as exc:
|
||||
exitstatus = exc.returncode
|
||||
output = ''
|
||||
except OSError:
|
||||
# preserve the EnvironmentError exit status
|
||||
# used historically in exec_command()
|
||||
exitstatus = 127
|
||||
output = ''
|
||||
else:
|
||||
output = filepath_from_subprocess_output(output)
|
||||
if hasattr(os, 'WEXITSTATUS'):
|
||||
exitcode = os.WEXITSTATUS(exitstatus)
|
||||
if os.WIFSIGNALED(exitstatus):
|
||||
sig = os.WTERMSIG(exitstatus)
|
||||
log.error('subprocess exited with signal %d' % (sig,))
|
||||
if sig == signal.SIGINT:
|
||||
# control-C
|
||||
raise KeyboardInterrupt
|
||||
else:
|
||||
exitcode = exitstatus
|
||||
log.info("success!")
|
||||
except (CompileError, LinkError):
|
||||
log.info("failure.")
|
||||
self._clean()
|
||||
return exitcode, output
|
||||
|
||||
class GrabStdout(object):
|
||||
|
||||
def __init__(self):
|
||||
self.sys_stdout = sys.stdout
|
||||
self.data = ''
|
||||
sys.stdout = self
|
||||
|
||||
def write (self, data):
|
||||
self.sys_stdout.write(data)
|
||||
self.data += data
|
||||
|
||||
def flush (self):
|
||||
self.sys_stdout.flush()
|
||||
|
||||
def restore(self):
|
||||
sys.stdout = self.sys_stdout
|
||||
@@ -0,0 +1,128 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from distutils.core import Command
|
||||
from numpy.distutils import log
|
||||
|
||||
#XXX: Linker flags
|
||||
|
||||
def show_fortran_compilers(_cache=None):
|
||||
# Using cache to prevent infinite recursion.
|
||||
if _cache:
|
||||
return
|
||||
elif _cache is None:
|
||||
_cache = []
|
||||
_cache.append(1)
|
||||
from numpy.distutils.fcompiler import show_fcompilers
|
||||
import distutils.core
|
||||
dist = distutils.core._setup_distribution
|
||||
show_fcompilers(dist)
|
||||
|
||||
class config_fc(Command):
|
||||
""" Distutils command to hold user specified options
|
||||
to Fortran compilers.
|
||||
|
||||
config_fc command is used by the FCompiler.customize() method.
|
||||
"""
|
||||
|
||||
description = "specify Fortran 77/Fortran 90 compiler information"
|
||||
|
||||
user_options = [
|
||||
('fcompiler=', None, "specify Fortran compiler type"),
|
||||
('f77exec=', None, "specify F77 compiler command"),
|
||||
('f90exec=', None, "specify F90 compiler command"),
|
||||
('f77flags=', None, "specify F77 compiler flags"),
|
||||
('f90flags=', None, "specify F90 compiler flags"),
|
||||
('opt=', None, "specify optimization flags"),
|
||||
('arch=', None, "specify architecture specific optimization flags"),
|
||||
('debug', 'g', "compile with debugging information"),
|
||||
('noopt', None, "compile without optimization"),
|
||||
('noarch', None, "compile without arch-dependent optimization"),
|
||||
]
|
||||
|
||||
help_options = [
|
||||
('help-fcompiler', None, "list available Fortran compilers",
|
||||
show_fortran_compilers),
|
||||
]
|
||||
|
||||
boolean_options = ['debug', 'noopt', 'noarch']
|
||||
|
||||
def initialize_options(self):
|
||||
self.fcompiler = None
|
||||
self.f77exec = None
|
||||
self.f90exec = None
|
||||
self.f77flags = None
|
||||
self.f90flags = None
|
||||
self.opt = None
|
||||
self.arch = None
|
||||
self.debug = None
|
||||
self.noopt = None
|
||||
self.noarch = None
|
||||
|
||||
def finalize_options(self):
|
||||
log.info('unifing config_fc, config, build_clib, build_ext, build commands --fcompiler options')
|
||||
build_clib = self.get_finalized_command('build_clib')
|
||||
build_ext = self.get_finalized_command('build_ext')
|
||||
config = self.get_finalized_command('config')
|
||||
build = self.get_finalized_command('build')
|
||||
cmd_list = [self, config, build_clib, build_ext, build]
|
||||
for a in ['fcompiler']:
|
||||
l = []
|
||||
for c in cmd_list:
|
||||
v = getattr(c, a)
|
||||
if v is not None:
|
||||
if not isinstance(v, str): v = v.compiler_type
|
||||
if v not in l: l.append(v)
|
||||
if not l: v1 = None
|
||||
else: v1 = l[0]
|
||||
if len(l)>1:
|
||||
log.warn(' commands have different --%s options: %s'\
|
||||
', using first in list as default' % (a, l))
|
||||
if v1:
|
||||
for c in cmd_list:
|
||||
if getattr(c, a) is None: setattr(c, a, v1)
|
||||
|
||||
def run(self):
|
||||
# Do nothing.
|
||||
return
|
||||
|
||||
class config_cc(Command):
|
||||
""" Distutils command to hold user specified options
|
||||
to C/C++ compilers.
|
||||
"""
|
||||
|
||||
description = "specify C/C++ compiler information"
|
||||
|
||||
user_options = [
|
||||
('compiler=', None, "specify C/C++ compiler type"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.compiler = None
|
||||
|
||||
def finalize_options(self):
|
||||
log.info('unifing config_cc, config, build_clib, build_ext, build commands --compiler options')
|
||||
build_clib = self.get_finalized_command('build_clib')
|
||||
build_ext = self.get_finalized_command('build_ext')
|
||||
config = self.get_finalized_command('config')
|
||||
build = self.get_finalized_command('build')
|
||||
cmd_list = [self, config, build_clib, build_ext, build]
|
||||
for a in ['compiler']:
|
||||
l = []
|
||||
for c in cmd_list:
|
||||
v = getattr(c, a)
|
||||
if v is not None:
|
||||
if not isinstance(v, str): v = v.compiler_type
|
||||
if v not in l: l.append(v)
|
||||
if not l: v1 = None
|
||||
else: v1 = l[0]
|
||||
if len(l)>1:
|
||||
log.warn(' commands have different --%s options: %s'\
|
||||
', using first in list as default' % (a, l))
|
||||
if v1:
|
||||
for c in cmd_list:
|
||||
if getattr(c, a) is None: setattr(c, a, v1)
|
||||
return
|
||||
|
||||
def run(self):
|
||||
# Do nothing.
|
||||
return
|
||||
@@ -0,0 +1,17 @@
|
||||
""" Override the develop command from setuptools so we can ensure that our
|
||||
generated files (from build_src or build_scripts) are properly converted to real
|
||||
files with filenames.
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from setuptools.command.develop import develop as old_develop
|
||||
|
||||
class develop(old_develop):
|
||||
__doc__ = old_develop.__doc__
|
||||
def install_for_development(self):
|
||||
# Build sources in-place, too.
|
||||
self.reinitialize_command('build_src', inplace=1)
|
||||
# Make sure scripts are built.
|
||||
self.run_command('build_scripts')
|
||||
old_develop.install_for_development(self)
|
||||
@@ -0,0 +1,27 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
|
||||
from setuptools.command.egg_info import egg_info as _egg_info
|
||||
|
||||
class egg_info(_egg_info):
|
||||
def run(self):
|
||||
if 'sdist' in sys.argv:
|
||||
import warnings
|
||||
import textwrap
|
||||
msg = textwrap.dedent("""
|
||||
`build_src` is being run, this may lead to missing
|
||||
files in your sdist! You want to use distutils.sdist
|
||||
instead of the setuptools version:
|
||||
|
||||
from distutils.command.sdist import sdist
|
||||
cmdclass={'sdist': sdist}"
|
||||
|
||||
See numpy's setup.py or gh-7131 for details.""")
|
||||
warnings.warn(msg, UserWarning, stacklevel=2)
|
||||
|
||||
# We need to ensure that build_src has been executed in order to give
|
||||
# setuptools' egg_info command real filenames instead of functions which
|
||||
# generate files.
|
||||
self.run_command("build_src")
|
||||
_egg_info.run(self)
|
||||
@@ -0,0 +1,81 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
if 'setuptools' in sys.modules:
|
||||
import setuptools.command.install as old_install_mod
|
||||
have_setuptools = True
|
||||
else:
|
||||
import distutils.command.install as old_install_mod
|
||||
have_setuptools = False
|
||||
from distutils.file_util import write_file
|
||||
|
||||
old_install = old_install_mod.install
|
||||
|
||||
class install(old_install):
|
||||
|
||||
# Always run install_clib - the command is cheap, so no need to bypass it;
|
||||
# but it's not run by setuptools -- so it's run again in install_data
|
||||
sub_commands = old_install.sub_commands + [
|
||||
('install_clib', lambda x: True)
|
||||
]
|
||||
|
||||
def finalize_options (self):
|
||||
old_install.finalize_options(self)
|
||||
self.install_lib = self.install_libbase
|
||||
|
||||
def setuptools_run(self):
|
||||
""" The setuptools version of the .run() method.
|
||||
|
||||
We must pull in the entire code so we can override the level used in the
|
||||
_getframe() call since we wrap this call by one more level.
|
||||
"""
|
||||
from distutils.command.install import install as distutils_install
|
||||
|
||||
# Explicit request for old-style install? Just do it
|
||||
if self.old_and_unmanageable or self.single_version_externally_managed:
|
||||
return distutils_install.run(self)
|
||||
|
||||
# Attempt to detect whether we were called from setup() or by another
|
||||
# command. If we were called by setup(), our caller will be the
|
||||
# 'run_command' method in 'distutils.dist', and *its* caller will be
|
||||
# the 'run_commands' method. If we were called any other way, our
|
||||
# immediate caller *might* be 'run_command', but it won't have been
|
||||
# called by 'run_commands'. This is slightly kludgy, but seems to
|
||||
# work.
|
||||
#
|
||||
caller = sys._getframe(3)
|
||||
caller_module = caller.f_globals.get('__name__', '')
|
||||
caller_name = caller.f_code.co_name
|
||||
|
||||
if caller_module != 'distutils.dist' or caller_name!='run_commands':
|
||||
# We weren't called from the command line or setup(), so we
|
||||
# should run in backward-compatibility mode to support bdist_*
|
||||
# commands.
|
||||
distutils_install.run(self)
|
||||
else:
|
||||
self.do_egg_install()
|
||||
|
||||
def run(self):
|
||||
if not have_setuptools:
|
||||
r = old_install.run(self)
|
||||
else:
|
||||
r = self.setuptools_run()
|
||||
if self.record:
|
||||
# bdist_rpm fails when INSTALLED_FILES contains
|
||||
# paths with spaces. Such paths must be enclosed
|
||||
# with double-quotes.
|
||||
with open(self.record, 'r') as f:
|
||||
lines = []
|
||||
need_rewrite = False
|
||||
for l in f:
|
||||
l = l.rstrip()
|
||||
if ' ' in l:
|
||||
need_rewrite = True
|
||||
l = '"%s"' % (l)
|
||||
lines.append(l)
|
||||
if need_rewrite:
|
||||
self.execute(write_file,
|
||||
(self.record, lines),
|
||||
"re-writing list of installed files to '%s'" %
|
||||
self.record)
|
||||
return r
|
||||
@@ -0,0 +1,42 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.ccompiler import new_compiler
|
||||
from numpy.distutils.misc_util import get_cmd
|
||||
|
||||
class install_clib(Command):
|
||||
description = "Command to install installable C libraries"
|
||||
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.outfiles = []
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install', ('install_lib', 'install_dir'))
|
||||
|
||||
def run (self):
|
||||
build_clib_cmd = get_cmd("build_clib")
|
||||
if not build_clib_cmd.build_clib:
|
||||
# can happen if the user specified `--skip-build`
|
||||
build_clib_cmd.finalize_options()
|
||||
build_dir = build_clib_cmd.build_clib
|
||||
|
||||
# We need the compiler to get the library name -> filename association
|
||||
if not build_clib_cmd.compiler:
|
||||
compiler = new_compiler(compiler=None)
|
||||
compiler.customize(self.distribution)
|
||||
else:
|
||||
compiler = build_clib_cmd.compiler
|
||||
|
||||
for l in self.distribution.installed_libraries:
|
||||
target_dir = os.path.join(self.install_dir, l.target_dir)
|
||||
name = compiler.library_filename(l.name)
|
||||
source = os.path.join(build_dir, name)
|
||||
self.mkpath(target_dir)
|
||||
self.outfiles.append(self.copy_file(source, target_dir)[0])
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles
|
||||
@@ -0,0 +1,26 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
have_setuptools = ('setuptools' in sys.modules)
|
||||
|
||||
from distutils.command.install_data import install_data as old_install_data
|
||||
|
||||
#data installer with improved intelligence over distutils
|
||||
#data files are copied into the project directory instead
|
||||
#of willy-nilly
|
||||
class install_data (old_install_data):
|
||||
|
||||
def run(self):
|
||||
old_install_data.run(self)
|
||||
|
||||
if have_setuptools:
|
||||
# Run install_clib again, since setuptools does not run sub-commands
|
||||
# of install automatically
|
||||
self.run_command('install_clib')
|
||||
|
||||
def finalize_options (self):
|
||||
self.set_undefined_options('install',
|
||||
('install_lib', 'install_dir'),
|
||||
('root', 'root'),
|
||||
('force', 'force'),
|
||||
)
|
||||
@@ -0,0 +1,27 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
from distutils.command.install_headers import install_headers as old_install_headers
|
||||
|
||||
class install_headers (old_install_headers):
|
||||
|
||||
def run (self):
|
||||
headers = self.distribution.headers
|
||||
if not headers:
|
||||
return
|
||||
|
||||
prefix = os.path.dirname(self.install_dir)
|
||||
for header in headers:
|
||||
if isinstance(header, tuple):
|
||||
# Kind of a hack, but I don't know where else to change this...
|
||||
if header[0] == 'numpy.core':
|
||||
header = ('numpy', header[1])
|
||||
if os.path.splitext(header[1])[1] == '.inc':
|
||||
continue
|
||||
d = os.path.join(*([prefix]+header[0].split('.')))
|
||||
header = header[1]
|
||||
else:
|
||||
d = self.install_dir
|
||||
self.mkpath(d)
|
||||
(out, _) = self.copy_file(header, d)
|
||||
self.outfiles.append(out)
|
||||
@@ -0,0 +1,29 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
if 'setuptools' in sys.modules:
|
||||
from setuptools.command.sdist import sdist as old_sdist
|
||||
else:
|
||||
from distutils.command.sdist import sdist as old_sdist
|
||||
|
||||
from numpy.distutils.misc_util import get_data_files
|
||||
|
||||
class sdist(old_sdist):
|
||||
|
||||
def add_defaults (self):
|
||||
old_sdist.add_defaults(self)
|
||||
|
||||
dist = self.distribution
|
||||
|
||||
if dist.has_data_files():
|
||||
for data in dist.data_files:
|
||||
self.filelist.extend(get_data_files(data))
|
||||
|
||||
if dist.has_headers():
|
||||
headers = []
|
||||
for h in dist.headers:
|
||||
if isinstance(h, str): headers.append(h)
|
||||
else: headers.append(h[1])
|
||||
self.filelist.extend(headers)
|
||||
|
||||
return
|
||||
10
venv/lib/python3.6/site-packages/numpy/distutils/compat.py
Normal file
10
venv/lib/python3.6/site-packages/numpy/distutils/compat.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Small modules to cope with python 2 vs 3 incompatibilities inside
|
||||
numpy.distutils
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
|
||||
def get_exception():
|
||||
return sys.exc_info()[1]
|
||||
@@ -0,0 +1,337 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
takes templated file .xxx.src and produces .xxx file where .xxx is
|
||||
.i or .c or .h, using the following template rules
|
||||
|
||||
/**begin repeat -- on a line by itself marks the start of a repeated code
|
||||
segment
|
||||
/**end repeat**/ -- on a line by itself marks it's end
|
||||
|
||||
After the /**begin repeat and before the */, all the named templates are placed
|
||||
these should all have the same number of replacements
|
||||
|
||||
Repeat blocks can be nested, with each nested block labeled with its depth,
|
||||
i.e.
|
||||
/**begin repeat1
|
||||
*....
|
||||
*/
|
||||
/**end repeat1**/
|
||||
|
||||
When using nested loops, you can optionally exclude particular
|
||||
combinations of the variables using (inside the comment portion of the inner loop):
|
||||
|
||||
:exclude: var1=value1, var2=value2, ...
|
||||
|
||||
This will exclude the pattern where var1 is value1 and var2 is value2 when
|
||||
the result is being generated.
|
||||
|
||||
|
||||
In the main body each replace will use one entry from the list of named replacements
|
||||
|
||||
Note that all #..# forms in a block must have the same number of
|
||||
comma-separated entries.
|
||||
|
||||
Example:
|
||||
|
||||
An input file containing
|
||||
|
||||
/**begin repeat
|
||||
* #a = 1,2,3#
|
||||
* #b = 1,2,3#
|
||||
*/
|
||||
|
||||
/**begin repeat1
|
||||
* #c = ted, jim#
|
||||
*/
|
||||
@a@, @b@, @c@
|
||||
/**end repeat1**/
|
||||
|
||||
/**end repeat**/
|
||||
|
||||
produces
|
||||
|
||||
line 1 "template.c.src"
|
||||
|
||||
/*
|
||||
*********************************************************************
|
||||
** This file was autogenerated from a template DO NOT EDIT!!**
|
||||
** Changes should be made to the original source (.src) file **
|
||||
*********************************************************************
|
||||
*/
|
||||
|
||||
#line 9
|
||||
1, 1, ted
|
||||
|
||||
#line 9
|
||||
1, 1, jim
|
||||
|
||||
#line 9
|
||||
2, 2, ted
|
||||
|
||||
#line 9
|
||||
2, 2, jim
|
||||
|
||||
#line 9
|
||||
3, 3, ted
|
||||
|
||||
#line 9
|
||||
3, 3, jim
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
|
||||
__all__ = ['process_str', 'process_file']
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
||||
from numpy.distutils.compat import get_exception
|
||||
|
||||
# names for replacement that are already global.
|
||||
global_names = {}
|
||||
|
||||
# header placed at the front of head processed file
|
||||
header =\
|
||||
"""
|
||||
/*
|
||||
*****************************************************************************
|
||||
** This file was autogenerated from a template DO NOT EDIT!!!! **
|
||||
** Changes should be made to the original source (.src) file **
|
||||
*****************************************************************************
|
||||
*/
|
||||
|
||||
"""
|
||||
# Parse string for repeat loops
|
||||
def parse_structure(astr, level):
|
||||
"""
|
||||
The returned line number is from the beginning of the string, starting
|
||||
at zero. Returns an empty list if no loops found.
|
||||
|
||||
"""
|
||||
if level == 0 :
|
||||
loopbeg = "/**begin repeat"
|
||||
loopend = "/**end repeat**/"
|
||||
else :
|
||||
loopbeg = "/**begin repeat%d" % level
|
||||
loopend = "/**end repeat%d**/" % level
|
||||
|
||||
ind = 0
|
||||
line = 0
|
||||
spanlist = []
|
||||
while True:
|
||||
start = astr.find(loopbeg, ind)
|
||||
if start == -1:
|
||||
break
|
||||
start2 = astr.find("*/", start)
|
||||
start2 = astr.find("\n", start2)
|
||||
fini1 = astr.find(loopend, start2)
|
||||
fini2 = astr.find("\n", fini1)
|
||||
line += astr.count("\n", ind, start2+1)
|
||||
spanlist.append((start, start2+1, fini1, fini2+1, line))
|
||||
line += astr.count("\n", start2+1, fini2)
|
||||
ind = fini2
|
||||
spanlist.sort()
|
||||
return spanlist
|
||||
|
||||
|
||||
def paren_repl(obj):
|
||||
torep = obj.group(1)
|
||||
numrep = obj.group(2)
|
||||
return ','.join([torep]*int(numrep))
|
||||
|
||||
parenrep = re.compile(r"[(]([^)]*)[)]\*(\d+)")
|
||||
plainrep = re.compile(r"([^*]+)\*(\d+)")
|
||||
def parse_values(astr):
|
||||
# replaces all occurrences of '(a,b,c)*4' in astr
|
||||
# with 'a,b,c,a,b,c,a,b,c,a,b,c'. Empty braces generate
|
||||
# empty values, i.e., ()*4 yields ',,,'. The result is
|
||||
# split at ',' and a list of values returned.
|
||||
astr = parenrep.sub(paren_repl, astr)
|
||||
# replaces occurrences of xxx*3 with xxx, xxx, xxx
|
||||
astr = ','.join([plainrep.sub(paren_repl, x.strip())
|
||||
for x in astr.split(',')])
|
||||
return astr.split(',')
|
||||
|
||||
|
||||
stripast = re.compile(r"\n\s*\*?")
|
||||
named_re = re.compile(r"#\s*(\w*)\s*=([^#]*)#")
|
||||
exclude_vars_re = re.compile(r"(\w*)=(\w*)")
|
||||
exclude_re = re.compile(":exclude:")
|
||||
def parse_loop_header(loophead) :
|
||||
"""Find all named replacements in the header
|
||||
|
||||
Returns a list of dictionaries, one for each loop iteration,
|
||||
where each key is a name to be substituted and the corresponding
|
||||
value is the replacement string.
|
||||
|
||||
Also return a list of exclusions. The exclusions are dictionaries
|
||||
of key value pairs. There can be more than one exclusion.
|
||||
[{'var1':'value1', 'var2', 'value2'[,...]}, ...]
|
||||
|
||||
"""
|
||||
# Strip out '\n' and leading '*', if any, in continuation lines.
|
||||
# This should not effect code previous to this change as
|
||||
# continuation lines were not allowed.
|
||||
loophead = stripast.sub("", loophead)
|
||||
# parse out the names and lists of values
|
||||
names = []
|
||||
reps = named_re.findall(loophead)
|
||||
nsub = None
|
||||
for rep in reps:
|
||||
name = rep[0]
|
||||
vals = parse_values(rep[1])
|
||||
size = len(vals)
|
||||
if nsub is None :
|
||||
nsub = size
|
||||
elif nsub != size :
|
||||
msg = "Mismatch in number of values, %d != %d\n%s = %s"
|
||||
raise ValueError(msg % (nsub, size, name, vals))
|
||||
names.append((name, vals))
|
||||
|
||||
|
||||
# Find any exclude variables
|
||||
excludes = []
|
||||
|
||||
for obj in exclude_re.finditer(loophead):
|
||||
span = obj.span()
|
||||
# find next newline
|
||||
endline = loophead.find('\n', span[1])
|
||||
substr = loophead[span[1]:endline]
|
||||
ex_names = exclude_vars_re.findall(substr)
|
||||
excludes.append(dict(ex_names))
|
||||
|
||||
# generate list of dictionaries, one for each template iteration
|
||||
dlist = []
|
||||
if nsub is None :
|
||||
raise ValueError("No substitution variables found")
|
||||
for i in range(nsub):
|
||||
tmp = {name: vals[i] for name, vals in names}
|
||||
dlist.append(tmp)
|
||||
return dlist
|
||||
|
||||
replace_re = re.compile(r"@([\w]+)@")
|
||||
def parse_string(astr, env, level, line) :
|
||||
lineno = "#line %d\n" % line
|
||||
|
||||
# local function for string replacement, uses env
|
||||
def replace(match):
|
||||
name = match.group(1)
|
||||
try :
|
||||
val = env[name]
|
||||
except KeyError:
|
||||
msg = 'line %d: no definition of key "%s"'%(line, name)
|
||||
raise ValueError(msg)
|
||||
return val
|
||||
|
||||
code = [lineno]
|
||||
struct = parse_structure(astr, level)
|
||||
if struct :
|
||||
# recurse over inner loops
|
||||
oldend = 0
|
||||
newlevel = level + 1
|
||||
for sub in struct:
|
||||
pref = astr[oldend:sub[0]]
|
||||
head = astr[sub[0]:sub[1]]
|
||||
text = astr[sub[1]:sub[2]]
|
||||
oldend = sub[3]
|
||||
newline = line + sub[4]
|
||||
code.append(replace_re.sub(replace, pref))
|
||||
try :
|
||||
envlist = parse_loop_header(head)
|
||||
except ValueError:
|
||||
e = get_exception()
|
||||
msg = "line %d: %s" % (newline, e)
|
||||
raise ValueError(msg)
|
||||
for newenv in envlist :
|
||||
newenv.update(env)
|
||||
newcode = parse_string(text, newenv, newlevel, newline)
|
||||
code.extend(newcode)
|
||||
suff = astr[oldend:]
|
||||
code.append(replace_re.sub(replace, suff))
|
||||
else :
|
||||
# replace keys
|
||||
code.append(replace_re.sub(replace, astr))
|
||||
code.append('\n')
|
||||
return ''.join(code)
|
||||
|
||||
def process_str(astr):
|
||||
code = [header]
|
||||
code.extend(parse_string(astr, global_names, 0, 1))
|
||||
return ''.join(code)
|
||||
|
||||
|
||||
include_src_re = re.compile(r"(\n|\A)#include\s*['\"]"
|
||||
r"(?P<name>[\w\d./\\]+[.]src)['\"]", re.I)
|
||||
|
||||
def resolve_includes(source):
|
||||
d = os.path.dirname(source)
|
||||
with open(source) as fid:
|
||||
lines = []
|
||||
for line in fid:
|
||||
m = include_src_re.match(line)
|
||||
if m:
|
||||
fn = m.group('name')
|
||||
if not os.path.isabs(fn):
|
||||
fn = os.path.join(d, fn)
|
||||
if os.path.isfile(fn):
|
||||
print('Including file', fn)
|
||||
lines.extend(resolve_includes(fn))
|
||||
else:
|
||||
lines.append(line)
|
||||
else:
|
||||
lines.append(line)
|
||||
return lines
|
||||
|
||||
def process_file(source):
|
||||
lines = resolve_includes(source)
|
||||
sourcefile = os.path.normcase(source).replace("\\", "\\\\")
|
||||
try:
|
||||
code = process_str(''.join(lines))
|
||||
except ValueError:
|
||||
e = get_exception()
|
||||
raise ValueError('In "%s" loop at %s' % (sourcefile, e))
|
||||
return '#line 1 "%s"\n%s' % (sourcefile, code)
|
||||
|
||||
|
||||
def unique_key(adict):
|
||||
# this obtains a unique key given a dictionary
|
||||
# currently it works by appending together n of the letters of the
|
||||
# current keys and increasing n until a unique key is found
|
||||
# -- not particularly quick
|
||||
allkeys = list(adict.keys())
|
||||
done = False
|
||||
n = 1
|
||||
while not done:
|
||||
newkey = "".join([x[:n] for x in allkeys])
|
||||
if newkey in allkeys:
|
||||
n += 1
|
||||
else:
|
||||
done = True
|
||||
return newkey
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
file = sys.argv[1]
|
||||
except IndexError:
|
||||
fid = sys.stdin
|
||||
outfile = sys.stdout
|
||||
else:
|
||||
fid = open(file, 'r')
|
||||
(base, ext) = os.path.splitext(file)
|
||||
newname = base
|
||||
outfile = open(newname, 'w')
|
||||
|
||||
allstr = fid.read()
|
||||
try:
|
||||
writestr = process_str(allstr)
|
||||
except ValueError:
|
||||
e = get_exception()
|
||||
raise ValueError("In %s loop at %s" % (file, e))
|
||||
|
||||
outfile.write(writestr)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
217
venv/lib/python3.6/site-packages/numpy/distutils/core.py
Normal file
217
venv/lib/python3.6/site-packages/numpy/distutils/core.py
Normal file
@@ -0,0 +1,217 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
from distutils.core import *
|
||||
|
||||
if 'setuptools' in sys.modules:
|
||||
have_setuptools = True
|
||||
from setuptools import setup as old_setup
|
||||
# easy_install imports math, it may be picked up from cwd
|
||||
from setuptools.command import easy_install
|
||||
try:
|
||||
# very old versions of setuptools don't have this
|
||||
from setuptools.command import bdist_egg
|
||||
except ImportError:
|
||||
have_setuptools = False
|
||||
else:
|
||||
from distutils.core import setup as old_setup
|
||||
have_setuptools = False
|
||||
|
||||
import warnings
|
||||
import distutils.core
|
||||
import distutils.dist
|
||||
|
||||
from numpy.distutils.extension import Extension
|
||||
from numpy.distutils.numpy_distribution import NumpyDistribution
|
||||
from numpy.distutils.command import config, config_compiler, \
|
||||
build, build_py, build_ext, build_clib, build_src, build_scripts, \
|
||||
sdist, install_data, install_headers, install, bdist_rpm, \
|
||||
install_clib
|
||||
from numpy.distutils.misc_util import get_data_files, is_sequence, is_string
|
||||
|
||||
numpy_cmdclass = {'build': build.build,
|
||||
'build_src': build_src.build_src,
|
||||
'build_scripts': build_scripts.build_scripts,
|
||||
'config_cc': config_compiler.config_cc,
|
||||
'config_fc': config_compiler.config_fc,
|
||||
'config': config.config,
|
||||
'build_ext': build_ext.build_ext,
|
||||
'build_py': build_py.build_py,
|
||||
'build_clib': build_clib.build_clib,
|
||||
'sdist': sdist.sdist,
|
||||
'install_data': install_data.install_data,
|
||||
'install_headers': install_headers.install_headers,
|
||||
'install_clib': install_clib.install_clib,
|
||||
'install': install.install,
|
||||
'bdist_rpm': bdist_rpm.bdist_rpm,
|
||||
}
|
||||
if have_setuptools:
|
||||
# Use our own versions of develop and egg_info to ensure that build_src is
|
||||
# handled appropriately.
|
||||
from numpy.distutils.command import develop, egg_info
|
||||
numpy_cmdclass['bdist_egg'] = bdist_egg.bdist_egg
|
||||
numpy_cmdclass['develop'] = develop.develop
|
||||
numpy_cmdclass['easy_install'] = easy_install.easy_install
|
||||
numpy_cmdclass['egg_info'] = egg_info.egg_info
|
||||
|
||||
def _dict_append(d, **kws):
|
||||
for k, v in kws.items():
|
||||
if k not in d:
|
||||
d[k] = v
|
||||
continue
|
||||
dv = d[k]
|
||||
if isinstance(dv, tuple):
|
||||
d[k] = dv + tuple(v)
|
||||
elif isinstance(dv, list):
|
||||
d[k] = dv + list(v)
|
||||
elif isinstance(dv, dict):
|
||||
_dict_append(dv, **v)
|
||||
elif is_string(dv):
|
||||
d[k] = dv + v
|
||||
else:
|
||||
raise TypeError(repr(type(dv)))
|
||||
|
||||
def _command_line_ok(_cache=None):
|
||||
""" Return True if command line does not contain any
|
||||
help or display requests.
|
||||
"""
|
||||
if _cache:
|
||||
return _cache[0]
|
||||
elif _cache is None:
|
||||
_cache = []
|
||||
ok = True
|
||||
display_opts = ['--'+n for n in Distribution.display_option_names]
|
||||
for o in Distribution.display_options:
|
||||
if o[1]:
|
||||
display_opts.append('-'+o[1])
|
||||
for arg in sys.argv:
|
||||
if arg.startswith('--help') or arg=='-h' or arg in display_opts:
|
||||
ok = False
|
||||
break
|
||||
_cache.append(ok)
|
||||
return ok
|
||||
|
||||
def get_distribution(always=False):
|
||||
dist = distutils.core._setup_distribution
|
||||
# XXX Hack to get numpy installable with easy_install.
|
||||
# The problem is easy_install runs it's own setup(), which
|
||||
# sets up distutils.core._setup_distribution. However,
|
||||
# when our setup() runs, that gets overwritten and lost.
|
||||
# We can't use isinstance, as the DistributionWithoutHelpCommands
|
||||
# class is local to a function in setuptools.command.easy_install
|
||||
if dist is not None and \
|
||||
'DistributionWithoutHelpCommands' in repr(dist):
|
||||
dist = None
|
||||
if always and dist is None:
|
||||
dist = NumpyDistribution()
|
||||
return dist
|
||||
|
||||
def setup(**attr):
|
||||
|
||||
cmdclass = numpy_cmdclass.copy()
|
||||
|
||||
new_attr = attr.copy()
|
||||
if 'cmdclass' in new_attr:
|
||||
cmdclass.update(new_attr['cmdclass'])
|
||||
new_attr['cmdclass'] = cmdclass
|
||||
|
||||
if 'configuration' in new_attr:
|
||||
# To avoid calling configuration if there are any errors
|
||||
# or help request in command in the line.
|
||||
configuration = new_attr.pop('configuration')
|
||||
|
||||
old_dist = distutils.core._setup_distribution
|
||||
old_stop = distutils.core._setup_stop_after
|
||||
distutils.core._setup_distribution = None
|
||||
distutils.core._setup_stop_after = "commandline"
|
||||
try:
|
||||
dist = setup(**new_attr)
|
||||
finally:
|
||||
distutils.core._setup_distribution = old_dist
|
||||
distutils.core._setup_stop_after = old_stop
|
||||
if dist.help or not _command_line_ok():
|
||||
# probably displayed help, skip running any commands
|
||||
return dist
|
||||
|
||||
# create setup dictionary and append to new_attr
|
||||
config = configuration()
|
||||
if hasattr(config, 'todict'):
|
||||
config = config.todict()
|
||||
_dict_append(new_attr, **config)
|
||||
|
||||
# Move extension source libraries to libraries
|
||||
libraries = []
|
||||
for ext in new_attr.get('ext_modules', []):
|
||||
new_libraries = []
|
||||
for item in ext.libraries:
|
||||
if is_sequence(item):
|
||||
lib_name, build_info = item
|
||||
_check_append_ext_library(libraries, lib_name, build_info)
|
||||
new_libraries.append(lib_name)
|
||||
elif is_string(item):
|
||||
new_libraries.append(item)
|
||||
else:
|
||||
raise TypeError("invalid description of extension module "
|
||||
"library %r" % (item,))
|
||||
ext.libraries = new_libraries
|
||||
if libraries:
|
||||
if 'libraries' not in new_attr:
|
||||
new_attr['libraries'] = []
|
||||
for item in libraries:
|
||||
_check_append_library(new_attr['libraries'], item)
|
||||
|
||||
# sources in ext_modules or libraries may contain header files
|
||||
if ('ext_modules' in new_attr or 'libraries' in new_attr) \
|
||||
and 'headers' not in new_attr:
|
||||
new_attr['headers'] = []
|
||||
|
||||
# Use our custom NumpyDistribution class instead of distutils' one
|
||||
new_attr['distclass'] = NumpyDistribution
|
||||
|
||||
return old_setup(**new_attr)
|
||||
|
||||
def _check_append_library(libraries, item):
|
||||
for libitem in libraries:
|
||||
if is_sequence(libitem):
|
||||
if is_sequence(item):
|
||||
if item[0]==libitem[0]:
|
||||
if item[1] is libitem[1]:
|
||||
return
|
||||
warnings.warn("[0] libraries list contains %r with"
|
||||
" different build_info" % (item[0],),
|
||||
stacklevel=2)
|
||||
break
|
||||
else:
|
||||
if item==libitem[0]:
|
||||
warnings.warn("[1] libraries list contains %r with"
|
||||
" no build_info" % (item[0],),
|
||||
stacklevel=2)
|
||||
break
|
||||
else:
|
||||
if is_sequence(item):
|
||||
if item[0]==libitem:
|
||||
warnings.warn("[2] libraries list contains %r with"
|
||||
" no build_info" % (item[0],),
|
||||
stacklevel=2)
|
||||
break
|
||||
else:
|
||||
if item==libitem:
|
||||
return
|
||||
libraries.append(item)
|
||||
|
||||
def _check_append_ext_library(libraries, lib_name, build_info):
|
||||
for item in libraries:
|
||||
if is_sequence(item):
|
||||
if item[0]==lib_name:
|
||||
if item[1] is build_info:
|
||||
return
|
||||
warnings.warn("[3] libraries list contains %r with"
|
||||
" different build_info" % (lib_name,),
|
||||
stacklevel=2)
|
||||
break
|
||||
elif item==lib_name:
|
||||
warnings.warn("[4] libraries list contains %r with"
|
||||
" no build_info" % (lib_name,),
|
||||
stacklevel=2)
|
||||
break
|
||||
libraries.append((lib_name, build_info))
|
||||
693
venv/lib/python3.6/site-packages/numpy/distutils/cpuinfo.py
Normal file
693
venv/lib/python3.6/site-packages/numpy/distutils/cpuinfo.py
Normal file
@@ -0,0 +1,693 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
cpuinfo
|
||||
|
||||
Copyright 2002 Pearu Peterson all rights reserved,
|
||||
Pearu Peterson <pearu@cens.ioc.ee>
|
||||
Permission to use, modify, and distribute this software is given under the
|
||||
terms of the NumPy (BSD style) license. See LICENSE.txt that came with
|
||||
this distribution for specifics.
|
||||
|
||||
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||
Pearu Peterson
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
__all__ = ['cpu']
|
||||
|
||||
import sys, re, types
|
||||
import os
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
from subprocess import getstatusoutput
|
||||
else:
|
||||
from commands import getstatusoutput
|
||||
|
||||
import warnings
|
||||
import platform
|
||||
|
||||
from numpy.distutils.compat import get_exception
|
||||
|
||||
def getoutput(cmd, successful_status=(0,), stacklevel=1):
|
||||
try:
|
||||
status, output = getstatusoutput(cmd)
|
||||
except EnvironmentError:
|
||||
e = get_exception()
|
||||
warnings.warn(str(e), UserWarning, stacklevel=stacklevel)
|
||||
return False, ""
|
||||
if os.WIFEXITED(status) and os.WEXITSTATUS(status) in successful_status:
|
||||
return True, output
|
||||
return False, output
|
||||
|
||||
def command_info(successful_status=(0,), stacklevel=1, **kw):
|
||||
info = {}
|
||||
for key in kw:
|
||||
ok, output = getoutput(kw[key], successful_status=successful_status,
|
||||
stacklevel=stacklevel+1)
|
||||
if ok:
|
||||
info[key] = output.strip()
|
||||
return info
|
||||
|
||||
def command_by_line(cmd, successful_status=(0,), stacklevel=1):
|
||||
ok, output = getoutput(cmd, successful_status=successful_status,
|
||||
stacklevel=stacklevel+1)
|
||||
if not ok:
|
||||
return
|
||||
for line in output.splitlines():
|
||||
yield line.strip()
|
||||
|
||||
def key_value_from_command(cmd, sep, successful_status=(0,),
|
||||
stacklevel=1):
|
||||
d = {}
|
||||
for line in command_by_line(cmd, successful_status=successful_status,
|
||||
stacklevel=stacklevel+1):
|
||||
l = [s.strip() for s in line.split(sep, 1)]
|
||||
if len(l) == 2:
|
||||
d[l[0]] = l[1]
|
||||
return d
|
||||
|
||||
class CPUInfoBase(object):
|
||||
"""Holds CPU information and provides methods for requiring
|
||||
the availability of various CPU features.
|
||||
"""
|
||||
|
||||
def _try_call(self, func):
|
||||
try:
|
||||
return func()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def __getattr__(self, name):
|
||||
if not name.startswith('_'):
|
||||
if hasattr(self, '_'+name):
|
||||
attr = getattr(self, '_'+name)
|
||||
if isinstance(attr, types.MethodType):
|
||||
return lambda func=self._try_call,attr=attr : func(attr)
|
||||
else:
|
||||
return lambda : None
|
||||
raise AttributeError(name)
|
||||
|
||||
def _getNCPUs(self):
|
||||
return 1
|
||||
|
||||
def __get_nbits(self):
|
||||
abits = platform.architecture()[0]
|
||||
nbits = re.compile(r'(\d+)bit').search(abits).group(1)
|
||||
return nbits
|
||||
|
||||
def _is_32bit(self):
|
||||
return self.__get_nbits() == '32'
|
||||
|
||||
def _is_64bit(self):
|
||||
return self.__get_nbits() == '64'
|
||||
|
||||
class LinuxCPUInfo(CPUInfoBase):
|
||||
|
||||
info = None
|
||||
|
||||
def __init__(self):
|
||||
if self.info is not None:
|
||||
return
|
||||
info = [ {} ]
|
||||
ok, output = getoutput('uname -m')
|
||||
if ok:
|
||||
info[0]['uname_m'] = output.strip()
|
||||
try:
|
||||
fo = open('/proc/cpuinfo')
|
||||
except EnvironmentError:
|
||||
e = get_exception()
|
||||
warnings.warn(str(e), UserWarning, stacklevel=2)
|
||||
else:
|
||||
for line in fo:
|
||||
name_value = [s.strip() for s in line.split(':', 1)]
|
||||
if len(name_value) != 2:
|
||||
continue
|
||||
name, value = name_value
|
||||
if not info or name in info[-1]: # next processor
|
||||
info.append({})
|
||||
info[-1][name] = value
|
||||
fo.close()
|
||||
self.__class__.info = info
|
||||
|
||||
def _not_impl(self): pass
|
||||
|
||||
# Athlon
|
||||
|
||||
def _is_AMD(self):
|
||||
return self.info[0]['vendor_id']=='AuthenticAMD'
|
||||
|
||||
def _is_AthlonK6_2(self):
|
||||
return self._is_AMD() and self.info[0]['model'] == '2'
|
||||
|
||||
def _is_AthlonK6_3(self):
|
||||
return self._is_AMD() and self.info[0]['model'] == '3'
|
||||
|
||||
def _is_AthlonK6(self):
|
||||
return re.match(r'.*?AMD-K6', self.info[0]['model name']) is not None
|
||||
|
||||
def _is_AthlonK7(self):
|
||||
return re.match(r'.*?AMD-K7', self.info[0]['model name']) is not None
|
||||
|
||||
def _is_AthlonMP(self):
|
||||
return re.match(r'.*?Athlon\(tm\) MP\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_AMD64(self):
|
||||
return self.is_AMD() and self.info[0]['family'] == '15'
|
||||
|
||||
def _is_Athlon64(self):
|
||||
return re.match(r'.*?Athlon\(tm\) 64\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_AthlonHX(self):
|
||||
return re.match(r'.*?Athlon HX\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_Opteron(self):
|
||||
return re.match(r'.*?Opteron\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_Hammer(self):
|
||||
return re.match(r'.*?Hammer\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
# Alpha
|
||||
|
||||
def _is_Alpha(self):
|
||||
return self.info[0]['cpu']=='Alpha'
|
||||
|
||||
def _is_EV4(self):
|
||||
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV4'
|
||||
|
||||
def _is_EV5(self):
|
||||
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV5'
|
||||
|
||||
def _is_EV56(self):
|
||||
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV56'
|
||||
|
||||
def _is_PCA56(self):
|
||||
return self.is_Alpha() and self.info[0]['cpu model'] == 'PCA56'
|
||||
|
||||
# Intel
|
||||
|
||||
#XXX
|
||||
_is_i386 = _not_impl
|
||||
|
||||
def _is_Intel(self):
|
||||
return self.info[0]['vendor_id']=='GenuineIntel'
|
||||
|
||||
def _is_i486(self):
|
||||
return self.info[0]['cpu']=='i486'
|
||||
|
||||
def _is_i586(self):
|
||||
return self.is_Intel() and self.info[0]['cpu family'] == '5'
|
||||
|
||||
def _is_i686(self):
|
||||
return self.is_Intel() and self.info[0]['cpu family'] == '6'
|
||||
|
||||
def _is_Celeron(self):
|
||||
return re.match(r'.*?Celeron',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_Pentium(self):
|
||||
return re.match(r'.*?Pentium',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_PentiumII(self):
|
||||
return re.match(r'.*?Pentium.*?II\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_PentiumPro(self):
|
||||
return re.match(r'.*?PentiumPro\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_PentiumMMX(self):
|
||||
return re.match(r'.*?Pentium.*?MMX\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_PentiumIII(self):
|
||||
return re.match(r'.*?Pentium.*?III\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_PentiumIV(self):
|
||||
return re.match(r'.*?Pentium.*?(IV|4)\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_PentiumM(self):
|
||||
return re.match(r'.*?Pentium.*?M\b',
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_Prescott(self):
|
||||
return self.is_PentiumIV() and self.has_sse3()
|
||||
|
||||
def _is_Nocona(self):
|
||||
return self.is_Intel() \
|
||||
and (self.info[0]['cpu family'] == '6' \
|
||||
or self.info[0]['cpu family'] == '15' ) \
|
||||
and (self.has_sse3() and not self.has_ssse3())\
|
||||
and re.match(r'.*?\blm\b', self.info[0]['flags']) is not None
|
||||
|
||||
def _is_Core2(self):
|
||||
return self.is_64bit() and self.is_Intel() and \
|
||||
re.match(r'.*?Core\(TM\)2\b', \
|
||||
self.info[0]['model name']) is not None
|
||||
|
||||
def _is_Itanium(self):
|
||||
return re.match(r'.*?Itanium\b',
|
||||
self.info[0]['family']) is not None
|
||||
|
||||
def _is_XEON(self):
|
||||
return re.match(r'.*?XEON\b',
|
||||
self.info[0]['model name'], re.IGNORECASE) is not None
|
||||
|
||||
_is_Xeon = _is_XEON
|
||||
|
||||
# Varia
|
||||
|
||||
def _is_singleCPU(self):
|
||||
return len(self.info) == 1
|
||||
|
||||
def _getNCPUs(self):
|
||||
return len(self.info)
|
||||
|
||||
def _has_fdiv_bug(self):
|
||||
return self.info[0]['fdiv_bug']=='yes'
|
||||
|
||||
def _has_f00f_bug(self):
|
||||
return self.info[0]['f00f_bug']=='yes'
|
||||
|
||||
def _has_mmx(self):
|
||||
return re.match(r'.*?\bmmx\b', self.info[0]['flags']) is not None
|
||||
|
||||
def _has_sse(self):
|
||||
return re.match(r'.*?\bsse\b', self.info[0]['flags']) is not None
|
||||
|
||||
def _has_sse2(self):
|
||||
return re.match(r'.*?\bsse2\b', self.info[0]['flags']) is not None
|
||||
|
||||
def _has_sse3(self):
|
||||
return re.match(r'.*?\bpni\b', self.info[0]['flags']) is not None
|
||||
|
||||
def _has_ssse3(self):
|
||||
return re.match(r'.*?\bssse3\b', self.info[0]['flags']) is not None
|
||||
|
||||
def _has_3dnow(self):
|
||||
return re.match(r'.*?\b3dnow\b', self.info[0]['flags']) is not None
|
||||
|
||||
def _has_3dnowext(self):
|
||||
return re.match(r'.*?\b3dnowext\b', self.info[0]['flags']) is not None
|
||||
|
||||
class IRIXCPUInfo(CPUInfoBase):
|
||||
info = None
|
||||
|
||||
def __init__(self):
|
||||
if self.info is not None:
|
||||
return
|
||||
info = key_value_from_command('sysconf', sep=' ',
|
||||
successful_status=(0, 1))
|
||||
self.__class__.info = info
|
||||
|
||||
def _not_impl(self): pass
|
||||
|
||||
def _is_singleCPU(self):
|
||||
return self.info.get('NUM_PROCESSORS') == '1'
|
||||
|
||||
def _getNCPUs(self):
|
||||
return int(self.info.get('NUM_PROCESSORS', 1))
|
||||
|
||||
def __cputype(self, n):
|
||||
return self.info.get('PROCESSORS').split()[0].lower() == 'r%s' % (n)
|
||||
def _is_r2000(self): return self.__cputype(2000)
|
||||
def _is_r3000(self): return self.__cputype(3000)
|
||||
def _is_r3900(self): return self.__cputype(3900)
|
||||
def _is_r4000(self): return self.__cputype(4000)
|
||||
def _is_r4100(self): return self.__cputype(4100)
|
||||
def _is_r4300(self): return self.__cputype(4300)
|
||||
def _is_r4400(self): return self.__cputype(4400)
|
||||
def _is_r4600(self): return self.__cputype(4600)
|
||||
def _is_r4650(self): return self.__cputype(4650)
|
||||
def _is_r5000(self): return self.__cputype(5000)
|
||||
def _is_r6000(self): return self.__cputype(6000)
|
||||
def _is_r8000(self): return self.__cputype(8000)
|
||||
def _is_r10000(self): return self.__cputype(10000)
|
||||
def _is_r12000(self): return self.__cputype(12000)
|
||||
def _is_rorion(self): return self.__cputype('orion')
|
||||
|
||||
def get_ip(self):
|
||||
try: return self.info.get('MACHINE')
|
||||
except Exception: pass
|
||||
def __machine(self, n):
|
||||
return self.info.get('MACHINE').lower() == 'ip%s' % (n)
|
||||
def _is_IP19(self): return self.__machine(19)
|
||||
def _is_IP20(self): return self.__machine(20)
|
||||
def _is_IP21(self): return self.__machine(21)
|
||||
def _is_IP22(self): return self.__machine(22)
|
||||
def _is_IP22_4k(self): return self.__machine(22) and self._is_r4000()
|
||||
def _is_IP22_5k(self): return self.__machine(22) and self._is_r5000()
|
||||
def _is_IP24(self): return self.__machine(24)
|
||||
def _is_IP25(self): return self.__machine(25)
|
||||
def _is_IP26(self): return self.__machine(26)
|
||||
def _is_IP27(self): return self.__machine(27)
|
||||
def _is_IP28(self): return self.__machine(28)
|
||||
def _is_IP30(self): return self.__machine(30)
|
||||
def _is_IP32(self): return self.__machine(32)
|
||||
def _is_IP32_5k(self): return self.__machine(32) and self._is_r5000()
|
||||
def _is_IP32_10k(self): return self.__machine(32) and self._is_r10000()
|
||||
|
||||
|
||||
class DarwinCPUInfo(CPUInfoBase):
|
||||
info = None
|
||||
|
||||
def __init__(self):
|
||||
if self.info is not None:
|
||||
return
|
||||
info = command_info(arch='arch',
|
||||
machine='machine')
|
||||
info['sysctl_hw'] = key_value_from_command('sysctl hw', sep='=')
|
||||
self.__class__.info = info
|
||||
|
||||
def _not_impl(self): pass
|
||||
|
||||
def _getNCPUs(self):
|
||||
return int(self.info['sysctl_hw'].get('hw.ncpu', 1))
|
||||
|
||||
def _is_Power_Macintosh(self):
|
||||
return self.info['sysctl_hw']['hw.machine']=='Power Macintosh'
|
||||
|
||||
def _is_i386(self):
|
||||
return self.info['arch']=='i386'
|
||||
def _is_ppc(self):
|
||||
return self.info['arch']=='ppc'
|
||||
|
||||
def __machine(self, n):
|
||||
return self.info['machine'] == 'ppc%s'%n
|
||||
def _is_ppc601(self): return self.__machine(601)
|
||||
def _is_ppc602(self): return self.__machine(602)
|
||||
def _is_ppc603(self): return self.__machine(603)
|
||||
def _is_ppc603e(self): return self.__machine('603e')
|
||||
def _is_ppc604(self): return self.__machine(604)
|
||||
def _is_ppc604e(self): return self.__machine('604e')
|
||||
def _is_ppc620(self): return self.__machine(620)
|
||||
def _is_ppc630(self): return self.__machine(630)
|
||||
def _is_ppc740(self): return self.__machine(740)
|
||||
def _is_ppc7400(self): return self.__machine(7400)
|
||||
def _is_ppc7450(self): return self.__machine(7450)
|
||||
def _is_ppc750(self): return self.__machine(750)
|
||||
def _is_ppc403(self): return self.__machine(403)
|
||||
def _is_ppc505(self): return self.__machine(505)
|
||||
def _is_ppc801(self): return self.__machine(801)
|
||||
def _is_ppc821(self): return self.__machine(821)
|
||||
def _is_ppc823(self): return self.__machine(823)
|
||||
def _is_ppc860(self): return self.__machine(860)
|
||||
|
||||
|
||||
class SunOSCPUInfo(CPUInfoBase):
|
||||
|
||||
info = None
|
||||
|
||||
def __init__(self):
|
||||
if self.info is not None:
|
||||
return
|
||||
info = command_info(arch='arch',
|
||||
mach='mach',
|
||||
uname_i='uname_i',
|
||||
isainfo_b='isainfo -b',
|
||||
isainfo_n='isainfo -n',
|
||||
)
|
||||
info['uname_X'] = key_value_from_command('uname -X', sep='=')
|
||||
for line in command_by_line('psrinfo -v 0'):
|
||||
m = re.match(r'\s*The (?P<p>[\w\d]+) processor operates at', line)
|
||||
if m:
|
||||
info['processor'] = m.group('p')
|
||||
break
|
||||
self.__class__.info = info
|
||||
|
||||
def _not_impl(self): pass
|
||||
|
||||
def _is_i386(self):
|
||||
return self.info['isainfo_n']=='i386'
|
||||
def _is_sparc(self):
|
||||
return self.info['isainfo_n']=='sparc'
|
||||
def _is_sparcv9(self):
|
||||
return self.info['isainfo_n']=='sparcv9'
|
||||
|
||||
def _getNCPUs(self):
|
||||
return int(self.info['uname_X'].get('NumCPU', 1))
|
||||
|
||||
def _is_sun4(self):
|
||||
return self.info['arch']=='sun4'
|
||||
|
||||
def _is_SUNW(self):
|
||||
return re.match(r'SUNW', self.info['uname_i']) is not None
|
||||
def _is_sparcstation5(self):
|
||||
return re.match(r'.*SPARCstation-5', self.info['uname_i']) is not None
|
||||
def _is_ultra1(self):
|
||||
return re.match(r'.*Ultra-1', self.info['uname_i']) is not None
|
||||
def _is_ultra250(self):
|
||||
return re.match(r'.*Ultra-250', self.info['uname_i']) is not None
|
||||
def _is_ultra2(self):
|
||||
return re.match(r'.*Ultra-2', self.info['uname_i']) is not None
|
||||
def _is_ultra30(self):
|
||||
return re.match(r'.*Ultra-30', self.info['uname_i']) is not None
|
||||
def _is_ultra4(self):
|
||||
return re.match(r'.*Ultra-4', self.info['uname_i']) is not None
|
||||
def _is_ultra5_10(self):
|
||||
return re.match(r'.*Ultra-5_10', self.info['uname_i']) is not None
|
||||
def _is_ultra5(self):
|
||||
return re.match(r'.*Ultra-5', self.info['uname_i']) is not None
|
||||
def _is_ultra60(self):
|
||||
return re.match(r'.*Ultra-60', self.info['uname_i']) is not None
|
||||
def _is_ultra80(self):
|
||||
return re.match(r'.*Ultra-80', self.info['uname_i']) is not None
|
||||
def _is_ultraenterprice(self):
|
||||
return re.match(r'.*Ultra-Enterprise', self.info['uname_i']) is not None
|
||||
def _is_ultraenterprice10k(self):
|
||||
return re.match(r'.*Ultra-Enterprise-10000', self.info['uname_i']) is not None
|
||||
def _is_sunfire(self):
|
||||
return re.match(r'.*Sun-Fire', self.info['uname_i']) is not None
|
||||
def _is_ultra(self):
|
||||
return re.match(r'.*Ultra', self.info['uname_i']) is not None
|
||||
|
||||
def _is_cpusparcv7(self):
|
||||
return self.info['processor']=='sparcv7'
|
||||
def _is_cpusparcv8(self):
|
||||
return self.info['processor']=='sparcv8'
|
||||
def _is_cpusparcv9(self):
|
||||
return self.info['processor']=='sparcv9'
|
||||
|
||||
class Win32CPUInfo(CPUInfoBase):
|
||||
|
||||
info = None
|
||||
pkey = r"HARDWARE\DESCRIPTION\System\CentralProcessor"
|
||||
# XXX: what does the value of
|
||||
# HKEY_LOCAL_MACHINE\HARDWARE\DESCRIPTION\System\CentralProcessor\0
|
||||
# mean?
|
||||
|
||||
def __init__(self):
|
||||
if self.info is not None:
|
||||
return
|
||||
info = []
|
||||
try:
|
||||
#XXX: Bad style to use so long `try:...except:...`. Fix it!
|
||||
if sys.version_info[0] >= 3:
|
||||
import winreg
|
||||
else:
|
||||
import _winreg as winreg
|
||||
|
||||
prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)"
|
||||
r"\s+stepping\s+(?P<STP>\d+)", re.IGNORECASE)
|
||||
chnd=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, self.pkey)
|
||||
pnum=0
|
||||
while True:
|
||||
try:
|
||||
proc=winreg.EnumKey(chnd, pnum)
|
||||
except winreg.error:
|
||||
break
|
||||
else:
|
||||
pnum+=1
|
||||
info.append({"Processor":proc})
|
||||
phnd=winreg.OpenKey(chnd, proc)
|
||||
pidx=0
|
||||
while True:
|
||||
try:
|
||||
name, value, vtpe=winreg.EnumValue(phnd, pidx)
|
||||
except winreg.error:
|
||||
break
|
||||
else:
|
||||
pidx=pidx+1
|
||||
info[-1][name]=value
|
||||
if name=="Identifier":
|
||||
srch=prgx.search(value)
|
||||
if srch:
|
||||
info[-1]["Family"]=int(srch.group("FML"))
|
||||
info[-1]["Model"]=int(srch.group("MDL"))
|
||||
info[-1]["Stepping"]=int(srch.group("STP"))
|
||||
except Exception:
|
||||
print(sys.exc_info()[1], '(ignoring)')
|
||||
self.__class__.info = info
|
||||
|
||||
def _not_impl(self): pass
|
||||
|
||||
# Athlon
|
||||
|
||||
def _is_AMD(self):
|
||||
return self.info[0]['VendorIdentifier']=='AuthenticAMD'
|
||||
|
||||
def _is_Am486(self):
|
||||
return self.is_AMD() and self.info[0]['Family']==4
|
||||
|
||||
def _is_Am5x86(self):
|
||||
return self.is_AMD() and self.info[0]['Family']==4
|
||||
|
||||
def _is_AMDK5(self):
|
||||
return self.is_AMD() and self.info[0]['Family']==5 \
|
||||
and self.info[0]['Model'] in [0, 1, 2, 3]
|
||||
|
||||
def _is_AMDK6(self):
|
||||
return self.is_AMD() and self.info[0]['Family']==5 \
|
||||
and self.info[0]['Model'] in [6, 7]
|
||||
|
||||
def _is_AMDK6_2(self):
|
||||
return self.is_AMD() and self.info[0]['Family']==5 \
|
||||
and self.info[0]['Model']==8
|
||||
|
||||
def _is_AMDK6_3(self):
|
||||
return self.is_AMD() and self.info[0]['Family']==5 \
|
||||
and self.info[0]['Model']==9
|
||||
|
||||
def _is_AMDK7(self):
|
||||
return self.is_AMD() and self.info[0]['Family'] == 6
|
||||
|
||||
# To reliably distinguish between the different types of AMD64 chips
|
||||
# (Athlon64, Operton, Athlon64 X2, Semperon, Turion 64, etc.) would
|
||||
# require looking at the 'brand' from cpuid
|
||||
|
||||
def _is_AMD64(self):
|
||||
return self.is_AMD() and self.info[0]['Family'] == 15
|
||||
|
||||
# Intel
|
||||
|
||||
def _is_Intel(self):
|
||||
return self.info[0]['VendorIdentifier']=='GenuineIntel'
|
||||
|
||||
def _is_i386(self):
|
||||
return self.info[0]['Family']==3
|
||||
|
||||
def _is_i486(self):
|
||||
return self.info[0]['Family']==4
|
||||
|
||||
def _is_i586(self):
|
||||
return self.is_Intel() and self.info[0]['Family']==5
|
||||
|
||||
def _is_i686(self):
|
||||
return self.is_Intel() and self.info[0]['Family']==6
|
||||
|
||||
def _is_Pentium(self):
|
||||
return self.is_Intel() and self.info[0]['Family']==5
|
||||
|
||||
def _is_PentiumMMX(self):
|
||||
return self.is_Intel() and self.info[0]['Family']==5 \
|
||||
and self.info[0]['Model']==4
|
||||
|
||||
def _is_PentiumPro(self):
|
||||
return self.is_Intel() and self.info[0]['Family']==6 \
|
||||
and self.info[0]['Model']==1
|
||||
|
||||
def _is_PentiumII(self):
|
||||
return self.is_Intel() and self.info[0]['Family']==6 \
|
||||
and self.info[0]['Model'] in [3, 5, 6]
|
||||
|
||||
def _is_PentiumIII(self):
|
||||
return self.is_Intel() and self.info[0]['Family']==6 \
|
||||
and self.info[0]['Model'] in [7, 8, 9, 10, 11]
|
||||
|
||||
def _is_PentiumIV(self):
|
||||
return self.is_Intel() and self.info[0]['Family']==15
|
||||
|
||||
def _is_PentiumM(self):
|
||||
return self.is_Intel() and self.info[0]['Family'] == 6 \
|
||||
and self.info[0]['Model'] in [9, 13, 14]
|
||||
|
||||
def _is_Core2(self):
|
||||
return self.is_Intel() and self.info[0]['Family'] == 6 \
|
||||
and self.info[0]['Model'] in [15, 16, 17]
|
||||
|
||||
# Varia
|
||||
|
||||
def _is_singleCPU(self):
|
||||
return len(self.info) == 1
|
||||
|
||||
def _getNCPUs(self):
|
||||
return len(self.info)
|
||||
|
||||
def _has_mmx(self):
|
||||
if self.is_Intel():
|
||||
return (self.info[0]['Family']==5 and self.info[0]['Model']==4) \
|
||||
or (self.info[0]['Family'] in [6, 15])
|
||||
elif self.is_AMD():
|
||||
return self.info[0]['Family'] in [5, 6, 15]
|
||||
else:
|
||||
return False
|
||||
|
||||
def _has_sse(self):
|
||||
if self.is_Intel():
|
||||
return (self.info[0]['Family']==6 and \
|
||||
self.info[0]['Model'] in [7, 8, 9, 10, 11]) \
|
||||
or self.info[0]['Family']==15
|
||||
elif self.is_AMD():
|
||||
return (self.info[0]['Family']==6 and \
|
||||
self.info[0]['Model'] in [6, 7, 8, 10]) \
|
||||
or self.info[0]['Family']==15
|
||||
else:
|
||||
return False
|
||||
|
||||
def _has_sse2(self):
|
||||
if self.is_Intel():
|
||||
return self.is_Pentium4() or self.is_PentiumM() \
|
||||
or self.is_Core2()
|
||||
elif self.is_AMD():
|
||||
return self.is_AMD64()
|
||||
else:
|
||||
return False
|
||||
|
||||
def _has_3dnow(self):
|
||||
return self.is_AMD() and self.info[0]['Family'] in [5, 6, 15]
|
||||
|
||||
def _has_3dnowext(self):
|
||||
return self.is_AMD() and self.info[0]['Family'] in [6, 15]
|
||||
|
||||
if sys.platform.startswith('linux'): # variations: linux2,linux-i386 (any others?)
|
||||
cpuinfo = LinuxCPUInfo
|
||||
elif sys.platform.startswith('irix'):
|
||||
cpuinfo = IRIXCPUInfo
|
||||
elif sys.platform == 'darwin':
|
||||
cpuinfo = DarwinCPUInfo
|
||||
elif sys.platform.startswith('sunos'):
|
||||
cpuinfo = SunOSCPUInfo
|
||||
elif sys.platform.startswith('win32'):
|
||||
cpuinfo = Win32CPUInfo
|
||||
elif sys.platform.startswith('cygwin'):
|
||||
cpuinfo = LinuxCPUInfo
|
||||
#XXX: other OS's. Eg. use _winreg on Win32. Or os.uname on unices.
|
||||
else:
|
||||
cpuinfo = CPUInfoBase
|
||||
|
||||
cpu = cpuinfo()
|
||||
|
||||
#if __name__ == "__main__":
|
||||
#
|
||||
# cpu.is_blaa()
|
||||
# cpu.is_Intel()
|
||||
# cpu.is_Alpha()
|
||||
#
|
||||
# print('CPU information:'),
|
||||
# for name in dir(cpuinfo):
|
||||
# if name[0]=='_' and name[1]!='_':
|
||||
# r = getattr(cpu,name[1:])()
|
||||
# if r:
|
||||
# if r!=1:
|
||||
# print('%s=%s' %(name[1:],r))
|
||||
# else:
|
||||
# print(name[1:]),
|
||||
# print()
|
||||
330
venv/lib/python3.6/site-packages/numpy/distutils/exec_command.py
Normal file
330
venv/lib/python3.6/site-packages/numpy/distutils/exec_command.py
Normal file
@@ -0,0 +1,330 @@
|
||||
"""
|
||||
exec_command
|
||||
|
||||
Implements exec_command function that is (almost) equivalent to
|
||||
commands.getstatusoutput function but on NT, DOS systems the
|
||||
returned status is actually correct (though, the returned status
|
||||
values may be different by a factor). In addition, exec_command
|
||||
takes keyword arguments for (re-)defining environment variables.
|
||||
|
||||
Provides functions:
|
||||
|
||||
exec_command --- execute command in a specified directory and
|
||||
in the modified environment.
|
||||
find_executable --- locate a command using info from environment
|
||||
variable PATH. Equivalent to posix `which`
|
||||
command.
|
||||
|
||||
Author: Pearu Peterson <pearu@cens.ioc.ee>
|
||||
Created: 11 January 2003
|
||||
|
||||
Requires: Python 2.x
|
||||
|
||||
Successfully tested on:
|
||||
|
||||
======== ============ =================================================
|
||||
os.name sys.platform comments
|
||||
======== ============ =================================================
|
||||
posix linux2 Debian (sid) Linux, Python 2.1.3+, 2.2.3+, 2.3.3
|
||||
PyCrust 0.9.3, Idle 1.0.2
|
||||
posix linux2 Red Hat 9 Linux, Python 2.1.3, 2.2.2, 2.3.2
|
||||
posix sunos5 SunOS 5.9, Python 2.2, 2.3.2
|
||||
posix darwin Darwin 7.2.0, Python 2.3
|
||||
nt win32 Windows Me
|
||||
Python 2.3(EE), Idle 1.0, PyCrust 0.7.2
|
||||
Python 2.1.1 Idle 0.8
|
||||
nt win32 Windows 98, Python 2.1.1. Idle 0.8
|
||||
nt win32 Cygwin 98-4.10, Python 2.1.1(MSC) - echo tests
|
||||
fail i.e. redefining environment variables may
|
||||
not work. FIXED: don't use cygwin echo!
|
||||
Comment: also `cmd /c echo` will not work
|
||||
but redefining environment variables do work.
|
||||
posix cygwin Cygwin 98-4.10, Python 2.3.3(cygming special)
|
||||
nt win32 Windows XP, Python 2.3.3
|
||||
======== ============ =================================================
|
||||
|
||||
Known bugs:
|
||||
|
||||
* Tests, that send messages to stderr, fail when executed from MSYS prompt
|
||||
because the messages are lost at some point.
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
__all__ = ['exec_command', 'find_executable']
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import locale
|
||||
import warnings
|
||||
|
||||
from numpy.distutils.misc_util import is_sequence, make_temp_file
|
||||
from numpy.distutils import log
|
||||
|
||||
def filepath_from_subprocess_output(output):
|
||||
"""
|
||||
Convert `bytes` in the encoding used by a subprocess into a filesystem-appropriate `str`.
|
||||
|
||||
Inherited from `exec_command`, and possibly incorrect.
|
||||
"""
|
||||
mylocale = locale.getpreferredencoding(False)
|
||||
if mylocale is None:
|
||||
mylocale = 'ascii'
|
||||
output = output.decode(mylocale, errors='replace')
|
||||
output = output.replace('\r\n', '\n')
|
||||
# Another historical oddity
|
||||
if output[-1:] == '\n':
|
||||
output = output[:-1]
|
||||
# stdio uses bytes in python 2, so to avoid issues, we simply
|
||||
# remove all non-ascii characters
|
||||
if sys.version_info < (3, 0):
|
||||
output = output.encode('ascii', errors='replace')
|
||||
return output
|
||||
|
||||
|
||||
def forward_bytes_to_stdout(val):
|
||||
"""
|
||||
Forward bytes from a subprocess call to the console, without attempting to
|
||||
decode them.
|
||||
|
||||
The assumption is that the subprocess call already returned bytes in
|
||||
a suitable encoding.
|
||||
"""
|
||||
if sys.version_info.major < 3:
|
||||
# python 2 has binary output anyway
|
||||
sys.stdout.write(val)
|
||||
elif hasattr(sys.stdout, 'buffer'):
|
||||
# use the underlying binary output if there is one
|
||||
sys.stdout.buffer.write(val)
|
||||
elif hasattr(sys.stdout, 'encoding'):
|
||||
# round-trip the encoding if necessary
|
||||
sys.stdout.write(val.decode(sys.stdout.encoding))
|
||||
else:
|
||||
# make a best-guess at the encoding
|
||||
sys.stdout.write(val.decode('utf8', errors='replace'))
|
||||
|
||||
|
||||
def temp_file_name():
|
||||
# 2019-01-30, 1.17
|
||||
warnings.warn('temp_file_name is deprecated since NumPy v1.17, use '
|
||||
'tempfile.mkstemp instead', DeprecationWarning, stacklevel=1)
|
||||
fo, name = make_temp_file()
|
||||
fo.close()
|
||||
return name
|
||||
|
||||
def get_pythonexe():
|
||||
pythonexe = sys.executable
|
||||
if os.name in ['nt', 'dos']:
|
||||
fdir, fn = os.path.split(pythonexe)
|
||||
fn = fn.upper().replace('PYTHONW', 'PYTHON')
|
||||
pythonexe = os.path.join(fdir, fn)
|
||||
assert os.path.isfile(pythonexe), '%r is not a file' % (pythonexe,)
|
||||
return pythonexe
|
||||
|
||||
def find_executable(exe, path=None, _cache={}):
|
||||
"""Return full path of a executable or None.
|
||||
|
||||
Symbolic links are not followed.
|
||||
"""
|
||||
key = exe, path
|
||||
try:
|
||||
return _cache[key]
|
||||
except KeyError:
|
||||
pass
|
||||
log.debug('find_executable(%r)' % exe)
|
||||
orig_exe = exe
|
||||
|
||||
if path is None:
|
||||
path = os.environ.get('PATH', os.defpath)
|
||||
if os.name=='posix':
|
||||
realpath = os.path.realpath
|
||||
else:
|
||||
realpath = lambda a:a
|
||||
|
||||
if exe.startswith('"'):
|
||||
exe = exe[1:-1]
|
||||
|
||||
suffixes = ['']
|
||||
if os.name in ['nt', 'dos', 'os2']:
|
||||
fn, ext = os.path.splitext(exe)
|
||||
extra_suffixes = ['.exe', '.com', '.bat']
|
||||
if ext.lower() not in extra_suffixes:
|
||||
suffixes = extra_suffixes
|
||||
|
||||
if os.path.isabs(exe):
|
||||
paths = ['']
|
||||
else:
|
||||
paths = [ os.path.abspath(p) for p in path.split(os.pathsep) ]
|
||||
|
||||
for path in paths:
|
||||
fn = os.path.join(path, exe)
|
||||
for s in suffixes:
|
||||
f_ext = fn+s
|
||||
if not os.path.islink(f_ext):
|
||||
f_ext = realpath(f_ext)
|
||||
if os.path.isfile(f_ext) and os.access(f_ext, os.X_OK):
|
||||
log.info('Found executable %s' % f_ext)
|
||||
_cache[key] = f_ext
|
||||
return f_ext
|
||||
|
||||
log.warn('Could not locate executable %s' % orig_exe)
|
||||
return None
|
||||
|
||||
############################################################
|
||||
|
||||
def _preserve_environment( names ):
|
||||
log.debug('_preserve_environment(%r)' % (names))
|
||||
env = {name: os.environ.get(name) for name in names}
|
||||
return env
|
||||
|
||||
def _update_environment( **env ):
|
||||
log.debug('_update_environment(...)')
|
||||
for name, value in env.items():
|
||||
os.environ[name] = value or ''
|
||||
|
||||
def exec_command(command, execute_in='', use_shell=None, use_tee=None,
|
||||
_with_python = 1, **env ):
|
||||
"""
|
||||
Return (status,output) of executed command.
|
||||
|
||||
.. deprecated:: 1.17
|
||||
Use subprocess.Popen instead
|
||||
|
||||
Parameters
|
||||
----------
|
||||
command : str
|
||||
A concatenated string of executable and arguments.
|
||||
execute_in : str
|
||||
Before running command ``cd execute_in`` and after ``cd -``.
|
||||
use_shell : {bool, None}, optional
|
||||
If True, execute ``sh -c command``. Default None (True)
|
||||
use_tee : {bool, None}, optional
|
||||
If True use tee. Default None (True)
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
res : str
|
||||
Both stdout and stderr messages.
|
||||
|
||||
Notes
|
||||
-----
|
||||
On NT, DOS systems the returned status is correct for external commands.
|
||||
Wild cards will not work for non-posix systems or when use_shell=0.
|
||||
|
||||
"""
|
||||
# 2019-01-30, 1.17
|
||||
warnings.warn('exec_command is deprecated since NumPy v1.17, use '
|
||||
'subprocess.Popen instead', DeprecationWarning, stacklevel=1)
|
||||
log.debug('exec_command(%r,%s)' % (command,\
|
||||
','.join(['%s=%r'%kv for kv in env.items()])))
|
||||
|
||||
if use_tee is None:
|
||||
use_tee = os.name=='posix'
|
||||
if use_shell is None:
|
||||
use_shell = os.name=='posix'
|
||||
execute_in = os.path.abspath(execute_in)
|
||||
oldcwd = os.path.abspath(os.getcwd())
|
||||
|
||||
if __name__[-12:] == 'exec_command':
|
||||
exec_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
elif os.path.isfile('exec_command.py'):
|
||||
exec_dir = os.path.abspath('.')
|
||||
else:
|
||||
exec_dir = os.path.abspath(sys.argv[0])
|
||||
if os.path.isfile(exec_dir):
|
||||
exec_dir = os.path.dirname(exec_dir)
|
||||
|
||||
if oldcwd!=execute_in:
|
||||
os.chdir(execute_in)
|
||||
log.debug('New cwd: %s' % execute_in)
|
||||
else:
|
||||
log.debug('Retaining cwd: %s' % oldcwd)
|
||||
|
||||
oldenv = _preserve_environment( list(env.keys()) )
|
||||
_update_environment( **env )
|
||||
|
||||
try:
|
||||
st = _exec_command(command,
|
||||
use_shell=use_shell,
|
||||
use_tee=use_tee,
|
||||
**env)
|
||||
finally:
|
||||
if oldcwd!=execute_in:
|
||||
os.chdir(oldcwd)
|
||||
log.debug('Restored cwd to %s' % oldcwd)
|
||||
_update_environment(**oldenv)
|
||||
|
||||
return st
|
||||
|
||||
|
||||
def _exec_command(command, use_shell=None, use_tee = None, **env):
|
||||
"""
|
||||
Internal workhorse for exec_command().
|
||||
"""
|
||||
if use_shell is None:
|
||||
use_shell = os.name=='posix'
|
||||
if use_tee is None:
|
||||
use_tee = os.name=='posix'
|
||||
|
||||
if os.name == 'posix' and use_shell:
|
||||
# On POSIX, subprocess always uses /bin/sh, override
|
||||
sh = os.environ.get('SHELL', '/bin/sh')
|
||||
if is_sequence(command):
|
||||
command = [sh, '-c', ' '.join(command)]
|
||||
else:
|
||||
command = [sh, '-c', command]
|
||||
use_shell = False
|
||||
|
||||
elif os.name == 'nt' and is_sequence(command):
|
||||
# On Windows, join the string for CreateProcess() ourselves as
|
||||
# subprocess does it a bit differently
|
||||
command = ' '.join(_quote_arg(arg) for arg in command)
|
||||
|
||||
# Inherit environment by default
|
||||
env = env or None
|
||||
try:
|
||||
# universal_newlines is set to False so that communicate()
|
||||
# will return bytes. We need to decode the output ourselves
|
||||
# so that Python will not raise a UnicodeDecodeError when
|
||||
# it encounters an invalid character; rather, we simply replace it
|
||||
proc = subprocess.Popen(command, shell=use_shell, env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=False)
|
||||
except EnvironmentError:
|
||||
# Return 127, as os.spawn*() and /bin/sh do
|
||||
return 127, ''
|
||||
|
||||
text, err = proc.communicate()
|
||||
mylocale = locale.getpreferredencoding(False)
|
||||
if mylocale is None:
|
||||
mylocale = 'ascii'
|
||||
text = text.decode(mylocale, errors='replace')
|
||||
text = text.replace('\r\n', '\n')
|
||||
# Another historical oddity
|
||||
if text[-1:] == '\n':
|
||||
text = text[:-1]
|
||||
|
||||
# stdio uses bytes in python 2, so to avoid issues, we simply
|
||||
# remove all non-ascii characters
|
||||
if sys.version_info < (3, 0):
|
||||
text = text.encode('ascii', errors='replace')
|
||||
|
||||
if use_tee and text:
|
||||
print(text)
|
||||
return proc.returncode, text
|
||||
|
||||
|
||||
def _quote_arg(arg):
|
||||
"""
|
||||
Quote the argument for safe use in a shell command line.
|
||||
"""
|
||||
# If there is a quote in the string, assume relevants parts of the
|
||||
# string are already quoted (e.g. '-I"C:\\Program Files\\..."')
|
||||
if '"' not in arg and ' ' in arg:
|
||||
return '"%s"' % arg
|
||||
return arg
|
||||
|
||||
############################################################
|
||||
@@ -0,0 +1,93 @@
|
||||
"""distutils.extension
|
||||
|
||||
Provides the Extension class, used to describe C/C++ extension
|
||||
modules in setup scripts.
|
||||
|
||||
Overridden to support f2py.
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
import re
|
||||
from distutils.extension import Extension as old_Extension
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
basestring = str
|
||||
|
||||
|
||||
cxx_ext_re = re.compile(r'.*[.](cpp|cxx|cc)\Z', re.I).match
|
||||
fortran_pyf_ext_re = re.compile(r'.*[.](f90|f95|f77|for|ftn|f|pyf)\Z', re.I).match
|
||||
|
||||
class Extension(old_Extension):
|
||||
def __init__ (
|
||||
self, name, sources,
|
||||
include_dirs=None,
|
||||
define_macros=None,
|
||||
undef_macros=None,
|
||||
library_dirs=None,
|
||||
libraries=None,
|
||||
runtime_library_dirs=None,
|
||||
extra_objects=None,
|
||||
extra_compile_args=None,
|
||||
extra_link_args=None,
|
||||
export_symbols=None,
|
||||
swig_opts=None,
|
||||
depends=None,
|
||||
language=None,
|
||||
f2py_options=None,
|
||||
module_dirs=None,
|
||||
extra_f77_compile_args=None,
|
||||
extra_f90_compile_args=None,):
|
||||
|
||||
old_Extension.__init__(
|
||||
self, name, [],
|
||||
include_dirs=include_dirs,
|
||||
define_macros=define_macros,
|
||||
undef_macros=undef_macros,
|
||||
library_dirs=library_dirs,
|
||||
libraries=libraries,
|
||||
runtime_library_dirs=runtime_library_dirs,
|
||||
extra_objects=extra_objects,
|
||||
extra_compile_args=extra_compile_args,
|
||||
extra_link_args=extra_link_args,
|
||||
export_symbols=export_symbols)
|
||||
|
||||
# Avoid assert statements checking that sources contains strings:
|
||||
self.sources = sources
|
||||
|
||||
# Python 2.4 distutils new features
|
||||
self.swig_opts = swig_opts or []
|
||||
# swig_opts is assumed to be a list. Here we handle the case where it
|
||||
# is specified as a string instead.
|
||||
if isinstance(self.swig_opts, basestring):
|
||||
import warnings
|
||||
msg = "swig_opts is specified as a string instead of a list"
|
||||
warnings.warn(msg, SyntaxWarning, stacklevel=2)
|
||||
self.swig_opts = self.swig_opts.split()
|
||||
|
||||
# Python 2.3 distutils new features
|
||||
self.depends = depends or []
|
||||
self.language = language
|
||||
|
||||
# numpy_distutils features
|
||||
self.f2py_options = f2py_options or []
|
||||
self.module_dirs = module_dirs or []
|
||||
self.extra_f77_compile_args = extra_f77_compile_args or []
|
||||
self.extra_f90_compile_args = extra_f90_compile_args or []
|
||||
|
||||
return
|
||||
|
||||
def has_cxx_sources(self):
|
||||
for source in self.sources:
|
||||
if cxx_ext_re(str(source)):
|
||||
return True
|
||||
return False
|
||||
|
||||
def has_f2py_sources(self):
|
||||
for source in self.sources:
|
||||
if fortran_pyf_ext_re(source):
|
||||
return True
|
||||
return False
|
||||
|
||||
# class Extension
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,158 @@
|
||||
|
||||
# http://www.absoft.com/literature/osxuserguide.pdf
|
||||
# http://www.absoft.com/documentation.html
|
||||
|
||||
# Notes:
|
||||
# - when using -g77 then use -DUNDERSCORE_G77 to compile f2py
|
||||
# generated extension modules (works for f2py v2.45.241_1936 and up)
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
|
||||
from numpy.distutils.cpuinfo import cpu
|
||||
from numpy.distutils.fcompiler import FCompiler, dummy_fortran_file
|
||||
from numpy.distutils.misc_util import cyg2win32
|
||||
|
||||
compilers = ['AbsoftFCompiler']
|
||||
|
||||
class AbsoftFCompiler(FCompiler):
|
||||
|
||||
compiler_type = 'absoft'
|
||||
description = 'Absoft Corp Fortran Compiler'
|
||||
#version_pattern = r'FORTRAN 77 Compiler (?P<version>[^\s*,]*).*?Absoft Corp'
|
||||
version_pattern = r'(f90:.*?(Absoft Pro FORTRAN Version|FORTRAN 77 Compiler|Absoft Fortran Compiler Version|Copyright Absoft Corporation.*?Version))'+\
|
||||
r' (?P<version>[^\s*,]*)(.*?Absoft Corp|)'
|
||||
|
||||
# on windows: f90 -V -c dummy.f
|
||||
# f90: Copyright Absoft Corporation 1994-1998 mV2; Cray Research, Inc. 1994-1996 CF90 (2.x.x.x f36t87) Version 2.3 Wed Apr 19, 2006 13:05:16
|
||||
|
||||
# samt5735(8)$ f90 -V -c dummy.f
|
||||
# f90: Copyright Absoft Corporation 1994-2002; Absoft Pro FORTRAN Version 8.0
|
||||
# Note that fink installs g77 as f77, so need to use f90 for detection.
|
||||
|
||||
executables = {
|
||||
'version_cmd' : None, # set by update_executables
|
||||
'compiler_f77' : ["f77"],
|
||||
'compiler_fix' : ["f90"],
|
||||
'compiler_f90' : ["f90"],
|
||||
'linker_so' : ["<F90>"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
|
||||
if os.name=='nt':
|
||||
library_switch = '/out:' #No space after /out:!
|
||||
|
||||
module_dir_switch = None
|
||||
module_include_switch = '-p'
|
||||
|
||||
def update_executables(self):
|
||||
f = cyg2win32(dummy_fortran_file())
|
||||
self.executables['version_cmd'] = ['<F90>', '-V', '-c',
|
||||
f+'.f', '-o', f+'.o']
|
||||
|
||||
def get_flags_linker_so(self):
|
||||
if os.name=='nt':
|
||||
opt = ['/dll']
|
||||
# The "-K shared" switches are being left in for pre-9.0 versions
|
||||
# of Absoft though I don't think versions earlier than 9 can
|
||||
# actually be used to build shared libraries. In fact, version
|
||||
# 8 of Absoft doesn't recognize "-K shared" and will fail.
|
||||
elif self.get_version() >= '9.0':
|
||||
opt = ['-shared']
|
||||
else:
|
||||
opt = ["-K", "shared"]
|
||||
return opt
|
||||
|
||||
def library_dir_option(self, dir):
|
||||
if os.name=='nt':
|
||||
return ['-link', '/PATH:%s' % (dir)]
|
||||
return "-L" + dir
|
||||
|
||||
def library_option(self, lib):
|
||||
if os.name=='nt':
|
||||
return '%s.lib' % (lib)
|
||||
return "-l" + lib
|
||||
|
||||
def get_library_dirs(self):
|
||||
opt = FCompiler.get_library_dirs(self)
|
||||
d = os.environ.get('ABSOFT')
|
||||
if d:
|
||||
if self.get_version() >= '10.0':
|
||||
# use shared libraries, the static libraries were not compiled -fPIC
|
||||
prefix = 'sh'
|
||||
else:
|
||||
prefix = ''
|
||||
if cpu.is_64bit():
|
||||
suffix = '64'
|
||||
else:
|
||||
suffix = ''
|
||||
opt.append(os.path.join(d, '%slib%s' % (prefix, suffix)))
|
||||
return opt
|
||||
|
||||
def get_libraries(self):
|
||||
opt = FCompiler.get_libraries(self)
|
||||
if self.get_version() >= '11.0':
|
||||
opt.extend(['af90math', 'afio', 'af77math', 'amisc'])
|
||||
elif self.get_version() >= '10.0':
|
||||
opt.extend(['af90math', 'afio', 'af77math', 'U77'])
|
||||
elif self.get_version() >= '8.0':
|
||||
opt.extend(['f90math', 'fio', 'f77math', 'U77'])
|
||||
else:
|
||||
opt.extend(['fio', 'f90math', 'fmath', 'U77'])
|
||||
if os.name =='nt':
|
||||
opt.append('COMDLG32')
|
||||
return opt
|
||||
|
||||
def get_flags(self):
|
||||
opt = FCompiler.get_flags(self)
|
||||
if os.name != 'nt':
|
||||
opt.extend(['-s'])
|
||||
if self.get_version():
|
||||
if self.get_version()>='8.2':
|
||||
opt.append('-fpic')
|
||||
return opt
|
||||
|
||||
def get_flags_f77(self):
|
||||
opt = FCompiler.get_flags_f77(self)
|
||||
opt.extend(['-N22', '-N90', '-N110'])
|
||||
v = self.get_version()
|
||||
if os.name == 'nt':
|
||||
if v and v>='8.0':
|
||||
opt.extend(['-f', '-N15'])
|
||||
else:
|
||||
opt.append('-f')
|
||||
if v:
|
||||
if v<='4.6':
|
||||
opt.append('-B108')
|
||||
else:
|
||||
# Though -N15 is undocumented, it works with
|
||||
# Absoft 8.0 on Linux
|
||||
opt.append('-N15')
|
||||
return opt
|
||||
|
||||
def get_flags_f90(self):
|
||||
opt = FCompiler.get_flags_f90(self)
|
||||
opt.extend(["-YCFRL=1", "-YCOM_NAMES=LCS", "-YCOM_PFX", "-YEXT_PFX",
|
||||
"-YCOM_SFX=_", "-YEXT_SFX=_", "-YEXT_NAMES=LCS"])
|
||||
if self.get_version():
|
||||
if self.get_version()>'4.6':
|
||||
opt.extend(["-YDEALLOC=ALL"])
|
||||
return opt
|
||||
|
||||
def get_flags_fix(self):
|
||||
opt = FCompiler.get_flags_fix(self)
|
||||
opt.extend(["-YCFRL=1", "-YCOM_NAMES=LCS", "-YCOM_PFX", "-YEXT_PFX",
|
||||
"-YCOM_SFX=_", "-YEXT_SFX=_", "-YEXT_NAMES=LCS"])
|
||||
opt.extend(["-f", "fixed"])
|
||||
return opt
|
||||
|
||||
def get_flags_opt(self):
|
||||
opt = ['-O']
|
||||
return opt
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(2)
|
||||
from numpy.distutils import customized_fcompiler
|
||||
print(customized_fcompiler(compiler='absoft').get_version())
|
||||
@@ -0,0 +1,126 @@
|
||||
|
||||
#http://www.compaq.com/fortran/docs/
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
from numpy.distutils.compat import get_exception
|
||||
from distutils.errors import DistutilsPlatformError
|
||||
|
||||
compilers = ['CompaqFCompiler']
|
||||
if os.name != 'posix' or sys.platform[:6] == 'cygwin' :
|
||||
# Otherwise we'd get a false positive on posix systems with
|
||||
# case-insensitive filesystems (like darwin), because we'll pick
|
||||
# up /bin/df
|
||||
compilers.append('CompaqVisualFCompiler')
|
||||
|
||||
class CompaqFCompiler(FCompiler):
|
||||
|
||||
compiler_type = 'compaq'
|
||||
description = 'Compaq Fortran Compiler'
|
||||
version_pattern = r'Compaq Fortran (?P<version>[^\s]*).*'
|
||||
|
||||
if sys.platform[:5]=='linux':
|
||||
fc_exe = 'fort'
|
||||
else:
|
||||
fc_exe = 'f90'
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ['<F90>', "-version"],
|
||||
'compiler_f77' : [fc_exe, "-f77rtl", "-fixed"],
|
||||
'compiler_fix' : [fc_exe, "-fixed"],
|
||||
'compiler_f90' : [fc_exe],
|
||||
'linker_so' : ['<F90>'],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
|
||||
module_dir_switch = '-module ' # not tested
|
||||
module_include_switch = '-I'
|
||||
|
||||
def get_flags(self):
|
||||
return ['-assume no2underscore', '-nomixed_str_len_arg']
|
||||
def get_flags_debug(self):
|
||||
return ['-g', '-check bounds']
|
||||
def get_flags_opt(self):
|
||||
return ['-O4', '-align dcommons', '-assume bigarrays',
|
||||
'-assume nozsize', '-math_library fast']
|
||||
def get_flags_arch(self):
|
||||
return ['-arch host', '-tune host']
|
||||
def get_flags_linker_so(self):
|
||||
if sys.platform[:5]=='linux':
|
||||
return ['-shared']
|
||||
return ['-shared', '-Wl,-expect_unresolved,*']
|
||||
|
||||
class CompaqVisualFCompiler(FCompiler):
|
||||
|
||||
compiler_type = 'compaqv'
|
||||
description = 'DIGITAL or Compaq Visual Fortran Compiler'
|
||||
version_pattern = (r'(DIGITAL|Compaq) Visual Fortran Optimizing Compiler'
|
||||
r' Version (?P<version>[^\s]*).*')
|
||||
|
||||
compile_switch = '/compile_only'
|
||||
object_switch = '/object:'
|
||||
library_switch = '/OUT:' #No space after /OUT:!
|
||||
|
||||
static_lib_extension = ".lib"
|
||||
static_lib_format = "%s%s"
|
||||
module_dir_switch = '/module:'
|
||||
module_include_switch = '/I'
|
||||
|
||||
ar_exe = 'lib.exe'
|
||||
fc_exe = 'DF'
|
||||
|
||||
if sys.platform=='win32':
|
||||
from numpy.distutils.msvccompiler import MSVCCompiler
|
||||
|
||||
try:
|
||||
m = MSVCCompiler()
|
||||
m.initialize()
|
||||
ar_exe = m.lib
|
||||
except DistutilsPlatformError:
|
||||
pass
|
||||
except AttributeError:
|
||||
msg = get_exception()
|
||||
if '_MSVCCompiler__root' in str(msg):
|
||||
print('Ignoring "%s" (I think it is msvccompiler.py bug)' % (msg))
|
||||
else:
|
||||
raise
|
||||
except IOError:
|
||||
e = get_exception()
|
||||
if not "vcvarsall.bat" in str(e):
|
||||
print("Unexpected IOError in", __file__)
|
||||
raise e
|
||||
except ValueError:
|
||||
e = get_exception()
|
||||
if not "path']" in str(e):
|
||||
print("Unexpected ValueError in", __file__)
|
||||
raise e
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ['<F90>', "/what"],
|
||||
'compiler_f77' : [fc_exe, "/f77rtl", "/fixed"],
|
||||
'compiler_fix' : [fc_exe, "/fixed"],
|
||||
'compiler_f90' : [fc_exe],
|
||||
'linker_so' : ['<F90>'],
|
||||
'archiver' : [ar_exe, "/OUT:"],
|
||||
'ranlib' : None
|
||||
}
|
||||
|
||||
def get_flags(self):
|
||||
return ['/nologo', '/MD', '/WX', '/iface=(cref,nomixed_str_len_arg)',
|
||||
'/names:lowercase', '/assume:underscore']
|
||||
def get_flags_opt(self):
|
||||
return ['/Ox', '/fast', '/optimize:5', '/unroll:0', '/math_library:fast']
|
||||
def get_flags_arch(self):
|
||||
return ['/threads']
|
||||
def get_flags_debug(self):
|
||||
return ['/debug']
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(2)
|
||||
from numpy.distutils import customized_fcompiler
|
||||
print(customized_fcompiler(compiler='compaq').get_version())
|
||||
@@ -0,0 +1,92 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import warnings
|
||||
from distutils.dist import Distribution
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
class EnvironmentConfig(object):
|
||||
def __init__(self, distutils_section='ALL', **kw):
|
||||
self._distutils_section = distutils_section
|
||||
self._conf_keys = kw
|
||||
self._conf = None
|
||||
self._hook_handler = None
|
||||
|
||||
def dump_variable(self, name):
|
||||
conf_desc = self._conf_keys[name]
|
||||
hook, envvar, confvar, convert, append = conf_desc
|
||||
if not convert:
|
||||
convert = lambda x : x
|
||||
print('%s.%s:' % (self._distutils_section, name))
|
||||
v = self._hook_handler(name, hook)
|
||||
print(' hook : %s' % (convert(v),))
|
||||
if envvar:
|
||||
v = os.environ.get(envvar, None)
|
||||
print(' environ: %s' % (convert(v),))
|
||||
if confvar and self._conf:
|
||||
v = self._conf.get(confvar, (None, None))[1]
|
||||
print(' config : %s' % (convert(v),))
|
||||
|
||||
def dump_variables(self):
|
||||
for name in self._conf_keys:
|
||||
self.dump_variable(name)
|
||||
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
conf_desc = self._conf_keys[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
return self._get_var(name, conf_desc)
|
||||
|
||||
def get(self, name, default=None):
|
||||
try:
|
||||
conf_desc = self._conf_keys[name]
|
||||
except KeyError:
|
||||
return default
|
||||
var = self._get_var(name, conf_desc)
|
||||
if var is None:
|
||||
var = default
|
||||
return var
|
||||
|
||||
def _get_var(self, name, conf_desc):
|
||||
hook, envvar, confvar, convert, append = conf_desc
|
||||
if convert is None:
|
||||
convert = lambda x: x
|
||||
var = self._hook_handler(name, hook)
|
||||
if envvar is not None:
|
||||
envvar_contents = os.environ.get(envvar)
|
||||
if envvar_contents is not None:
|
||||
envvar_contents = convert(envvar_contents)
|
||||
if var and append:
|
||||
if os.environ.get('NPY_DISTUTILS_APPEND_FLAGS', '0') == '1':
|
||||
var.extend(envvar_contents)
|
||||
else:
|
||||
var = envvar_contents
|
||||
if 'NPY_DISTUTILS_APPEND_FLAGS' not in os.environ.keys():
|
||||
msg = "{} is used as is, not appended ".format(envvar) + \
|
||||
"to flags already defined " + \
|
||||
"by numpy.distutils! Use NPY_DISTUTILS_APPEND_FLAGS=1 " + \
|
||||
"to obtain appending behavior instead (this " + \
|
||||
"behavior will become default in a future release)."
|
||||
warnings.warn(msg, UserWarning, stacklevel=3)
|
||||
else:
|
||||
var = envvar_contents
|
||||
if confvar is not None and self._conf:
|
||||
if confvar in self._conf:
|
||||
source, confvar_contents = self._conf[confvar]
|
||||
var = convert(confvar_contents)
|
||||
return var
|
||||
|
||||
|
||||
def clone(self, hook_handler):
|
||||
ec = self.__class__(distutils_section=self._distutils_section,
|
||||
**self._conf_keys)
|
||||
ec._hook_handler = hook_handler
|
||||
return ec
|
||||
|
||||
def use_distribution(self, dist):
|
||||
if isinstance(dist, Distribution):
|
||||
self._conf = dist.get_option_dict(self._distutils_section)
|
||||
else:
|
||||
self._conf = dist
|
||||
@@ -0,0 +1,44 @@
|
||||
# http://g95.sourceforge.net/
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
|
||||
compilers = ['G95FCompiler']
|
||||
|
||||
class G95FCompiler(FCompiler):
|
||||
compiler_type = 'g95'
|
||||
description = 'G95 Fortran Compiler'
|
||||
|
||||
# version_pattern = r'G95 \((GCC (?P<gccversion>[\d.]+)|.*?) \(g95!\) (?P<version>.*)\).*'
|
||||
# $ g95 --version
|
||||
# G95 (GCC 4.0.3 (g95!) May 22 2006)
|
||||
|
||||
version_pattern = r'G95 \((GCC (?P<gccversion>[\d.]+)|.*?) \(g95 (?P<version>.*)!\) (?P<date>.*)\).*'
|
||||
# $ g95 --version
|
||||
# G95 (GCC 4.0.3 (g95 0.90!) Aug 22 2006)
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ["<F90>", "--version"],
|
||||
'compiler_f77' : ["g95", "-ffixed-form"],
|
||||
'compiler_fix' : ["g95", "-ffixed-form"],
|
||||
'compiler_f90' : ["g95"],
|
||||
'linker_so' : ["<F90>", "-shared"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
pic_flags = ['-fpic']
|
||||
module_dir_switch = '-fmod='
|
||||
module_include_switch = '-I'
|
||||
|
||||
def get_flags(self):
|
||||
return ['-fno-second-underscore']
|
||||
def get_flags_opt(self):
|
||||
return ['-O']
|
||||
def get_flags_debug(self):
|
||||
return ['-g']
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
from numpy.distutils import customized_fcompiler
|
||||
log.set_verbosity(2)
|
||||
print(customized_fcompiler('g95').get_version())
|
||||
@@ -0,0 +1,564 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import platform
|
||||
import tempfile
|
||||
import hashlib
|
||||
import base64
|
||||
import subprocess
|
||||
from subprocess import Popen, PIPE, STDOUT
|
||||
from numpy.distutils.exec_command import filepath_from_subprocess_output
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
from numpy.distutils.compat import get_exception
|
||||
from numpy.distutils.system_info import system_info
|
||||
|
||||
compilers = ['GnuFCompiler', 'Gnu95FCompiler']
|
||||
|
||||
TARGET_R = re.compile(r"Target: ([a-zA-Z0-9_\-]*)")
|
||||
|
||||
# XXX: handle cross compilation
|
||||
|
||||
|
||||
def is_win64():
|
||||
return sys.platform == "win32" and platform.architecture()[0] == "64bit"
|
||||
|
||||
|
||||
if is_win64():
|
||||
#_EXTRAFLAGS = ["-fno-leading-underscore"]
|
||||
_EXTRAFLAGS = []
|
||||
else:
|
||||
_EXTRAFLAGS = []
|
||||
|
||||
|
||||
class GnuFCompiler(FCompiler):
|
||||
compiler_type = 'gnu'
|
||||
compiler_aliases = ('g77', )
|
||||
description = 'GNU Fortran 77 compiler'
|
||||
|
||||
def gnu_version_match(self, version_string):
|
||||
"""Handle the different versions of GNU fortran compilers"""
|
||||
# Strip warning(s) that may be emitted by gfortran
|
||||
while version_string.startswith('gfortran: warning'):
|
||||
version_string = version_string[version_string.find('\n') + 1:]
|
||||
|
||||
# Gfortran versions from after 2010 will output a simple string
|
||||
# (usually "x.y", "x.y.z" or "x.y.z-q") for ``-dumpversion``; older
|
||||
# gfortrans may still return long version strings (``-dumpversion`` was
|
||||
# an alias for ``--version``)
|
||||
if len(version_string) <= 20:
|
||||
# Try to find a valid version string
|
||||
m = re.search(r'([0-9.]+)', version_string)
|
||||
if m:
|
||||
# g77 provides a longer version string that starts with GNU
|
||||
# Fortran
|
||||
if version_string.startswith('GNU Fortran'):
|
||||
return ('g77', m.group(1))
|
||||
|
||||
# gfortran only outputs a version string such as #.#.#, so check
|
||||
# if the match is at the start of the string
|
||||
elif m.start() == 0:
|
||||
return ('gfortran', m.group(1))
|
||||
else:
|
||||
# Output probably from --version, try harder:
|
||||
m = re.search(r'GNU Fortran\s+95.*?([0-9-.]+)', version_string)
|
||||
if m:
|
||||
return ('gfortran', m.group(1))
|
||||
m = re.search(
|
||||
r'GNU Fortran.*?\-?([0-9-.]+\.[0-9-.]+)', version_string)
|
||||
if m:
|
||||
v = m.group(1)
|
||||
if v.startswith('0') or v.startswith('2') or v.startswith('3'):
|
||||
# the '0' is for early g77's
|
||||
return ('g77', v)
|
||||
else:
|
||||
# at some point in the 4.x series, the ' 95' was dropped
|
||||
# from the version string
|
||||
return ('gfortran', v)
|
||||
|
||||
# If still nothing, raise an error to make the problem easy to find.
|
||||
err = 'A valid Fortran version was not found in this string:\n'
|
||||
raise ValueError(err + version_string)
|
||||
|
||||
def version_match(self, version_string):
|
||||
v = self.gnu_version_match(version_string)
|
||||
if not v or v[0] != 'g77':
|
||||
return None
|
||||
return v[1]
|
||||
|
||||
possible_executables = ['g77', 'f77']
|
||||
executables = {
|
||||
'version_cmd' : [None, "-dumpversion"],
|
||||
'compiler_f77' : [None, "-g", "-Wall", "-fno-second-underscore"],
|
||||
'compiler_f90' : None, # Use --fcompiler=gnu95 for f90 codes
|
||||
'compiler_fix' : None,
|
||||
'linker_so' : [None, "-g", "-Wall"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"],
|
||||
'linker_exe' : [None, "-g", "-Wall"]
|
||||
}
|
||||
module_dir_switch = None
|
||||
module_include_switch = None
|
||||
|
||||
# Cygwin: f771: warning: -fPIC ignored for target (all code is
|
||||
# position independent)
|
||||
if os.name != 'nt' and sys.platform != 'cygwin':
|
||||
pic_flags = ['-fPIC']
|
||||
|
||||
# use -mno-cygwin for g77 when Python is not Cygwin-Python
|
||||
if sys.platform == 'win32':
|
||||
for key in ['version_cmd', 'compiler_f77', 'linker_so', 'linker_exe']:
|
||||
executables[key].append('-mno-cygwin')
|
||||
|
||||
g2c = 'g2c'
|
||||
suggested_f90_compiler = 'gnu95'
|
||||
|
||||
def get_flags_linker_so(self):
|
||||
opt = self.linker_so[1:]
|
||||
if sys.platform == 'darwin':
|
||||
target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)
|
||||
# If MACOSX_DEPLOYMENT_TARGET is set, we simply trust the value
|
||||
# and leave it alone. But, distutils will complain if the
|
||||
# environment's value is different from the one in the Python
|
||||
# Makefile used to build Python. We let disutils handle this
|
||||
# error checking.
|
||||
if not target:
|
||||
# If MACOSX_DEPLOYMENT_TARGET is not set in the environment,
|
||||
# we try to get it first from the Python Makefile and then we
|
||||
# fall back to setting it to 10.3 to maximize the set of
|
||||
# versions we can work with. This is a reasonable default
|
||||
# even when using the official Python dist and those derived
|
||||
# from it.
|
||||
import distutils.sysconfig as sc
|
||||
g = {}
|
||||
try:
|
||||
get_makefile_filename = sc.get_makefile_filename
|
||||
except AttributeError:
|
||||
pass # i.e. PyPy
|
||||
else:
|
||||
filename = get_makefile_filename()
|
||||
sc.parse_makefile(filename, g)
|
||||
target = g.get('MACOSX_DEPLOYMENT_TARGET', '10.3')
|
||||
os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
|
||||
if target == '10.3':
|
||||
s = 'Env. variable MACOSX_DEPLOYMENT_TARGET set to 10.3'
|
||||
warnings.warn(s, stacklevel=2)
|
||||
|
||||
opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])
|
||||
else:
|
||||
opt.append("-shared")
|
||||
if sys.platform.startswith('sunos'):
|
||||
# SunOS often has dynamically loaded symbols defined in the
|
||||
# static library libg2c.a The linker doesn't like this. To
|
||||
# ignore the problem, use the -mimpure-text flag. It isn't
|
||||
# the safest thing, but seems to work. 'man gcc' says:
|
||||
# ".. Instead of using -mimpure-text, you should compile all
|
||||
# source code with -fpic or -fPIC."
|
||||
opt.append('-mimpure-text')
|
||||
return opt
|
||||
|
||||
def get_libgcc_dir(self):
|
||||
try:
|
||||
output = subprocess.check_output(self.compiler_f77 +
|
||||
['-print-libgcc-file-name'])
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
output = filepath_from_subprocess_output(output)
|
||||
return os.path.dirname(output)
|
||||
return None
|
||||
|
||||
def get_libgfortran_dir(self):
|
||||
if sys.platform[:5] == 'linux':
|
||||
libgfortran_name = 'libgfortran.so'
|
||||
elif sys.platform == 'darwin':
|
||||
libgfortran_name = 'libgfortran.dylib'
|
||||
else:
|
||||
libgfortran_name = None
|
||||
|
||||
libgfortran_dir = None
|
||||
if libgfortran_name:
|
||||
find_lib_arg = ['-print-file-name={0}'.format(libgfortran_name)]
|
||||
try:
|
||||
output = subprocess.check_output(
|
||||
self.compiler_f77 + find_lib_arg)
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
output = filepath_from_subprocess_output(output)
|
||||
libgfortran_dir = os.path.dirname(output)
|
||||
return libgfortran_dir
|
||||
|
||||
def get_library_dirs(self):
|
||||
opt = []
|
||||
if sys.platform[:5] != 'linux':
|
||||
d = self.get_libgcc_dir()
|
||||
if d:
|
||||
# if windows and not cygwin, libg2c lies in a different folder
|
||||
if sys.platform == 'win32' and not d.startswith('/usr/lib'):
|
||||
d = os.path.normpath(d)
|
||||
path = os.path.join(d, "lib%s.a" % self.g2c)
|
||||
if not os.path.exists(path):
|
||||
root = os.path.join(d, *((os.pardir, ) * 4))
|
||||
d2 = os.path.abspath(os.path.join(root, 'lib'))
|
||||
path = os.path.join(d2, "lib%s.a" % self.g2c)
|
||||
if os.path.exists(path):
|
||||
opt.append(d2)
|
||||
opt.append(d)
|
||||
# For Macports / Linux, libgfortran and libgcc are not co-located
|
||||
lib_gfortran_dir = self.get_libgfortran_dir()
|
||||
if lib_gfortran_dir:
|
||||
opt.append(lib_gfortran_dir)
|
||||
return opt
|
||||
|
||||
def get_libraries(self):
|
||||
opt = []
|
||||
d = self.get_libgcc_dir()
|
||||
if d is not None:
|
||||
g2c = self.g2c + '-pic'
|
||||
f = self.static_lib_format % (g2c, self.static_lib_extension)
|
||||
if not os.path.isfile(os.path.join(d, f)):
|
||||
g2c = self.g2c
|
||||
else:
|
||||
g2c = self.g2c
|
||||
|
||||
if g2c is not None:
|
||||
opt.append(g2c)
|
||||
c_compiler = self.c_compiler
|
||||
if sys.platform == 'win32' and c_compiler and \
|
||||
c_compiler.compiler_type == 'msvc':
|
||||
opt.append('gcc')
|
||||
if sys.platform == 'darwin':
|
||||
opt.append('cc_dynamic')
|
||||
return opt
|
||||
|
||||
def get_flags_debug(self):
|
||||
return ['-g']
|
||||
|
||||
def get_flags_opt(self):
|
||||
v = self.get_version()
|
||||
if v and v <= '3.3.3':
|
||||
# With this compiler version building Fortran BLAS/LAPACK
|
||||
# with -O3 caused failures in lib.lapack heevr,syevr tests.
|
||||
opt = ['-O2']
|
||||
else:
|
||||
opt = ['-O3']
|
||||
opt.append('-funroll-loops')
|
||||
return opt
|
||||
|
||||
def _c_arch_flags(self):
|
||||
""" Return detected arch flags from CFLAGS """
|
||||
from distutils import sysconfig
|
||||
try:
|
||||
cflags = sysconfig.get_config_vars()['CFLAGS']
|
||||
except KeyError:
|
||||
return []
|
||||
arch_re = re.compile(r"-arch\s+(\w+)")
|
||||
arch_flags = []
|
||||
for arch in arch_re.findall(cflags):
|
||||
arch_flags += ['-arch', arch]
|
||||
return arch_flags
|
||||
|
||||
def get_flags_arch(self):
|
||||
return []
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
if sys.platform[:3] == 'aix' or sys.platform == 'win32':
|
||||
# Linux/Solaris/Unix support RPATH, Windows and AIX do not
|
||||
raise NotImplementedError
|
||||
|
||||
# TODO: could use -Xlinker here, if it's supported
|
||||
assert "," not in dir
|
||||
|
||||
sep = ',' if sys.platform == 'darwin' else '='
|
||||
return '-Wl,-rpath%s%s' % (sep, dir)
|
||||
|
||||
|
||||
class Gnu95FCompiler(GnuFCompiler):
|
||||
compiler_type = 'gnu95'
|
||||
compiler_aliases = ('gfortran', )
|
||||
description = 'GNU Fortran 95 compiler'
|
||||
|
||||
def version_match(self, version_string):
|
||||
v = self.gnu_version_match(version_string)
|
||||
if not v or v[0] != 'gfortran':
|
||||
return None
|
||||
v = v[1]
|
||||
if v >= '4.':
|
||||
# gcc-4 series releases do not support -mno-cygwin option
|
||||
pass
|
||||
else:
|
||||
# use -mno-cygwin flag for gfortran when Python is not
|
||||
# Cygwin-Python
|
||||
if sys.platform == 'win32':
|
||||
for key in [
|
||||
'version_cmd', 'compiler_f77', 'compiler_f90',
|
||||
'compiler_fix', 'linker_so', 'linker_exe'
|
||||
]:
|
||||
self.executables[key].append('-mno-cygwin')
|
||||
return v
|
||||
|
||||
possible_executables = ['gfortran', 'f95']
|
||||
executables = {
|
||||
'version_cmd' : ["<F90>", "-dumpversion"],
|
||||
'compiler_f77' : [None, "-Wall", "-g", "-ffixed-form",
|
||||
"-fno-second-underscore"] + _EXTRAFLAGS,
|
||||
'compiler_f90' : [None, "-Wall", "-g",
|
||||
"-fno-second-underscore"] + _EXTRAFLAGS,
|
||||
'compiler_fix' : [None, "-Wall", "-g","-ffixed-form",
|
||||
"-fno-second-underscore"] + _EXTRAFLAGS,
|
||||
'linker_so' : ["<F90>", "-Wall", "-g"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"],
|
||||
'linker_exe' : [None, "-Wall"]
|
||||
}
|
||||
|
||||
module_dir_switch = '-J'
|
||||
module_include_switch = '-I'
|
||||
|
||||
if sys.platform[:3] == 'aix':
|
||||
executables['linker_so'].append('-lpthread')
|
||||
if platform.architecture()[0][:2] == '64':
|
||||
for key in ['compiler_f77', 'compiler_f90','compiler_fix','linker_so', 'linker_exe']:
|
||||
executables[key].append('-maix64')
|
||||
|
||||
g2c = 'gfortran'
|
||||
|
||||
def _universal_flags(self, cmd):
|
||||
"""Return a list of -arch flags for every supported architecture."""
|
||||
if not sys.platform == 'darwin':
|
||||
return []
|
||||
arch_flags = []
|
||||
# get arches the C compiler gets.
|
||||
c_archs = self._c_arch_flags()
|
||||
if "i386" in c_archs:
|
||||
c_archs[c_archs.index("i386")] = "i686"
|
||||
# check the arches the Fortran compiler supports, and compare with
|
||||
# arch flags from C compiler
|
||||
for arch in ["ppc", "i686", "x86_64", "ppc64"]:
|
||||
if _can_target(cmd, arch) and arch in c_archs:
|
||||
arch_flags.extend(["-arch", arch])
|
||||
return arch_flags
|
||||
|
||||
def get_flags(self):
|
||||
flags = GnuFCompiler.get_flags(self)
|
||||
arch_flags = self._universal_flags(self.compiler_f90)
|
||||
if arch_flags:
|
||||
flags[:0] = arch_flags
|
||||
return flags
|
||||
|
||||
def get_flags_linker_so(self):
|
||||
flags = GnuFCompiler.get_flags_linker_so(self)
|
||||
arch_flags = self._universal_flags(self.linker_so)
|
||||
if arch_flags:
|
||||
flags[:0] = arch_flags
|
||||
return flags
|
||||
|
||||
def get_library_dirs(self):
|
||||
opt = GnuFCompiler.get_library_dirs(self)
|
||||
if sys.platform == 'win32':
|
||||
c_compiler = self.c_compiler
|
||||
if c_compiler and c_compiler.compiler_type == "msvc":
|
||||
target = self.get_target()
|
||||
if target:
|
||||
d = os.path.normpath(self.get_libgcc_dir())
|
||||
root = os.path.join(d, *((os.pardir, ) * 4))
|
||||
path = os.path.join(root, "lib")
|
||||
mingwdir = os.path.normpath(path)
|
||||
if os.path.exists(os.path.join(mingwdir, "libmingwex.a")):
|
||||
opt.append(mingwdir)
|
||||
# For Macports / Linux, libgfortran and libgcc are not co-located
|
||||
lib_gfortran_dir = self.get_libgfortran_dir()
|
||||
if lib_gfortran_dir:
|
||||
opt.append(lib_gfortran_dir)
|
||||
return opt
|
||||
|
||||
def get_libraries(self):
|
||||
opt = GnuFCompiler.get_libraries(self)
|
||||
if sys.platform == 'darwin':
|
||||
opt.remove('cc_dynamic')
|
||||
if sys.platform == 'win32':
|
||||
c_compiler = self.c_compiler
|
||||
if c_compiler and c_compiler.compiler_type == "msvc":
|
||||
if "gcc" in opt:
|
||||
i = opt.index("gcc")
|
||||
opt.insert(i + 1, "mingwex")
|
||||
opt.insert(i + 1, "mingw32")
|
||||
c_compiler = self.c_compiler
|
||||
if c_compiler and c_compiler.compiler_type == "msvc":
|
||||
return []
|
||||
else:
|
||||
pass
|
||||
return opt
|
||||
|
||||
def get_target(self):
|
||||
try:
|
||||
output = subprocess.check_output(self.compiler_f77 + ['-v'])
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
output = filepath_from_subprocess_output(output)
|
||||
m = TARGET_R.search(output)
|
||||
if m:
|
||||
return m.group(1)
|
||||
return ""
|
||||
|
||||
def _hash_files(self, filenames):
|
||||
h = hashlib.sha1()
|
||||
for fn in filenames:
|
||||
with open(fn, 'rb') as f:
|
||||
while True:
|
||||
block = f.read(131072)
|
||||
if not block:
|
||||
break
|
||||
h.update(block)
|
||||
text = base64.b32encode(h.digest())
|
||||
if sys.version_info[0] >= 3:
|
||||
text = text.decode('ascii')
|
||||
return text.rstrip('=')
|
||||
|
||||
def _link_wrapper_lib(self, objects, output_dir, extra_dll_dir,
|
||||
chained_dlls, is_archive):
|
||||
"""Create a wrapper shared library for the given objects
|
||||
|
||||
Return an MSVC-compatible lib
|
||||
"""
|
||||
|
||||
c_compiler = self.c_compiler
|
||||
if c_compiler.compiler_type != "msvc":
|
||||
raise ValueError("This method only supports MSVC")
|
||||
|
||||
object_hash = self._hash_files(list(objects) + list(chained_dlls))
|
||||
|
||||
if is_win64():
|
||||
tag = 'win_amd64'
|
||||
else:
|
||||
tag = 'win32'
|
||||
|
||||
basename = 'lib' + os.path.splitext(
|
||||
os.path.basename(objects[0]))[0][:8]
|
||||
root_name = basename + '.' + object_hash + '.gfortran-' + tag
|
||||
dll_name = root_name + '.dll'
|
||||
def_name = root_name + '.def'
|
||||
lib_name = root_name + '.lib'
|
||||
dll_path = os.path.join(extra_dll_dir, dll_name)
|
||||
def_path = os.path.join(output_dir, def_name)
|
||||
lib_path = os.path.join(output_dir, lib_name)
|
||||
|
||||
if os.path.isfile(lib_path):
|
||||
# Nothing to do
|
||||
return lib_path, dll_path
|
||||
|
||||
if is_archive:
|
||||
objects = (["-Wl,--whole-archive"] + list(objects) +
|
||||
["-Wl,--no-whole-archive"])
|
||||
self.link_shared_object(
|
||||
objects,
|
||||
dll_name,
|
||||
output_dir=extra_dll_dir,
|
||||
extra_postargs=list(chained_dlls) + [
|
||||
'-Wl,--allow-multiple-definition',
|
||||
'-Wl,--output-def,' + def_path,
|
||||
'-Wl,--export-all-symbols',
|
||||
'-Wl,--enable-auto-import',
|
||||
'-static',
|
||||
'-mlong-double-64',
|
||||
])
|
||||
|
||||
# No PowerPC!
|
||||
if is_win64():
|
||||
specifier = '/MACHINE:X64'
|
||||
else:
|
||||
specifier = '/MACHINE:X86'
|
||||
|
||||
# MSVC specific code
|
||||
lib_args = ['/def:' + def_path, '/OUT:' + lib_path, specifier]
|
||||
if not c_compiler.initialized:
|
||||
c_compiler.initialize()
|
||||
c_compiler.spawn([c_compiler.lib] + lib_args)
|
||||
|
||||
return lib_path, dll_path
|
||||
|
||||
def can_ccompiler_link(self, compiler):
|
||||
# MSVC cannot link objects compiled by GNU fortran
|
||||
return compiler.compiler_type not in ("msvc", )
|
||||
|
||||
def wrap_unlinkable_objects(self, objects, output_dir, extra_dll_dir):
|
||||
"""
|
||||
Convert a set of object files that are not compatible with the default
|
||||
linker, to a file that is compatible.
|
||||
"""
|
||||
if self.c_compiler.compiler_type == "msvc":
|
||||
# Compile a DLL and return the lib for the DLL as
|
||||
# the object. Also keep track of previous DLLs that
|
||||
# we have compiled so that we can link against them.
|
||||
|
||||
# If there are .a archives, assume they are self-contained
|
||||
# static libraries, and build separate DLLs for each
|
||||
archives = []
|
||||
plain_objects = []
|
||||
for obj in objects:
|
||||
if obj.lower().endswith('.a'):
|
||||
archives.append(obj)
|
||||
else:
|
||||
plain_objects.append(obj)
|
||||
|
||||
chained_libs = []
|
||||
chained_dlls = []
|
||||
for archive in archives[::-1]:
|
||||
lib, dll = self._link_wrapper_lib(
|
||||
[archive],
|
||||
output_dir,
|
||||
extra_dll_dir,
|
||||
chained_dlls=chained_dlls,
|
||||
is_archive=True)
|
||||
chained_libs.insert(0, lib)
|
||||
chained_dlls.insert(0, dll)
|
||||
|
||||
if not plain_objects:
|
||||
return chained_libs
|
||||
|
||||
lib, dll = self._link_wrapper_lib(
|
||||
plain_objects,
|
||||
output_dir,
|
||||
extra_dll_dir,
|
||||
chained_dlls=chained_dlls,
|
||||
is_archive=False)
|
||||
return [lib] + chained_libs
|
||||
else:
|
||||
raise ValueError("Unsupported C compiler")
|
||||
|
||||
|
||||
def _can_target(cmd, arch):
|
||||
"""Return true if the architecture supports the -arch flag"""
|
||||
newcmd = cmd[:]
|
||||
fid, filename = tempfile.mkstemp(suffix=".f")
|
||||
os.close(fid)
|
||||
try:
|
||||
d = os.path.dirname(filename)
|
||||
output = os.path.splitext(filename)[0] + ".o"
|
||||
try:
|
||||
newcmd.extend(["-arch", arch, "-c", filename])
|
||||
p = Popen(newcmd, stderr=STDOUT, stdout=PIPE, cwd=d)
|
||||
p.communicate()
|
||||
return p.returncode == 0
|
||||
finally:
|
||||
if os.path.exists(output):
|
||||
os.remove(output)
|
||||
finally:
|
||||
os.remove(filename)
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
from numpy.distutils import customized_fcompiler
|
||||
log.set_verbosity(2)
|
||||
|
||||
print(customized_fcompiler('gnu').get_version())
|
||||
try:
|
||||
print(customized_fcompiler('g95').get_version())
|
||||
except Exception:
|
||||
print(get_exception())
|
||||
@@ -0,0 +1,43 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
|
||||
compilers = ['HPUXFCompiler']
|
||||
|
||||
class HPUXFCompiler(FCompiler):
|
||||
|
||||
compiler_type = 'hpux'
|
||||
description = 'HP Fortran 90 Compiler'
|
||||
version_pattern = r'HP F90 (?P<version>[^\s*,]*)'
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ["f90", "+version"],
|
||||
'compiler_f77' : ["f90"],
|
||||
'compiler_fix' : ["f90"],
|
||||
'compiler_f90' : ["f90"],
|
||||
'linker_so' : ["ld", "-b"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
module_dir_switch = None #XXX: fix me
|
||||
module_include_switch = None #XXX: fix me
|
||||
pic_flags = ['+Z']
|
||||
def get_flags(self):
|
||||
return self.pic_flags + ['+ppu', '+DD64']
|
||||
def get_flags_opt(self):
|
||||
return ['-O3']
|
||||
def get_libraries(self):
|
||||
return ['m']
|
||||
def get_library_dirs(self):
|
||||
opt = ['/usr/lib/hpux64']
|
||||
return opt
|
||||
def get_version(self, force=0, ok_status=[256, 0, 1]):
|
||||
# XXX status==256 may indicate 'unrecognized option' or
|
||||
# 'no input file'. So, version_cmd needs more work.
|
||||
return FCompiler.get_version(self, force, ok_status)
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(10)
|
||||
from numpy.distutils import customized_fcompiler
|
||||
print(customized_fcompiler(compiler='hpux').get_version())
|
||||
@@ -0,0 +1,99 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
from numpy.distutils.exec_command import find_executable
|
||||
from numpy.distutils.misc_util import make_temp_file
|
||||
from distutils import log
|
||||
|
||||
compilers = ['IBMFCompiler']
|
||||
|
||||
class IBMFCompiler(FCompiler):
|
||||
compiler_type = 'ibm'
|
||||
description = 'IBM XL Fortran Compiler'
|
||||
version_pattern = r'(xlf\(1\)\s*|)IBM XL Fortran ((Advanced Edition |)Version |Enterprise Edition V|for AIX, V)(?P<version>[^\s*]*)'
|
||||
#IBM XL Fortran Enterprise Edition V10.1 for AIX \nVersion: 10.01.0000.0004
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ["<F77>", "-qversion"],
|
||||
'compiler_f77' : ["xlf"],
|
||||
'compiler_fix' : ["xlf90", "-qfixed"],
|
||||
'compiler_f90' : ["xlf90"],
|
||||
'linker_so' : ["xlf95"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
|
||||
def get_version(self,*args,**kwds):
|
||||
version = FCompiler.get_version(self,*args,**kwds)
|
||||
|
||||
if version is None and sys.platform.startswith('aix'):
|
||||
# use lslpp to find out xlf version
|
||||
lslpp = find_executable('lslpp')
|
||||
xlf = find_executable('xlf')
|
||||
if os.path.exists(xlf) and os.path.exists(lslpp):
|
||||
try:
|
||||
o = subprocess.check_output([lslpp, '-Lc', 'xlfcmp'])
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
m = re.search(r'xlfcmp:(?P<version>\d+([.]\d+)+)', o)
|
||||
if m: version = m.group('version')
|
||||
|
||||
xlf_dir = '/etc/opt/ibmcmp/xlf'
|
||||
if version is None and os.path.isdir(xlf_dir):
|
||||
# linux:
|
||||
# If the output of xlf does not contain version info
|
||||
# (that's the case with xlf 8.1, for instance) then
|
||||
# let's try another method:
|
||||
l = sorted(os.listdir(xlf_dir))
|
||||
l.reverse()
|
||||
l = [d for d in l if os.path.isfile(os.path.join(xlf_dir, d, 'xlf.cfg'))]
|
||||
if l:
|
||||
from distutils.version import LooseVersion
|
||||
self.version = version = LooseVersion(l[0])
|
||||
return version
|
||||
|
||||
def get_flags(self):
|
||||
return ['-qextname']
|
||||
|
||||
def get_flags_debug(self):
|
||||
return ['-g']
|
||||
|
||||
def get_flags_linker_so(self):
|
||||
opt = []
|
||||
if sys.platform=='darwin':
|
||||
opt.append('-Wl,-bundle,-flat_namespace,-undefined,suppress')
|
||||
else:
|
||||
opt.append('-bshared')
|
||||
version = self.get_version(ok_status=[0, 40])
|
||||
if version is not None:
|
||||
if sys.platform.startswith('aix'):
|
||||
xlf_cfg = '/etc/xlf.cfg'
|
||||
else:
|
||||
xlf_cfg = '/etc/opt/ibmcmp/xlf/%s/xlf.cfg' % version
|
||||
fo, new_cfg = make_temp_file(suffix='_xlf.cfg')
|
||||
log.info('Creating '+new_cfg)
|
||||
with open(xlf_cfg, 'r') as fi:
|
||||
crt1_match = re.compile(r'\s*crt\s*[=]\s*(?P<path>.*)/crt1.o').match
|
||||
for line in fi:
|
||||
m = crt1_match(line)
|
||||
if m:
|
||||
fo.write('crt = %s/bundle1.o\n' % (m.group('path')))
|
||||
else:
|
||||
fo.write(line)
|
||||
fo.close()
|
||||
opt.append('-F'+new_cfg)
|
||||
return opt
|
||||
|
||||
def get_flags_opt(self):
|
||||
return ['-O3']
|
||||
|
||||
if __name__ == '__main__':
|
||||
from numpy.distutils import customized_fcompiler
|
||||
log.set_verbosity(2)
|
||||
print(customized_fcompiler(compiler='ibm').get_version())
|
||||
@@ -0,0 +1,222 @@
|
||||
# http://developer.intel.com/software/products/compilers/flin/
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
|
||||
from numpy.distutils.ccompiler import simple_version_match
|
||||
from numpy.distutils.fcompiler import FCompiler, dummy_fortran_file
|
||||
|
||||
compilers = ['IntelFCompiler', 'IntelVisualFCompiler',
|
||||
'IntelItaniumFCompiler', 'IntelItaniumVisualFCompiler',
|
||||
'IntelEM64VisualFCompiler', 'IntelEM64TFCompiler']
|
||||
|
||||
|
||||
def intel_version_match(type):
|
||||
# Match against the important stuff in the version string
|
||||
return simple_version_match(start=r'Intel.*?Fortran.*?(?:%s).*?Version' % (type,))
|
||||
|
||||
|
||||
class BaseIntelFCompiler(FCompiler):
|
||||
def update_executables(self):
|
||||
f = dummy_fortran_file()
|
||||
self.executables['version_cmd'] = ['<F77>', '-FI', '-V', '-c',
|
||||
f + '.f', '-o', f + '.o']
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
# TODO: could use -Xlinker here, if it's supported
|
||||
assert "," not in dir
|
||||
|
||||
return '-Wl,-rpath=%s' % dir
|
||||
|
||||
|
||||
class IntelFCompiler(BaseIntelFCompiler):
|
||||
|
||||
compiler_type = 'intel'
|
||||
compiler_aliases = ('ifort',)
|
||||
description = 'Intel Fortran Compiler for 32-bit apps'
|
||||
version_match = intel_version_match('32-bit|IA-32')
|
||||
|
||||
possible_executables = ['ifort', 'ifc']
|
||||
|
||||
executables = {
|
||||
'version_cmd' : None, # set by update_executables
|
||||
'compiler_f77' : [None, "-72", "-w90", "-w95"],
|
||||
'compiler_f90' : [None],
|
||||
'compiler_fix' : [None, "-FI"],
|
||||
'linker_so' : ["<F90>", "-shared"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
|
||||
pic_flags = ['-fPIC']
|
||||
module_dir_switch = '-module ' # Don't remove ending space!
|
||||
module_include_switch = '-I'
|
||||
|
||||
def get_flags_free(self):
|
||||
return ['-FR']
|
||||
|
||||
def get_flags(self):
|
||||
return ['-fPIC']
|
||||
|
||||
def get_flags_opt(self): # Scipy test failures with -O2
|
||||
v = self.get_version()
|
||||
mpopt = 'openmp' if v and v < '15' else 'qopenmp'
|
||||
return ['-fp-model strict -O1 -{}'.format(mpopt)]
|
||||
|
||||
def get_flags_arch(self):
|
||||
return []
|
||||
|
||||
def get_flags_linker_so(self):
|
||||
opt = FCompiler.get_flags_linker_so(self)
|
||||
v = self.get_version()
|
||||
if v and v >= '8.0':
|
||||
opt.append('-nofor_main')
|
||||
if sys.platform == 'darwin':
|
||||
# Here, it's -dynamiclib
|
||||
try:
|
||||
idx = opt.index('-shared')
|
||||
opt.remove('-shared')
|
||||
except ValueError:
|
||||
idx = 0
|
||||
opt[idx:idx] = ['-dynamiclib', '-Wl,-undefined,dynamic_lookup']
|
||||
return opt
|
||||
|
||||
|
||||
class IntelItaniumFCompiler(IntelFCompiler):
|
||||
compiler_type = 'intele'
|
||||
compiler_aliases = ()
|
||||
description = 'Intel Fortran Compiler for Itanium apps'
|
||||
|
||||
version_match = intel_version_match('Itanium|IA-64')
|
||||
|
||||
possible_executables = ['ifort', 'efort', 'efc']
|
||||
|
||||
executables = {
|
||||
'version_cmd' : None,
|
||||
'compiler_f77' : [None, "-FI", "-w90", "-w95"],
|
||||
'compiler_fix' : [None, "-FI"],
|
||||
'compiler_f90' : [None],
|
||||
'linker_so' : ['<F90>', "-shared"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
|
||||
|
||||
class IntelEM64TFCompiler(IntelFCompiler):
|
||||
compiler_type = 'intelem'
|
||||
compiler_aliases = ()
|
||||
description = 'Intel Fortran Compiler for 64-bit apps'
|
||||
|
||||
version_match = intel_version_match('EM64T-based|Intel\\(R\\) 64|64|IA-64|64-bit')
|
||||
|
||||
possible_executables = ['ifort', 'efort', 'efc']
|
||||
|
||||
executables = {
|
||||
'version_cmd' : None,
|
||||
'compiler_f77' : [None, "-FI"],
|
||||
'compiler_fix' : [None, "-FI"],
|
||||
'compiler_f90' : [None],
|
||||
'linker_so' : ['<F90>', "-shared"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
|
||||
def get_flags(self):
|
||||
return ['-fPIC']
|
||||
|
||||
def get_flags_opt(self): # Scipy test failures with -O2
|
||||
v = self.get_version()
|
||||
mpopt = 'openmp' if v and v < '15' else 'qopenmp'
|
||||
return ['-fp-model strict -O1 -{}'.format(mpopt)]
|
||||
|
||||
def get_flags_arch(self):
|
||||
return ['']
|
||||
|
||||
# Is there no difference in the version string between the above compilers
|
||||
# and the Visual compilers?
|
||||
|
||||
|
||||
class IntelVisualFCompiler(BaseIntelFCompiler):
|
||||
compiler_type = 'intelv'
|
||||
description = 'Intel Visual Fortran Compiler for 32-bit apps'
|
||||
version_match = intel_version_match('32-bit|IA-32')
|
||||
|
||||
def update_executables(self):
|
||||
f = dummy_fortran_file()
|
||||
self.executables['version_cmd'] = ['<F77>', '/FI', '/c',
|
||||
f + '.f', '/o', f + '.o']
|
||||
|
||||
ar_exe = 'lib.exe'
|
||||
possible_executables = ['ifort', 'ifl']
|
||||
|
||||
executables = {
|
||||
'version_cmd' : None,
|
||||
'compiler_f77' : [None],
|
||||
'compiler_fix' : [None],
|
||||
'compiler_f90' : [None],
|
||||
'linker_so' : [None],
|
||||
'archiver' : [ar_exe, "/verbose", "/OUT:"],
|
||||
'ranlib' : None
|
||||
}
|
||||
|
||||
compile_switch = '/c '
|
||||
object_switch = '/Fo' # No space after /Fo!
|
||||
library_switch = '/OUT:' # No space after /OUT:!
|
||||
module_dir_switch = '/module:' # No space after /module:
|
||||
module_include_switch = '/I'
|
||||
|
||||
def get_flags(self):
|
||||
opt = ['/nologo', '/MD', '/nbs', '/names:lowercase', '/assume:underscore']
|
||||
return opt
|
||||
|
||||
def get_flags_free(self):
|
||||
return []
|
||||
|
||||
def get_flags_debug(self):
|
||||
return ['/4Yb', '/d2']
|
||||
|
||||
def get_flags_opt(self):
|
||||
return ['/O1'] # Scipy test failures with /O2
|
||||
|
||||
def get_flags_arch(self):
|
||||
return ["/arch:IA32", "/QaxSSE3"]
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class IntelItaniumVisualFCompiler(IntelVisualFCompiler):
|
||||
compiler_type = 'intelev'
|
||||
description = 'Intel Visual Fortran Compiler for Itanium apps'
|
||||
|
||||
version_match = intel_version_match('Itanium')
|
||||
|
||||
possible_executables = ['efl'] # XXX this is a wild guess
|
||||
ar_exe = IntelVisualFCompiler.ar_exe
|
||||
|
||||
executables = {
|
||||
'version_cmd' : None,
|
||||
'compiler_f77' : [None, "-FI", "-w90", "-w95"],
|
||||
'compiler_fix' : [None, "-FI", "-4L72", "-w"],
|
||||
'compiler_f90' : [None],
|
||||
'linker_so' : ['<F90>', "-shared"],
|
||||
'archiver' : [ar_exe, "/verbose", "/OUT:"],
|
||||
'ranlib' : None
|
||||
}
|
||||
|
||||
|
||||
class IntelEM64VisualFCompiler(IntelVisualFCompiler):
|
||||
compiler_type = 'intelvem'
|
||||
description = 'Intel Visual Fortran Compiler for 64-bit apps'
|
||||
|
||||
version_match = simple_version_match(start=r'Intel\(R\).*?64,')
|
||||
|
||||
def get_flags_arch(self):
|
||||
return ['']
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(2)
|
||||
from numpy.distutils import customized_fcompiler
|
||||
print(customized_fcompiler(compiler='intel').get_version())
|
||||
@@ -0,0 +1,47 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
|
||||
compilers = ['LaheyFCompiler']
|
||||
|
||||
class LaheyFCompiler(FCompiler):
|
||||
|
||||
compiler_type = 'lahey'
|
||||
description = 'Lahey/Fujitsu Fortran 95 Compiler'
|
||||
version_pattern = r'Lahey/Fujitsu Fortran 95 Compiler Release (?P<version>[^\s*]*)'
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ["<F90>", "--version"],
|
||||
'compiler_f77' : ["lf95", "--fix"],
|
||||
'compiler_fix' : ["lf95", "--fix"],
|
||||
'compiler_f90' : ["lf95"],
|
||||
'linker_so' : ["lf95", "-shared"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
|
||||
module_dir_switch = None #XXX Fix me
|
||||
module_include_switch = None #XXX Fix me
|
||||
|
||||
def get_flags_opt(self):
|
||||
return ['-O']
|
||||
def get_flags_debug(self):
|
||||
return ['-g', '--chk', '--chkglobal']
|
||||
def get_library_dirs(self):
|
||||
opt = []
|
||||
d = os.environ.get('LAHEY')
|
||||
if d:
|
||||
opt.append(os.path.join(d, 'lib'))
|
||||
return opt
|
||||
def get_libraries(self):
|
||||
opt = []
|
||||
opt.extend(['fj9f6', 'fj9i6', 'fj9ipp', 'fj9e6'])
|
||||
return opt
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(2)
|
||||
from numpy.distutils import customized_fcompiler
|
||||
print(customized_fcompiler(compiler='lahey').get_version())
|
||||
@@ -0,0 +1,56 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from numpy.distutils.cpuinfo import cpu
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
|
||||
compilers = ['MIPSFCompiler']
|
||||
|
||||
class MIPSFCompiler(FCompiler):
|
||||
|
||||
compiler_type = 'mips'
|
||||
description = 'MIPSpro Fortran Compiler'
|
||||
version_pattern = r'MIPSpro Compilers: Version (?P<version>[^\s*,]*)'
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ["<F90>", "-version"],
|
||||
'compiler_f77' : ["f77", "-f77"],
|
||||
'compiler_fix' : ["f90", "-fixedform"],
|
||||
'compiler_f90' : ["f90"],
|
||||
'linker_so' : ["f90", "-shared"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : None
|
||||
}
|
||||
module_dir_switch = None #XXX: fix me
|
||||
module_include_switch = None #XXX: fix me
|
||||
pic_flags = ['-KPIC']
|
||||
|
||||
def get_flags(self):
|
||||
return self.pic_flags + ['-n32']
|
||||
def get_flags_opt(self):
|
||||
return ['-O3']
|
||||
def get_flags_arch(self):
|
||||
opt = []
|
||||
for a in '19 20 21 22_4k 22_5k 24 25 26 27 28 30 32_5k 32_10k'.split():
|
||||
if getattr(cpu, 'is_IP%s'%a)():
|
||||
opt.append('-TARG:platform=IP%s' % a)
|
||||
break
|
||||
return opt
|
||||
def get_flags_arch_f77(self):
|
||||
r = None
|
||||
if cpu.is_r10000(): r = 10000
|
||||
elif cpu.is_r12000(): r = 12000
|
||||
elif cpu.is_r8000(): r = 8000
|
||||
elif cpu.is_r5000(): r = 5000
|
||||
elif cpu.is_r4000(): r = 4000
|
||||
if r is not None:
|
||||
return ['r%s' % (r)]
|
||||
return []
|
||||
def get_flags_arch_f90(self):
|
||||
r = self.get_flags_arch_f77()
|
||||
if r:
|
||||
r[0] = '-' + r[0]
|
||||
return r
|
||||
|
||||
if __name__ == '__main__':
|
||||
from numpy.distutils import customized_fcompiler
|
||||
print(customized_fcompiler(compiler='mips').get_version())
|
||||
@@ -0,0 +1,84 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
import re
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
|
||||
compilers = ['NAGFCompiler', 'NAGFORCompiler']
|
||||
|
||||
class BaseNAGFCompiler(FCompiler):
|
||||
version_pattern = r'NAG.* Release (?P<version>[^(\s]*)'
|
||||
|
||||
def version_match(self, version_string):
|
||||
m = re.search(self.version_pattern, version_string)
|
||||
if m:
|
||||
return m.group('version')
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_flags_linker_so(self):
|
||||
return ["-Wl,-shared"]
|
||||
def get_flags_opt(self):
|
||||
return ['-O4']
|
||||
def get_flags_arch(self):
|
||||
return ['']
|
||||
|
||||
class NAGFCompiler(BaseNAGFCompiler):
|
||||
|
||||
compiler_type = 'nag'
|
||||
description = 'NAGWare Fortran 95 Compiler'
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ["<F90>", "-V"],
|
||||
'compiler_f77' : ["f95", "-fixed"],
|
||||
'compiler_fix' : ["f95", "-fixed"],
|
||||
'compiler_f90' : ["f95"],
|
||||
'linker_so' : ["<F90>"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
|
||||
def get_flags_linker_so(self):
|
||||
if sys.platform == 'darwin':
|
||||
return ['-unsharedf95', '-Wl,-bundle,-flat_namespace,-undefined,suppress']
|
||||
return BaseNAGFCompiler.get_flags_linker_so(self)
|
||||
def get_flags_arch(self):
|
||||
version = self.get_version()
|
||||
if version and version < '5.1':
|
||||
return ['-target=native']
|
||||
else:
|
||||
return BaseNAGFCompiler.get_flags_arch(self)
|
||||
def get_flags_debug(self):
|
||||
return ['-g', '-gline', '-g90', '-nan', '-C']
|
||||
|
||||
class NAGFORCompiler(BaseNAGFCompiler):
|
||||
|
||||
compiler_type = 'nagfor'
|
||||
description = 'NAG Fortran Compiler'
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ["nagfor", "-V"],
|
||||
'compiler_f77' : ["nagfor", "-fixed"],
|
||||
'compiler_fix' : ["nagfor", "-fixed"],
|
||||
'compiler_f90' : ["nagfor"],
|
||||
'linker_so' : ["nagfor"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
|
||||
def get_flags_debug(self):
|
||||
version = self.get_version()
|
||||
if version and version > '6.1':
|
||||
return ['-g', '-u', '-nan', '-C=all', '-thread_safe',
|
||||
'-kind=unique', '-Warn=allocation', '-Warn=subnormal']
|
||||
else:
|
||||
return ['-g', '-nan', '-C=all', '-u', '-thread_safe']
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(2)
|
||||
from numpy.distutils import customized_fcompiler
|
||||
compiler = customized_fcompiler(compiler='nagfor')
|
||||
print(compiler.get_version())
|
||||
print(compiler.get_flags_debug())
|
||||
@@ -0,0 +1,30 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
from numpy.distutils import customized_fcompiler
|
||||
|
||||
compilers = ['NoneFCompiler']
|
||||
|
||||
class NoneFCompiler(FCompiler):
|
||||
|
||||
compiler_type = 'none'
|
||||
description = 'Fake Fortran compiler'
|
||||
|
||||
executables = {'compiler_f77': None,
|
||||
'compiler_f90': None,
|
||||
'compiler_fix': None,
|
||||
'linker_so': None,
|
||||
'linker_exe': None,
|
||||
'archiver': None,
|
||||
'ranlib': None,
|
||||
'version_cmd': None,
|
||||
}
|
||||
|
||||
def find_executables(self):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(2)
|
||||
print(customized_fcompiler(compiler='none').get_version())
|
||||
@@ -0,0 +1,35 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
|
||||
compilers = ['PathScaleFCompiler']
|
||||
|
||||
class PathScaleFCompiler(FCompiler):
|
||||
|
||||
compiler_type = 'pathf95'
|
||||
description = 'PathScale Fortran Compiler'
|
||||
version_pattern = r'PathScale\(TM\) Compiler Suite: Version (?P<version>[\d.]+)'
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ["pathf95", "-version"],
|
||||
'compiler_f77' : ["pathf95", "-fixedform"],
|
||||
'compiler_fix' : ["pathf95", "-fixedform"],
|
||||
'compiler_f90' : ["pathf95"],
|
||||
'linker_so' : ["pathf95", "-shared"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
pic_flags = ['-fPIC']
|
||||
module_dir_switch = '-module ' # Don't remove ending space!
|
||||
module_include_switch = '-I'
|
||||
|
||||
def get_flags_opt(self):
|
||||
return ['-O3']
|
||||
def get_flags_debug(self):
|
||||
return ['-g']
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(2)
|
||||
from numpy.distutils import customized_fcompiler
|
||||
print(customized_fcompiler(compiler='pathf95').get_version())
|
||||
142
venv/lib/python3.6/site-packages/numpy/distutils/fcompiler/pg.py
Normal file
142
venv/lib/python3.6/site-packages/numpy/distutils/fcompiler/pg.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# http://www.pgroup.com
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
|
||||
from numpy.distutils.fcompiler import FCompiler, dummy_fortran_file
|
||||
from sys import platform
|
||||
from os.path import join, dirname, normpath
|
||||
|
||||
compilers = ['PGroupFCompiler', 'PGroupFlangCompiler']
|
||||
|
||||
|
||||
class PGroupFCompiler(FCompiler):
|
||||
|
||||
compiler_type = 'pg'
|
||||
description = 'Portland Group Fortran Compiler'
|
||||
version_pattern = r'\s*pg(f77|f90|hpf|fortran) (?P<version>[\d.-]+).*'
|
||||
|
||||
if platform == 'darwin':
|
||||
executables = {
|
||||
'version_cmd': ["<F77>", "-V"],
|
||||
'compiler_f77': ["pgfortran", "-dynamiclib"],
|
||||
'compiler_fix': ["pgfortran", "-Mfixed", "-dynamiclib"],
|
||||
'compiler_f90': ["pgfortran", "-dynamiclib"],
|
||||
'linker_so': ["libtool"],
|
||||
'archiver': ["ar", "-cr"],
|
||||
'ranlib': ["ranlib"]
|
||||
}
|
||||
pic_flags = ['']
|
||||
else:
|
||||
executables = {
|
||||
'version_cmd': ["<F77>", "-V"],
|
||||
'compiler_f77': ["pgfortran"],
|
||||
'compiler_fix': ["pgfortran", "-Mfixed"],
|
||||
'compiler_f90': ["pgfortran"],
|
||||
'linker_so': ["pgfortran"],
|
||||
'archiver': ["ar", "-cr"],
|
||||
'ranlib': ["ranlib"]
|
||||
}
|
||||
pic_flags = ['-fpic']
|
||||
|
||||
module_dir_switch = '-module '
|
||||
module_include_switch = '-I'
|
||||
|
||||
def get_flags(self):
|
||||
opt = ['-Minform=inform', '-Mnosecond_underscore']
|
||||
return self.pic_flags + opt
|
||||
|
||||
def get_flags_opt(self):
|
||||
return ['-fast']
|
||||
|
||||
def get_flags_debug(self):
|
||||
return ['-g']
|
||||
|
||||
if platform == 'darwin':
|
||||
def get_flags_linker_so(self):
|
||||
return ["-dynamic", '-undefined', 'dynamic_lookup']
|
||||
|
||||
else:
|
||||
def get_flags_linker_so(self):
|
||||
return ["-shared", '-fpic']
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
return '-R%s' % dir
|
||||
|
||||
|
||||
if sys.version_info >= (3, 5):
|
||||
import functools
|
||||
|
||||
class PGroupFlangCompiler(FCompiler):
|
||||
compiler_type = 'flang'
|
||||
description = 'Portland Group Fortran LLVM Compiler'
|
||||
version_pattern = r'\s*(flang|clang) version (?P<version>[\d.-]+).*'
|
||||
|
||||
ar_exe = 'lib.exe'
|
||||
possible_executables = ['flang']
|
||||
|
||||
executables = {
|
||||
'version_cmd': ["<F77>", "--version"],
|
||||
'compiler_f77': ["flang"],
|
||||
'compiler_fix': ["flang"],
|
||||
'compiler_f90': ["flang"],
|
||||
'linker_so': [None],
|
||||
'archiver': [ar_exe, "/verbose", "/OUT:"],
|
||||
'ranlib': None
|
||||
}
|
||||
|
||||
library_switch = '/OUT:' # No space after /OUT:!
|
||||
module_dir_switch = '-module ' # Don't remove ending space!
|
||||
|
||||
def get_libraries(self):
|
||||
opt = FCompiler.get_libraries(self)
|
||||
opt.extend(['flang', 'flangrti', 'ompstub'])
|
||||
return opt
|
||||
|
||||
@functools.lru_cache(maxsize=128)
|
||||
def get_library_dirs(self):
|
||||
"""List of compiler library directories."""
|
||||
opt = FCompiler.get_library_dirs(self)
|
||||
flang_dir = dirname(self.executables['compiler_f77'][0])
|
||||
opt.append(normpath(join(flang_dir, '..', 'lib')))
|
||||
|
||||
return opt
|
||||
|
||||
def get_flags(self):
|
||||
return []
|
||||
|
||||
def get_flags_free(self):
|
||||
return []
|
||||
|
||||
def get_flags_debug(self):
|
||||
return ['-g']
|
||||
|
||||
def get_flags_opt(self):
|
||||
return ['-O3']
|
||||
|
||||
def get_flags_arch(self):
|
||||
return []
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
raise NotImplementedError
|
||||
|
||||
else:
|
||||
from numpy.distutils.fcompiler import CompilerNotFound
|
||||
|
||||
# No point in supporting on older Pythons because not ABI compatible
|
||||
class PGroupFlangCompiler(FCompiler):
|
||||
compiler_type = 'flang'
|
||||
description = 'Portland Group Fortran LLVM Compiler'
|
||||
|
||||
def get_version(self):
|
||||
raise CompilerNotFound('Flang unsupported on Python < 3.5')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(2)
|
||||
from numpy.distutils import customized_fcompiler
|
||||
if 'flang' in sys.argv:
|
||||
print(customized_fcompiler(compiler='flang').get_version())
|
||||
else:
|
||||
print(customized_fcompiler(compiler='pg').get_version())
|
||||
@@ -0,0 +1,53 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from numpy.distutils.ccompiler import simple_version_match
|
||||
from numpy.distutils.fcompiler import FCompiler
|
||||
|
||||
compilers = ['SunFCompiler']
|
||||
|
||||
class SunFCompiler(FCompiler):
|
||||
|
||||
compiler_type = 'sun'
|
||||
description = 'Sun or Forte Fortran 95 Compiler'
|
||||
# ex:
|
||||
# f90: Sun WorkShop 6 update 2 Fortran 95 6.2 Patch 111690-10 2003/08/28
|
||||
version_match = simple_version_match(
|
||||
start=r'f9[05]: (Sun|Forte|WorkShop).*Fortran 95')
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ["<F90>", "-V"],
|
||||
'compiler_f77' : ["f90"],
|
||||
'compiler_fix' : ["f90", "-fixed"],
|
||||
'compiler_f90' : ["f90"],
|
||||
'linker_so' : ["<F90>", "-Bdynamic", "-G"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
module_dir_switch = '-moddir='
|
||||
module_include_switch = '-M'
|
||||
pic_flags = ['-xcode=pic32']
|
||||
|
||||
def get_flags_f77(self):
|
||||
ret = ["-ftrap=%none"]
|
||||
if (self.get_version() or '') >= '7':
|
||||
ret.append("-f77")
|
||||
else:
|
||||
ret.append("-fixed")
|
||||
return ret
|
||||
def get_opt(self):
|
||||
return ['-fast', '-dalign']
|
||||
def get_arch(self):
|
||||
return ['-xtarget=generic']
|
||||
def get_libraries(self):
|
||||
opt = []
|
||||
opt.extend(['fsu', 'sunmath', 'mvec'])
|
||||
return opt
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
return '-R%s' % dir
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(2)
|
||||
from numpy.distutils import customized_fcompiler
|
||||
print(customized_fcompiler(compiler='sun').get_version())
|
||||
@@ -0,0 +1,54 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
|
||||
from numpy.distutils.fcompiler.gnu import GnuFCompiler
|
||||
|
||||
compilers = ['VastFCompiler']
|
||||
|
||||
class VastFCompiler(GnuFCompiler):
|
||||
compiler_type = 'vast'
|
||||
compiler_aliases = ()
|
||||
description = 'Pacific-Sierra Research Fortran 90 Compiler'
|
||||
version_pattern = (r'\s*Pacific-Sierra Research vf90 '
|
||||
r'(Personal|Professional)\s+(?P<version>[^\s]*)')
|
||||
|
||||
# VAST f90 does not support -o with -c. So, object files are created
|
||||
# to the current directory and then moved to build directory
|
||||
object_switch = ' && function _mvfile { mv -v `basename $1` $1 ; } && _mvfile '
|
||||
|
||||
executables = {
|
||||
'version_cmd' : ["vf90", "-v"],
|
||||
'compiler_f77' : ["g77"],
|
||||
'compiler_fix' : ["f90", "-Wv,-ya"],
|
||||
'compiler_f90' : ["f90"],
|
||||
'linker_so' : ["<F90>"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : ["ranlib"]
|
||||
}
|
||||
module_dir_switch = None #XXX Fix me
|
||||
module_include_switch = None #XXX Fix me
|
||||
|
||||
def find_executables(self):
|
||||
pass
|
||||
|
||||
def get_version_cmd(self):
|
||||
f90 = self.compiler_f90[0]
|
||||
d, b = os.path.split(f90)
|
||||
vf90 = os.path.join(d, 'v'+b)
|
||||
return vf90
|
||||
|
||||
def get_flags_arch(self):
|
||||
vast_version = self.get_version()
|
||||
gnu = GnuFCompiler()
|
||||
gnu.customize(None)
|
||||
self.version = gnu.get_version()
|
||||
opt = GnuFCompiler.get_flags_arch(self)
|
||||
self.version = vast_version
|
||||
return opt
|
||||
|
||||
if __name__ == '__main__':
|
||||
from distutils import log
|
||||
log.set_verbosity(2)
|
||||
from numpy.distutils import customized_fcompiler
|
||||
print(customized_fcompiler(compiler='vast').get_version())
|
||||
@@ -0,0 +1,264 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
|
||||
process_file(filename)
|
||||
|
||||
takes templated file .xxx.src and produces .xxx file where .xxx
|
||||
is .pyf .f90 or .f using the following template rules:
|
||||
|
||||
'<..>' denotes a template.
|
||||
|
||||
All function and subroutine blocks in a source file with names that
|
||||
contain '<..>' will be replicated according to the rules in '<..>'.
|
||||
|
||||
The number of comma-separated words in '<..>' will determine the number of
|
||||
replicates.
|
||||
|
||||
'<..>' may have two different forms, named and short. For example,
|
||||
|
||||
named:
|
||||
<p=d,s,z,c> where anywhere inside a block '<p>' will be replaced with
|
||||
'd', 's', 'z', and 'c' for each replicate of the block.
|
||||
|
||||
<_c> is already defined: <_c=s,d,c,z>
|
||||
<_t> is already defined: <_t=real,double precision,complex,double complex>
|
||||
|
||||
short:
|
||||
<s,d,c,z>, a short form of the named, useful when no <p> appears inside
|
||||
a block.
|
||||
|
||||
In general, '<..>' contains a comma separated list of arbitrary
|
||||
expressions. If these expression must contain a comma|leftarrow|rightarrow,
|
||||
then prepend the comma|leftarrow|rightarrow with a backslash.
|
||||
|
||||
If an expression matches '\\<index>' then it will be replaced
|
||||
by <index>-th expression.
|
||||
|
||||
Note that all '<..>' forms in a block must have the same number of
|
||||
comma-separated entries.
|
||||
|
||||
Predefined named template rules:
|
||||
<prefix=s,d,c,z>
|
||||
<ftype=real,double precision,complex,double complex>
|
||||
<ftypereal=real,double precision,\\0,\\1>
|
||||
<ctype=float,double,complex_float,complex_double>
|
||||
<ctypereal=float,double,\\0,\\1>
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
__all__ = ['process_str', 'process_file']
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
||||
routine_start_re = re.compile(r'(\n|\A)(( (\$|\*))|)\s*(subroutine|function)\b', re.I)
|
||||
routine_end_re = re.compile(r'\n\s*end\s*(subroutine|function)\b.*(\n|\Z)', re.I)
|
||||
function_start_re = re.compile(r'\n (\$|\*)\s*function\b', re.I)
|
||||
|
||||
def parse_structure(astr):
|
||||
""" Return a list of tuples for each function or subroutine each
|
||||
tuple is the start and end of a subroutine or function to be
|
||||
expanded.
|
||||
"""
|
||||
|
||||
spanlist = []
|
||||
ind = 0
|
||||
while True:
|
||||
m = routine_start_re.search(astr, ind)
|
||||
if m is None:
|
||||
break
|
||||
start = m.start()
|
||||
if function_start_re.match(astr, start, m.end()):
|
||||
while True:
|
||||
i = astr.rfind('\n', ind, start)
|
||||
if i==-1:
|
||||
break
|
||||
start = i
|
||||
if astr[i:i+7]!='\n $':
|
||||
break
|
||||
start += 1
|
||||
m = routine_end_re.search(astr, m.end())
|
||||
ind = end = m and m.end()-1 or len(astr)
|
||||
spanlist.append((start, end))
|
||||
return spanlist
|
||||
|
||||
template_re = re.compile(r"<\s*(\w[\w\d]*)\s*>")
|
||||
named_re = re.compile(r"<\s*(\w[\w\d]*)\s*=\s*(.*?)\s*>")
|
||||
list_re = re.compile(r"<\s*((.*?))\s*>")
|
||||
|
||||
def find_repl_patterns(astr):
|
||||
reps = named_re.findall(astr)
|
||||
names = {}
|
||||
for rep in reps:
|
||||
name = rep[0].strip() or unique_key(names)
|
||||
repl = rep[1].replace(r'\,', '@comma@')
|
||||
thelist = conv(repl)
|
||||
names[name] = thelist
|
||||
return names
|
||||
|
||||
def find_and_remove_repl_patterns(astr):
|
||||
names = find_repl_patterns(astr)
|
||||
astr = re.subn(named_re, '', astr)[0]
|
||||
return astr, names
|
||||
|
||||
item_re = re.compile(r"\A\\(?P<index>\d+)\Z")
|
||||
def conv(astr):
|
||||
b = astr.split(',')
|
||||
l = [x.strip() for x in b]
|
||||
for i in range(len(l)):
|
||||
m = item_re.match(l[i])
|
||||
if m:
|
||||
j = int(m.group('index'))
|
||||
l[i] = l[j]
|
||||
return ','.join(l)
|
||||
|
||||
def unique_key(adict):
|
||||
""" Obtain a unique key given a dictionary."""
|
||||
allkeys = list(adict.keys())
|
||||
done = False
|
||||
n = 1
|
||||
while not done:
|
||||
newkey = '__l%s' % (n)
|
||||
if newkey in allkeys:
|
||||
n += 1
|
||||
else:
|
||||
done = True
|
||||
return newkey
|
||||
|
||||
|
||||
template_name_re = re.compile(r'\A\s*(\w[\w\d]*)\s*\Z')
|
||||
def expand_sub(substr, names):
|
||||
substr = substr.replace(r'\>', '@rightarrow@')
|
||||
substr = substr.replace(r'\<', '@leftarrow@')
|
||||
lnames = find_repl_patterns(substr)
|
||||
substr = named_re.sub(r"<\1>", substr) # get rid of definition templates
|
||||
|
||||
def listrepl(mobj):
|
||||
thelist = conv(mobj.group(1).replace(r'\,', '@comma@'))
|
||||
if template_name_re.match(thelist):
|
||||
return "<%s>" % (thelist)
|
||||
name = None
|
||||
for key in lnames.keys(): # see if list is already in dictionary
|
||||
if lnames[key] == thelist:
|
||||
name = key
|
||||
if name is None: # this list is not in the dictionary yet
|
||||
name = unique_key(lnames)
|
||||
lnames[name] = thelist
|
||||
return "<%s>" % name
|
||||
|
||||
substr = list_re.sub(listrepl, substr) # convert all lists to named templates
|
||||
# newnames are constructed as needed
|
||||
|
||||
numsubs = None
|
||||
base_rule = None
|
||||
rules = {}
|
||||
for r in template_re.findall(substr):
|
||||
if r not in rules:
|
||||
thelist = lnames.get(r, names.get(r, None))
|
||||
if thelist is None:
|
||||
raise ValueError('No replicates found for <%s>' % (r))
|
||||
if r not in names and not thelist.startswith('_'):
|
||||
names[r] = thelist
|
||||
rule = [i.replace('@comma@', ',') for i in thelist.split(',')]
|
||||
num = len(rule)
|
||||
|
||||
if numsubs is None:
|
||||
numsubs = num
|
||||
rules[r] = rule
|
||||
base_rule = r
|
||||
elif num == numsubs:
|
||||
rules[r] = rule
|
||||
else:
|
||||
print("Mismatch in number of replacements (base <%s=%s>)"
|
||||
" for <%s=%s>. Ignoring." %
|
||||
(base_rule, ','.join(rules[base_rule]), r, thelist))
|
||||
if not rules:
|
||||
return substr
|
||||
|
||||
def namerepl(mobj):
|
||||
name = mobj.group(1)
|
||||
return rules.get(name, (k+1)*[name])[k]
|
||||
|
||||
newstr = ''
|
||||
for k in range(numsubs):
|
||||
newstr += template_re.sub(namerepl, substr) + '\n\n'
|
||||
|
||||
newstr = newstr.replace('@rightarrow@', '>')
|
||||
newstr = newstr.replace('@leftarrow@', '<')
|
||||
return newstr
|
||||
|
||||
def process_str(allstr):
|
||||
newstr = allstr
|
||||
writestr = ''
|
||||
|
||||
struct = parse_structure(newstr)
|
||||
|
||||
oldend = 0
|
||||
names = {}
|
||||
names.update(_special_names)
|
||||
for sub in struct:
|
||||
cleanedstr, defs = find_and_remove_repl_patterns(newstr[oldend:sub[0]])
|
||||
writestr += cleanedstr
|
||||
names.update(defs)
|
||||
writestr += expand_sub(newstr[sub[0]:sub[1]], names)
|
||||
oldend = sub[1]
|
||||
writestr += newstr[oldend:]
|
||||
|
||||
return writestr
|
||||
|
||||
include_src_re = re.compile(r"(\n|\A)\s*include\s*['\"](?P<name>[\w\d./\\]+[.]src)['\"]", re.I)
|
||||
|
||||
def resolve_includes(source):
|
||||
d = os.path.dirname(source)
|
||||
with open(source) as fid:
|
||||
lines = []
|
||||
for line in fid:
|
||||
m = include_src_re.match(line)
|
||||
if m:
|
||||
fn = m.group('name')
|
||||
if not os.path.isabs(fn):
|
||||
fn = os.path.join(d, fn)
|
||||
if os.path.isfile(fn):
|
||||
print('Including file', fn)
|
||||
lines.extend(resolve_includes(fn))
|
||||
else:
|
||||
lines.append(line)
|
||||
else:
|
||||
lines.append(line)
|
||||
return lines
|
||||
|
||||
def process_file(source):
|
||||
lines = resolve_includes(source)
|
||||
return process_str(''.join(lines))
|
||||
|
||||
_special_names = find_repl_patterns('''
|
||||
<_c=s,d,c,z>
|
||||
<_t=real,double precision,complex,double complex>
|
||||
<prefix=s,d,c,z>
|
||||
<ftype=real,double precision,complex,double complex>
|
||||
<ctype=float,double,complex_float,complex_double>
|
||||
<ftypereal=real,double precision,\\0,\\1>
|
||||
<ctypereal=float,double,\\0,\\1>
|
||||
''')
|
||||
|
||||
def main():
|
||||
try:
|
||||
file = sys.argv[1]
|
||||
except IndexError:
|
||||
fid = sys.stdin
|
||||
outfile = sys.stdout
|
||||
else:
|
||||
fid = open(file, 'r')
|
||||
(base, ext) = os.path.splitext(file)
|
||||
newname = base
|
||||
outfile = open(newname, 'w')
|
||||
|
||||
allstr = fid.read()
|
||||
writestr = process_str(allstr)
|
||||
outfile.write(writestr)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
6
venv/lib/python3.6/site-packages/numpy/distutils/info.py
Normal file
6
venv/lib/python3.6/site-packages/numpy/distutils/info.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""
|
||||
Enhanced distutils with Fortran compilers support and more.
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
postpone_import = True
|
||||
@@ -0,0 +1,113 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import platform
|
||||
|
||||
from distutils.unixccompiler import UnixCCompiler
|
||||
from numpy.distutils.exec_command import find_executable
|
||||
from numpy.distutils.ccompiler import simple_version_match
|
||||
if platform.system() == 'Windows':
|
||||
from numpy.distutils.msvc9compiler import MSVCCompiler
|
||||
|
||||
|
||||
class IntelCCompiler(UnixCCompiler):
|
||||
"""A modified Intel compiler compatible with a GCC-built Python."""
|
||||
compiler_type = 'intel'
|
||||
cc_exe = 'icc'
|
||||
cc_args = 'fPIC'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
UnixCCompiler.__init__(self, verbose, dry_run, force)
|
||||
|
||||
v = self.get_version()
|
||||
mpopt = 'openmp' if v and v < '15' else 'qopenmp'
|
||||
self.cc_exe = ('icc -fPIC -fp-model strict -O3 '
|
||||
'-fomit-frame-pointer -{}').format(mpopt)
|
||||
compiler = self.cc_exe
|
||||
|
||||
if platform.system() == 'Darwin':
|
||||
shared_flag = '-Wl,-undefined,dynamic_lookup'
|
||||
else:
|
||||
shared_flag = '-shared'
|
||||
self.set_executables(compiler=compiler,
|
||||
compiler_so=compiler,
|
||||
compiler_cxx=compiler,
|
||||
archiver='xiar' + ' cru',
|
||||
linker_exe=compiler + ' -shared-intel',
|
||||
linker_so=compiler + ' ' + shared_flag +
|
||||
' -shared-intel')
|
||||
|
||||
|
||||
class IntelItaniumCCompiler(IntelCCompiler):
|
||||
compiler_type = 'intele'
|
||||
|
||||
# On Itanium, the Intel Compiler used to be called ecc, let's search for
|
||||
# it (now it's also icc, so ecc is last in the search).
|
||||
for cc_exe in map(find_executable, ['icc', 'ecc']):
|
||||
if cc_exe:
|
||||
break
|
||||
|
||||
|
||||
class IntelEM64TCCompiler(UnixCCompiler):
|
||||
"""
|
||||
A modified Intel x86_64 compiler compatible with a 64bit GCC-built Python.
|
||||
"""
|
||||
compiler_type = 'intelem'
|
||||
cc_exe = 'icc -m64'
|
||||
cc_args = '-fPIC'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
UnixCCompiler.__init__(self, verbose, dry_run, force)
|
||||
|
||||
v = self.get_version()
|
||||
mpopt = 'openmp' if v and v < '15' else 'qopenmp'
|
||||
self.cc_exe = ('icc -m64 -fPIC -fp-model strict -O3 '
|
||||
'-fomit-frame-pointer -{}').format(mpopt)
|
||||
compiler = self.cc_exe
|
||||
|
||||
if platform.system() == 'Darwin':
|
||||
shared_flag = '-Wl,-undefined,dynamic_lookup'
|
||||
else:
|
||||
shared_flag = '-shared'
|
||||
self.set_executables(compiler=compiler,
|
||||
compiler_so=compiler,
|
||||
compiler_cxx=compiler,
|
||||
archiver='xiar' + ' cru',
|
||||
linker_exe=compiler + ' -shared-intel',
|
||||
linker_so=compiler + ' ' + shared_flag +
|
||||
' -shared-intel')
|
||||
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
class IntelCCompilerW(MSVCCompiler):
|
||||
"""
|
||||
A modified Intel compiler compatible with an MSVC-built Python.
|
||||
"""
|
||||
compiler_type = 'intelw'
|
||||
compiler_cxx = 'icl'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
MSVCCompiler.__init__(self, verbose, dry_run, force)
|
||||
version_match = simple_version_match(start=r'Intel\(R\).*?32,')
|
||||
self.__version = version_match
|
||||
|
||||
def initialize(self, plat_name=None):
|
||||
MSVCCompiler.initialize(self, plat_name)
|
||||
self.cc = self.find_exe('icl.exe')
|
||||
self.lib = self.find_exe('xilib')
|
||||
self.linker = self.find_exe('xilink')
|
||||
self.compile_options = ['/nologo', '/O3', '/MD', '/W3',
|
||||
'/Qstd=c99']
|
||||
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
|
||||
'/Qstd=c99', '/Z7', '/D_DEBUG']
|
||||
|
||||
class IntelEM64TCCompilerW(IntelCCompilerW):
|
||||
"""
|
||||
A modified Intel x86_64 compiler compatible with
|
||||
a 64bit MSVC-built Python.
|
||||
"""
|
||||
compiler_type = 'intelemw'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
MSVCCompiler.__init__(self, verbose, dry_run, force)
|
||||
version_match = simple_version_match(start=r'Intel\(R\).*?64,')
|
||||
self.__version = version_match
|
||||
115
venv/lib/python3.6/site-packages/numpy/distutils/lib2def.py
Normal file
115
venv/lib/python3.6/site-packages/numpy/distutils/lib2def.py
Normal file
@@ -0,0 +1,115 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
__doc__ = """This module generates a DEF file from the symbols in
|
||||
an MSVC-compiled DLL import library. It correctly discriminates between
|
||||
data and functions. The data is collected from the output of the program
|
||||
nm(1).
|
||||
|
||||
Usage:
|
||||
python lib2def.py [libname.lib] [output.def]
|
||||
or
|
||||
python lib2def.py [libname.lib] > output.def
|
||||
|
||||
libname.lib defaults to python<py_ver>.lib and output.def defaults to stdout
|
||||
|
||||
Author: Robert Kern <kernr@mail.ncifcrf.gov>
|
||||
Last Update: April 30, 1999
|
||||
"""
|
||||
|
||||
__version__ = '0.1a'
|
||||
|
||||
py_ver = "%d%d" % tuple(sys.version_info[:2])
|
||||
|
||||
DEFAULT_NM = 'nm -Cs'
|
||||
|
||||
DEF_HEADER = """LIBRARY python%s.dll
|
||||
;CODE PRELOAD MOVEABLE DISCARDABLE
|
||||
;DATA PRELOAD SINGLE
|
||||
|
||||
EXPORTS
|
||||
""" % py_ver
|
||||
# the header of the DEF file
|
||||
|
||||
FUNC_RE = re.compile(r"^(.*) in python%s\.dll" % py_ver, re.MULTILINE)
|
||||
DATA_RE = re.compile(r"^_imp__(.*) in python%s\.dll" % py_ver, re.MULTILINE)
|
||||
|
||||
def parse_cmd():
|
||||
"""Parses the command-line arguments.
|
||||
|
||||
libfile, deffile = parse_cmd()"""
|
||||
if len(sys.argv) == 3:
|
||||
if sys.argv[1][-4:] == '.lib' and sys.argv[2][-4:] == '.def':
|
||||
libfile, deffile = sys.argv[1:]
|
||||
elif sys.argv[1][-4:] == '.def' and sys.argv[2][-4:] == '.lib':
|
||||
deffile, libfile = sys.argv[1:]
|
||||
else:
|
||||
print("I'm assuming that your first argument is the library")
|
||||
print("and the second is the DEF file.")
|
||||
elif len(sys.argv) == 2:
|
||||
if sys.argv[1][-4:] == '.def':
|
||||
deffile = sys.argv[1]
|
||||
libfile = 'python%s.lib' % py_ver
|
||||
elif sys.argv[1][-4:] == '.lib':
|
||||
deffile = None
|
||||
libfile = sys.argv[1]
|
||||
else:
|
||||
libfile = 'python%s.lib' % py_ver
|
||||
deffile = None
|
||||
return libfile, deffile
|
||||
|
||||
def getnm(nm_cmd = ['nm', '-Cs', 'python%s.lib' % py_ver]):
|
||||
"""Returns the output of nm_cmd via a pipe.
|
||||
|
||||
nm_output = getnam(nm_cmd = 'nm -Cs py_lib')"""
|
||||
f = subprocess.Popen(nm_cmd, shell=True, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
nm_output = f.stdout.read()
|
||||
f.stdout.close()
|
||||
return nm_output
|
||||
|
||||
def parse_nm(nm_output):
|
||||
"""Returns a tuple of lists: dlist for the list of data
|
||||
symbols and flist for the list of function symbols.
|
||||
|
||||
dlist, flist = parse_nm(nm_output)"""
|
||||
data = DATA_RE.findall(nm_output)
|
||||
func = FUNC_RE.findall(nm_output)
|
||||
|
||||
flist = []
|
||||
for sym in data:
|
||||
if sym in func and (sym[:2] == 'Py' or sym[:3] == '_Py' or sym[:4] == 'init'):
|
||||
flist.append(sym)
|
||||
|
||||
dlist = []
|
||||
for sym in data:
|
||||
if sym not in flist and (sym[:2] == 'Py' or sym[:3] == '_Py'):
|
||||
dlist.append(sym)
|
||||
|
||||
dlist.sort()
|
||||
flist.sort()
|
||||
return dlist, flist
|
||||
|
||||
def output_def(dlist, flist, header, file = sys.stdout):
|
||||
"""Outputs the final DEF file to a file defaulting to stdout.
|
||||
|
||||
output_def(dlist, flist, header, file = sys.stdout)"""
|
||||
for data_sym in dlist:
|
||||
header = header + '\t%s DATA\n' % data_sym
|
||||
header = header + '\n' # blank line
|
||||
for func_sym in flist:
|
||||
header = header + '\t%s\n' % func_sym
|
||||
file.write(header)
|
||||
|
||||
if __name__ == '__main__':
|
||||
libfile, deffile = parse_cmd()
|
||||
if deffile is None:
|
||||
deffile = sys.stdout
|
||||
else:
|
||||
deffile = open(deffile, 'w')
|
||||
nm_cmd = [str(DEFAULT_NM), str(libfile)]
|
||||
nm_output = getnm(nm_cmd)
|
||||
dlist, flist = parse_nm(nm_output)
|
||||
output_def(dlist, flist, DEF_HEADER, deffile)
|
||||
@@ -0,0 +1,76 @@
|
||||
""" Functions for converting from DOS to UNIX line endings
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys, re, os
|
||||
|
||||
def dos2unix(file):
|
||||
"Replace CRLF with LF in argument files. Print names of changed files."
|
||||
if os.path.isdir(file):
|
||||
print(file, "Directory!")
|
||||
return
|
||||
|
||||
with open(file, "rb") as fp:
|
||||
data = fp.read()
|
||||
if '\0' in data:
|
||||
print(file, "Binary!")
|
||||
return
|
||||
|
||||
newdata = re.sub("\r\n", "\n", data)
|
||||
if newdata != data:
|
||||
print('dos2unix:', file)
|
||||
with open(file, "wb") as f:
|
||||
f.write(newdata)
|
||||
return file
|
||||
else:
|
||||
print(file, 'ok')
|
||||
|
||||
def dos2unix_one_dir(modified_files, dir_name, file_names):
|
||||
for file in file_names:
|
||||
full_path = os.path.join(dir_name, file)
|
||||
file = dos2unix(full_path)
|
||||
if file is not None:
|
||||
modified_files.append(file)
|
||||
|
||||
def dos2unix_dir(dir_name):
|
||||
modified_files = []
|
||||
os.path.walk(dir_name, dos2unix_one_dir, modified_files)
|
||||
return modified_files
|
||||
#----------------------------------
|
||||
|
||||
def unix2dos(file):
|
||||
"Replace LF with CRLF in argument files. Print names of changed files."
|
||||
if os.path.isdir(file):
|
||||
print(file, "Directory!")
|
||||
return
|
||||
|
||||
with open(file, "rb") as fp:
|
||||
data = fp.read()
|
||||
if '\0' in data:
|
||||
print(file, "Binary!")
|
||||
return
|
||||
newdata = re.sub("\r\n", "\n", data)
|
||||
newdata = re.sub("\n", "\r\n", newdata)
|
||||
if newdata != data:
|
||||
print('unix2dos:', file)
|
||||
with open(file, "wb") as f:
|
||||
f.write(newdata)
|
||||
return file
|
||||
else:
|
||||
print(file, 'ok')
|
||||
|
||||
def unix2dos_one_dir(modified_files, dir_name, file_names):
|
||||
for file in file_names:
|
||||
full_path = os.path.join(dir_name, file)
|
||||
unix2dos(full_path)
|
||||
if file is not None:
|
||||
modified_files.append(file)
|
||||
|
||||
def unix2dos_dir(dir_name):
|
||||
modified_files = []
|
||||
os.path.walk(dir_name, unix2dos_one_dir, modified_files)
|
||||
return modified_files
|
||||
|
||||
if __name__ == "__main__":
|
||||
dos2unix_dir(sys.argv[1])
|
||||
93
venv/lib/python3.6/site-packages/numpy/distutils/log.py
Normal file
93
venv/lib/python3.6/site-packages/numpy/distutils/log.py
Normal file
@@ -0,0 +1,93 @@
|
||||
# Colored log, requires Python 2.3 or up.
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
from distutils.log import *
|
||||
from distutils.log import Log as old_Log
|
||||
from distutils.log import _global_log
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
from .misc_util import (red_text, default_text, cyan_text, green_text,
|
||||
is_sequence, is_string)
|
||||
else:
|
||||
from numpy.distutils.misc_util import (red_text, default_text, cyan_text,
|
||||
green_text, is_sequence, is_string)
|
||||
|
||||
|
||||
def _fix_args(args,flag=1):
|
||||
if is_string(args):
|
||||
return args.replace('%', '%%')
|
||||
if flag and is_sequence(args):
|
||||
return tuple([_fix_args(a, flag=0) for a in args])
|
||||
return args
|
||||
|
||||
|
||||
class Log(old_Log):
|
||||
def _log(self, level, msg, args):
|
||||
if level >= self.threshold:
|
||||
if args:
|
||||
msg = msg % _fix_args(args)
|
||||
if 0:
|
||||
if msg.startswith('copying ') and msg.find(' -> ') != -1:
|
||||
return
|
||||
if msg.startswith('byte-compiling '):
|
||||
return
|
||||
print(_global_color_map[level](msg))
|
||||
sys.stdout.flush()
|
||||
|
||||
def good(self, msg, *args):
|
||||
"""
|
||||
If we log WARN messages, log this message as a 'nice' anti-warn
|
||||
message.
|
||||
|
||||
"""
|
||||
if WARN >= self.threshold:
|
||||
if args:
|
||||
print(green_text(msg % _fix_args(args)))
|
||||
else:
|
||||
print(green_text(msg))
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
_global_log.__class__ = Log
|
||||
|
||||
good = _global_log.good
|
||||
|
||||
def set_threshold(level, force=False):
|
||||
prev_level = _global_log.threshold
|
||||
if prev_level > DEBUG or force:
|
||||
# If we're running at DEBUG, don't change the threshold, as there's
|
||||
# likely a good reason why we're running at this level.
|
||||
_global_log.threshold = level
|
||||
if level <= DEBUG:
|
||||
info('set_threshold: setting threshold to DEBUG level,'
|
||||
' it can be changed only with force argument')
|
||||
else:
|
||||
info('set_threshold: not changing threshold from DEBUG level'
|
||||
' %s to %s' % (prev_level, level))
|
||||
return prev_level
|
||||
|
||||
|
||||
def set_verbosity(v, force=False):
|
||||
prev_level = _global_log.threshold
|
||||
if v < 0:
|
||||
set_threshold(ERROR, force)
|
||||
elif v == 0:
|
||||
set_threshold(WARN, force)
|
||||
elif v == 1:
|
||||
set_threshold(INFO, force)
|
||||
elif v >= 2:
|
||||
set_threshold(DEBUG, force)
|
||||
return {FATAL:-2,ERROR:-1,WARN:0,INFO:1,DEBUG:2}.get(prev_level, 1)
|
||||
|
||||
|
||||
_global_color_map = {
|
||||
DEBUG:cyan_text,
|
||||
INFO:default_text,
|
||||
WARN:red_text,
|
||||
ERROR:red_text,
|
||||
FATAL:red_text
|
||||
}
|
||||
|
||||
# don't use INFO,.. flags in set_verbosity, these flags are for set_threshold.
|
||||
set_verbosity(0, force=True)
|
||||
@@ -0,0 +1,6 @@
|
||||
int _get_output_format(void)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
int _imp____lc_codepage = 0;
|
||||
@@ -0,0 +1,656 @@
|
||||
"""
|
||||
Support code for building Python extensions on Windows.
|
||||
|
||||
# NT stuff
|
||||
# 1. Make sure libpython<version>.a exists for gcc. If not, build it.
|
||||
# 2. Force windows to use gcc (we're struggling with MSVC and g77 support)
|
||||
# 3. Force windows to use g77
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
# Overwrite certain distutils.ccompiler functions:
|
||||
import numpy.distutils.ccompiler
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
from . import log
|
||||
else:
|
||||
from numpy.distutils import log
|
||||
# NT stuff
|
||||
# 1. Make sure libpython<version>.a exists for gcc. If not, build it.
|
||||
# 2. Force windows to use gcc (we're struggling with MSVC and g77 support)
|
||||
# --> this is done in numpy/distutils/ccompiler.py
|
||||
# 3. Force windows to use g77
|
||||
|
||||
import distutils.cygwinccompiler
|
||||
from distutils.version import StrictVersion
|
||||
from distutils.unixccompiler import UnixCCompiler
|
||||
from distutils.msvccompiler import get_build_version as get_build_msvc_version
|
||||
from distutils.errors import (DistutilsExecError, CompileError,
|
||||
UnknownFileError)
|
||||
from numpy.distutils.misc_util import (msvc_runtime_library,
|
||||
msvc_runtime_version,
|
||||
msvc_runtime_major,
|
||||
get_build_architecture)
|
||||
|
||||
def get_msvcr_replacement():
|
||||
"""Replacement for outdated version of get_msvcr from cygwinccompiler"""
|
||||
msvcr = msvc_runtime_library()
|
||||
return [] if msvcr is None else [msvcr]
|
||||
|
||||
# monkey-patch cygwinccompiler with our updated version from misc_util
|
||||
# to avoid getting an exception raised on Python 3.5
|
||||
distutils.cygwinccompiler.get_msvcr = get_msvcr_replacement
|
||||
|
||||
# Useful to generate table of symbols from a dll
|
||||
_START = re.compile(r'\[Ordinal/Name Pointer\] Table')
|
||||
_TABLE = re.compile(r'^\s+\[([\s*[0-9]*)\] ([a-zA-Z0-9_]*)')
|
||||
|
||||
# the same as cygwin plus some additional parameters
|
||||
class Mingw32CCompiler(distutils.cygwinccompiler.CygwinCCompiler):
|
||||
""" A modified MingW32 compiler compatible with an MSVC built Python.
|
||||
|
||||
"""
|
||||
|
||||
compiler_type = 'mingw32'
|
||||
|
||||
def __init__ (self,
|
||||
verbose=0,
|
||||
dry_run=0,
|
||||
force=0):
|
||||
|
||||
distutils.cygwinccompiler.CygwinCCompiler.__init__ (self, verbose,
|
||||
dry_run, force)
|
||||
|
||||
# we need to support 3.2 which doesn't match the standard
|
||||
# get_versions methods regex
|
||||
if self.gcc_version is None:
|
||||
p = subprocess.Popen(['gcc', '-dumpversion'], shell=True,
|
||||
stdout=subprocess.PIPE)
|
||||
out_string = p.stdout.read()
|
||||
p.stdout.close()
|
||||
result = re.search(r'(\d+\.\d+)', out_string)
|
||||
if result:
|
||||
self.gcc_version = StrictVersion(result.group(1))
|
||||
|
||||
# A real mingw32 doesn't need to specify a different entry point,
|
||||
# but cygwin 2.91.57 in no-cygwin-mode needs it.
|
||||
if self.gcc_version <= "2.91.57":
|
||||
entry_point = '--entry _DllMain@12'
|
||||
else:
|
||||
entry_point = ''
|
||||
|
||||
if self.linker_dll == 'dllwrap':
|
||||
# Commented out '--driver-name g++' part that fixes weird
|
||||
# g++.exe: g++: No such file or directory
|
||||
# error (mingw 1.0 in Enthon24 tree, gcc-3.4.5).
|
||||
# If the --driver-name part is required for some environment
|
||||
# then make the inclusion of this part specific to that
|
||||
# environment.
|
||||
self.linker = 'dllwrap' # --driver-name g++'
|
||||
elif self.linker_dll == 'gcc':
|
||||
self.linker = 'g++'
|
||||
|
||||
# **changes: eric jones 4/11/01
|
||||
# 1. Check for import library on Windows. Build if it doesn't exist.
|
||||
|
||||
build_import_library()
|
||||
|
||||
# Check for custom msvc runtime library on Windows. Build if it doesn't exist.
|
||||
msvcr_success = build_msvcr_library()
|
||||
msvcr_dbg_success = build_msvcr_library(debug=True)
|
||||
if msvcr_success or msvcr_dbg_success:
|
||||
# add preprocessor statement for using customized msvcr lib
|
||||
self.define_macro('NPY_MINGW_USE_CUSTOM_MSVCR')
|
||||
|
||||
# Define the MSVC version as hint for MinGW
|
||||
msvcr_version = msvc_runtime_version()
|
||||
if msvcr_version:
|
||||
self.define_macro('__MSVCRT_VERSION__', '0x%04i' % msvcr_version)
|
||||
|
||||
# MS_WIN64 should be defined when building for amd64 on windows,
|
||||
# but python headers define it only for MS compilers, which has all
|
||||
# kind of bad consequences, like using Py_ModuleInit4 instead of
|
||||
# Py_ModuleInit4_64, etc... So we add it here
|
||||
if get_build_architecture() == 'AMD64':
|
||||
if self.gcc_version < "4.0":
|
||||
self.set_executables(
|
||||
compiler='gcc -g -DDEBUG -DMS_WIN64 -mno-cygwin -O0 -Wall',
|
||||
compiler_so='gcc -g -DDEBUG -DMS_WIN64 -mno-cygwin -O0'
|
||||
' -Wall -Wstrict-prototypes',
|
||||
linker_exe='gcc -g -mno-cygwin',
|
||||
linker_so='gcc -g -mno-cygwin -shared')
|
||||
else:
|
||||
# gcc-4 series releases do not support -mno-cygwin option
|
||||
self.set_executables(
|
||||
compiler='gcc -g -DDEBUG -DMS_WIN64 -O0 -Wall',
|
||||
compiler_so='gcc -g -DDEBUG -DMS_WIN64 -O0 -Wall -Wstrict-prototypes',
|
||||
linker_exe='gcc -g',
|
||||
linker_so='gcc -g -shared')
|
||||
else:
|
||||
if self.gcc_version <= "3.0.0":
|
||||
self.set_executables(
|
||||
compiler='gcc -mno-cygwin -O2 -w',
|
||||
compiler_so='gcc -mno-cygwin -mdll -O2 -w'
|
||||
' -Wstrict-prototypes',
|
||||
linker_exe='g++ -mno-cygwin',
|
||||
linker_so='%s -mno-cygwin -mdll -static %s' %
|
||||
(self.linker, entry_point))
|
||||
elif self.gcc_version < "4.0":
|
||||
self.set_executables(
|
||||
compiler='gcc -mno-cygwin -O2 -Wall',
|
||||
compiler_so='gcc -mno-cygwin -O2 -Wall'
|
||||
' -Wstrict-prototypes',
|
||||
linker_exe='g++ -mno-cygwin',
|
||||
linker_so='g++ -mno-cygwin -shared')
|
||||
else:
|
||||
# gcc-4 series releases do not support -mno-cygwin option
|
||||
self.set_executables(compiler='gcc -O2 -Wall',
|
||||
compiler_so='gcc -O2 -Wall -Wstrict-prototypes',
|
||||
linker_exe='g++ ',
|
||||
linker_so='g++ -shared')
|
||||
# added for python2.3 support
|
||||
# we can't pass it through set_executables because pre 2.2 would fail
|
||||
self.compiler_cxx = ['g++']
|
||||
|
||||
# Maybe we should also append -mthreads, but then the finished dlls
|
||||
# need another dll (mingwm10.dll see Mingw32 docs) (-mthreads: Support
|
||||
# thread-safe exception handling on `Mingw32')
|
||||
|
||||
# no additional libraries needed
|
||||
#self.dll_libraries=[]
|
||||
return
|
||||
|
||||
# __init__ ()
|
||||
|
||||
def link(self,
|
||||
target_desc,
|
||||
objects,
|
||||
output_filename,
|
||||
output_dir,
|
||||
libraries,
|
||||
library_dirs,
|
||||
runtime_library_dirs,
|
||||
export_symbols = None,
|
||||
debug=0,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
target_lang=None):
|
||||
# Include the appropriate MSVC runtime library if Python was built
|
||||
# with MSVC >= 7.0 (MinGW standard is msvcrt)
|
||||
runtime_library = msvc_runtime_library()
|
||||
if runtime_library:
|
||||
if not libraries:
|
||||
libraries = []
|
||||
libraries.append(runtime_library)
|
||||
args = (self,
|
||||
target_desc,
|
||||
objects,
|
||||
output_filename,
|
||||
output_dir,
|
||||
libraries,
|
||||
library_dirs,
|
||||
runtime_library_dirs,
|
||||
None, #export_symbols, we do this in our def-file
|
||||
debug,
|
||||
extra_preargs,
|
||||
extra_postargs,
|
||||
build_temp,
|
||||
target_lang)
|
||||
if self.gcc_version < "3.0.0":
|
||||
func = distutils.cygwinccompiler.CygwinCCompiler.link
|
||||
else:
|
||||
func = UnixCCompiler.link
|
||||
func(*args[:func.__code__.co_argcount])
|
||||
return
|
||||
|
||||
def object_filenames (self,
|
||||
source_filenames,
|
||||
strip_dir=0,
|
||||
output_dir=''):
|
||||
if output_dir is None: output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
||||
(base, ext) = os.path.splitext (os.path.normcase(src_name))
|
||||
|
||||
# added these lines to strip off windows drive letters
|
||||
# without it, .o files are placed next to .c files
|
||||
# instead of the build directory
|
||||
drv, base = os.path.splitdrive(base)
|
||||
if drv:
|
||||
base = base[1:]
|
||||
|
||||
if ext not in (self.src_extensions + ['.rc', '.res']):
|
||||
raise UnknownFileError(
|
||||
"unknown file type '%s' (from '%s')" % \
|
||||
(ext, src_name))
|
||||
if strip_dir:
|
||||
base = os.path.basename (base)
|
||||
if ext == '.res' or ext == '.rc':
|
||||
# these need to be compiled to object files
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + ext + self.obj_extension))
|
||||
else:
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
# object_filenames ()
|
||||
|
||||
|
||||
def find_python_dll():
|
||||
# We can't do much here:
|
||||
# - find it in the virtualenv (sys.prefix)
|
||||
# - find it in python main dir (sys.base_prefix, if in a virtualenv)
|
||||
# - sys.real_prefix is main dir for virtualenvs in Python 2.7
|
||||
# - in system32,
|
||||
# - ortherwise (Sxs), I don't know how to get it.
|
||||
stems = [sys.prefix]
|
||||
if hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix:
|
||||
stems.append(sys.base_prefix)
|
||||
elif hasattr(sys, 'real_prefix') and sys.real_prefix != sys.prefix:
|
||||
stems.append(sys.real_prefix)
|
||||
|
||||
sub_dirs = ['', 'lib', 'bin']
|
||||
# generate possible combinations of directory trees and sub-directories
|
||||
lib_dirs = []
|
||||
for stem in stems:
|
||||
for folder in sub_dirs:
|
||||
lib_dirs.append(os.path.join(stem, folder))
|
||||
|
||||
# add system directory as well
|
||||
if 'SYSTEMROOT' in os.environ:
|
||||
lib_dirs.append(os.path.join(os.environ['SYSTEMROOT'], 'System32'))
|
||||
|
||||
# search in the file system for possible candidates
|
||||
major_version, minor_version = tuple(sys.version_info[:2])
|
||||
patterns = ['python%d%d.dll']
|
||||
|
||||
for pat in patterns:
|
||||
dllname = pat % (major_version, minor_version)
|
||||
print("Looking for %s" % dllname)
|
||||
for folder in lib_dirs:
|
||||
dll = os.path.join(folder, dllname)
|
||||
if os.path.exists(dll):
|
||||
return dll
|
||||
|
||||
raise ValueError("%s not found in %s" % (dllname, lib_dirs))
|
||||
|
||||
def dump_table(dll):
|
||||
st = subprocess.Popen(["objdump.exe", "-p", dll], stdout=subprocess.PIPE)
|
||||
return st.stdout.readlines()
|
||||
|
||||
def generate_def(dll, dfile):
|
||||
"""Given a dll file location, get all its exported symbols and dump them
|
||||
into the given def file.
|
||||
|
||||
The .def file will be overwritten"""
|
||||
dump = dump_table(dll)
|
||||
for i in range(len(dump)):
|
||||
if _START.match(dump[i].decode()):
|
||||
break
|
||||
else:
|
||||
raise ValueError("Symbol table not found")
|
||||
|
||||
syms = []
|
||||
for j in range(i+1, len(dump)):
|
||||
m = _TABLE.match(dump[j].decode())
|
||||
if m:
|
||||
syms.append((int(m.group(1).strip()), m.group(2)))
|
||||
else:
|
||||
break
|
||||
|
||||
if len(syms) == 0:
|
||||
log.warn('No symbols found in %s' % dll)
|
||||
|
||||
d = open(dfile, 'w')
|
||||
d.write('LIBRARY %s\n' % os.path.basename(dll))
|
||||
d.write(';CODE PRELOAD MOVEABLE DISCARDABLE\n')
|
||||
d.write(';DATA PRELOAD SINGLE\n')
|
||||
d.write('\nEXPORTS\n')
|
||||
for s in syms:
|
||||
#d.write('@%d %s\n' % (s[0], s[1]))
|
||||
d.write('%s\n' % s[1])
|
||||
d.close()
|
||||
|
||||
def find_dll(dll_name):
|
||||
|
||||
arch = {'AMD64' : 'amd64',
|
||||
'Intel' : 'x86'}[get_build_architecture()]
|
||||
|
||||
def _find_dll_in_winsxs(dll_name):
|
||||
# Walk through the WinSxS directory to find the dll.
|
||||
winsxs_path = os.path.join(os.environ.get('WINDIR', r'C:\WINDOWS'),
|
||||
'winsxs')
|
||||
if not os.path.exists(winsxs_path):
|
||||
return None
|
||||
for root, dirs, files in os.walk(winsxs_path):
|
||||
if dll_name in files and arch in root:
|
||||
return os.path.join(root, dll_name)
|
||||
return None
|
||||
|
||||
def _find_dll_in_path(dll_name):
|
||||
# First, look in the Python directory, then scan PATH for
|
||||
# the given dll name.
|
||||
for path in [sys.prefix] + os.environ['PATH'].split(';'):
|
||||
filepath = os.path.join(path, dll_name)
|
||||
if os.path.exists(filepath):
|
||||
return os.path.abspath(filepath)
|
||||
|
||||
return _find_dll_in_winsxs(dll_name) or _find_dll_in_path(dll_name)
|
||||
|
||||
def build_msvcr_library(debug=False):
|
||||
if os.name != 'nt':
|
||||
return False
|
||||
|
||||
# If the version number is None, then we couldn't find the MSVC runtime at
|
||||
# all, because we are running on a Python distribution which is customed
|
||||
# compiled; trust that the compiler is the same as the one available to us
|
||||
# now, and that it is capable of linking with the correct runtime without
|
||||
# any extra options.
|
||||
msvcr_ver = msvc_runtime_major()
|
||||
if msvcr_ver is None:
|
||||
log.debug('Skip building import library: '
|
||||
'Runtime is not compiled with MSVC')
|
||||
return False
|
||||
|
||||
# Skip using a custom library for versions < MSVC 8.0
|
||||
if msvcr_ver < 80:
|
||||
log.debug('Skip building msvcr library:'
|
||||
' custom functionality not present')
|
||||
return False
|
||||
|
||||
msvcr_name = msvc_runtime_library()
|
||||
if debug:
|
||||
msvcr_name += 'd'
|
||||
|
||||
# Skip if custom library already exists
|
||||
out_name = "lib%s.a" % msvcr_name
|
||||
out_file = os.path.join(sys.prefix, 'libs', out_name)
|
||||
if os.path.isfile(out_file):
|
||||
log.debug('Skip building msvcr library: "%s" exists' %
|
||||
(out_file,))
|
||||
return True
|
||||
|
||||
# Find the msvcr dll
|
||||
msvcr_dll_name = msvcr_name + '.dll'
|
||||
dll_file = find_dll(msvcr_dll_name)
|
||||
if not dll_file:
|
||||
log.warn('Cannot build msvcr library: "%s" not found' %
|
||||
msvcr_dll_name)
|
||||
return False
|
||||
|
||||
def_name = "lib%s.def" % msvcr_name
|
||||
def_file = os.path.join(sys.prefix, 'libs', def_name)
|
||||
|
||||
log.info('Building msvcr library: "%s" (from %s)' \
|
||||
% (out_file, dll_file))
|
||||
|
||||
# Generate a symbol definition file from the msvcr dll
|
||||
generate_def(dll_file, def_file)
|
||||
|
||||
# Create a custom mingw library for the given symbol definitions
|
||||
cmd = ['dlltool', '-d', def_file, '-l', out_file]
|
||||
retcode = subprocess.call(cmd)
|
||||
|
||||
# Clean up symbol definitions
|
||||
os.remove(def_file)
|
||||
|
||||
return (not retcode)
|
||||
|
||||
def build_import_library():
|
||||
if os.name != 'nt':
|
||||
return
|
||||
|
||||
arch = get_build_architecture()
|
||||
if arch == 'AMD64':
|
||||
return _build_import_library_amd64()
|
||||
elif arch == 'Intel':
|
||||
return _build_import_library_x86()
|
||||
else:
|
||||
raise ValueError("Unhandled arch %s" % arch)
|
||||
|
||||
def _check_for_import_lib():
|
||||
"""Check if an import library for the Python runtime already exists."""
|
||||
major_version, minor_version = tuple(sys.version_info[:2])
|
||||
|
||||
# patterns for the file name of the library itself
|
||||
patterns = ['libpython%d%d.a',
|
||||
'libpython%d%d.dll.a',
|
||||
'libpython%d.%d.dll.a']
|
||||
|
||||
# directory trees that may contain the library
|
||||
stems = [sys.prefix]
|
||||
if hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix:
|
||||
stems.append(sys.base_prefix)
|
||||
elif hasattr(sys, 'real_prefix') and sys.real_prefix != sys.prefix:
|
||||
stems.append(sys.real_prefix)
|
||||
|
||||
# possible subdirectories within those trees where it is placed
|
||||
sub_dirs = ['libs', 'lib']
|
||||
|
||||
# generate a list of candidate locations
|
||||
candidates = []
|
||||
for pat in patterns:
|
||||
filename = pat % (major_version, minor_version)
|
||||
for stem_dir in stems:
|
||||
for folder in sub_dirs:
|
||||
candidates.append(os.path.join(stem_dir, folder, filename))
|
||||
|
||||
# test the filesystem to see if we can find any of these
|
||||
for fullname in candidates:
|
||||
if os.path.isfile(fullname):
|
||||
# already exists, in location given
|
||||
return (True, fullname)
|
||||
|
||||
# needs to be built, preferred location given first
|
||||
return (False, candidates[0])
|
||||
|
||||
def _build_import_library_amd64():
|
||||
out_exists, out_file = _check_for_import_lib()
|
||||
if out_exists:
|
||||
log.debug('Skip building import library: "%s" exists', out_file)
|
||||
return
|
||||
|
||||
# get the runtime dll for which we are building import library
|
||||
dll_file = find_python_dll()
|
||||
log.info('Building import library (arch=AMD64): "%s" (from %s)' %
|
||||
(out_file, dll_file))
|
||||
|
||||
# generate symbol list from this library
|
||||
def_name = "python%d%d.def" % tuple(sys.version_info[:2])
|
||||
def_file = os.path.join(sys.prefix, 'libs', def_name)
|
||||
generate_def(dll_file, def_file)
|
||||
|
||||
# generate import library from this symbol list
|
||||
cmd = ['dlltool', '-d', def_file, '-l', out_file]
|
||||
subprocess.Popen(cmd)
|
||||
|
||||
def _build_import_library_x86():
|
||||
""" Build the import libraries for Mingw32-gcc on Windows
|
||||
"""
|
||||
out_exists, out_file = _check_for_import_lib()
|
||||
if out_exists:
|
||||
log.debug('Skip building import library: "%s" exists', out_file)
|
||||
return
|
||||
|
||||
lib_name = "python%d%d.lib" % tuple(sys.version_info[:2])
|
||||
lib_file = os.path.join(sys.prefix, 'libs', lib_name)
|
||||
if not os.path.isfile(lib_file):
|
||||
# didn't find library file in virtualenv, try base distribution, too,
|
||||
# and use that instead if found there. for Python 2.7 venvs, the base
|
||||
# directory is in attribute real_prefix instead of base_prefix.
|
||||
if hasattr(sys, 'base_prefix'):
|
||||
base_lib = os.path.join(sys.base_prefix, 'libs', lib_name)
|
||||
elif hasattr(sys, 'real_prefix'):
|
||||
base_lib = os.path.join(sys.real_prefix, 'libs', lib_name)
|
||||
else:
|
||||
base_lib = '' # os.path.isfile('') == False
|
||||
|
||||
if os.path.isfile(base_lib):
|
||||
lib_file = base_lib
|
||||
else:
|
||||
log.warn('Cannot build import library: "%s" not found', lib_file)
|
||||
return
|
||||
log.info('Building import library (ARCH=x86): "%s"', out_file)
|
||||
|
||||
from numpy.distutils import lib2def
|
||||
|
||||
def_name = "python%d%d.def" % tuple(sys.version_info[:2])
|
||||
def_file = os.path.join(sys.prefix, 'libs', def_name)
|
||||
nm_cmd = '%s %s' % (lib2def.DEFAULT_NM, lib_file)
|
||||
nm_output = lib2def.getnm(nm_cmd)
|
||||
dlist, flist = lib2def.parse_nm(nm_output)
|
||||
lib2def.output_def(dlist, flist, lib2def.DEF_HEADER, open(def_file, 'w'))
|
||||
|
||||
dll_name = find_python_dll ()
|
||||
args = (dll_name, def_file, out_file)
|
||||
cmd = 'dlltool --dllname "%s" --def "%s" --output-lib "%s"' % args
|
||||
status = os.system(cmd)
|
||||
# for now, fail silently
|
||||
if status:
|
||||
log.warn('Failed to build import library for gcc. Linking will fail.')
|
||||
return
|
||||
|
||||
#=====================================
|
||||
# Dealing with Visual Studio MANIFESTS
|
||||
#=====================================
|
||||
|
||||
# Functions to deal with visual studio manifests. Manifest are a mechanism to
|
||||
# enforce strong DLL versioning on windows, and has nothing to do with
|
||||
# distutils MANIFEST. manifests are XML files with version info, and used by
|
||||
# the OS loader; they are necessary when linking against a DLL not in the
|
||||
# system path; in particular, official python 2.6 binary is built against the
|
||||
# MS runtime 9 (the one from VS 2008), which is not available on most windows
|
||||
# systems; python 2.6 installer does install it in the Win SxS (Side by side)
|
||||
# directory, but this requires the manifest for this to work. This is a big
|
||||
# mess, thanks MS for a wonderful system.
|
||||
|
||||
# XXX: ideally, we should use exactly the same version as used by python. I
|
||||
# submitted a patch to get this version, but it was only included for python
|
||||
# 2.6.1 and above. So for versions below, we use a "best guess".
|
||||
_MSVCRVER_TO_FULLVER = {}
|
||||
if sys.platform == 'win32':
|
||||
try:
|
||||
import msvcrt
|
||||
# I took one version in my SxS directory: no idea if it is the good
|
||||
# one, and we can't retrieve it from python
|
||||
_MSVCRVER_TO_FULLVER['80'] = "8.0.50727.42"
|
||||
_MSVCRVER_TO_FULLVER['90'] = "9.0.21022.8"
|
||||
# Value from msvcrt.CRT_ASSEMBLY_VERSION under Python 3.3.0
|
||||
# on Windows XP:
|
||||
_MSVCRVER_TO_FULLVER['100'] = "10.0.30319.460"
|
||||
if hasattr(msvcrt, "CRT_ASSEMBLY_VERSION"):
|
||||
major, minor, rest = msvcrt.CRT_ASSEMBLY_VERSION.split(".", 2)
|
||||
_MSVCRVER_TO_FULLVER[major + minor] = msvcrt.CRT_ASSEMBLY_VERSION
|
||||
del major, minor, rest
|
||||
except ImportError:
|
||||
# If we are here, means python was not built with MSVC. Not sure what
|
||||
# to do in that case: manifest building will fail, but it should not be
|
||||
# used in that case anyway
|
||||
log.warn('Cannot import msvcrt: using manifest will not be possible')
|
||||
|
||||
def msvc_manifest_xml(maj, min):
|
||||
"""Given a major and minor version of the MSVCR, returns the
|
||||
corresponding XML file."""
|
||||
try:
|
||||
fullver = _MSVCRVER_TO_FULLVER[str(maj * 10 + min)]
|
||||
except KeyError:
|
||||
raise ValueError("Version %d,%d of MSVCRT not supported yet" %
|
||||
(maj, min))
|
||||
# Don't be fooled, it looks like an XML, but it is not. In particular, it
|
||||
# should not have any space before starting, and its size should be
|
||||
# divisible by 4, most likely for alignment constraints when the xml is
|
||||
# embedded in the binary...
|
||||
# This template was copied directly from the python 2.6 binary (using
|
||||
# strings.exe from mingw on python.exe).
|
||||
template = textwrap.dedent("""\
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
<dependency>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity type="win32" name="Microsoft.VC%(maj)d%(min)d.CRT" version="%(fullver)s" processorArchitecture="*" publicKeyToken="1fc8b3b9a1e18e3b"></assemblyIdentity>
|
||||
</dependentAssembly>
|
||||
</dependency>
|
||||
</assembly>""")
|
||||
|
||||
return template % {'fullver': fullver, 'maj': maj, 'min': min}
|
||||
|
||||
def manifest_rc(name, type='dll'):
|
||||
"""Return the rc file used to generate the res file which will be embedded
|
||||
as manifest for given manifest file name, of given type ('dll' or
|
||||
'exe').
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
name of the manifest file to embed
|
||||
type : str {'dll', 'exe'}
|
||||
type of the binary which will embed the manifest
|
||||
|
||||
"""
|
||||
if type == 'dll':
|
||||
rctype = 2
|
||||
elif type == 'exe':
|
||||
rctype = 1
|
||||
else:
|
||||
raise ValueError("Type %s not supported" % type)
|
||||
|
||||
return """\
|
||||
#include "winuser.h"
|
||||
%d RT_MANIFEST %s""" % (rctype, name)
|
||||
|
||||
def check_embedded_msvcr_match_linked(msver):
|
||||
"""msver is the ms runtime version used for the MANIFEST."""
|
||||
# check msvcr major version are the same for linking and
|
||||
# embedding
|
||||
maj = msvc_runtime_major()
|
||||
if maj:
|
||||
if not maj == int(msver):
|
||||
raise ValueError(
|
||||
"Discrepancy between linked msvcr " \
|
||||
"(%d) and the one about to be embedded " \
|
||||
"(%d)" % (int(msver), maj))
|
||||
|
||||
def configtest_name(config):
|
||||
base = os.path.basename(config._gen_temp_sourcefile("yo", [], "c"))
|
||||
return os.path.splitext(base)[0]
|
||||
|
||||
def manifest_name(config):
|
||||
# Get configest name (including suffix)
|
||||
root = configtest_name(config)
|
||||
exext = config.compiler.exe_extension
|
||||
return root + exext + ".manifest"
|
||||
|
||||
def rc_name(config):
|
||||
# Get configtest name (including suffix)
|
||||
root = configtest_name(config)
|
||||
return root + ".rc"
|
||||
|
||||
def generate_manifest(config):
|
||||
msver = get_build_msvc_version()
|
||||
if msver is not None:
|
||||
if msver >= 8:
|
||||
check_embedded_msvcr_match_linked(msver)
|
||||
ma = int(msver)
|
||||
mi = int((msver - ma) * 10)
|
||||
# Write the manifest file
|
||||
manxml = msvc_manifest_xml(ma, mi)
|
||||
man = open(manifest_name(config), "w")
|
||||
config.temp_files.append(manifest_name(config))
|
||||
man.write(manxml)
|
||||
man.close()
|
||||
2323
venv/lib/python3.6/site-packages/numpy/distutils/misc_util.py
Normal file
2323
venv/lib/python3.6/site-packages/numpy/distutils/misc_util.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,65 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
from distutils.msvc9compiler import MSVCCompiler as _MSVCCompiler
|
||||
|
||||
from .system_info import platform_bits
|
||||
|
||||
|
||||
def _merge(old, new):
|
||||
"""Concatenate two environment paths avoiding repeats.
|
||||
|
||||
Here `old` is the environment string before the base class initialize
|
||||
function is called and `new` is the string after the call. The new string
|
||||
will be a fixed string if it is not obtained from the current environment,
|
||||
or the same as the old string if obtained from the same environment. The aim
|
||||
here is not to append the new string if it is already contained in the old
|
||||
string so as to limit the growth of the environment string.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
old : string
|
||||
Previous environment string.
|
||||
new : string
|
||||
New environment string.
|
||||
|
||||
Returns
|
||||
-------
|
||||
ret : string
|
||||
Updated environment string.
|
||||
|
||||
"""
|
||||
if not old:
|
||||
return new
|
||||
if new in old:
|
||||
return old
|
||||
|
||||
# Neither new nor old is empty. Give old priority.
|
||||
return ';'.join([old, new])
|
||||
|
||||
|
||||
class MSVCCompiler(_MSVCCompiler):
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
_MSVCCompiler.__init__(self, verbose, dry_run, force)
|
||||
|
||||
def initialize(self, plat_name=None):
|
||||
# The 'lib' and 'include' variables may be overwritten
|
||||
# by MSVCCompiler.initialize, so save them for later merge.
|
||||
environ_lib = os.getenv('lib')
|
||||
environ_include = os.getenv('include')
|
||||
_MSVCCompiler.initialize(self, plat_name)
|
||||
|
||||
# Merge current and previous values of 'lib' and 'include'
|
||||
os.environ['lib'] = _merge(environ_lib, os.environ['lib'])
|
||||
os.environ['include'] = _merge(environ_include, os.environ['include'])
|
||||
|
||||
# msvc9 building for 32 bits requires SSE2 to work around a
|
||||
# compiler bug.
|
||||
if platform_bits == 32:
|
||||
self.compile_options += ['/arch:SSE2']
|
||||
self.compile_options_debug += ['/arch:SSE2']
|
||||
|
||||
def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
|
||||
ld_args.append('/MANIFEST')
|
||||
_MSVCCompiler.manifest_setup_ldargs(self, output_filename,
|
||||
build_temp, ld_args)
|
||||
@@ -0,0 +1,60 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
from distutils.msvccompiler import MSVCCompiler as _MSVCCompiler
|
||||
|
||||
from .system_info import platform_bits
|
||||
|
||||
|
||||
def _merge(old, new):
|
||||
"""Concatenate two environment paths avoiding repeats.
|
||||
|
||||
Here `old` is the environment string before the base class initialize
|
||||
function is called and `new` is the string after the call. The new string
|
||||
will be a fixed string if it is not obtained from the current environment,
|
||||
or the same as the old string if obtained from the same environment. The aim
|
||||
here is not to append the new string if it is already contained in the old
|
||||
string so as to limit the growth of the environment string.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
old : string
|
||||
Previous environment string.
|
||||
new : string
|
||||
New environment string.
|
||||
|
||||
Returns
|
||||
-------
|
||||
ret : string
|
||||
Updated environment string.
|
||||
|
||||
"""
|
||||
if new in old:
|
||||
return old
|
||||
if not old:
|
||||
return new
|
||||
|
||||
# Neither new nor old is empty. Give old priority.
|
||||
return ';'.join([old, new])
|
||||
|
||||
|
||||
class MSVCCompiler(_MSVCCompiler):
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
_MSVCCompiler.__init__(self, verbose, dry_run, force)
|
||||
|
||||
def initialize(self):
|
||||
# The 'lib' and 'include' variables may be overwritten
|
||||
# by MSVCCompiler.initialize, so save them for later merge.
|
||||
environ_lib = os.getenv('lib', '')
|
||||
environ_include = os.getenv('include', '')
|
||||
_MSVCCompiler.initialize(self)
|
||||
|
||||
# Merge current and previous values of 'lib' and 'include'
|
||||
os.environ['lib'] = _merge(environ_lib, os.environ['lib'])
|
||||
os.environ['include'] = _merge(environ_include, os.environ['include'])
|
||||
|
||||
# msvc9 building for 32 bits requires SSE2 to work around a
|
||||
# compiler bug.
|
||||
if platform_bits == 32:
|
||||
self.compile_options += ['/arch:SSE2']
|
||||
self.compile_options_debug += ['/arch:SSE2']
|
||||
@@ -0,0 +1,443 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import sys
|
||||
import re
|
||||
import os
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
from ConfigParser import RawConfigParser
|
||||
else:
|
||||
from configparser import RawConfigParser
|
||||
|
||||
__all__ = ['FormatError', 'PkgNotFound', 'LibraryInfo', 'VariableSet',
|
||||
'read_config', 'parse_flags']
|
||||
|
||||
_VAR = re.compile(r'\$\{([a-zA-Z0-9_-]+)\}')
|
||||
|
||||
class FormatError(IOError):
|
||||
"""
|
||||
Exception thrown when there is a problem parsing a configuration file.
|
||||
|
||||
"""
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
class PkgNotFound(IOError):
|
||||
"""Exception raised when a package can not be located."""
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
def parse_flags(line):
|
||||
"""
|
||||
Parse a line from a config file containing compile flags.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
line : str
|
||||
A single line containing one or more compile flags.
|
||||
|
||||
Returns
|
||||
-------
|
||||
d : dict
|
||||
Dictionary of parsed flags, split into relevant categories.
|
||||
These categories are the keys of `d`:
|
||||
|
||||
* 'include_dirs'
|
||||
* 'library_dirs'
|
||||
* 'libraries'
|
||||
* 'macros'
|
||||
* 'ignored'
|
||||
|
||||
"""
|
||||
d = {'include_dirs': [], 'library_dirs': [], 'libraries': [],
|
||||
'macros': [], 'ignored': []}
|
||||
|
||||
flags = (' ' + line).split(' -')
|
||||
for flag in flags:
|
||||
flag = '-' + flag
|
||||
if len(flag) > 0:
|
||||
if flag.startswith('-I'):
|
||||
d['include_dirs'].append(flag[2:].strip())
|
||||
elif flag.startswith('-L'):
|
||||
d['library_dirs'].append(flag[2:].strip())
|
||||
elif flag.startswith('-l'):
|
||||
d['libraries'].append(flag[2:].strip())
|
||||
elif flag.startswith('-D'):
|
||||
d['macros'].append(flag[2:].strip())
|
||||
else:
|
||||
d['ignored'].append(flag)
|
||||
|
||||
return d
|
||||
|
||||
def _escape_backslash(val):
|
||||
return val.replace('\\', '\\\\')
|
||||
|
||||
class LibraryInfo(object):
|
||||
"""
|
||||
Object containing build information about a library.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
The library name.
|
||||
description : str
|
||||
Description of the library.
|
||||
version : str
|
||||
Version string.
|
||||
sections : dict
|
||||
The sections of the configuration file for the library. The keys are
|
||||
the section headers, the values the text under each header.
|
||||
vars : class instance
|
||||
A `VariableSet` instance, which contains ``(name, value)`` pairs for
|
||||
variables defined in the configuration file for the library.
|
||||
requires : sequence, optional
|
||||
The required libraries for the library to be installed.
|
||||
|
||||
Notes
|
||||
-----
|
||||
All input parameters (except "sections" which is a method) are available as
|
||||
attributes of the same name.
|
||||
|
||||
"""
|
||||
def __init__(self, name, description, version, sections, vars, requires=None):
|
||||
self.name = name
|
||||
self.description = description
|
||||
if requires:
|
||||
self.requires = requires
|
||||
else:
|
||||
self.requires = []
|
||||
self.version = version
|
||||
self._sections = sections
|
||||
self.vars = vars
|
||||
|
||||
def sections(self):
|
||||
"""
|
||||
Return the section headers of the config file.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
None
|
||||
|
||||
Returns
|
||||
-------
|
||||
keys : list of str
|
||||
The list of section headers.
|
||||
|
||||
"""
|
||||
return list(self._sections.keys())
|
||||
|
||||
def cflags(self, section="default"):
|
||||
val = self.vars.interpolate(self._sections[section]['cflags'])
|
||||
return _escape_backslash(val)
|
||||
|
||||
def libs(self, section="default"):
|
||||
val = self.vars.interpolate(self._sections[section]['libs'])
|
||||
return _escape_backslash(val)
|
||||
|
||||
def __str__(self):
|
||||
m = ['Name: %s' % self.name, 'Description: %s' % self.description]
|
||||
if self.requires:
|
||||
m.append('Requires:')
|
||||
else:
|
||||
m.append('Requires: %s' % ",".join(self.requires))
|
||||
m.append('Version: %s' % self.version)
|
||||
|
||||
return "\n".join(m)
|
||||
|
||||
class VariableSet(object):
|
||||
"""
|
||||
Container object for the variables defined in a config file.
|
||||
|
||||
`VariableSet` can be used as a plain dictionary, with the variable names
|
||||
as keys.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
d : dict
|
||||
Dict of items in the "variables" section of the configuration file.
|
||||
|
||||
"""
|
||||
def __init__(self, d):
|
||||
self._raw_data = dict([(k, v) for k, v in d.items()])
|
||||
|
||||
self._re = {}
|
||||
self._re_sub = {}
|
||||
|
||||
self._init_parse()
|
||||
|
||||
def _init_parse(self):
|
||||
for k, v in self._raw_data.items():
|
||||
self._init_parse_var(k, v)
|
||||
|
||||
def _init_parse_var(self, name, value):
|
||||
self._re[name] = re.compile(r'\$\{%s\}' % name)
|
||||
self._re_sub[name] = value
|
||||
|
||||
def interpolate(self, value):
|
||||
# Brute force: we keep interpolating until there is no '${var}' anymore
|
||||
# or until interpolated string is equal to input string
|
||||
def _interpolate(value):
|
||||
for k in self._re.keys():
|
||||
value = self._re[k].sub(self._re_sub[k], value)
|
||||
return value
|
||||
while _VAR.search(value):
|
||||
nvalue = _interpolate(value)
|
||||
if nvalue == value:
|
||||
break
|
||||
value = nvalue
|
||||
|
||||
return value
|
||||
|
||||
def variables(self):
|
||||
"""
|
||||
Return the list of variable names.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
None
|
||||
|
||||
Returns
|
||||
-------
|
||||
names : list of str
|
||||
The names of all variables in the `VariableSet` instance.
|
||||
|
||||
"""
|
||||
return list(self._raw_data.keys())
|
||||
|
||||
# Emulate a dict to set/get variables values
|
||||
def __getitem__(self, name):
|
||||
return self._raw_data[name]
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
self._raw_data[name] = value
|
||||
self._init_parse_var(name, value)
|
||||
|
||||
def parse_meta(config):
|
||||
if not config.has_section('meta'):
|
||||
raise FormatError("No meta section found !")
|
||||
|
||||
d = dict(config.items('meta'))
|
||||
|
||||
for k in ['name', 'description', 'version']:
|
||||
if not k in d:
|
||||
raise FormatError("Option %s (section [meta]) is mandatory, "
|
||||
"but not found" % k)
|
||||
|
||||
if not 'requires' in d:
|
||||
d['requires'] = []
|
||||
|
||||
return d
|
||||
|
||||
def parse_variables(config):
|
||||
if not config.has_section('variables'):
|
||||
raise FormatError("No variables section found !")
|
||||
|
||||
d = {}
|
||||
|
||||
for name, value in config.items("variables"):
|
||||
d[name] = value
|
||||
|
||||
return VariableSet(d)
|
||||
|
||||
def parse_sections(config):
|
||||
return meta_d, r
|
||||
|
||||
def pkg_to_filename(pkg_name):
|
||||
return "%s.ini" % pkg_name
|
||||
|
||||
def parse_config(filename, dirs=None):
|
||||
if dirs:
|
||||
filenames = [os.path.join(d, filename) for d in dirs]
|
||||
else:
|
||||
filenames = [filename]
|
||||
|
||||
config = RawConfigParser()
|
||||
|
||||
n = config.read(filenames)
|
||||
if not len(n) >= 1:
|
||||
raise PkgNotFound("Could not find file(s) %s" % str(filenames))
|
||||
|
||||
# Parse meta and variables sections
|
||||
meta = parse_meta(config)
|
||||
|
||||
vars = {}
|
||||
if config.has_section('variables'):
|
||||
for name, value in config.items("variables"):
|
||||
vars[name] = _escape_backslash(value)
|
||||
|
||||
# Parse "normal" sections
|
||||
secs = [s for s in config.sections() if not s in ['meta', 'variables']]
|
||||
sections = {}
|
||||
|
||||
requires = {}
|
||||
for s in secs:
|
||||
d = {}
|
||||
if config.has_option(s, "requires"):
|
||||
requires[s] = config.get(s, 'requires')
|
||||
|
||||
for name, value in config.items(s):
|
||||
d[name] = value
|
||||
sections[s] = d
|
||||
|
||||
return meta, vars, sections, requires
|
||||
|
||||
def _read_config_imp(filenames, dirs=None):
|
||||
def _read_config(f):
|
||||
meta, vars, sections, reqs = parse_config(f, dirs)
|
||||
# recursively add sections and variables of required libraries
|
||||
for rname, rvalue in reqs.items():
|
||||
nmeta, nvars, nsections, nreqs = _read_config(pkg_to_filename(rvalue))
|
||||
|
||||
# Update var dict for variables not in 'top' config file
|
||||
for k, v in nvars.items():
|
||||
if not k in vars:
|
||||
vars[k] = v
|
||||
|
||||
# Update sec dict
|
||||
for oname, ovalue in nsections[rname].items():
|
||||
if ovalue:
|
||||
sections[rname][oname] += ' %s' % ovalue
|
||||
|
||||
return meta, vars, sections, reqs
|
||||
|
||||
meta, vars, sections, reqs = _read_config(filenames)
|
||||
|
||||
# FIXME: document this. If pkgname is defined in the variables section, and
|
||||
# there is no pkgdir variable defined, pkgdir is automatically defined to
|
||||
# the path of pkgname. This requires the package to be imported to work
|
||||
if not 'pkgdir' in vars and "pkgname" in vars:
|
||||
pkgname = vars["pkgname"]
|
||||
if not pkgname in sys.modules:
|
||||
raise ValueError("You should import %s to get information on %s" %
|
||||
(pkgname, meta["name"]))
|
||||
|
||||
mod = sys.modules[pkgname]
|
||||
vars["pkgdir"] = _escape_backslash(os.path.dirname(mod.__file__))
|
||||
|
||||
return LibraryInfo(name=meta["name"], description=meta["description"],
|
||||
version=meta["version"], sections=sections, vars=VariableSet(vars))
|
||||
|
||||
# Trivial cache to cache LibraryInfo instances creation. To be really
|
||||
# efficient, the cache should be handled in read_config, since a same file can
|
||||
# be parsed many time outside LibraryInfo creation, but I doubt this will be a
|
||||
# problem in practice
|
||||
_CACHE = {}
|
||||
def read_config(pkgname, dirs=None):
|
||||
"""
|
||||
Return library info for a package from its configuration file.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
pkgname : str
|
||||
Name of the package (should match the name of the .ini file, without
|
||||
the extension, e.g. foo for the file foo.ini).
|
||||
dirs : sequence, optional
|
||||
If given, should be a sequence of directories - usually including
|
||||
the NumPy base directory - where to look for npy-pkg-config files.
|
||||
|
||||
Returns
|
||||
-------
|
||||
pkginfo : class instance
|
||||
The `LibraryInfo` instance containing the build information.
|
||||
|
||||
Raises
|
||||
------
|
||||
PkgNotFound
|
||||
If the package is not found.
|
||||
|
||||
See Also
|
||||
--------
|
||||
misc_util.get_info, misc_util.get_pkg_info
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> npymath_info = np.distutils.npy_pkg_config.read_config('npymath')
|
||||
>>> type(npymath_info)
|
||||
<class 'numpy.distutils.npy_pkg_config.LibraryInfo'>
|
||||
>>> print(npymath_info)
|
||||
Name: npymath
|
||||
Description: Portable, core math library implementing C99 standard
|
||||
Requires:
|
||||
Version: 0.1 #random
|
||||
|
||||
"""
|
||||
try:
|
||||
return _CACHE[pkgname]
|
||||
except KeyError:
|
||||
v = _read_config_imp(pkg_to_filename(pkgname), dirs)
|
||||
_CACHE[pkgname] = v
|
||||
return v
|
||||
|
||||
# TODO:
|
||||
# - implements version comparison (modversion + atleast)
|
||||
|
||||
# pkg-config simple emulator - useful for debugging, and maybe later to query
|
||||
# the system
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
import glob
|
||||
|
||||
parser = OptionParser()
|
||||
parser.add_option("--cflags", dest="cflags", action="store_true",
|
||||
help="output all preprocessor and compiler flags")
|
||||
parser.add_option("--libs", dest="libs", action="store_true",
|
||||
help="output all linker flags")
|
||||
parser.add_option("--use-section", dest="section",
|
||||
help="use this section instead of default for options")
|
||||
parser.add_option("--version", dest="version", action="store_true",
|
||||
help="output version")
|
||||
parser.add_option("--atleast-version", dest="min_version",
|
||||
help="Minimal version")
|
||||
parser.add_option("--list-all", dest="list_all", action="store_true",
|
||||
help="Minimal version")
|
||||
parser.add_option("--define-variable", dest="define_variable",
|
||||
help="Replace variable with the given value")
|
||||
|
||||
(options, args) = parser.parse_args(sys.argv)
|
||||
|
||||
if len(args) < 2:
|
||||
raise ValueError("Expect package name on the command line:")
|
||||
|
||||
if options.list_all:
|
||||
files = glob.glob("*.ini")
|
||||
for f in files:
|
||||
info = read_config(f)
|
||||
print("%s\t%s - %s" % (info.name, info.name, info.description))
|
||||
|
||||
pkg_name = args[1]
|
||||
d = os.environ.get('NPY_PKG_CONFIG_PATH')
|
||||
if d:
|
||||
info = read_config(pkg_name, ['numpy/core/lib/npy-pkg-config', '.', d])
|
||||
else:
|
||||
info = read_config(pkg_name, ['numpy/core/lib/npy-pkg-config', '.'])
|
||||
|
||||
if options.section:
|
||||
section = options.section
|
||||
else:
|
||||
section = "default"
|
||||
|
||||
if options.define_variable:
|
||||
m = re.search(r'([\S]+)=([\S]+)', options.define_variable)
|
||||
if not m:
|
||||
raise ValueError("--define-variable option should be of " \
|
||||
"the form --define-variable=foo=bar")
|
||||
else:
|
||||
name = m.group(1)
|
||||
value = m.group(2)
|
||||
info.vars[name] = value
|
||||
|
||||
if options.cflags:
|
||||
print(info.cflags(section))
|
||||
if options.libs:
|
||||
print(info.libs(section))
|
||||
if options.version:
|
||||
print(info.version)
|
||||
if options.min_version:
|
||||
print(info.version >= options.min_version)
|
||||
@@ -0,0 +1,19 @@
|
||||
# XXX: Handle setuptools ?
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from distutils.core import Distribution
|
||||
|
||||
# This class is used because we add new files (sconscripts, and so on) with the
|
||||
# scons command
|
||||
class NumpyDistribution(Distribution):
|
||||
def __init__(self, attrs = None):
|
||||
# A list of (sconscripts, pre_hook, post_hook, src, parent_names)
|
||||
self.scons_data = []
|
||||
# A list of installable libraries
|
||||
self.installed_libraries = []
|
||||
# A dict of pkg_config files to generate/install
|
||||
self.installed_pkg_config = {}
|
||||
Distribution.__init__(self, attrs)
|
||||
|
||||
def has_scons_scripts(self):
|
||||
return bool(self.scons_data)
|
||||
@@ -0,0 +1,23 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from distutils.unixccompiler import UnixCCompiler
|
||||
|
||||
class PathScaleCCompiler(UnixCCompiler):
|
||||
|
||||
"""
|
||||
PathScale compiler compatible with an gcc built Python.
|
||||
"""
|
||||
|
||||
compiler_type = 'pathcc'
|
||||
cc_exe = 'pathcc'
|
||||
cxx_exe = 'pathCC'
|
||||
|
||||
def __init__ (self, verbose=0, dry_run=0, force=0):
|
||||
UnixCCompiler.__init__ (self, verbose, dry_run, force)
|
||||
cc_compiler = self.cc_exe
|
||||
cxx_compiler = self.cxx_exe
|
||||
self.set_executables(compiler=cc_compiler,
|
||||
compiler_so=cc_compiler,
|
||||
compiler_cxx=cxx_compiler,
|
||||
linker_exe=cc_compiler,
|
||||
linker_so=cc_compiler + ' -shared')
|
||||
17
venv/lib/python3.6/site-packages/numpy/distutils/setup.py
Normal file
17
venv/lib/python3.6/site-packages/numpy/distutils/setup.py
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import division, print_function
|
||||
|
||||
def configuration(parent_package='',top_path=None):
|
||||
from numpy.distutils.misc_util import Configuration
|
||||
config = Configuration('distutils', parent_package, top_path)
|
||||
config.add_subpackage('command')
|
||||
config.add_subpackage('fcompiler')
|
||||
config.add_data_dir('tests')
|
||||
config.add_data_files('site.cfg')
|
||||
config.add_data_files('mingw/gfortran_vs2003_hack.c')
|
||||
config.make_config_py()
|
||||
return config
|
||||
|
||||
if __name__ == '__main__':
|
||||
from numpy.distutils.core import setup
|
||||
setup(configuration=configuration)
|
||||
2802
venv/lib/python3.6/site-packages/numpy/distutils/system_info.py
Normal file
2802
venv/lib/python3.6/site-packages/numpy/distutils/system_info.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,220 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
from tempfile import TemporaryFile
|
||||
|
||||
from numpy.distutils import exec_command
|
||||
from numpy.distutils.exec_command import get_pythonexe
|
||||
from numpy.testing import tempdir, assert_, assert_warns
|
||||
|
||||
# In python 3 stdout, stderr are text (unicode compliant) devices, so to
|
||||
# emulate them import StringIO from the io module.
|
||||
if sys.version_info[0] >= 3:
|
||||
from io import StringIO
|
||||
else:
|
||||
from StringIO import StringIO
|
||||
|
||||
class redirect_stdout(object):
|
||||
"""Context manager to redirect stdout for exec_command test."""
|
||||
def __init__(self, stdout=None):
|
||||
self._stdout = stdout or sys.stdout
|
||||
|
||||
def __enter__(self):
|
||||
self.old_stdout = sys.stdout
|
||||
sys.stdout = self._stdout
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self._stdout.flush()
|
||||
sys.stdout = self.old_stdout
|
||||
# note: closing sys.stdout won't close it.
|
||||
self._stdout.close()
|
||||
|
||||
class redirect_stderr(object):
|
||||
"""Context manager to redirect stderr for exec_command test."""
|
||||
def __init__(self, stderr=None):
|
||||
self._stderr = stderr or sys.stderr
|
||||
|
||||
def __enter__(self):
|
||||
self.old_stderr = sys.stderr
|
||||
sys.stderr = self._stderr
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self._stderr.flush()
|
||||
sys.stderr = self.old_stderr
|
||||
# note: closing sys.stderr won't close it.
|
||||
self._stderr.close()
|
||||
|
||||
class emulate_nonposix(object):
|
||||
"""Context manager to emulate os.name != 'posix' """
|
||||
def __init__(self, osname='non-posix'):
|
||||
self._new_name = osname
|
||||
|
||||
def __enter__(self):
|
||||
self._old_name = os.name
|
||||
os.name = self._new_name
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
os.name = self._old_name
|
||||
|
||||
|
||||
def test_exec_command_stdout():
|
||||
# Regression test for gh-2999 and gh-2915.
|
||||
# There are several packages (nose, scipy.weave.inline, Sage inline
|
||||
# Fortran) that replace stdout, in which case it doesn't have a fileno
|
||||
# method. This is tested here, with a do-nothing command that fails if the
|
||||
# presence of fileno() is assumed in exec_command.
|
||||
|
||||
# The code has a special case for posix systems, so if we are on posix test
|
||||
# both that the special case works and that the generic code works.
|
||||
|
||||
# Test posix version:
|
||||
with redirect_stdout(StringIO()):
|
||||
with redirect_stderr(TemporaryFile()):
|
||||
with assert_warns(DeprecationWarning):
|
||||
exec_command.exec_command("cd '.'")
|
||||
|
||||
if os.name == 'posix':
|
||||
# Test general (non-posix) version:
|
||||
with emulate_nonposix():
|
||||
with redirect_stdout(StringIO()):
|
||||
with redirect_stderr(TemporaryFile()):
|
||||
with assert_warns(DeprecationWarning):
|
||||
exec_command.exec_command("cd '.'")
|
||||
|
||||
def test_exec_command_stderr():
|
||||
# Test posix version:
|
||||
with redirect_stdout(TemporaryFile(mode='w+')):
|
||||
with redirect_stderr(StringIO()):
|
||||
with assert_warns(DeprecationWarning):
|
||||
exec_command.exec_command("cd '.'")
|
||||
|
||||
if os.name == 'posix':
|
||||
# Test general (non-posix) version:
|
||||
with emulate_nonposix():
|
||||
with redirect_stdout(TemporaryFile()):
|
||||
with redirect_stderr(StringIO()):
|
||||
with assert_warns(DeprecationWarning):
|
||||
exec_command.exec_command("cd '.'")
|
||||
|
||||
|
||||
class TestExecCommand(object):
|
||||
def setup(self):
|
||||
self.pyexe = get_pythonexe()
|
||||
|
||||
def check_nt(self, **kws):
|
||||
s, o = exec_command.exec_command('cmd /C echo path=%path%')
|
||||
assert_(s == 0)
|
||||
assert_(o != '')
|
||||
|
||||
s, o = exec_command.exec_command(
|
||||
'"%s" -c "import sys;sys.stderr.write(sys.platform)"' % self.pyexe)
|
||||
assert_(s == 0)
|
||||
assert_(o == 'win32')
|
||||
|
||||
def check_posix(self, **kws):
|
||||
s, o = exec_command.exec_command("echo Hello", **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == 'Hello')
|
||||
|
||||
s, o = exec_command.exec_command('echo $AAA', **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == '')
|
||||
|
||||
s, o = exec_command.exec_command('echo "$AAA"', AAA='Tere', **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == 'Tere')
|
||||
|
||||
s, o = exec_command.exec_command('echo "$AAA"', **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == '')
|
||||
|
||||
if 'BBB' not in os.environ:
|
||||
os.environ['BBB'] = 'Hi'
|
||||
s, o = exec_command.exec_command('echo "$BBB"', **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == 'Hi')
|
||||
|
||||
s, o = exec_command.exec_command('echo "$BBB"', BBB='Hey', **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == 'Hey')
|
||||
|
||||
s, o = exec_command.exec_command('echo "$BBB"', **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == 'Hi')
|
||||
|
||||
del os.environ['BBB']
|
||||
|
||||
s, o = exec_command.exec_command('echo "$BBB"', **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == '')
|
||||
|
||||
|
||||
s, o = exec_command.exec_command('this_is_not_a_command', **kws)
|
||||
assert_(s != 0)
|
||||
assert_(o != '')
|
||||
|
||||
s, o = exec_command.exec_command('echo path=$PATH', **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o != '')
|
||||
|
||||
s, o = exec_command.exec_command(
|
||||
'"%s" -c "import sys,os;sys.stderr.write(os.name)"' %
|
||||
self.pyexe, **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == 'posix')
|
||||
|
||||
def check_basic(self, *kws):
|
||||
s, o = exec_command.exec_command(
|
||||
'"%s" -c "raise \'Ignore me.\'"' % self.pyexe, **kws)
|
||||
assert_(s != 0)
|
||||
assert_(o != '')
|
||||
|
||||
s, o = exec_command.exec_command(
|
||||
'"%s" -c "import sys;sys.stderr.write(\'0\');'
|
||||
'sys.stderr.write(\'1\');sys.stderr.write(\'2\')"' %
|
||||
self.pyexe, **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == '012')
|
||||
|
||||
s, o = exec_command.exec_command(
|
||||
'"%s" -c "import sys;sys.exit(15)"' % self.pyexe, **kws)
|
||||
assert_(s == 15)
|
||||
assert_(o == '')
|
||||
|
||||
s, o = exec_command.exec_command(
|
||||
'"%s" -c "print(\'Heipa\'")' % self.pyexe, **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == 'Heipa')
|
||||
|
||||
def check_execute_in(self, **kws):
|
||||
with tempdir() as tmpdir:
|
||||
fn = "file"
|
||||
tmpfile = os.path.join(tmpdir, fn)
|
||||
f = open(tmpfile, 'w')
|
||||
f.write('Hello')
|
||||
f.close()
|
||||
|
||||
s, o = exec_command.exec_command(
|
||||
'"%s" -c "f = open(\'%s\', \'r\'); f.close()"' %
|
||||
(self.pyexe, fn), **kws)
|
||||
assert_(s != 0)
|
||||
assert_(o != '')
|
||||
s, o = exec_command.exec_command(
|
||||
'"%s" -c "f = open(\'%s\', \'r\'); print(f.read()); '
|
||||
'f.close()"' % (self.pyexe, fn), execute_in=tmpdir, **kws)
|
||||
assert_(s == 0)
|
||||
assert_(o == 'Hello')
|
||||
|
||||
def test_basic(self):
|
||||
with redirect_stdout(StringIO()):
|
||||
with redirect_stderr(StringIO()):
|
||||
with assert_warns(DeprecationWarning):
|
||||
if os.name == "posix":
|
||||
self.check_posix(use_tee=0)
|
||||
self.check_posix(use_tee=1)
|
||||
elif os.name == "nt":
|
||||
self.check_nt(use_tee=0)
|
||||
self.check_nt(use_tee=1)
|
||||
self.check_execute_in(use_tee=0)
|
||||
self.check_execute_in(use_tee=1)
|
||||
@@ -0,0 +1,81 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import pytest
|
||||
|
||||
from numpy.testing import assert_, suppress_warnings
|
||||
import numpy.distutils.fcompiler
|
||||
|
||||
customizable_flags = [
|
||||
('f77', 'F77FLAGS'),
|
||||
('f90', 'F90FLAGS'),
|
||||
('free', 'FREEFLAGS'),
|
||||
('arch', 'FARCH'),
|
||||
('debug', 'FDEBUG'),
|
||||
('flags', 'FFLAGS'),
|
||||
('linker_so', 'LDFLAGS'),
|
||||
]
|
||||
|
||||
|
||||
def test_fcompiler_flags(monkeypatch):
|
||||
monkeypatch.setenv('NPY_DISTUTILS_APPEND_FLAGS', '0')
|
||||
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='none')
|
||||
flag_vars = fc.flag_vars.clone(lambda *args, **kwargs: None)
|
||||
|
||||
for opt, envvar in customizable_flags:
|
||||
new_flag = '-dummy-{}-flag'.format(opt)
|
||||
prev_flags = getattr(flag_vars, opt)
|
||||
|
||||
monkeypatch.setenv(envvar, new_flag)
|
||||
new_flags = getattr(flag_vars, opt)
|
||||
|
||||
monkeypatch.delenv(envvar)
|
||||
assert_(new_flags == [new_flag])
|
||||
|
||||
monkeypatch.setenv('NPY_DISTUTILS_APPEND_FLAGS', '1')
|
||||
|
||||
for opt, envvar in customizable_flags:
|
||||
new_flag = '-dummy-{}-flag'.format(opt)
|
||||
prev_flags = getattr(flag_vars, opt)
|
||||
monkeypatch.setenv(envvar, new_flag)
|
||||
new_flags = getattr(flag_vars, opt)
|
||||
|
||||
monkeypatch.delenv(envvar)
|
||||
if prev_flags is None:
|
||||
assert_(new_flags == [new_flag])
|
||||
else:
|
||||
assert_(new_flags == prev_flags + [new_flag])
|
||||
|
||||
|
||||
def test_fcompiler_flags_append_warning(monkeypatch):
|
||||
# Test to check that the warning for append behavior changing in future
|
||||
# is triggered. Need to use a real compiler instance so that we have
|
||||
# non-empty flags to start with (otherwise the "if var and append" check
|
||||
# will always be false).
|
||||
try:
|
||||
with suppress_warnings() as sup:
|
||||
sup.record()
|
||||
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='gnu95')
|
||||
fc.customize()
|
||||
except numpy.distutils.fcompiler.CompilerNotFound:
|
||||
pytest.skip("gfortran not found, so can't execute this test")
|
||||
|
||||
# Ensure NPY_DISTUTILS_APPEND_FLAGS not defined
|
||||
monkeypatch.delenv('NPY_DISTUTILS_APPEND_FLAGS', raising=False)
|
||||
|
||||
for opt, envvar in customizable_flags:
|
||||
new_flag = '-dummy-{}-flag'.format(opt)
|
||||
with suppress_warnings() as sup:
|
||||
sup.record()
|
||||
prev_flags = getattr(fc.flag_vars, opt)
|
||||
|
||||
monkeypatch.setenv(envvar, new_flag)
|
||||
with suppress_warnings() as sup:
|
||||
sup.record()
|
||||
new_flags = getattr(fc.flag_vars, opt)
|
||||
if prev_flags:
|
||||
# Check that warning was issued
|
||||
assert len(sup.log) == 1
|
||||
|
||||
monkeypatch.delenv(envvar)
|
||||
assert_(new_flags == [new_flag])
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from numpy.testing import assert_
|
||||
|
||||
import numpy.distutils.fcompiler
|
||||
|
||||
g77_version_strings = [
|
||||
('GNU Fortran 0.5.25 20010319 (prerelease)', '0.5.25'),
|
||||
('GNU Fortran (GCC 3.2) 3.2 20020814 (release)', '3.2'),
|
||||
('GNU Fortran (GCC) 3.3.3 20040110 (prerelease) (Debian)', '3.3.3'),
|
||||
('GNU Fortran (GCC) 3.3.3 (Debian 20040401)', '3.3.3'),
|
||||
('GNU Fortran (GCC 3.2.2 20030222 (Red Hat Linux 3.2.2-5)) 3.2.2'
|
||||
' 20030222 (Red Hat Linux 3.2.2-5)', '3.2.2'),
|
||||
]
|
||||
|
||||
gfortran_version_strings = [
|
||||
('GNU Fortran 95 (GCC 4.0.3 20051023 (prerelease) (Debian 4.0.2-3))',
|
||||
'4.0.3'),
|
||||
('GNU Fortran 95 (GCC) 4.1.0', '4.1.0'),
|
||||
('GNU Fortran 95 (GCC) 4.2.0 20060218 (experimental)', '4.2.0'),
|
||||
('GNU Fortran (GCC) 4.3.0 20070316 (experimental)', '4.3.0'),
|
||||
('GNU Fortran (rubenvb-4.8.0) 4.8.0', '4.8.0'),
|
||||
('4.8.0', '4.8.0'),
|
||||
('4.0.3-7', '4.0.3'),
|
||||
("gfortran: warning: couldn't understand kern.osversion '14.1.0\n4.9.1",
|
||||
'4.9.1'),
|
||||
("gfortran: warning: couldn't understand kern.osversion '14.1.0\n"
|
||||
"gfortran: warning: yet another warning\n4.9.1",
|
||||
'4.9.1'),
|
||||
('GNU Fortran (crosstool-NG 8a21ab48) 7.2.0', '7.2.0')
|
||||
]
|
||||
|
||||
class TestG77Versions(object):
|
||||
def test_g77_version(self):
|
||||
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='gnu')
|
||||
for vs, version in g77_version_strings:
|
||||
v = fc.version_match(vs)
|
||||
assert_(v == version, (vs, v))
|
||||
|
||||
def test_not_g77(self):
|
||||
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='gnu')
|
||||
for vs, _ in gfortran_version_strings:
|
||||
v = fc.version_match(vs)
|
||||
assert_(v is None, (vs, v))
|
||||
|
||||
class TestGFortranVersions(object):
|
||||
def test_gfortran_version(self):
|
||||
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='gnu95')
|
||||
for vs, version in gfortran_version_strings:
|
||||
v = fc.version_match(vs)
|
||||
assert_(v == version, (vs, v))
|
||||
|
||||
def test_not_gfortran(self):
|
||||
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='gnu95')
|
||||
for vs, _ in g77_version_strings:
|
||||
v = fc.version_match(vs)
|
||||
assert_(v is None, (vs, v))
|
||||
@@ -0,0 +1,32 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import numpy.distutils.fcompiler
|
||||
from numpy.testing import assert_
|
||||
|
||||
|
||||
intel_32bit_version_strings = [
|
||||
("Intel(R) Fortran Intel(R) 32-bit Compiler Professional for applications"
|
||||
"running on Intel(R) 32, Version 11.1", '11.1'),
|
||||
]
|
||||
|
||||
intel_64bit_version_strings = [
|
||||
("Intel(R) Fortran IA-64 Compiler Professional for applications"
|
||||
"running on IA-64, Version 11.0", '11.0'),
|
||||
("Intel(R) Fortran Intel(R) 64 Compiler Professional for applications"
|
||||
"running on Intel(R) 64, Version 11.1", '11.1')
|
||||
]
|
||||
|
||||
class TestIntelFCompilerVersions(object):
|
||||
def test_32bit_version(self):
|
||||
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='intel')
|
||||
for vs, version in intel_32bit_version_strings:
|
||||
v = fc.version_match(vs)
|
||||
assert_(v == version)
|
||||
|
||||
|
||||
class TestIntelEM64TFCompilerVersions(object):
|
||||
def test_64bit_version(self):
|
||||
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='intelem')
|
||||
for vs, version in intel_64bit_version_strings:
|
||||
v = fc.version_match(vs)
|
||||
assert_(v == version)
|
||||
@@ -0,0 +1,24 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from numpy.testing import assert_
|
||||
import numpy.distutils.fcompiler
|
||||
|
||||
nag_version_strings = [('nagfor', 'NAG Fortran Compiler Release '
|
||||
'6.2(Chiyoda) Build 6200', '6.2'),
|
||||
('nagfor', 'NAG Fortran Compiler Release '
|
||||
'6.1(Tozai) Build 6136', '6.1'),
|
||||
('nagfor', 'NAG Fortran Compiler Release '
|
||||
'6.0(Hibiya) Build 1021', '6.0'),
|
||||
('nagfor', 'NAG Fortran Compiler Release '
|
||||
'5.3.2(971)', '5.3.2'),
|
||||
('nag', 'NAGWare Fortran 95 compiler Release 5.1'
|
||||
'(347,355-367,375,380-383,389,394,399,401-402,407,'
|
||||
'431,435,437,446,459-460,463,472,494,496,503,508,'
|
||||
'511,517,529,555,557,565)', '5.1')]
|
||||
|
||||
class TestNagFCompilerVersions(object):
|
||||
def test_version_match(self):
|
||||
for comp, vs, version in nag_version_strings:
|
||||
fc = numpy.distutils.fcompiler.new_fcompiler(compiler=comp)
|
||||
v = fc.version_match(vs)
|
||||
assert_(v == version)
|
||||
@@ -0,0 +1,44 @@
|
||||
|
||||
from numpy.distutils.from_template import process_str
|
||||
from numpy.testing import assert_equal
|
||||
|
||||
|
||||
pyf_src = """
|
||||
python module foo
|
||||
<_rd=real,double precision>
|
||||
interface
|
||||
subroutine <s,d>foosub(tol)
|
||||
<_rd>, intent(in,out) :: tol
|
||||
end subroutine <s,d>foosub
|
||||
end interface
|
||||
end python module foo
|
||||
"""
|
||||
|
||||
expected_pyf = """
|
||||
python module foo
|
||||
interface
|
||||
subroutine sfoosub(tol)
|
||||
real, intent(in,out) :: tol
|
||||
end subroutine sfoosub
|
||||
subroutine dfoosub(tol)
|
||||
double precision, intent(in,out) :: tol
|
||||
end subroutine dfoosub
|
||||
end interface
|
||||
end python module foo
|
||||
"""
|
||||
|
||||
|
||||
def normalize_whitespace(s):
|
||||
"""
|
||||
Remove leading and trailing whitespace, and convert internal
|
||||
stretches of whitespace to a single space.
|
||||
"""
|
||||
return ' '.join(s.split())
|
||||
|
||||
|
||||
def test_from_template():
|
||||
"""Regression test for gh-10712."""
|
||||
pyf = process_str(pyf_src)
|
||||
normalized_pyf = normalize_whitespace(pyf)
|
||||
normalized_expected_pyf = normalize_whitespace(expected_pyf)
|
||||
assert_equal(normalized_pyf, normalized_expected_pyf)
|
||||
@@ -0,0 +1,84 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
from os.path import join, sep, dirname
|
||||
|
||||
from numpy.distutils.misc_util import (
|
||||
appendpath, minrelpath, gpaths, get_shared_lib_extension, get_info
|
||||
)
|
||||
from numpy.testing import (
|
||||
assert_, assert_equal
|
||||
)
|
||||
|
||||
ajoin = lambda *paths: join(*((sep,)+paths))
|
||||
|
||||
class TestAppendpath(object):
|
||||
|
||||
def test_1(self):
|
||||
assert_equal(appendpath('prefix', 'name'), join('prefix', 'name'))
|
||||
assert_equal(appendpath('/prefix', 'name'), ajoin('prefix', 'name'))
|
||||
assert_equal(appendpath('/prefix', '/name'), ajoin('prefix', 'name'))
|
||||
assert_equal(appendpath('prefix', '/name'), join('prefix', 'name'))
|
||||
|
||||
def test_2(self):
|
||||
assert_equal(appendpath('prefix/sub', 'name'),
|
||||
join('prefix', 'sub', 'name'))
|
||||
assert_equal(appendpath('prefix/sub', 'sup/name'),
|
||||
join('prefix', 'sub', 'sup', 'name'))
|
||||
assert_equal(appendpath('/prefix/sub', '/prefix/name'),
|
||||
ajoin('prefix', 'sub', 'name'))
|
||||
|
||||
def test_3(self):
|
||||
assert_equal(appendpath('/prefix/sub', '/prefix/sup/name'),
|
||||
ajoin('prefix', 'sub', 'sup', 'name'))
|
||||
assert_equal(appendpath('/prefix/sub/sub2', '/prefix/sup/sup2/name'),
|
||||
ajoin('prefix', 'sub', 'sub2', 'sup', 'sup2', 'name'))
|
||||
assert_equal(appendpath('/prefix/sub/sub2', '/prefix/sub/sup/name'),
|
||||
ajoin('prefix', 'sub', 'sub2', 'sup', 'name'))
|
||||
|
||||
class TestMinrelpath(object):
|
||||
|
||||
def test_1(self):
|
||||
n = lambda path: path.replace('/', sep)
|
||||
assert_equal(minrelpath(n('aa/bb')), n('aa/bb'))
|
||||
assert_equal(minrelpath('..'), '..')
|
||||
assert_equal(minrelpath(n('aa/..')), '')
|
||||
assert_equal(minrelpath(n('aa/../bb')), 'bb')
|
||||
assert_equal(minrelpath(n('aa/bb/..')), 'aa')
|
||||
assert_equal(minrelpath(n('aa/bb/../..')), '')
|
||||
assert_equal(minrelpath(n('aa/bb/../cc/../dd')), n('aa/dd'))
|
||||
assert_equal(minrelpath(n('.././..')), n('../..'))
|
||||
assert_equal(minrelpath(n('aa/bb/.././../dd')), n('dd'))
|
||||
|
||||
class TestGpaths(object):
|
||||
|
||||
def test_gpaths(self):
|
||||
local_path = minrelpath(join(dirname(__file__), '..'))
|
||||
ls = gpaths('command/*.py', local_path)
|
||||
assert_(join(local_path, 'command', 'build_src.py') in ls, repr(ls))
|
||||
f = gpaths('system_info.py', local_path)
|
||||
assert_(join(local_path, 'system_info.py') == f[0], repr(f))
|
||||
|
||||
class TestSharedExtension(object):
|
||||
|
||||
def test_get_shared_lib_extension(self):
|
||||
import sys
|
||||
ext = get_shared_lib_extension(is_python_ext=False)
|
||||
if sys.platform.startswith('linux'):
|
||||
assert_equal(ext, '.so')
|
||||
elif sys.platform.startswith('gnukfreebsd'):
|
||||
assert_equal(ext, '.so')
|
||||
elif sys.platform.startswith('darwin'):
|
||||
assert_equal(ext, '.dylib')
|
||||
elif sys.platform.startswith('win'):
|
||||
assert_equal(ext, '.dll')
|
||||
# just check for no crash
|
||||
assert_(get_shared_lib_extension(is_python_ext=True))
|
||||
|
||||
|
||||
def test_installed_npymath_ini():
|
||||
# Regression test for gh-7707. If npymath.ini wasn't installed, then this
|
||||
# will give an error.
|
||||
info = get_info('npymath')
|
||||
|
||||
assert isinstance(info, dict)
|
||||
assert "define_macros" in info
|
||||
@@ -0,0 +1,86 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
|
||||
from numpy.distutils.npy_pkg_config import read_config, parse_flags
|
||||
from numpy.testing import temppath, assert_
|
||||
|
||||
simple = """\
|
||||
[meta]
|
||||
Name = foo
|
||||
Description = foo lib
|
||||
Version = 0.1
|
||||
|
||||
[default]
|
||||
cflags = -I/usr/include
|
||||
libs = -L/usr/lib
|
||||
"""
|
||||
simple_d = {'cflags': '-I/usr/include', 'libflags': '-L/usr/lib',
|
||||
'version': '0.1', 'name': 'foo'}
|
||||
|
||||
simple_variable = """\
|
||||
[meta]
|
||||
Name = foo
|
||||
Description = foo lib
|
||||
Version = 0.1
|
||||
|
||||
[variables]
|
||||
prefix = /foo/bar
|
||||
libdir = ${prefix}/lib
|
||||
includedir = ${prefix}/include
|
||||
|
||||
[default]
|
||||
cflags = -I${includedir}
|
||||
libs = -L${libdir}
|
||||
"""
|
||||
simple_variable_d = {'cflags': '-I/foo/bar/include', 'libflags': '-L/foo/bar/lib',
|
||||
'version': '0.1', 'name': 'foo'}
|
||||
|
||||
class TestLibraryInfo(object):
|
||||
def test_simple(self):
|
||||
with temppath('foo.ini') as path:
|
||||
with open(path, 'w') as f:
|
||||
f.write(simple)
|
||||
pkg = os.path.splitext(path)[0]
|
||||
out = read_config(pkg)
|
||||
|
||||
assert_(out.cflags() == simple_d['cflags'])
|
||||
assert_(out.libs() == simple_d['libflags'])
|
||||
assert_(out.name == simple_d['name'])
|
||||
assert_(out.version == simple_d['version'])
|
||||
|
||||
def test_simple_variable(self):
|
||||
with temppath('foo.ini') as path:
|
||||
with open(path, 'w') as f:
|
||||
f.write(simple_variable)
|
||||
pkg = os.path.splitext(path)[0]
|
||||
out = read_config(pkg)
|
||||
|
||||
assert_(out.cflags() == simple_variable_d['cflags'])
|
||||
assert_(out.libs() == simple_variable_d['libflags'])
|
||||
assert_(out.name == simple_variable_d['name'])
|
||||
assert_(out.version == simple_variable_d['version'])
|
||||
out.vars['prefix'] = '/Users/david'
|
||||
assert_(out.cflags() == '-I/Users/david/include')
|
||||
|
||||
class TestParseFlags(object):
|
||||
def test_simple_cflags(self):
|
||||
d = parse_flags("-I/usr/include")
|
||||
assert_(d['include_dirs'] == ['/usr/include'])
|
||||
|
||||
d = parse_flags("-I/usr/include -DFOO")
|
||||
assert_(d['include_dirs'] == ['/usr/include'])
|
||||
assert_(d['macros'] == ['FOO'])
|
||||
|
||||
d = parse_flags("-I /usr/include -DFOO")
|
||||
assert_(d['include_dirs'] == ['/usr/include'])
|
||||
assert_(d['macros'] == ['FOO'])
|
||||
|
||||
def test_simple_lflags(self):
|
||||
d = parse_flags("-L/usr/lib -lfoo -L/usr/lib -lbar")
|
||||
assert_(d['library_dirs'] == ['/usr/lib', '/usr/lib'])
|
||||
assert_(d['libraries'] == ['foo', 'bar'])
|
||||
|
||||
d = parse_flags("-L /usr/lib -lfoo -L/usr/lib -lbar")
|
||||
assert_(d['library_dirs'] == ['/usr/lib', '/usr/lib'])
|
||||
assert_(d['libraries'] == ['foo', 'bar'])
|
||||
@@ -0,0 +1,79 @@
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import pytest
|
||||
import subprocess
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
from numpy.distutils import _shell_utils
|
||||
|
||||
argv_cases = [
|
||||
[r'exe'],
|
||||
[r'path/exe'],
|
||||
[r'path\exe'],
|
||||
[r'\\server\path\exe'],
|
||||
[r'path to/exe'],
|
||||
[r'path to\exe'],
|
||||
|
||||
[r'exe', '--flag'],
|
||||
[r'path/exe', '--flag'],
|
||||
[r'path\exe', '--flag'],
|
||||
[r'path to/exe', '--flag'],
|
||||
[r'path to\exe', '--flag'],
|
||||
|
||||
# flags containing literal quotes in their name
|
||||
[r'path to/exe', '--flag-"quoted"'],
|
||||
[r'path to\exe', '--flag-"quoted"'],
|
||||
[r'path to/exe', '"--flag-quoted"'],
|
||||
[r'path to\exe', '"--flag-quoted"'],
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(params=[
|
||||
_shell_utils.WindowsParser,
|
||||
_shell_utils.PosixParser
|
||||
])
|
||||
def Parser(request):
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runner(Parser):
|
||||
if Parser != _shell_utils.NativeParser:
|
||||
pytest.skip('Unable to run with non-native parser')
|
||||
|
||||
if Parser == _shell_utils.WindowsParser:
|
||||
return lambda cmd: subprocess.check_output(cmd)
|
||||
elif Parser == _shell_utils.PosixParser:
|
||||
# posix has no non-shell string parsing
|
||||
return lambda cmd: subprocess.check_output(cmd, shell=True)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@pytest.mark.parametrize('argv', argv_cases)
|
||||
def test_join_matches_subprocess(Parser, runner, argv):
|
||||
"""
|
||||
Test that join produces strings understood by subprocess
|
||||
"""
|
||||
# invoke python to return its arguments as json
|
||||
cmd = [
|
||||
sys.executable, '-c',
|
||||
'import json, sys; print(json.dumps(sys.argv[1:]))'
|
||||
]
|
||||
joined = Parser.join(cmd + argv)
|
||||
json_out = runner(joined).decode()
|
||||
assert json.loads(json_out) == argv
|
||||
|
||||
|
||||
@pytest.mark.parametrize('argv', argv_cases)
|
||||
def test_roundtrip(Parser, argv):
|
||||
"""
|
||||
Test that split is the inverse operation of join
|
||||
"""
|
||||
try:
|
||||
joined = Parser.join(argv)
|
||||
assert argv == Parser.split(joined)
|
||||
except NotImplementedError:
|
||||
pytest.skip("Not implemented")
|
||||
@@ -0,0 +1,257 @@
|
||||
from __future__ import division, print_function
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import pytest
|
||||
from tempfile import mkstemp, mkdtemp
|
||||
from subprocess import Popen, PIPE
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
from numpy.testing import assert_, assert_equal, assert_raises
|
||||
from numpy.distutils import ccompiler, customized_ccompiler
|
||||
from numpy.distutils.system_info import system_info, ConfigParser
|
||||
from numpy.distutils.system_info import AliasedOptionError
|
||||
from numpy.distutils.system_info import default_lib_dirs, default_include_dirs
|
||||
from numpy.distutils import _shell_utils
|
||||
|
||||
|
||||
def get_class(name, notfound_action=1):
|
||||
"""
|
||||
notfound_action:
|
||||
0 - do nothing
|
||||
1 - display warning message
|
||||
2 - raise error
|
||||
"""
|
||||
cl = {'temp1': Temp1Info,
|
||||
'temp2': Temp2Info,
|
||||
'duplicate_options': DuplicateOptionInfo,
|
||||
}.get(name.lower(), _system_info)
|
||||
return cl()
|
||||
|
||||
simple_site = """
|
||||
[ALL]
|
||||
library_dirs = {dir1:s}{pathsep:s}{dir2:s}
|
||||
libraries = {lib1:s},{lib2:s}
|
||||
extra_compile_args = -I/fake/directory -I"/path with/spaces" -Os
|
||||
runtime_library_dirs = {dir1:s}
|
||||
|
||||
[temp1]
|
||||
library_dirs = {dir1:s}
|
||||
libraries = {lib1:s}
|
||||
runtime_library_dirs = {dir1:s}
|
||||
|
||||
[temp2]
|
||||
library_dirs = {dir2:s}
|
||||
libraries = {lib2:s}
|
||||
extra_link_args = -Wl,-rpath={lib2_escaped:s}
|
||||
rpath = {dir2:s}
|
||||
|
||||
[duplicate_options]
|
||||
mylib_libs = {lib1:s}
|
||||
libraries = {lib2:s}
|
||||
"""
|
||||
site_cfg = simple_site
|
||||
|
||||
fakelib_c_text = """
|
||||
/* This file is generated from numpy/distutils/testing/test_system_info.py */
|
||||
#include<stdio.h>
|
||||
void foo(void) {
|
||||
printf("Hello foo");
|
||||
}
|
||||
void bar(void) {
|
||||
printf("Hello bar");
|
||||
}
|
||||
"""
|
||||
|
||||
def have_compiler():
|
||||
""" Return True if there appears to be an executable compiler
|
||||
"""
|
||||
compiler = customized_ccompiler()
|
||||
try:
|
||||
cmd = compiler.compiler # Unix compilers
|
||||
except AttributeError:
|
||||
try:
|
||||
if not compiler.initialized:
|
||||
compiler.initialize() # MSVC is different
|
||||
except (DistutilsError, ValueError):
|
||||
return False
|
||||
cmd = [compiler.cc]
|
||||
try:
|
||||
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
|
||||
p.stdout.close()
|
||||
p.stderr.close()
|
||||
p.wait()
|
||||
except OSError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
HAVE_COMPILER = have_compiler()
|
||||
|
||||
|
||||
class _system_info(system_info):
|
||||
|
||||
def __init__(self,
|
||||
default_lib_dirs=default_lib_dirs,
|
||||
default_include_dirs=default_include_dirs,
|
||||
verbosity=1,
|
||||
):
|
||||
self.__class__.info = {}
|
||||
self.local_prefixes = []
|
||||
defaults = {'library_dirs': '',
|
||||
'include_dirs': '',
|
||||
'runtime_library_dirs': '',
|
||||
'rpath': '',
|
||||
'src_dirs': '',
|
||||
'search_static_first': "0",
|
||||
'extra_compile_args': '',
|
||||
'extra_link_args': ''}
|
||||
self.cp = ConfigParser(defaults)
|
||||
# We have to parse the config files afterwards
|
||||
# to have a consistent temporary filepath
|
||||
|
||||
def _check_libs(self, lib_dirs, libs, opt_libs, exts):
|
||||
"""Override _check_libs to return with all dirs """
|
||||
info = {'libraries': libs, 'library_dirs': lib_dirs}
|
||||
return info
|
||||
|
||||
|
||||
class Temp1Info(_system_info):
|
||||
"""For testing purposes"""
|
||||
section = 'temp1'
|
||||
|
||||
|
||||
class Temp2Info(_system_info):
|
||||
"""For testing purposes"""
|
||||
section = 'temp2'
|
||||
|
||||
class DuplicateOptionInfo(_system_info):
|
||||
"""For testing purposes"""
|
||||
section = 'duplicate_options'
|
||||
|
||||
|
||||
class TestSystemInfoReading(object):
|
||||
|
||||
def setup(self):
|
||||
""" Create the libraries """
|
||||
# Create 2 sources and 2 libraries
|
||||
self._dir1 = mkdtemp()
|
||||
self._src1 = os.path.join(self._dir1, 'foo.c')
|
||||
self._lib1 = os.path.join(self._dir1, 'libfoo.so')
|
||||
self._dir2 = mkdtemp()
|
||||
self._src2 = os.path.join(self._dir2, 'bar.c')
|
||||
self._lib2 = os.path.join(self._dir2, 'libbar.so')
|
||||
# Update local site.cfg
|
||||
global simple_site, site_cfg
|
||||
site_cfg = simple_site.format(**{
|
||||
'dir1': self._dir1,
|
||||
'lib1': self._lib1,
|
||||
'dir2': self._dir2,
|
||||
'lib2': self._lib2,
|
||||
'pathsep': os.pathsep,
|
||||
'lib2_escaped': _shell_utils.NativeParser.join([self._lib2])
|
||||
})
|
||||
# Write site.cfg
|
||||
fd, self._sitecfg = mkstemp()
|
||||
os.close(fd)
|
||||
with open(self._sitecfg, 'w') as fd:
|
||||
fd.write(site_cfg)
|
||||
# Write the sources
|
||||
with open(self._src1, 'w') as fd:
|
||||
fd.write(fakelib_c_text)
|
||||
with open(self._src2, 'w') as fd:
|
||||
fd.write(fakelib_c_text)
|
||||
# We create all class-instances
|
||||
|
||||
def site_and_parse(c, site_cfg):
|
||||
c.files = [site_cfg]
|
||||
c.parse_config_files()
|
||||
return c
|
||||
self.c_default = site_and_parse(get_class('default'), self._sitecfg)
|
||||
self.c_temp1 = site_and_parse(get_class('temp1'), self._sitecfg)
|
||||
self.c_temp2 = site_and_parse(get_class('temp2'), self._sitecfg)
|
||||
self.c_dup_options = site_and_parse(get_class('duplicate_options'),
|
||||
self._sitecfg)
|
||||
|
||||
|
||||
def teardown(self):
|
||||
# Do each removal separately
|
||||
try:
|
||||
shutil.rmtree(self._dir1)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
shutil.rmtree(self._dir2)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
os.remove(self._sitecfg)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def test_all(self):
|
||||
# Read in all information in the ALL block
|
||||
tsi = self.c_default
|
||||
assert_equal(tsi.get_lib_dirs(), [self._dir1, self._dir2])
|
||||
assert_equal(tsi.get_libraries(), [self._lib1, self._lib2])
|
||||
assert_equal(tsi.get_runtime_lib_dirs(), [self._dir1])
|
||||
extra = tsi.calc_extra_info()
|
||||
assert_equal(extra['extra_compile_args'], ['-I/fake/directory', '-I/path with/spaces', '-Os'])
|
||||
|
||||
def test_temp1(self):
|
||||
# Read in all information in the temp1 block
|
||||
tsi = self.c_temp1
|
||||
assert_equal(tsi.get_lib_dirs(), [self._dir1])
|
||||
assert_equal(tsi.get_libraries(), [self._lib1])
|
||||
assert_equal(tsi.get_runtime_lib_dirs(), [self._dir1])
|
||||
|
||||
def test_temp2(self):
|
||||
# Read in all information in the temp2 block
|
||||
tsi = self.c_temp2
|
||||
assert_equal(tsi.get_lib_dirs(), [self._dir2])
|
||||
assert_equal(tsi.get_libraries(), [self._lib2])
|
||||
# Now from rpath and not runtime_library_dirs
|
||||
assert_equal(tsi.get_runtime_lib_dirs(key='rpath'), [self._dir2])
|
||||
extra = tsi.calc_extra_info()
|
||||
assert_equal(extra['extra_link_args'], ['-Wl,-rpath=' + self._lib2])
|
||||
|
||||
def test_duplicate_options(self):
|
||||
# Ensure that duplicates are raising an AliasedOptionError
|
||||
tsi = self.c_dup_options
|
||||
assert_raises(AliasedOptionError, tsi.get_option_single, "mylib_libs", "libraries")
|
||||
assert_equal(tsi.get_libs("mylib_libs", [self._lib1]), [self._lib1])
|
||||
assert_equal(tsi.get_libs("libraries", [self._lib2]), [self._lib2])
|
||||
|
||||
@pytest.mark.skipif(not HAVE_COMPILER, reason="Missing compiler")
|
||||
def test_compile1(self):
|
||||
# Compile source and link the first source
|
||||
c = customized_ccompiler()
|
||||
previousDir = os.getcwd()
|
||||
try:
|
||||
# Change directory to not screw up directories
|
||||
os.chdir(self._dir1)
|
||||
c.compile([os.path.basename(self._src1)], output_dir=self._dir1)
|
||||
# Ensure that the object exists
|
||||
assert_(os.path.isfile(self._src1.replace('.c', '.o')) or
|
||||
os.path.isfile(self._src1.replace('.c', '.obj')))
|
||||
finally:
|
||||
os.chdir(previousDir)
|
||||
|
||||
@pytest.mark.skipif(not HAVE_COMPILER, reason="Missing compiler")
|
||||
@pytest.mark.skipif('msvc' in repr(ccompiler.new_compiler()),
|
||||
reason="Fails with MSVC compiler ")
|
||||
def test_compile2(self):
|
||||
# Compile source and link the second source
|
||||
tsi = self.c_temp2
|
||||
c = customized_ccompiler()
|
||||
extra_link_args = tsi.calc_extra_info()['extra_link_args']
|
||||
previousDir = os.getcwd()
|
||||
try:
|
||||
# Change directory to not screw up directories
|
||||
os.chdir(self._dir2)
|
||||
c.compile([os.path.basename(self._src2)], output_dir=self._dir2,
|
||||
extra_postargs=extra_link_args)
|
||||
# Ensure that the object exists
|
||||
assert_(os.path.isfile(self._src2.replace('.c', '.o')))
|
||||
finally:
|
||||
os.chdir(previousDir)
|
||||
@@ -0,0 +1,139 @@
|
||||
"""
|
||||
unixccompiler - can handle very long argument lists for ar.
|
||||
|
||||
"""
|
||||
from __future__ import division, absolute_import, print_function
|
||||
|
||||
import os
|
||||
|
||||
from distutils.errors import DistutilsExecError, CompileError
|
||||
from distutils.unixccompiler import *
|
||||
from numpy.distutils.ccompiler import replace_method
|
||||
from numpy.distutils.compat import get_exception
|
||||
from numpy.distutils.misc_util import _commandline_dep_string
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
from . import log
|
||||
else:
|
||||
from numpy.distutils import log
|
||||
|
||||
# Note that UnixCCompiler._compile appeared in Python 2.3
|
||||
def UnixCCompiler__compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
||||
"""Compile a single source files with a Unix-style compiler."""
|
||||
# HP ad-hoc fix, see ticket 1383
|
||||
ccomp = self.compiler_so
|
||||
if ccomp[0] == 'aCC':
|
||||
# remove flags that will trigger ANSI-C mode for aCC
|
||||
if '-Ae' in ccomp:
|
||||
ccomp.remove('-Ae')
|
||||
if '-Aa' in ccomp:
|
||||
ccomp.remove('-Aa')
|
||||
# add flags for (almost) sane C++ handling
|
||||
ccomp += ['-AA']
|
||||
self.compiler_so = ccomp
|
||||
# ensure OPT environment variable is read
|
||||
if 'OPT' in os.environ:
|
||||
from distutils.sysconfig import get_config_vars
|
||||
opt = " ".join(os.environ['OPT'].split())
|
||||
gcv_opt = " ".join(get_config_vars('OPT')[0].split())
|
||||
ccomp_s = " ".join(self.compiler_so)
|
||||
if opt not in ccomp_s:
|
||||
ccomp_s = ccomp_s.replace(gcv_opt, opt)
|
||||
self.compiler_so = ccomp_s.split()
|
||||
llink_s = " ".join(self.linker_so)
|
||||
if opt not in llink_s:
|
||||
self.linker_so = llink_s.split() + opt.split()
|
||||
|
||||
display = '%s: %s' % (os.path.basename(self.compiler_so[0]), src)
|
||||
|
||||
# gcc style automatic dependencies, outputs a makefile (-MF) that lists
|
||||
# all headers needed by a c file as a side effect of compilation (-MMD)
|
||||
if getattr(self, '_auto_depends', False):
|
||||
deps = ['-MMD', '-MF', obj + '.d']
|
||||
else:
|
||||
deps = []
|
||||
|
||||
try:
|
||||
self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + deps +
|
||||
extra_postargs, display = display)
|
||||
except DistutilsExecError:
|
||||
msg = str(get_exception())
|
||||
raise CompileError(msg)
|
||||
|
||||
# add commandline flags to dependency file
|
||||
if deps:
|
||||
with open(obj + '.d', 'a') as f:
|
||||
f.write(_commandline_dep_string(cc_args, extra_postargs, pp_opts))
|
||||
|
||||
replace_method(UnixCCompiler, '_compile', UnixCCompiler__compile)
|
||||
|
||||
|
||||
def UnixCCompiler_create_static_lib(self, objects, output_libname,
|
||||
output_dir=None, debug=0, target_lang=None):
|
||||
"""
|
||||
Build a static library in a separate sub-process.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
objects : list or tuple of str
|
||||
List of paths to object files used to build the static library.
|
||||
output_libname : str
|
||||
The library name as an absolute or relative (if `output_dir` is used)
|
||||
path.
|
||||
output_dir : str, optional
|
||||
The path to the output directory. Default is None, in which case
|
||||
the ``output_dir`` attribute of the UnixCCompiler instance.
|
||||
debug : bool, optional
|
||||
This parameter is not used.
|
||||
target_lang : str, optional
|
||||
This parameter is not used.
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
|
||||
"""
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
|
||||
output_filename = \
|
||||
self.library_filename(output_libname, output_dir=output_dir)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
try:
|
||||
# previous .a may be screwed up; best to remove it first
|
||||
# and recreate.
|
||||
# Also, ar on OS X doesn't handle updating universal archives
|
||||
os.unlink(output_filename)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
self.mkpath(os.path.dirname(output_filename))
|
||||
tmp_objects = objects + self.objects
|
||||
while tmp_objects:
|
||||
objects = tmp_objects[:50]
|
||||
tmp_objects = tmp_objects[50:]
|
||||
display = '%s: adding %d object files to %s' % (
|
||||
os.path.basename(self.archiver[0]),
|
||||
len(objects), output_filename)
|
||||
self.spawn(self.archiver + [output_filename] + objects,
|
||||
display = display)
|
||||
|
||||
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
|
||||
# think the only major Unix that does. Maybe we need some
|
||||
# platform intelligence here to skip ranlib if it's not
|
||||
# needed -- or maybe Python's configure script took care of
|
||||
# it for us, hence the check for leading colon.
|
||||
if self.ranlib:
|
||||
display = '%s:@ %s' % (os.path.basename(self.ranlib[0]),
|
||||
output_filename)
|
||||
try:
|
||||
self.spawn(self.ranlib + [output_filename],
|
||||
display = display)
|
||||
except DistutilsExecError:
|
||||
msg = str(get_exception())
|
||||
raise LibError(msg)
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
return
|
||||
|
||||
replace_method(UnixCCompiler, 'create_static_lib',
|
||||
UnixCCompiler_create_static_lib)
|
||||
Reference in New Issue
Block a user