External: add macholib and altgraph needed to relocate Mach-o binaries on Linux (#12909)
This commit is contained in:
435
lib/spack/external/macholib/MachO.py
vendored
Normal file
435
lib/spack/external/macholib/MachO.py
vendored
Normal file
@@ -0,0 +1,435 @@
|
||||
"""
|
||||
Utilities for reading and writing Mach-O headers
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import struct
|
||||
import os
|
||||
|
||||
from .mach_o import MH_FILETYPE_SHORTNAMES, LC_DYSYMTAB, LC_SYMTAB
|
||||
from .mach_o import load_command, S_ZEROFILL, section_64, section
|
||||
from .mach_o import LC_REGISTRY, LC_ID_DYLIB, LC_SEGMENT, fat_header
|
||||
from .mach_o import LC_SEGMENT_64, MH_CIGAM_64, MH_MAGIC_64, FAT_MAGIC
|
||||
from .mach_o import mach_header, fat_arch64, FAT_MAGIC_64, fat_arch
|
||||
from .mach_o import LC_REEXPORT_DYLIB, LC_PREBOUND_DYLIB, LC_LOAD_WEAK_DYLIB
|
||||
from .mach_o import LC_LOAD_UPWARD_DYLIB, LC_LOAD_DYLIB, mach_header_64
|
||||
from .mach_o import MH_CIGAM, MH_MAGIC
|
||||
from .ptypes import sizeof
|
||||
|
||||
from macholib.util import fileview
|
||||
try:
|
||||
from macholib.compat import bytes
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
range = xrange # noqa: F821
|
||||
|
||||
__all__ = ['MachO']
|
||||
|
||||
_RELOCATABLE = set((
|
||||
# relocatable commands that should be used for dependency walking
|
||||
LC_LOAD_DYLIB,
|
||||
LC_LOAD_UPWARD_DYLIB,
|
||||
LC_LOAD_WEAK_DYLIB,
|
||||
LC_PREBOUND_DYLIB,
|
||||
LC_REEXPORT_DYLIB,
|
||||
))
|
||||
|
||||
_RELOCATABLE_NAMES = {
|
||||
LC_LOAD_DYLIB: 'load_dylib',
|
||||
LC_LOAD_UPWARD_DYLIB: 'load_upward_dylib',
|
||||
LC_LOAD_WEAK_DYLIB: 'load_weak_dylib',
|
||||
LC_PREBOUND_DYLIB: 'prebound_dylib',
|
||||
LC_REEXPORT_DYLIB: 'reexport_dylib',
|
||||
}
|
||||
|
||||
|
||||
def _shouldRelocateCommand(cmd):
|
||||
"""
|
||||
Should this command id be investigated for relocation?
|
||||
"""
|
||||
return cmd in _RELOCATABLE
|
||||
|
||||
|
||||
def lc_str_value(offset, cmd_info):
|
||||
"""
|
||||
Fetch the actual value of a field of type "lc_str"
|
||||
"""
|
||||
cmd_load, cmd_cmd, cmd_data = cmd_info
|
||||
|
||||
offset -= sizeof(cmd_load) + sizeof(cmd_cmd)
|
||||
return cmd_data[offset:].strip(b'\x00')
|
||||
|
||||
|
||||
class MachO(object):
|
||||
"""
|
||||
Provides reading/writing the Mach-O header of a specific existing file
|
||||
"""
|
||||
# filename - the original filename of this mach-o
|
||||
# sizediff - the current deviation from the initial mach-o size
|
||||
# header - the mach-o header
|
||||
# commands - a list of (load_command, somecommand, data)
|
||||
# data is either a str, or a list of segment structures
|
||||
# total_size - the current mach-o header size (including header)
|
||||
# low_offset - essentially, the maximum mach-o header size
|
||||
# id_cmd - the index of my id command, or None
|
||||
|
||||
def __init__(self, filename):
|
||||
|
||||
# supports the ObjectGraph protocol
|
||||
self.graphident = filename
|
||||
self.filename = filename
|
||||
self.loader_path = os.path.dirname(filename)
|
||||
|
||||
# initialized by load
|
||||
self.fat = None
|
||||
self.headers = []
|
||||
with open(filename, 'rb') as fp:
|
||||
self.load(fp)
|
||||
|
||||
def __repr__(self):
|
||||
return "<MachO filename=%r>" % (self.filename,)
|
||||
|
||||
def load(self, fh):
|
||||
assert fh.tell() == 0
|
||||
header = struct.unpack('>I', fh.read(4))[0]
|
||||
fh.seek(0)
|
||||
if header in (FAT_MAGIC, FAT_MAGIC_64):
|
||||
self.load_fat(fh)
|
||||
else:
|
||||
fh.seek(0, 2)
|
||||
size = fh.tell()
|
||||
fh.seek(0)
|
||||
self.load_header(fh, 0, size)
|
||||
|
||||
def load_fat(self, fh):
|
||||
self.fat = fat_header.from_fileobj(fh)
|
||||
if self.fat.magic == FAT_MAGIC:
|
||||
archs = [fat_arch.from_fileobj(fh)
|
||||
for i in range(self.fat.nfat_arch)]
|
||||
elif self.fat.magic == FAT_MAGIC_64:
|
||||
archs = [fat_arch64.from_fileobj(fh)
|
||||
for i in range(self.fat.nfat_arch)]
|
||||
else:
|
||||
raise ValueError("Unknown fat header magic: %r" % (self.fat.magic))
|
||||
|
||||
for arch in archs:
|
||||
self.load_header(fh, arch.offset, arch.size)
|
||||
|
||||
def rewriteLoadCommands(self, *args, **kw):
|
||||
changed = False
|
||||
for header in self.headers:
|
||||
if header.rewriteLoadCommands(*args, **kw):
|
||||
changed = True
|
||||
return changed
|
||||
|
||||
def load_header(self, fh, offset, size):
|
||||
fh.seek(offset)
|
||||
header = struct.unpack('>I', fh.read(4))[0]
|
||||
fh.seek(offset)
|
||||
if header == MH_MAGIC:
|
||||
magic, hdr, endian = MH_MAGIC, mach_header, '>'
|
||||
elif header == MH_CIGAM:
|
||||
magic, hdr, endian = MH_CIGAM, mach_header, '<'
|
||||
elif header == MH_MAGIC_64:
|
||||
magic, hdr, endian = MH_MAGIC_64, mach_header_64, '>'
|
||||
elif header == MH_CIGAM_64:
|
||||
magic, hdr, endian = MH_CIGAM_64, mach_header_64, '<'
|
||||
else:
|
||||
raise ValueError("Unknown Mach-O header: 0x%08x in %r" % (
|
||||
header, fh))
|
||||
hdr = MachOHeader(self, fh, offset, size, magic, hdr, endian)
|
||||
self.headers.append(hdr)
|
||||
|
||||
def write(self, f):
|
||||
for header in self.headers:
|
||||
header.write(f)
|
||||
|
||||
|
||||
class MachOHeader(object):
|
||||
"""
|
||||
Provides reading/writing the Mach-O header of a specific existing file
|
||||
"""
|
||||
# filename - the original filename of this mach-o
|
||||
# sizediff - the current deviation from the initial mach-o size
|
||||
# header - the mach-o header
|
||||
# commands - a list of (load_command, somecommand, data)
|
||||
# data is either a str, or a list of segment structures
|
||||
# total_size - the current mach-o header size (including header)
|
||||
# low_offset - essentially, the maximum mach-o header size
|
||||
# id_cmd - the index of my id command, or None
|
||||
|
||||
def __init__(self, parent, fh, offset, size, magic, hdr, endian):
|
||||
self.MH_MAGIC = magic
|
||||
self.mach_header = hdr
|
||||
|
||||
# These are all initialized by self.load()
|
||||
self.parent = parent
|
||||
self.offset = offset
|
||||
self.size = size
|
||||
|
||||
self.endian = endian
|
||||
self.header = None
|
||||
self.commands = None
|
||||
self.id_cmd = None
|
||||
self.sizediff = None
|
||||
self.total_size = None
|
||||
self.low_offset = None
|
||||
self.filetype = None
|
||||
self.headers = []
|
||||
|
||||
self.load(fh)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s filename=%r offset=%d size=%d endian=%r>" % (
|
||||
type(self).__name__, self.parent.filename, self.offset, self.size,
|
||||
self.endian)
|
||||
|
||||
def load(self, fh):
|
||||
fh = fileview(fh, self.offset, self.size)
|
||||
fh.seek(0)
|
||||
|
||||
self.sizediff = 0
|
||||
kw = {'_endian_': self.endian}
|
||||
header = self.mach_header.from_fileobj(fh, **kw)
|
||||
self.header = header
|
||||
# if header.magic != self.MH_MAGIC:
|
||||
# raise ValueError("header has magic %08x, expecting %08x" % (
|
||||
# header.magic, self.MH_MAGIC))
|
||||
|
||||
cmd = self.commands = []
|
||||
|
||||
self.filetype = self.get_filetype_shortname(header.filetype)
|
||||
|
||||
read_bytes = 0
|
||||
low_offset = sys.maxsize
|
||||
for i in range(header.ncmds):
|
||||
# read the load command
|
||||
cmd_load = load_command.from_fileobj(fh, **kw)
|
||||
|
||||
# read the specific command
|
||||
klass = LC_REGISTRY.get(cmd_load.cmd, None)
|
||||
if klass is None:
|
||||
raise ValueError("Unknown load command: %d" % (cmd_load.cmd,))
|
||||
cmd_cmd = klass.from_fileobj(fh, **kw)
|
||||
|
||||
if cmd_load.cmd == LC_ID_DYLIB:
|
||||
# remember where this command was
|
||||
if self.id_cmd is not None:
|
||||
raise ValueError("This dylib already has an id")
|
||||
self.id_cmd = i
|
||||
|
||||
if cmd_load.cmd in (LC_SEGMENT, LC_SEGMENT_64):
|
||||
# for segment commands, read the list of segments
|
||||
segs = []
|
||||
# assert that the size makes sense
|
||||
if cmd_load.cmd == LC_SEGMENT:
|
||||
section_cls = section
|
||||
else: # LC_SEGMENT_64
|
||||
section_cls = section_64
|
||||
|
||||
expected_size = (
|
||||
sizeof(klass) + sizeof(load_command) +
|
||||
(sizeof(section_cls) * cmd_cmd.nsects)
|
||||
)
|
||||
if cmd_load.cmdsize != expected_size:
|
||||
raise ValueError("Segment size mismatch")
|
||||
# this is a zero block or something
|
||||
# so the beginning is wherever the fileoff of this command is
|
||||
if cmd_cmd.nsects == 0:
|
||||
if cmd_cmd.filesize != 0:
|
||||
low_offset = min(low_offset, cmd_cmd.fileoff)
|
||||
else:
|
||||
# this one has multiple segments
|
||||
for j in range(cmd_cmd.nsects):
|
||||
# read the segment
|
||||
seg = section_cls.from_fileobj(fh, **kw)
|
||||
# if the segment has a size and is not zero filled
|
||||
# then its beginning is the offset of this segment
|
||||
not_zerofill = ((seg.flags & S_ZEROFILL) != S_ZEROFILL)
|
||||
if seg.offset > 0 and seg.size > 0 and not_zerofill:
|
||||
low_offset = min(low_offset, seg.offset)
|
||||
if not_zerofill:
|
||||
c = fh.tell()
|
||||
fh.seek(seg.offset)
|
||||
sd = fh.read(seg.size)
|
||||
seg.add_section_data(sd)
|
||||
fh.seek(c)
|
||||
segs.append(seg)
|
||||
# data is a list of segments
|
||||
cmd_data = segs
|
||||
|
||||
# XXX: Disabled for now because writing back doesn't work
|
||||
# elif cmd_load.cmd == LC_CODE_SIGNATURE:
|
||||
# c = fh.tell()
|
||||
# fh.seek(cmd_cmd.dataoff)
|
||||
# cmd_data = fh.read(cmd_cmd.datasize)
|
||||
# fh.seek(c)
|
||||
# elif cmd_load.cmd == LC_SYMTAB:
|
||||
# c = fh.tell()
|
||||
# fh.seek(cmd_cmd.stroff)
|
||||
# cmd_data = fh.read(cmd_cmd.strsize)
|
||||
# fh.seek(c)
|
||||
|
||||
else:
|
||||
# data is a raw str
|
||||
data_size = (
|
||||
cmd_load.cmdsize - sizeof(klass) - sizeof(load_command)
|
||||
)
|
||||
cmd_data = fh.read(data_size)
|
||||
cmd.append((cmd_load, cmd_cmd, cmd_data))
|
||||
read_bytes += cmd_load.cmdsize
|
||||
|
||||
# make sure the header made sense
|
||||
if read_bytes != header.sizeofcmds:
|
||||
raise ValueError("Read %d bytes, header reports %d bytes" % (
|
||||
read_bytes, header.sizeofcmds))
|
||||
self.total_size = sizeof(self.mach_header) + read_bytes
|
||||
self.low_offset = low_offset
|
||||
|
||||
def walkRelocatables(self, shouldRelocateCommand=_shouldRelocateCommand):
|
||||
"""
|
||||
for all relocatable commands
|
||||
yield (command_index, command_name, filename)
|
||||
"""
|
||||
for (idx, (lc, cmd, data)) in enumerate(self.commands):
|
||||
if shouldRelocateCommand(lc.cmd):
|
||||
name = _RELOCATABLE_NAMES[lc.cmd]
|
||||
ofs = cmd.name - sizeof(lc.__class__) - sizeof(cmd.__class__)
|
||||
yield idx, name, data[ofs:data.find(b'\x00', ofs)].decode(
|
||||
sys.getfilesystemencoding())
|
||||
|
||||
def rewriteInstallNameCommand(self, loadcmd):
|
||||
"""Rewrite the load command of this dylib"""
|
||||
if self.id_cmd is not None:
|
||||
self.rewriteDataForCommand(self.id_cmd, loadcmd)
|
||||
return True
|
||||
return False
|
||||
|
||||
def changedHeaderSizeBy(self, bytes):
|
||||
self.sizediff += bytes
|
||||
if (self.total_size + self.sizediff) > self.low_offset:
|
||||
print(
|
||||
"WARNING: Mach-O header in %r may be too large to relocate" % (
|
||||
self.parent.filename,))
|
||||
|
||||
def rewriteLoadCommands(self, changefunc):
|
||||
"""
|
||||
Rewrite the load commands based upon a change dictionary
|
||||
"""
|
||||
data = changefunc(self.parent.filename)
|
||||
changed = False
|
||||
if data is not None:
|
||||
if self.rewriteInstallNameCommand(
|
||||
data.encode(sys.getfilesystemencoding())):
|
||||
changed = True
|
||||
for idx, name, filename in self.walkRelocatables():
|
||||
data = changefunc(filename)
|
||||
if data is not None:
|
||||
if self.rewriteDataForCommand(idx, data.encode(
|
||||
sys.getfilesystemencoding())):
|
||||
changed = True
|
||||
return changed
|
||||
|
||||
def rewriteDataForCommand(self, idx, data):
|
||||
lc, cmd, old_data = self.commands[idx]
|
||||
hdrsize = sizeof(lc.__class__) + sizeof(cmd.__class__)
|
||||
align = struct.calcsize('Q')
|
||||
data = data + (b'\x00' * (align - (len(data) % align)))
|
||||
newsize = hdrsize + len(data)
|
||||
self.commands[idx] = (lc, cmd, data)
|
||||
self.changedHeaderSizeBy(newsize - lc.cmdsize)
|
||||
lc.cmdsize, cmd.name = newsize, hdrsize
|
||||
return True
|
||||
|
||||
def synchronize_size(self):
|
||||
if (self.total_size + self.sizediff) > self.low_offset:
|
||||
raise ValueError(
|
||||
("New Mach-O header is too large to relocate in %r "
|
||||
"(new size=%r, max size=%r, delta=%r)") % (
|
||||
self.parent.filename, self.total_size + self.sizediff,
|
||||
self.low_offset, self.sizediff))
|
||||
self.header.sizeofcmds += self.sizediff
|
||||
self.total_size = sizeof(self.mach_header) + self.header.sizeofcmds
|
||||
self.sizediff = 0
|
||||
|
||||
def write(self, fileobj):
|
||||
fileobj = fileview(fileobj, self.offset, self.size)
|
||||
fileobj.seek(0)
|
||||
|
||||
# serialize all the mach-o commands
|
||||
self.synchronize_size()
|
||||
|
||||
self.header.to_fileobj(fileobj)
|
||||
for lc, cmd, data in self.commands:
|
||||
lc.to_fileobj(fileobj)
|
||||
cmd.to_fileobj(fileobj)
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
if isinstance(data, unicode):
|
||||
fileobj.write(data.encode(sys.getfilesystemencoding()))
|
||||
|
||||
elif isinstance(data, (bytes, str)):
|
||||
fileobj.write(data)
|
||||
else:
|
||||
# segments..
|
||||
for obj in data:
|
||||
obj.to_fileobj(fileobj)
|
||||
else:
|
||||
if isinstance(data, str):
|
||||
fileobj.write(data.encode(sys.getfilesystemencoding()))
|
||||
|
||||
elif isinstance(data, bytes):
|
||||
fileobj.write(data)
|
||||
|
||||
else:
|
||||
# segments..
|
||||
for obj in data:
|
||||
obj.to_fileobj(fileobj)
|
||||
|
||||
# zero out the unused space, doubt this is strictly necessary
|
||||
# and is generally probably already the case
|
||||
fileobj.write(b'\x00' * (self.low_offset - fileobj.tell()))
|
||||
|
||||
def getSymbolTableCommand(self):
|
||||
for lc, cmd, data in self.commands:
|
||||
if lc.cmd == LC_SYMTAB:
|
||||
return cmd
|
||||
return None
|
||||
|
||||
def getDynamicSymbolTableCommand(self):
|
||||
for lc, cmd, data in self.commands:
|
||||
if lc.cmd == LC_DYSYMTAB:
|
||||
return cmd
|
||||
return None
|
||||
|
||||
def get_filetype_shortname(self, filetype):
|
||||
if filetype in MH_FILETYPE_SHORTNAMES:
|
||||
return MH_FILETYPE_SHORTNAMES[filetype]
|
||||
else:
|
||||
return 'unknown'
|
||||
|
||||
|
||||
def main(fn):
|
||||
m = MachO(fn)
|
||||
seen = set()
|
||||
for header in m.headers:
|
||||
for idx, name, other in header.walkRelocatables():
|
||||
if other not in seen:
|
||||
seen.add(other)
|
||||
print('\t' + name + ": " + other)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
files = sys.argv[1:] or ['/bin/ls']
|
||||
for fn in files:
|
||||
print(fn)
|
||||
main(fn)
|
138
lib/spack/external/macholib/MachOGraph.py
vendored
Normal file
138
lib/spack/external/macholib/MachOGraph.py
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
"""
|
||||
Utilities for reading and writing Mach-O headers
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from altgraph.ObjectGraph import ObjectGraph
|
||||
|
||||
from macholib.dyld import dyld_find
|
||||
from macholib.MachO import MachO
|
||||
from macholib.itergraphreport import itergraphreport
|
||||
|
||||
__all__ = ['MachOGraph']
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
|
||||
class MissingMachO(object):
|
||||
def __init__(self, filename):
|
||||
self.graphident = filename
|
||||
self.headers = ()
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s graphident=%r>' % (type(self).__name__, self.graphident)
|
||||
|
||||
|
||||
class MachOGraph(ObjectGraph):
|
||||
"""
|
||||
Graph data structure of Mach-O dependencies
|
||||
"""
|
||||
def __init__(self, debug=0, graph=None, env=None, executable_path=None):
|
||||
super(MachOGraph, self).__init__(debug=debug, graph=graph)
|
||||
self.env = env
|
||||
self.trans_table = {}
|
||||
self.executable_path = executable_path
|
||||
|
||||
def locate(self, filename, loader=None):
|
||||
if not isinstance(filename, (str, unicode)):
|
||||
raise TypeError("%r is not a string" % (filename,))
|
||||
if filename.startswith('@loader_path/') and loader is not None:
|
||||
fn = self.trans_table.get((loader.filename, filename))
|
||||
if fn is None:
|
||||
loader_path = loader.loader_path
|
||||
|
||||
try:
|
||||
fn = dyld_find(
|
||||
filename, env=self.env,
|
||||
executable_path=self.executable_path,
|
||||
loader_path=loader_path)
|
||||
self.trans_table[(loader.filename, filename)] = fn
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
else:
|
||||
fn = self.trans_table.get(filename)
|
||||
if fn is None:
|
||||
try:
|
||||
fn = dyld_find(
|
||||
filename, env=self.env,
|
||||
executable_path=self.executable_path)
|
||||
self.trans_table[filename] = fn
|
||||
except ValueError:
|
||||
return None
|
||||
return fn
|
||||
|
||||
def findNode(self, name, loader=None):
|
||||
assert isinstance(name, (str, unicode))
|
||||
data = super(MachOGraph, self).findNode(name)
|
||||
if data is not None:
|
||||
return data
|
||||
newname = self.locate(name, loader=loader)
|
||||
if newname is not None and newname != name:
|
||||
return self.findNode(newname)
|
||||
return None
|
||||
|
||||
def run_file(self, pathname, caller=None):
|
||||
assert isinstance(pathname, (str, unicode))
|
||||
self.msgin(2, "run_file", pathname)
|
||||
m = self.findNode(pathname, loader=caller)
|
||||
if m is None:
|
||||
if not os.path.exists(pathname):
|
||||
raise ValueError('%r does not exist' % (pathname,))
|
||||
m = self.createNode(MachO, pathname)
|
||||
self.createReference(caller, m, edge_data='run_file')
|
||||
self.scan_node(m)
|
||||
self.msgout(2, '')
|
||||
return m
|
||||
|
||||
def load_file(self, name, caller=None):
|
||||
assert isinstance(name, (str, unicode))
|
||||
self.msgin(2, "load_file", name, caller)
|
||||
m = self.findNode(name, loader=caller)
|
||||
if m is None:
|
||||
newname = self.locate(name, loader=caller)
|
||||
if newname is not None and newname != name:
|
||||
return self.load_file(newname, caller=caller)
|
||||
if os.path.exists(name):
|
||||
m = self.createNode(MachO, name)
|
||||
self.scan_node(m)
|
||||
else:
|
||||
m = self.createNode(MissingMachO, name)
|
||||
self.msgout(2, '')
|
||||
return m
|
||||
|
||||
def scan_node(self, node):
|
||||
self.msgin(2, 'scan_node', node)
|
||||
for header in node.headers:
|
||||
for idx, name, filename in header.walkRelocatables():
|
||||
assert isinstance(name, (str, unicode))
|
||||
assert isinstance(filename, (str, unicode))
|
||||
m = self.load_file(filename, caller=node)
|
||||
self.createReference(node, m, edge_data=name)
|
||||
self.msgout(2, '', node)
|
||||
|
||||
def itergraphreport(self, name='G'):
|
||||
nodes = map(self.graph.describe_node, self.graph.iterdfs(self))
|
||||
describe_edge = self.graph.describe_edge
|
||||
return itergraphreport(nodes, describe_edge, name=name)
|
||||
|
||||
def graphreport(self, fileobj=None):
|
||||
if fileobj is None:
|
||||
fileobj = sys.stdout
|
||||
fileobj.writelines(self.itergraphreport())
|
||||
|
||||
|
||||
def main(args):
|
||||
g = MachOGraph()
|
||||
for arg in args:
|
||||
g.run_file(arg)
|
||||
g.graphreport()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:] or ['/bin/ls'])
|
169
lib/spack/external/macholib/MachOStandalone.py
vendored
Normal file
169
lib/spack/external/macholib/MachOStandalone.py
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
import os
|
||||
|
||||
from macholib.MachOGraph import MachOGraph, MissingMachO
|
||||
from macholib.util import iter_platform_files, in_system_path, mergecopy, \
|
||||
mergetree, flipwritable, has_filename_filter
|
||||
from macholib.dyld import framework_info
|
||||
from collections import deque
|
||||
|
||||
|
||||
class ExcludedMachO(MissingMachO):
|
||||
pass
|
||||
|
||||
|
||||
class FilteredMachOGraph(MachOGraph):
|
||||
def __init__(self, delegate, *args, **kwargs):
|
||||
super(FilteredMachOGraph, self).__init__(*args, **kwargs)
|
||||
self.delegate = delegate
|
||||
|
||||
def createNode(self, cls, name):
|
||||
cls = self.delegate.getClass(name, cls)
|
||||
res = super(FilteredMachOGraph, self).createNode(cls, name)
|
||||
return self.delegate.update_node(res)
|
||||
|
||||
def locate(self, filename, loader=None):
|
||||
newname = super(FilteredMachOGraph, self).locate(filename, loader)
|
||||
print("locate", filename, loader, "->", newname)
|
||||
if newname is None:
|
||||
return None
|
||||
return self.delegate.locate(newname, loader=loader)
|
||||
|
||||
|
||||
class MachOStandalone(object):
|
||||
def __init__(
|
||||
self, base, dest=None, graph=None, env=None,
|
||||
executable_path=None):
|
||||
self.base = os.path.join(os.path.abspath(base), '')
|
||||
if dest is None:
|
||||
dest = os.path.join(self.base, 'Contents', 'Frameworks')
|
||||
self.dest = dest
|
||||
self.mm = FilteredMachOGraph(
|
||||
self, graph=graph, env=env, executable_path=executable_path)
|
||||
self.changemap = {}
|
||||
self.excludes = []
|
||||
self.pending = deque()
|
||||
|
||||
def update_node(self, m):
|
||||
return m
|
||||
|
||||
def getClass(self, name, cls):
|
||||
if in_system_path(name):
|
||||
return ExcludedMachO
|
||||
for base in self.excludes:
|
||||
if name.startswith(base):
|
||||
return ExcludedMachO
|
||||
return cls
|
||||
|
||||
def locate(self, filename, loader=None):
|
||||
if in_system_path(filename):
|
||||
return filename
|
||||
if filename.startswith(self.base):
|
||||
return filename
|
||||
for base in self.excludes:
|
||||
if filename.startswith(base):
|
||||
return filename
|
||||
if filename in self.changemap:
|
||||
return self.changemap[filename]
|
||||
info = framework_info(filename)
|
||||
if info is None:
|
||||
res = self.copy_dylib(filename)
|
||||
self.changemap[filename] = res
|
||||
return res
|
||||
else:
|
||||
res = self.copy_framework(info)
|
||||
self.changemap[filename] = res
|
||||
return res
|
||||
|
||||
def copy_dylib(self, filename):
|
||||
# When the filename is a symlink use the basename of the target of
|
||||
# the link as the name in standalone bundle. This avoids problems
|
||||
# when two libraries link to the same dylib but using different
|
||||
# symlinks.
|
||||
if os.path.islink(filename):
|
||||
dest = os.path.join(
|
||||
self.dest, os.path.basename(os.path.realpath(filename)))
|
||||
else:
|
||||
dest = os.path.join(self.dest, os.path.basename(filename))
|
||||
|
||||
if not os.path.exists(dest):
|
||||
self.mergecopy(filename, dest)
|
||||
return dest
|
||||
|
||||
def mergecopy(self, src, dest):
|
||||
return mergecopy(src, dest)
|
||||
|
||||
def mergetree(self, src, dest):
|
||||
return mergetree(src, dest)
|
||||
|
||||
def copy_framework(self, info):
|
||||
dest = os.path.join(self.dest, info['shortname'] + '.framework')
|
||||
destfn = os.path.join(self.dest, info['name'])
|
||||
src = os.path.join(info['location'], info['shortname'] + '.framework')
|
||||
if not os.path.exists(dest):
|
||||
self.mergetree(src, dest)
|
||||
self.pending.append((destfn, iter_platform_files(dest)))
|
||||
return destfn
|
||||
|
||||
def run(self, platfiles=None, contents=None):
|
||||
mm = self.mm
|
||||
if contents is None:
|
||||
contents = '@executable_path/..'
|
||||
if platfiles is None:
|
||||
platfiles = iter_platform_files(self.base)
|
||||
|
||||
for fn in platfiles:
|
||||
mm.run_file(fn)
|
||||
|
||||
while self.pending:
|
||||
fmwk, files = self.pending.popleft()
|
||||
ref = mm.findNode(fmwk)
|
||||
for fn in files:
|
||||
mm.run_file(fn, caller=ref)
|
||||
|
||||
changemap = {}
|
||||
skipcontents = os.path.join(os.path.dirname(self.dest), '')
|
||||
machfiles = []
|
||||
|
||||
for node in mm.flatten(has_filename_filter):
|
||||
machfiles.append(node)
|
||||
dest = os.path.join(
|
||||
contents, os.path.normpath(node.filename[len(skipcontents):]))
|
||||
changemap[node.filename] = dest
|
||||
|
||||
def changefunc(path):
|
||||
if path.startswith('@loader_path/'):
|
||||
# XXX: This is a quick hack for py2app: In that
|
||||
# usecase paths like this are found in the load
|
||||
# commands of relocatable wheels. Those don't
|
||||
# need rewriting.
|
||||
return path
|
||||
|
||||
res = mm.locate(path)
|
||||
rv = changemap.get(res)
|
||||
if rv is None and path.startswith('@loader_path/'):
|
||||
rv = changemap.get(mm.locate(mm.trans_table.get(
|
||||
(node.filename, path))))
|
||||
return rv
|
||||
|
||||
for node in machfiles:
|
||||
fn = mm.locate(node.filename)
|
||||
if fn is None:
|
||||
continue
|
||||
rewroteAny = False
|
||||
for header in node.headers:
|
||||
if node.rewriteLoadCommands(changefunc):
|
||||
rewroteAny = True
|
||||
if rewroteAny:
|
||||
old_mode = flipwritable(fn)
|
||||
try:
|
||||
with open(fn, 'rb+') as f:
|
||||
for header in node.headers:
|
||||
f.seek(0)
|
||||
node.write(f)
|
||||
f.seek(0, 2)
|
||||
f.flush()
|
||||
finally:
|
||||
flipwritable(fn, old_mode)
|
||||
|
||||
allfiles = [mm.locate(node.filename) for node in machfiles]
|
||||
return set(filter(None, allfiles))
|
86
lib/spack/external/macholib/SymbolTable.py
vendored
Normal file
86
lib/spack/external/macholib/SymbolTable.py
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
"""
|
||||
Class to read the symbol table from a Mach-O header
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
from macholib.mach_o import relocation_info, dylib_reference, dylib_module
|
||||
from macholib.mach_o import dylib_table_of_contents, nlist, nlist_64
|
||||
from macholib.mach_o import MH_CIGAM_64, MH_MAGIC_64
|
||||
import sys
|
||||
|
||||
__all__ = ['SymbolTable']
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
range = xrange # noqa: F821
|
||||
|
||||
|
||||
class SymbolTable(object):
|
||||
def __init__(self, macho, header=None, openfile=None):
|
||||
if openfile is None:
|
||||
openfile = open
|
||||
if header is None:
|
||||
header = macho.headers[0]
|
||||
self.macho_header = header
|
||||
with openfile(macho.filename, 'rb') as fh:
|
||||
self.symtab = header.getSymbolTableCommand()
|
||||
self.dysymtab = header.getDynamicSymbolTableCommand()
|
||||
|
||||
if self.symtab is not None:
|
||||
self.nlists = self.readSymbolTable(fh)
|
||||
|
||||
if self.dysymtab is not None:
|
||||
self.readDynamicSymbolTable(fh)
|
||||
|
||||
def readSymbolTable(self, fh):
|
||||
cmd = self.symtab
|
||||
fh.seek(self.macho_header.offset + cmd.stroff)
|
||||
strtab = fh.read(cmd.strsize)
|
||||
fh.seek(self.macho_header.offset + cmd.symoff)
|
||||
nlists = []
|
||||
|
||||
if self.macho_header.MH_MAGIC in [MH_MAGIC_64, MH_CIGAM_64]:
|
||||
cls = nlist_64
|
||||
else:
|
||||
cls = nlist
|
||||
|
||||
for i in range(cmd.nsyms):
|
||||
cmd = cls.from_fileobj(fh, _endian_=self.macho_header.endian)
|
||||
if cmd.n_un == 0:
|
||||
nlists.append((cmd, ''))
|
||||
else:
|
||||
nlists.append(
|
||||
(cmd, strtab[cmd.n_un:strtab.find(b'\x00', cmd.n_un)]))
|
||||
return nlists
|
||||
|
||||
def readDynamicSymbolTable(self, fh):
|
||||
cmd = self.dysymtab
|
||||
nlists = self.nlists
|
||||
|
||||
self.localsyms = nlists[cmd.ilocalsym:cmd.ilocalsym+cmd.nlocalsym]
|
||||
self.extdefsyms = nlists[cmd.iextdefsym:cmd.iextdefsym+cmd.nextdefsym]
|
||||
self.undefsyms = nlists[cmd.iundefsym:cmd.iundefsym+cmd.nundefsym]
|
||||
if cmd.tocoff == 0:
|
||||
self.toc = None
|
||||
else:
|
||||
self.toc = self.readtoc(fh, cmd.tocoff, cmd.ntoc)
|
||||
|
||||
def readtoc(self, fh, off, n):
|
||||
fh.seek(self.macho_header.offset + off)
|
||||
return [dylib_table_of_contents.from_fileobj(fh) for i in range(n)]
|
||||
|
||||
def readmodtab(self, fh, off, n):
|
||||
fh.seek(self.macho_header.offset + off)
|
||||
return [dylib_module.from_fileobj(fh) for i in range(n)]
|
||||
|
||||
def readsym(self, fh, off, n):
|
||||
fh.seek(self.macho_header.offset + off)
|
||||
refs = []
|
||||
for i in range(n):
|
||||
ref = dylib_reference.from_fileobj(fh)
|
||||
isym, flags = divmod(ref.isym_flags, 256)
|
||||
refs.append((self.nlists[isym], flags))
|
||||
return refs
|
||||
|
||||
def readrel(self, fh, off, n):
|
||||
fh.seek(self.macho_header.offset + off)
|
||||
return [relocation_info.from_fileobj(fh) for i in range(n)]
|
8
lib/spack/external/macholib/__init__.py
vendored
Normal file
8
lib/spack/external/macholib/__init__.py
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
"""
|
||||
Enough Mach-O to make your head spin.
|
||||
|
||||
See the relevant header files in /usr/include/mach-o
|
||||
|
||||
And also Apple's documentation.
|
||||
"""
|
||||
__version__ = '1.10'
|
83
lib/spack/external/macholib/__main__.py
vendored
Normal file
83
lib/spack/external/macholib/__main__.py
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
from __future__ import print_function, absolute_import
|
||||
import os
|
||||
import sys
|
||||
|
||||
from macholib.util import is_platform_file
|
||||
from macholib import macho_dump
|
||||
from macholib import macho_standalone
|
||||
|
||||
gCommand = None
|
||||
|
||||
|
||||
def check_file(fp, path, callback):
|
||||
if not os.path.exists(path):
|
||||
print(
|
||||
'%s: %s: No such file or directory' % (gCommand, path),
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
is_plat = is_platform_file(path)
|
||||
|
||||
except IOError as msg:
|
||||
print('%s: %s: %s' % (gCommand, path, msg), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
else:
|
||||
if is_plat:
|
||||
callback(fp, path)
|
||||
return 0
|
||||
|
||||
|
||||
def walk_tree(callback, paths):
|
||||
err = 0
|
||||
|
||||
for base in paths:
|
||||
if os.path.isdir(base):
|
||||
for root, dirs, files in os.walk(base):
|
||||
for fn in files:
|
||||
err |= check_file(
|
||||
sys.stdout, os.path.join(root, fn), callback)
|
||||
else:
|
||||
err |= check_file(sys.stdout, base, callback)
|
||||
|
||||
return err
|
||||
|
||||
|
||||
def print_usage(fp):
|
||||
print("Usage:", file=fp)
|
||||
print(" python -mmacholib [help|--help]", file=fp)
|
||||
print(" python -mmacholib dump FILE ...", file=fp)
|
||||
print(" python -mmacholib find DIR ...", file=fp)
|
||||
print(" python -mmacholib standalone DIR ...", file=fp)
|
||||
|
||||
|
||||
def main():
|
||||
global gCommand
|
||||
if len(sys.argv) < 3:
|
||||
print_usage(sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
gCommand = sys.argv[1]
|
||||
|
||||
if gCommand == 'dump':
|
||||
walk_tree(macho_dump.print_file, sys.argv[2:])
|
||||
|
||||
elif gCommand == 'find':
|
||||
walk_tree(lambda fp, path: print(path, file=fp), sys.argv[2:])
|
||||
|
||||
elif gCommand == 'standalone':
|
||||
for dn in sys.argv[2:]:
|
||||
macho_standalone.standaloneApp(dn)
|
||||
|
||||
elif gCommand in ('help', '--help'):
|
||||
print_usage(sys.stdout)
|
||||
sys.exit(0)
|
||||
|
||||
else:
|
||||
print_usage(sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
48
lib/spack/external/macholib/_cmdline.py
vendored
Normal file
48
lib/spack/external/macholib/_cmdline.py
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
"""
|
||||
Internal helpers for basic commandline tools
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
import os
|
||||
import sys
|
||||
|
||||
from macholib.util import is_platform_file
|
||||
|
||||
|
||||
def check_file(fp, path, callback):
|
||||
if not os.path.exists(path):
|
||||
print('%s: %s: No such file or directory' % (
|
||||
sys.argv[0], path), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
is_plat = is_platform_file(path)
|
||||
|
||||
except IOError as msg:
|
||||
print('%s: %s: %s' % (sys.argv[0], path, msg), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
else:
|
||||
if is_plat:
|
||||
callback(fp, path)
|
||||
return 0
|
||||
|
||||
|
||||
def main(callback):
|
||||
args = sys.argv[1:]
|
||||
name = os.path.basename(sys.argv[0])
|
||||
err = 0
|
||||
|
||||
if not args:
|
||||
print("Usage: %s filename..." % (name,), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
for base in args:
|
||||
if os.path.isdir(base):
|
||||
for root, dirs, files in os.walk(base):
|
||||
for fn in files:
|
||||
err |= check_file(
|
||||
sys.stdout, os.path.join(root, fn), callback)
|
||||
else:
|
||||
err |= check_file(sys.stdout, base, callback)
|
||||
|
||||
return err
|
190
lib/spack/external/macholib/dyld.py
vendored
Normal file
190
lib/spack/external/macholib/dyld.py
vendored
Normal file
@@ -0,0 +1,190 @@
|
||||
"""
|
||||
dyld emulation
|
||||
"""
|
||||
|
||||
from itertools import chain
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from macholib.framework import framework_info
|
||||
from macholib.dylib import dylib_info
|
||||
|
||||
__all__ = [
|
||||
'dyld_find', 'framework_find',
|
||||
'framework_info', 'dylib_info',
|
||||
]
|
||||
|
||||
# These are the defaults as per man dyld(1)
|
||||
#
|
||||
_DEFAULT_FRAMEWORK_FALLBACK = [
|
||||
os.path.expanduser("~/Library/Frameworks"),
|
||||
"/Library/Frameworks",
|
||||
"/Network/Library/Frameworks",
|
||||
"/System/Library/Frameworks",
|
||||
]
|
||||
|
||||
_DEFAULT_LIBRARY_FALLBACK = [
|
||||
os.path.expanduser("~/lib"),
|
||||
"/usr/local/lib",
|
||||
"/lib",
|
||||
"/usr/lib",
|
||||
]
|
||||
|
||||
# XXX: Is this function still needed?
|
||||
if sys.version_info[0] == 2:
|
||||
def _ensure_utf8(s):
|
||||
if isinstance(s, unicode): # noqa: F821
|
||||
return s.encode('utf8')
|
||||
return s
|
||||
else:
|
||||
def _ensure_utf8(s):
|
||||
if s is not None and not isinstance(s, str):
|
||||
raise ValueError(s)
|
||||
return s
|
||||
|
||||
|
||||
def _dyld_env(env, var):
|
||||
if env is None:
|
||||
env = os.environ
|
||||
rval = env.get(var)
|
||||
if rval is None or rval == '':
|
||||
return []
|
||||
return rval.split(':')
|
||||
|
||||
|
||||
def dyld_image_suffix(env=None):
|
||||
if env is None:
|
||||
env = os.environ
|
||||
return env.get('DYLD_IMAGE_SUFFIX')
|
||||
|
||||
|
||||
def dyld_framework_path(env=None):
|
||||
return _dyld_env(env, 'DYLD_FRAMEWORK_PATH')
|
||||
|
||||
|
||||
def dyld_library_path(env=None):
|
||||
return _dyld_env(env, 'DYLD_LIBRARY_PATH')
|
||||
|
||||
|
||||
def dyld_fallback_framework_path(env=None):
|
||||
return _dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH')
|
||||
|
||||
|
||||
def dyld_fallback_library_path(env=None):
|
||||
return _dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH')
|
||||
|
||||
|
||||
def dyld_image_suffix_search(iterator, env=None):
|
||||
"""For a potential path iterator, add DYLD_IMAGE_SUFFIX semantics"""
|
||||
suffix = dyld_image_suffix(env)
|
||||
if suffix is None:
|
||||
return iterator
|
||||
|
||||
def _inject(iterator=iterator, suffix=suffix):
|
||||
for path in iterator:
|
||||
if path.endswith('.dylib'):
|
||||
yield path[:-len('.dylib')] + suffix + '.dylib'
|
||||
else:
|
||||
yield path + suffix
|
||||
yield path
|
||||
|
||||
return _inject()
|
||||
|
||||
|
||||
def dyld_override_search(name, env=None):
|
||||
# If DYLD_FRAMEWORK_PATH is set and this dylib_name is a
|
||||
# framework name, use the first file that exists in the framework
|
||||
# path if any. If there is none go on to search the DYLD_LIBRARY_PATH
|
||||
# if any.
|
||||
|
||||
framework = framework_info(name)
|
||||
|
||||
if framework is not None:
|
||||
for path in dyld_framework_path(env):
|
||||
yield os.path.join(path, framework['name'])
|
||||
|
||||
# If DYLD_LIBRARY_PATH is set then use the first file that exists
|
||||
# in the path. If none use the original name.
|
||||
for path in dyld_library_path(env):
|
||||
yield os.path.join(path, os.path.basename(name))
|
||||
|
||||
|
||||
def dyld_executable_path_search(name, executable_path=None):
|
||||
# If we haven't done any searching and found a library and the
|
||||
# dylib_name starts with "@executable_path/" then construct the
|
||||
# library name.
|
||||
if name.startswith('@executable_path/') and executable_path is not None:
|
||||
yield os.path.join(executable_path, name[len('@executable_path/'):])
|
||||
|
||||
|
||||
def dyld_loader_search(name, loader_path=None):
|
||||
# If we haven't done any searching and found a library and the
|
||||
# dylib_name starts with "@loader_path/" then construct the
|
||||
# library name.
|
||||
if name.startswith('@loader_path/') and loader_path is not None:
|
||||
yield os.path.join(loader_path, name[len('@loader_path/'):])
|
||||
|
||||
|
||||
def dyld_default_search(name, env=None):
|
||||
yield name
|
||||
|
||||
framework = framework_info(name)
|
||||
|
||||
if framework is not None:
|
||||
fallback_framework_path = dyld_fallback_framework_path(env)
|
||||
|
||||
if fallback_framework_path:
|
||||
for path in fallback_framework_path:
|
||||
yield os.path.join(path, framework['name'])
|
||||
|
||||
else:
|
||||
for path in _DEFAULT_FRAMEWORK_FALLBACK:
|
||||
yield os.path.join(path, framework['name'])
|
||||
|
||||
fallback_library_path = dyld_fallback_library_path(env)
|
||||
if fallback_library_path:
|
||||
for path in fallback_library_path:
|
||||
yield os.path.join(path, os.path.basename(name))
|
||||
|
||||
else:
|
||||
for path in _DEFAULT_LIBRARY_FALLBACK:
|
||||
yield os.path.join(path, os.path.basename(name))
|
||||
|
||||
|
||||
def dyld_find(name, executable_path=None, env=None, loader_path=None):
|
||||
"""
|
||||
Find a library or framework using dyld semantics
|
||||
"""
|
||||
name = _ensure_utf8(name)
|
||||
executable_path = _ensure_utf8(executable_path)
|
||||
for path in dyld_image_suffix_search(chain(
|
||||
dyld_override_search(name, env),
|
||||
dyld_executable_path_search(name, executable_path),
|
||||
dyld_loader_search(name, loader_path),
|
||||
dyld_default_search(name, env),
|
||||
), env):
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
raise ValueError("dylib %s could not be found" % (name,))
|
||||
|
||||
|
||||
def framework_find(fn, executable_path=None, env=None):
|
||||
"""
|
||||
Find a framework using dyld semantics in a very loose manner.
|
||||
|
||||
Will take input such as:
|
||||
Python
|
||||
Python.framework
|
||||
Python.framework/Versions/Current
|
||||
"""
|
||||
try:
|
||||
return dyld_find(fn, executable_path=executable_path, env=env)
|
||||
except ValueError:
|
||||
pass
|
||||
fmwk_index = fn.rfind('.framework')
|
||||
if fmwk_index == -1:
|
||||
fmwk_index = len(fn)
|
||||
fn += '.framework'
|
||||
fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
|
||||
return dyld_find(fn, executable_path=executable_path, env=env)
|
43
lib/spack/external/macholib/dylib.py
vendored
Normal file
43
lib/spack/external/macholib/dylib.py
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
"""
|
||||
Generic dylib path manipulation
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
__all__ = ['dylib_info']
|
||||
|
||||
_DYLIB_RE = re.compile(r"""(?x)
|
||||
(?P<location>^.*)(?:^|/)
|
||||
(?P<name>
|
||||
(?P<shortname>\w+?)
|
||||
(?:\.(?P<version>[^._]+))?
|
||||
(?:_(?P<suffix>[^._]+))?
|
||||
\.dylib$
|
||||
)
|
||||
""")
|
||||
|
||||
|
||||
def dylib_info(filename):
|
||||
"""
|
||||
A dylib name can take one of the following four forms:
|
||||
Location/Name.SomeVersion_Suffix.dylib
|
||||
Location/Name.SomeVersion.dylib
|
||||
Location/Name_Suffix.dylib
|
||||
Location/Name.dylib
|
||||
|
||||
returns None if not found or a mapping equivalent to:
|
||||
dict(
|
||||
location='Location',
|
||||
name='Name.SomeVersion_Suffix.dylib',
|
||||
shortname='Name',
|
||||
version='SomeVersion',
|
||||
suffix='Suffix',
|
||||
)
|
||||
|
||||
Note that SomeVersion and Suffix are optional and may be None
|
||||
if not present.
|
||||
"""
|
||||
is_dylib = _DYLIB_RE.match(filename)
|
||||
if not is_dylib:
|
||||
return None
|
||||
return is_dylib.groupdict()
|
43
lib/spack/external/macholib/framework.py
vendored
Normal file
43
lib/spack/external/macholib/framework.py
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
"""
|
||||
Generic framework path manipulation
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
__all__ = ['framework_info']
|
||||
|
||||
_STRICT_FRAMEWORK_RE = re.compile(r"""(?x)
|
||||
(?P<location>^.*)(?:^|/)
|
||||
(?P<name>
|
||||
(?P<shortname>[-_A-Za-z0-9]+).framework/
|
||||
(?:Versions/(?P<version>[^/]+)/)?
|
||||
(?P=shortname)
|
||||
(?:_(?P<suffix>[^_]+))?
|
||||
)$
|
||||
""")
|
||||
|
||||
|
||||
def framework_info(filename):
|
||||
"""
|
||||
A framework name can take one of the following four forms:
|
||||
Location/Name.framework/Versions/SomeVersion/Name_Suffix
|
||||
Location/Name.framework/Versions/SomeVersion/Name
|
||||
Location/Name.framework/Name_Suffix
|
||||
Location/Name.framework/Name
|
||||
|
||||
returns None if not found, or a mapping equivalent to:
|
||||
dict(
|
||||
location='Location',
|
||||
name='Name.framework/Versions/SomeVersion/Name_Suffix',
|
||||
shortname='Name',
|
||||
version='SomeVersion',
|
||||
suffix='Suffix',
|
||||
)
|
||||
|
||||
Note that SomeVersion and Suffix are optional and may be None
|
||||
if not present
|
||||
"""
|
||||
is_framework = _STRICT_FRAMEWORK_RE.match(filename)
|
||||
if not is_framework:
|
||||
return None
|
||||
return is_framework.groupdict()
|
73
lib/spack/external/macholib/itergraphreport.py
vendored
Normal file
73
lib/spack/external/macholib/itergraphreport.py
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Utilities for creating dot output from a MachOGraph
|
||||
|
||||
XXX: need to rewrite this based on altgraph.Dot
|
||||
"""
|
||||
|
||||
from collections import deque
|
||||
|
||||
try:
|
||||
from itertools import imap
|
||||
except ImportError:
|
||||
imap = map
|
||||
|
||||
__all__ = ['itergraphreport']
|
||||
|
||||
|
||||
def itergraphreport(nodes, describe_edge, name='G'):
|
||||
edges = deque()
|
||||
nodetoident = {}
|
||||
|
||||
def nodevisitor(node, data, outgoing, incoming):
|
||||
return {'label': str(node)}
|
||||
|
||||
def edgevisitor(edge, data, head, tail):
|
||||
return {}
|
||||
|
||||
yield 'digraph %s {\n' % (name,)
|
||||
attr = dict(rankdir='LR', concentrate='true')
|
||||
cpatt = '%s="%s"'
|
||||
for item in attr.iteritems():
|
||||
yield '\t%s;\n' % (cpatt % item,)
|
||||
|
||||
# find all packages (subgraphs)
|
||||
for (node, data, outgoing, incoming) in nodes:
|
||||
nodetoident[node] = getattr(data, 'identifier', node)
|
||||
|
||||
# create sets for subgraph, write out descriptions
|
||||
for (node, data, outgoing, incoming) in nodes:
|
||||
# update edges
|
||||
for edge in imap(describe_edge, outgoing):
|
||||
edges.append(edge)
|
||||
|
||||
# describe node
|
||||
yield '\t"%s" [%s];\n' % (
|
||||
node,
|
||||
','.join([
|
||||
(cpatt % item) for item in
|
||||
nodevisitor(node, data, outgoing, incoming).iteritems()
|
||||
]),
|
||||
)
|
||||
|
||||
graph = []
|
||||
|
||||
while edges:
|
||||
edge, data, head, tail = edges.popleft()
|
||||
if data in ('run_file', 'load_dylib'):
|
||||
graph.append((edge, data, head, tail))
|
||||
|
||||
def do_graph(edges, tabs):
|
||||
edgestr = tabs + '"%s" -> "%s" [%s];\n'
|
||||
# describe edge
|
||||
for (edge, data, head, tail) in edges:
|
||||
attribs = edgevisitor(edge, data, head, tail)
|
||||
yield edgestr % (
|
||||
head,
|
||||
tail,
|
||||
','.join([(cpatt % item) for item in attribs.iteritems()]),
|
||||
)
|
||||
|
||||
for s in do_graph(graph, '\t'):
|
||||
yield s
|
||||
|
||||
yield '}\n'
|
1665
lib/spack/external/macholib/mach_o.py
vendored
Normal file
1665
lib/spack/external/macholib/mach_o.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
58
lib/spack/external/macholib/macho_dump.py
vendored
Normal file
58
lib/spack/external/macholib/macho_dump.py
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
from macholib._cmdline import main as _main
|
||||
from macholib.MachO import MachO
|
||||
from macholib.mach_o import get_cpu_subtype, CPU_TYPE_NAMES
|
||||
from macholib.mach_o import MH_CIGAM_64, MH_MAGIC_64
|
||||
|
||||
ARCH_MAP = {
|
||||
('<', '64-bit'): 'x86_64',
|
||||
('<', '32-bit'): 'i386',
|
||||
('>', '64-bit'): 'ppc64',
|
||||
('>', '32-bit'): 'ppc',
|
||||
}
|
||||
|
||||
|
||||
def print_file(fp, path):
|
||||
print(path, file=fp)
|
||||
m = MachO(path)
|
||||
for header in m.headers:
|
||||
seen = set()
|
||||
|
||||
if header.MH_MAGIC == MH_MAGIC_64 or header.MH_MAGIC == MH_CIGAM_64:
|
||||
sz = '64-bit'
|
||||
else:
|
||||
sz = '32-bit'
|
||||
|
||||
arch = CPU_TYPE_NAMES.get(
|
||||
header.header.cputype, header.header.cputype)
|
||||
|
||||
subarch = get_cpu_subtype(
|
||||
header.header.cputype, header.header.cpusubtype)
|
||||
|
||||
print(' [%s endian=%r size=%r arch=%r subarch=%r]' % (
|
||||
header.__class__.__name__, header.endian, sz, arch, subarch),
|
||||
file=fp)
|
||||
for idx, name, other in header.walkRelocatables():
|
||||
if other not in seen:
|
||||
seen.add(other)
|
||||
print('\t' + other, file=fp)
|
||||
print('', file=fp)
|
||||
|
||||
|
||||
def main():
|
||||
print(
|
||||
"WARNING: 'macho_dump' is deprecated, use 'python -mmacholib dump' "
|
||||
"instead")
|
||||
_main(print_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
sys.exit(main())
|
||||
except KeyboardInterrupt:
|
||||
pass
|
21
lib/spack/external/macholib/macho_find.py
vendored
Normal file
21
lib/spack/external/macholib/macho_find.py
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
from macholib._cmdline import main as _main
|
||||
|
||||
|
||||
def print_file(fp, path):
|
||||
print(path, file=fp)
|
||||
|
||||
|
||||
def main():
|
||||
print(
|
||||
"WARNING: 'macho_find' is deprecated, "
|
||||
"use 'python -mmacholib dump' instead")
|
||||
_main(print_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
31
lib/spack/external/macholib/macho_standalone.py
vendored
Normal file
31
lib/spack/external/macholib/macho_standalone.py
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from macholib.MachOStandalone import MachOStandalone
|
||||
from macholib.util import strip_files
|
||||
|
||||
|
||||
def standaloneApp(path):
|
||||
if not (os.path.isdir(path) and os.path.exists(
|
||||
os.path.join(path, 'Contents'))):
|
||||
print(
|
||||
'%s: %s does not look like an app bundle' % (sys.argv[0], path))
|
||||
sys.exit(1)
|
||||
files = MachOStandalone(path).run()
|
||||
strip_files(files)
|
||||
|
||||
|
||||
def main():
|
||||
print(
|
||||
"WARNING: 'macho_standalone' is deprecated, use "
|
||||
"'python -mmacholib standalone' instead")
|
||||
if not sys.argv[1:]:
|
||||
raise SystemExit('usage: %s [appbundle ...]' % (sys.argv[0],))
|
||||
for fn in sys.argv[1:]:
|
||||
standaloneApp(fn)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
331
lib/spack/external/macholib/ptypes.py
vendored
Normal file
331
lib/spack/external/macholib/ptypes.py
vendored
Normal file
@@ -0,0 +1,331 @@
|
||||
"""
|
||||
This module defines packable types, that is types than can be easily
|
||||
converted to a binary format as used in MachO headers.
|
||||
"""
|
||||
import struct
|
||||
import sys
|
||||
|
||||
try:
|
||||
from itertools import izip, imap
|
||||
except ImportError:
|
||||
izip, imap = zip, map
|
||||
from itertools import chain, starmap
|
||||
|
||||
__all__ = """
|
||||
sizeof
|
||||
BasePackable
|
||||
Structure
|
||||
pypackable
|
||||
p_char
|
||||
p_byte
|
||||
p_ubyte
|
||||
p_short
|
||||
p_ushort
|
||||
p_int
|
||||
p_uint
|
||||
p_long
|
||||
p_ulong
|
||||
p_longlong
|
||||
p_ulonglong
|
||||
p_int8
|
||||
p_uint8
|
||||
p_int16
|
||||
p_uint16
|
||||
p_int32
|
||||
p_uint32
|
||||
p_int64
|
||||
p_uint64
|
||||
p_float
|
||||
p_double
|
||||
""".split()
|
||||
|
||||
|
||||
def sizeof(s):
|
||||
"""
|
||||
Return the size of an object when packed
|
||||
"""
|
||||
if hasattr(s, '_size_'):
|
||||
return s._size_
|
||||
|
||||
elif isinstance(s, bytes):
|
||||
return len(s)
|
||||
|
||||
raise ValueError(s)
|
||||
|
||||
|
||||
class MetaPackable(type):
|
||||
"""
|
||||
Fixed size struct.unpack-able types use from_tuple as their designated
|
||||
initializer
|
||||
"""
|
||||
def from_mmap(cls, mm, ptr, **kw):
|
||||
return cls.from_str(mm[ptr:ptr+cls._size_], **kw)
|
||||
|
||||
def from_fileobj(cls, f, **kw):
|
||||
return cls.from_str(f.read(cls._size_), **kw)
|
||||
|
||||
def from_str(cls, s, **kw):
|
||||
endian = kw.get('_endian_', cls._endian_)
|
||||
return cls.from_tuple(struct.unpack(endian + cls._format_, s), **kw)
|
||||
|
||||
def from_tuple(cls, tpl, **kw):
|
||||
return cls(tpl[0], **kw)
|
||||
|
||||
|
||||
class BasePackable(object):
|
||||
_endian_ = '>'
|
||||
|
||||
def to_str(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def to_fileobj(self, f):
|
||||
f.write(self.to_str())
|
||||
|
||||
def to_mmap(self, mm, ptr):
|
||||
mm[ptr:ptr+self._size_] = self.to_str()
|
||||
|
||||
|
||||
# This defines a class with a custom metaclass, we'd normally
|
||||
# use "class Packable(BasePackable, metaclass=MetaPackage)",
|
||||
# but that syntax is not valid in Python 2 (and likewise the
|
||||
# python 2 syntax is not valid in Python 3)
|
||||
def _make():
|
||||
def to_str(self):
|
||||
cls = type(self)
|
||||
endian = getattr(self, '_endian_', cls._endian_)
|
||||
return struct.pack(endian + cls._format_, self)
|
||||
return MetaPackable("Packable", (BasePackable,), {'to_str': to_str})
|
||||
|
||||
|
||||
Packable = _make()
|
||||
del _make
|
||||
|
||||
|
||||
def pypackable(name, pytype, format):
|
||||
"""
|
||||
Create a "mix-in" class with a python type and a
|
||||
Packable with the given struct format
|
||||
"""
|
||||
size, items = _formatinfo(format)
|
||||
|
||||
def __new__(cls, *args, **kwds):
|
||||
if '_endian_' in kwds:
|
||||
_endian_ = kwds.pop('_endian_')
|
||||
else:
|
||||
_endian_ = cls._endian_
|
||||
|
||||
result = pytype.__new__(cls, *args, **kwds)
|
||||
result._endian_ = _endian_
|
||||
return result
|
||||
|
||||
return type(Packable)(name, (pytype, Packable), {
|
||||
'_format_': format,
|
||||
'_size_': size,
|
||||
'_items_': items,
|
||||
'__new__': __new__,
|
||||
})
|
||||
|
||||
|
||||
def _formatinfo(format):
|
||||
"""
|
||||
Calculate the size and number of items in a struct format.
|
||||
"""
|
||||
size = struct.calcsize(format)
|
||||
return size, len(struct.unpack(format, b'\x00' * size))
|
||||
|
||||
|
||||
class MetaStructure(MetaPackable):
|
||||
"""
|
||||
The metaclass of Structure objects that does all the magic.
|
||||
|
||||
Since we can assume that all Structures have a fixed size,
|
||||
we can do a bunch of calculations up front and pack or
|
||||
unpack the whole thing in one struct call.
|
||||
"""
|
||||
def __new__(cls, clsname, bases, dct):
|
||||
fields = dct['_fields_']
|
||||
names = []
|
||||
types = []
|
||||
structmarks = []
|
||||
format = ''
|
||||
items = 0
|
||||
size = 0
|
||||
|
||||
def struct_property(name, typ):
|
||||
|
||||
def _get(self):
|
||||
return self._objects_[name]
|
||||
|
||||
def _set(self, obj):
|
||||
if type(obj) is not typ:
|
||||
obj = typ(obj)
|
||||
self._objects_[name] = obj
|
||||
|
||||
return property(_get, _set, typ.__name__)
|
||||
|
||||
for name, typ in fields:
|
||||
dct[name] = struct_property(name, typ)
|
||||
names.append(name)
|
||||
types.append(typ)
|
||||
format += typ._format_
|
||||
size += typ._size_
|
||||
if (typ._items_ > 1):
|
||||
structmarks.append((items, typ._items_, typ))
|
||||
items += typ._items_
|
||||
|
||||
dct['_structmarks_'] = structmarks
|
||||
dct['_names_'] = names
|
||||
dct['_types_'] = types
|
||||
dct['_size_'] = size
|
||||
dct['_items_'] = items
|
||||
dct['_format_'] = format
|
||||
return super(MetaStructure, cls).__new__(cls, clsname, bases, dct)
|
||||
|
||||
def from_tuple(cls, tpl, **kw):
|
||||
values = []
|
||||
current = 0
|
||||
for begin, length, typ in cls._structmarks_:
|
||||
if begin > current:
|
||||
values.extend(tpl[current:begin])
|
||||
current = begin + length
|
||||
values.append(typ.from_tuple(tpl[begin:current], **kw))
|
||||
values.extend(tpl[current:])
|
||||
return cls(*values, **kw)
|
||||
|
||||
|
||||
# See metaclass discussion earlier in this file
|
||||
def _make():
|
||||
class_dict = {}
|
||||
class_dict['_fields_'] = ()
|
||||
|
||||
def as_method(function):
|
||||
class_dict[function.__name__] = function
|
||||
|
||||
@as_method
|
||||
def __init__(self, *args, **kwargs):
|
||||
if len(args) == 1 and not kwargs and type(args[0]) is type(self):
|
||||
kwargs = args[0]._objects_
|
||||
args = ()
|
||||
self._objects_ = {}
|
||||
iargs = chain(izip(self._names_, args), kwargs.items())
|
||||
for key, value in iargs:
|
||||
if key not in self._names_ and key != "_endian_":
|
||||
raise TypeError
|
||||
setattr(self, key, value)
|
||||
for key, typ in izip(self._names_, self._types_):
|
||||
if key not in self._objects_:
|
||||
self._objects_[key] = typ()
|
||||
|
||||
@as_method
|
||||
def _get_packables(self):
|
||||
for obj in imap(self._objects_.__getitem__, self._names_):
|
||||
if hasattr(obj, '_get_packables'):
|
||||
for obj in obj._get_packables():
|
||||
yield obj
|
||||
|
||||
else:
|
||||
yield obj
|
||||
|
||||
@as_method
|
||||
def to_str(self):
|
||||
return struct.pack(
|
||||
self._endian_ + self._format_, *self._get_packables())
|
||||
|
||||
@as_method
|
||||
def __cmp__(self, other):
|
||||
if type(other) is not type(self):
|
||||
raise TypeError(
|
||||
'Cannot compare objects of type %r to objects of type %r' % (
|
||||
type(other), type(self)))
|
||||
if sys.version_info[0] == 2:
|
||||
_cmp = cmp # noqa: F821
|
||||
else:
|
||||
def _cmp(a, b):
|
||||
if a < b:
|
||||
return -1
|
||||
elif a > b:
|
||||
return 1
|
||||
elif a == b:
|
||||
return 0
|
||||
else:
|
||||
raise TypeError()
|
||||
|
||||
for cmpval in starmap(
|
||||
_cmp, izip(self._get_packables(), other._get_packables())):
|
||||
if cmpval != 0:
|
||||
return cmpval
|
||||
return 0
|
||||
|
||||
@as_method
|
||||
def __eq__(self, other):
|
||||
r = self.__cmp__(other)
|
||||
return r == 0
|
||||
|
||||
@as_method
|
||||
def __ne__(self, other):
|
||||
r = self.__cmp__(other)
|
||||
return r != 0
|
||||
|
||||
@as_method
|
||||
def __lt__(self, other):
|
||||
r = self.__cmp__(other)
|
||||
return r < 0
|
||||
|
||||
@as_method
|
||||
def __le__(self, other):
|
||||
r = self.__cmp__(other)
|
||||
return r <= 0
|
||||
|
||||
@as_method
|
||||
def __gt__(self, other):
|
||||
r = self.__cmp__(other)
|
||||
return r > 0
|
||||
|
||||
@as_method
|
||||
def __ge__(self, other):
|
||||
r = self.__cmp__(other)
|
||||
return r >= 0
|
||||
|
||||
@as_method
|
||||
def __repr__(self):
|
||||
result = []
|
||||
result.append('<')
|
||||
result.append(type(self).__name__)
|
||||
for nm in self._names_:
|
||||
result.append(' %s=%r' % (nm, getattr(self, nm)))
|
||||
result.append('>')
|
||||
return ''.join(result)
|
||||
|
||||
return MetaStructure("Structure", (BasePackable,), class_dict)
|
||||
|
||||
|
||||
Structure = _make()
|
||||
del _make
|
||||
|
||||
try:
|
||||
long
|
||||
except NameError:
|
||||
long = int
|
||||
|
||||
# export common packables with predictable names
|
||||
p_char = pypackable('p_char', bytes, 'c')
|
||||
p_int8 = pypackable('p_int8', int, 'b')
|
||||
p_uint8 = pypackable('p_uint8', int, 'B')
|
||||
p_int16 = pypackable('p_int16', int, 'h')
|
||||
p_uint16 = pypackable('p_uint16', int, 'H')
|
||||
p_int32 = pypackable('p_int32', int, 'i')
|
||||
p_uint32 = pypackable('p_uint32', long, 'I')
|
||||
p_int64 = pypackable('p_int64', long, 'q')
|
||||
p_uint64 = pypackable('p_uint64', long, 'Q')
|
||||
p_float = pypackable('p_float', float, 'f')
|
||||
p_double = pypackable('p_double', float, 'd')
|
||||
|
||||
# Deprecated names, need trick to emit deprecation warning.
|
||||
p_byte = p_int8
|
||||
p_ubyte = p_uint8
|
||||
p_short = p_int16
|
||||
p_ushort = p_uint16
|
||||
p_int = p_long = p_int32
|
||||
p_uint = p_ulong = p_uint32
|
||||
p_longlong = p_int64
|
||||
p_ulonglong = p_uint64
|
258
lib/spack/external/macholib/util.py
vendored
Normal file
258
lib/spack/external/macholib/util.py
vendored
Normal file
@@ -0,0 +1,258 @@
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import struct
|
||||
import shutil
|
||||
|
||||
from macholib import mach_o
|
||||
|
||||
MAGIC = [
|
||||
struct.pack('!L', getattr(mach_o, 'MH_' + _))
|
||||
for _ in ['MAGIC', 'CIGAM', 'MAGIC_64', 'CIGAM_64']
|
||||
]
|
||||
FAT_MAGIC_BYTES = struct.pack('!L', mach_o.FAT_MAGIC)
|
||||
MAGIC_LEN = 4
|
||||
STRIPCMD = ['/usr/bin/strip', '-x', '-S', '-']
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
|
||||
def fsencoding(s, encoding=sys.getfilesystemencoding()):
|
||||
"""
|
||||
Ensure the given argument is in filesystem encoding (not unicode)
|
||||
"""
|
||||
if isinstance(s, unicode):
|
||||
s = s.encode(encoding)
|
||||
return s
|
||||
|
||||
|
||||
def move(src, dst):
|
||||
"""
|
||||
move that ensures filesystem encoding of paths
|
||||
"""
|
||||
shutil.move(fsencoding(src), fsencoding(dst))
|
||||
|
||||
|
||||
def copy2(src, dst):
|
||||
"""
|
||||
copy2 that ensures filesystem encoding of paths
|
||||
"""
|
||||
shutil.copy2(fsencoding(src), fsencoding(dst))
|
||||
|
||||
|
||||
def flipwritable(fn, mode=None):
|
||||
"""
|
||||
Flip the writability of a file and return the old mode. Returns None
|
||||
if the file is already writable.
|
||||
"""
|
||||
if os.access(fn, os.W_OK):
|
||||
return None
|
||||
old_mode = os.stat(fn).st_mode
|
||||
os.chmod(fn, stat.S_IWRITE | old_mode)
|
||||
return old_mode
|
||||
|
||||
|
||||
class fileview(object):
|
||||
"""
|
||||
A proxy for file-like objects that exposes a given view of a file
|
||||
"""
|
||||
|
||||
def __init__(self, fileobj, start, size):
|
||||
self._fileobj = fileobj
|
||||
self._start = start
|
||||
self._end = start + size
|
||||
|
||||
def __repr__(self):
|
||||
return '<fileview [%d, %d] %r>' % (
|
||||
self._start, self._end, self._fileobj)
|
||||
|
||||
def tell(self):
|
||||
return self._fileobj.tell() - self._start
|
||||
|
||||
def _checkwindow(self, seekto, op):
|
||||
if not (self._start <= seekto <= self._end):
|
||||
raise IOError("%s to offset %d is outside window [%d, %d]" % (
|
||||
op, seekto, self._start, self._end))
|
||||
|
||||
def seek(self, offset, whence=0):
|
||||
seekto = offset
|
||||
if whence == 0:
|
||||
seekto += self._start
|
||||
elif whence == 1:
|
||||
seekto += self._fileobj.tell()
|
||||
elif whence == 2:
|
||||
seekto += self._end
|
||||
else:
|
||||
raise IOError("Invalid whence argument to seek: %r" % (whence,))
|
||||
self._checkwindow(seekto, 'seek')
|
||||
self._fileobj.seek(seekto)
|
||||
|
||||
def write(self, bytes):
|
||||
here = self._fileobj.tell()
|
||||
self._checkwindow(here, 'write')
|
||||
self._checkwindow(here + len(bytes), 'write')
|
||||
self._fileobj.write(bytes)
|
||||
|
||||
def read(self, size=sys.maxsize):
|
||||
if size < 0:
|
||||
raise ValueError(
|
||||
"Invalid size %s while reading from %s", size, self._fileobj)
|
||||
here = self._fileobj.tell()
|
||||
self._checkwindow(here, 'read')
|
||||
bytes = min(size, self._end - here)
|
||||
return self._fileobj.read(bytes)
|
||||
|
||||
|
||||
def mergecopy(src, dest):
|
||||
"""
|
||||
copy2, but only if the destination isn't up to date
|
||||
"""
|
||||
if os.path.exists(dest) and \
|
||||
os.stat(dest).st_mtime >= os.stat(src).st_mtime:
|
||||
return
|
||||
|
||||
copy2(src, dest)
|
||||
|
||||
|
||||
def mergetree(src, dst, condition=None, copyfn=mergecopy, srcbase=None):
|
||||
"""
|
||||
Recursively merge a directory tree using mergecopy().
|
||||
"""
|
||||
src = fsencoding(src)
|
||||
dst = fsencoding(dst)
|
||||
if srcbase is None:
|
||||
srcbase = src
|
||||
names = map(fsencoding, os.listdir(src))
|
||||
try:
|
||||
os.makedirs(dst)
|
||||
except OSError:
|
||||
pass
|
||||
errors = []
|
||||
for name in names:
|
||||
srcname = os.path.join(src, name)
|
||||
dstname = os.path.join(dst, name)
|
||||
if condition is not None and not condition(srcname):
|
||||
continue
|
||||
try:
|
||||
if os.path.islink(srcname):
|
||||
# XXX: This is naive at best, should check srcbase(?)
|
||||
realsrc = os.readlink(srcname)
|
||||
os.symlink(realsrc, dstname)
|
||||
elif os.path.isdir(srcname):
|
||||
mergetree(
|
||||
srcname, dstname,
|
||||
condition=condition, copyfn=copyfn, srcbase=srcbase)
|
||||
else:
|
||||
copyfn(srcname, dstname)
|
||||
except (IOError, os.error) as why:
|
||||
errors.append((srcname, dstname, why))
|
||||
if errors:
|
||||
raise IOError(errors)
|
||||
|
||||
|
||||
def sdk_normalize(filename):
|
||||
"""
|
||||
Normalize a path to strip out the SDK portion, normally so that it
|
||||
can be decided whether it is in a system path or not.
|
||||
"""
|
||||
if filename.startswith('/Developer/SDKs/'):
|
||||
pathcomp = filename.split('/')
|
||||
del pathcomp[1:4]
|
||||
filename = '/'.join(pathcomp)
|
||||
return filename
|
||||
|
||||
|
||||
NOT_SYSTEM_FILES = []
|
||||
|
||||
|
||||
def in_system_path(filename):
|
||||
"""
|
||||
Return True if the file is in a system path
|
||||
"""
|
||||
fn = sdk_normalize(os.path.realpath(filename))
|
||||
if fn.startswith('/usr/local/'):
|
||||
return False
|
||||
elif fn.startswith('/System/') or fn.startswith('/usr/'):
|
||||
if fn in NOT_SYSTEM_FILES:
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def has_filename_filter(module):
|
||||
"""
|
||||
Return False if the module does not have a filename attribute
|
||||
"""
|
||||
return getattr(module, 'filename', None) is not None
|
||||
|
||||
|
||||
def get_magic():
|
||||
"""
|
||||
Get a list of valid Mach-O header signatures, not including the fat header
|
||||
"""
|
||||
return MAGIC
|
||||
|
||||
|
||||
def is_platform_file(path):
|
||||
"""
|
||||
Return True if the file is Mach-O
|
||||
"""
|
||||
if not os.path.exists(path) or os.path.islink(path):
|
||||
return False
|
||||
# If the header is fat, we need to read into the first arch
|
||||
with open(path, 'rb') as fileobj:
|
||||
bytes = fileobj.read(MAGIC_LEN)
|
||||
if bytes == FAT_MAGIC_BYTES:
|
||||
# Read in the fat header
|
||||
fileobj.seek(0)
|
||||
header = mach_o.fat_header.from_fileobj(fileobj, _endian_='>')
|
||||
if header.nfat_arch < 1:
|
||||
return False
|
||||
# Read in the first fat arch header
|
||||
arch = mach_o.fat_arch.from_fileobj(fileobj, _endian_='>')
|
||||
fileobj.seek(arch.offset)
|
||||
# Read magic off the first header
|
||||
bytes = fileobj.read(MAGIC_LEN)
|
||||
for magic in MAGIC:
|
||||
if bytes == magic:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def iter_platform_files(dst):
|
||||
"""
|
||||
Walk a directory and yield each full path that is a Mach-O file
|
||||
"""
|
||||
for root, dirs, files in os.walk(dst):
|
||||
for fn in files:
|
||||
fn = os.path.join(root, fn)
|
||||
if is_platform_file(fn):
|
||||
yield fn
|
||||
|
||||
|
||||
def strip_files(files, argv_max=(256 * 1024)):
|
||||
"""
|
||||
Strip a list of files
|
||||
"""
|
||||
tostrip = [(fn, flipwritable(fn)) for fn in files]
|
||||
while tostrip:
|
||||
cmd = list(STRIPCMD)
|
||||
flips = []
|
||||
pathlen = sum([len(s) + 1 for s in cmd])
|
||||
while pathlen < argv_max:
|
||||
if not tostrip:
|
||||
break
|
||||
added, flip = tostrip.pop()
|
||||
pathlen += len(added) + 1
|
||||
cmd.append(added)
|
||||
flips.append((added, flip))
|
||||
else:
|
||||
cmd.pop()
|
||||
tostrip.append(flips.pop())
|
||||
os.spawnv(os.P_WAIT, cmd[0], cmd)
|
||||
for args in flips:
|
||||
flipwritable(*args)
|
Reference in New Issue
Block a user