macholib, altgraph: update vendored dependency (#28664)

This commit is contained in:
Massimiliano Culpo
2022-01-28 19:55:12 +01:00
committed by GitHub
parent 4bd761d1d5
commit bc06c1206d
25 changed files with 1355 additions and 1263 deletions

View File

@@ -11,7 +11,7 @@
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
* Usage: dependency of macholib
* Version: 0.16.1
* Version: 0.17.2
archspec
--------
@@ -96,7 +96,7 @@
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
* Version: 1.12
* Version: 1.15.2
markupsafe
----------

View File

@@ -1,4 +1,4 @@
'''
"""
altgraph.Dot - Interface to the dot language
============================================
@@ -107,7 +107,7 @@
- for more details on how to control the graph drawing process see the
`graphviz reference
<http://www.research.att.com/sw/tools/graphviz/refs.html>`_.
'''
"""
import os
import warnings
@@ -115,25 +115,34 @@
class Dot(object):
'''
"""
A class providing a **graphviz** (dot language) representation
allowing a fine grained control over how the graph is being
displayed.
If the :command:`dot` and :command:`dotty` programs are not in the current
system path their location needs to be specified in the contructor.
'''
"""
def __init__(
self, graph=None, nodes=None, edgefn=None, nodevisitor=None,
edgevisitor=None, name="G", dot='dot', dotty='dotty',
neato='neato', graphtype="digraph"):
'''
self,
graph=None,
nodes=None,
edgefn=None,
nodevisitor=None,
edgevisitor=None,
name="G",
dot="dot",
dotty="dotty",
neato="neato",
graphtype="digraph",
):
"""
Initialization.
'''
"""
self.name, self.attr = name, {}
assert graphtype in ['graph', 'digraph']
assert graphtype in ["graph", "digraph"]
self.type = graphtype
self.temp_dot = "tmp_dot.dot"
@@ -148,8 +157,10 @@ def __init__(
if graph is not None and nodes is None:
nodes = graph
if graph is not None and edgefn is None:
def edgefn(node, graph=graph):
return graph.out_nbrs(node)
if nodes is None:
nodes = ()
@@ -177,20 +188,19 @@ def edgefn(node, graph=graph):
self.edge_style(head, tail, **edgestyle)
def style(self, **attr):
'''
"""
Changes the overall style
'''
"""
self.attr = attr
def display(self, mode='dot'):
'''
def display(self, mode="dot"):
"""
Displays the current graph via dotty
'''
"""
if mode == 'neato':
if mode == "neato":
self.save_dot(self.temp_neo)
neato_cmd = "%s -o %s %s" % (
self.neato, self.temp_dot, self.temp_neo)
neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo)
os.system(neato_cmd)
else:
self.save_dot(self.temp_dot)
@@ -199,24 +209,24 @@ def display(self, mode='dot'):
os.system(plot_cmd)
def node_style(self, node, **kwargs):
'''
"""
Modifies a node style to the dot representation.
'''
"""
if node not in self.edges:
self.edges[node] = {}
self.nodes[node] = kwargs
def all_node_style(self, **kwargs):
'''
"""
Modifies all node styles
'''
"""
for node in self.nodes:
self.node_style(node, **kwargs)
def edge_style(self, head, tail, **kwargs):
'''
"""
Modifies an edge style to the dot representation.
'''
"""
if tail not in self.nodes:
raise GraphError("invalid node %s" % (tail,))
@@ -229,10 +239,10 @@ def edge_style(self, head, tail, **kwargs):
def iterdot(self):
# write graph title
if self.type == 'digraph':
yield 'digraph %s {\n' % (self.name,)
elif self.type == 'graph':
yield 'graph %s {\n' % (self.name,)
if self.type == "digraph":
yield "digraph %s {\n" % (self.name,)
elif self.type == "graph":
yield "graph %s {\n" % (self.name,)
else:
raise GraphError("unsupported graphtype %s" % (self.type,))
@@ -240,11 +250,11 @@ def iterdot(self):
# write overall graph attributes
for attr_name, attr_value in sorted(self.attr.items()):
yield '%s="%s";' % (attr_name, attr_value)
yield '\n'
yield "\n"
# some reusable patterns
cpatt = '%s="%s",' # to separate attributes
epatt = '];\n' # to end attributes
cpatt = '%s="%s",' # to separate attributes
epatt = "];\n" # to end attributes
# write node attributes
for node_name, node_attr in sorted(self.nodes.items()):
@@ -256,25 +266,24 @@ def iterdot(self):
# write edge attributes
for head in sorted(self.edges):
for tail in sorted(self.edges[head]):
if self.type == 'digraph':
if self.type == "digraph":
yield '\t"%s" -> "%s" [' % (head, tail)
else:
yield '\t"%s" -- "%s" [' % (head, tail)
for attr_name, attr_value in \
sorted(self.edges[head][tail].items()):
for attr_name, attr_value in sorted(self.edges[head][tail].items()):
yield cpatt % (attr_name, attr_value)
yield epatt
# finish file
yield '}\n'
yield "}\n"
def __iter__(self):
return self.iterdot()
def save_dot(self, file_name=None):
'''
"""
Saves the current graph representation into a file
'''
"""
if not file_name:
warnings.warn(DeprecationWarning, "always pass a file_name")
@@ -284,19 +293,18 @@ def save_dot(self, file_name=None):
for chunk in self.iterdot():
fp.write(chunk)
def save_img(self, file_name=None, file_type="gif", mode='dot'):
'''
def save_img(self, file_name=None, file_type="gif", mode="dot"):
"""
Saves the dot file as an image file
'''
"""
if not file_name:
warnings.warn(DeprecationWarning, "always pass a file_name")
file_name = "out"
if mode == 'neato':
if mode == "neato":
self.save_dot(self.temp_neo)
neato_cmd = "%s -o %s %s" % (
self.neato, self.temp_dot, self.temp_neo)
neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo)
os.system(neato_cmd)
plot_cmd = self.dot
else:
@@ -305,5 +313,9 @@ def save_img(self, file_name=None, file_type="gif", mode='dot'):
file_name = "%s.%s" % (file_name, file_type)
create_cmd = "%s -T%s %s -o %s" % (
plot_cmd, file_type, self.temp_dot, file_name)
plot_cmd,
file_type,
self.temp_dot,
file_name,
)
os.system(create_cmd)

View File

@@ -13,9 +13,10 @@
#--Nathan Denny, May 27, 1999
"""
from altgraph import GraphError
from collections import deque
from altgraph import GraphError
class Graph(object):
"""
@@ -58,8 +59,10 @@ def __init__(self, edges=None):
raise GraphError("Cannot create edge from %s" % (item,))
def __repr__(self):
return '<Graph: %d nodes, %d edges>' % (
self.number_of_nodes(), self.number_of_edges())
return "<Graph: %d nodes, %d edges>" % (
self.number_of_nodes(),
self.number_of_edges(),
)
def add_node(self, node, node_data=None):
"""
@@ -111,7 +114,7 @@ def add_edge(self, head_id, tail_id, edge_data=1, create_nodes=True):
self.nodes[tail_id][0].append(edge)
self.nodes[head_id][1].append(edge)
except KeyError:
raise GraphError('Invalid nodes %s -> %s' % (head_id, tail_id))
raise GraphError("Invalid nodes %s -> %s" % (head_id, tail_id))
# store edge information
self.edges[edge] = (head_id, tail_id, edge_data)
@@ -124,13 +127,12 @@ def hide_edge(self, edge):
time.
"""
try:
head_id, tail_id, edge_data = \
self.hidden_edges[edge] = self.edges[edge]
head_id, tail_id, edge_data = self.hidden_edges[edge] = self.edges[edge]
self.nodes[tail_id][0].remove(edge)
self.nodes[head_id][1].remove(edge)
del self.edges[edge]
except KeyError:
raise GraphError('Invalid edge %s' % edge)
raise GraphError("Invalid edge %s" % edge)
def hide_node(self, node):
"""
@@ -144,7 +146,7 @@ def hide_node(self, node):
self.hide_edge(edge)
del self.nodes[node]
except KeyError:
raise GraphError('Invalid node %s' % node)
raise GraphError("Invalid node %s" % node)
def restore_node(self, node):
"""
@@ -157,7 +159,7 @@ def restore_node(self, node):
self.restore_edge(edge)
del self.hidden_nodes[node]
except KeyError:
raise GraphError('Invalid node %s' % node)
raise GraphError("Invalid node %s" % node)
def restore_edge(self, edge):
"""
@@ -170,7 +172,7 @@ def restore_edge(self, edge):
self.edges[edge] = head_id, tail_id, data
del self.hidden_edges[edge]
except KeyError:
raise GraphError('Invalid edge %s' % edge)
raise GraphError("Invalid edge %s" % edge)
def restore_all_edges(self):
"""
@@ -203,7 +205,7 @@ def edge_by_id(self, edge):
head, tail, data = self.edges[edge]
except KeyError:
head, tail = None, None
raise GraphError('Invalid edge %s' % edge)
raise GraphError("Invalid edge %s" % edge)
return (head, tail)
@@ -339,7 +341,7 @@ def out_edges(self, node):
try:
return list(self.nodes[node][1])
except KeyError:
raise GraphError('Invalid node %s' % node)
raise GraphError("Invalid node %s" % node)
def inc_edges(self, node):
"""
@@ -348,7 +350,7 @@ def inc_edges(self, node):
try:
return list(self.nodes[node][0])
except KeyError:
raise GraphError('Invalid node %s' % node)
raise GraphError("Invalid node %s" % node)
def all_edges(self, node):
"""
@@ -488,7 +490,7 @@ def iterdfs(self, start, end=None, forward=True):
The forward parameter specifies whether it is a forward or backward
traversal.
"""
visited, stack = set([start]), deque([start])
visited, stack = {start}, deque([start])
if forward:
get_edges = self.out_edges
@@ -515,7 +517,7 @@ def iterdata(self, start, end=None, forward=True, condition=None):
condition callback is only called when node_data is not None.
"""
visited, stack = set([start]), deque([start])
visited, stack = {start}, deque([start])
if forward:
get_edges = self.out_edges
@@ -547,7 +549,7 @@ def _iterbfs(self, start, end=None, forward=True):
traversal. Returns a list of tuples where the first value is the hop
value the second value is the node id.
"""
queue, visited = deque([(start, 0)]), set([start])
queue, visited = deque([(start, 0)]), {start}
# the direction of the bfs depends on the edges that are sampled
if forward:

View File

@@ -1,7 +1,7 @@
'''
"""
altgraph.GraphAlgo - Graph algorithms
=====================================
'''
"""
from altgraph import GraphError
@@ -28,9 +28,9 @@ def dijkstra(graph, start, end=None):
Adapted to altgraph by Istvan Albert, Pennsylvania State University -
June, 9 2004
"""
D = {} # dictionary of final distances
P = {} # dictionary of predecessors
Q = _priorityDictionary() # estimated distances of non-final vertices
D = {} # dictionary of final distances
P = {} # dictionary of predecessors
Q = _priorityDictionary() # estimated distances of non-final vertices
Q[start] = 0
for v in Q:
@@ -44,7 +44,8 @@ def dijkstra(graph, start, end=None):
if w in D:
if vwLength < D[w]:
raise GraphError(
"Dijkstra: found better path to already-final vertex")
"Dijkstra: found better path to already-final vertex"
)
elif w not in Q or vwLength < Q[w]:
Q[w] = vwLength
P[w] = v
@@ -76,7 +77,7 @@ def shortest_path(graph, start, end):
# Utility classes and functions
#
class _priorityDictionary(dict):
'''
"""
Priority dictionary using binary heaps (internal use only)
David Eppstein, UC Irvine, 8 Mar 2002
@@ -92,22 +93,22 @@ class _priorityDictionary(dict):
order. Each item is not removed until the next item is requested,
so D[x] will still return a useful value until the next iteration
of the for-loop. Each operation takes logarithmic amortized time.
'''
"""
def __init__(self):
'''
"""
Initialize priorityDictionary by creating binary heap of pairs
(value,key). Note that changing or removing a dict entry will not
remove the old pair from the heap until it is found by smallest()
or until the heap is rebuilt.
'''
"""
self.__heap = []
dict.__init__(self)
def smallest(self):
'''
"""
Find smallest item after removing deleted items from front of heap.
'''
"""
if len(self) == 0:
raise IndexError("smallest of empty priorityDictionary")
heap = self.__heap
@@ -115,9 +116,11 @@ def smallest(self):
lastItem = heap.pop()
insertionPoint = 0
while 1:
smallChild = 2*insertionPoint+1
if smallChild+1 < len(heap) and \
heap[smallChild] > heap[smallChild+1]:
smallChild = 2 * insertionPoint + 1
if (
smallChild + 1 < len(heap)
and heap[smallChild] > heap[smallChild + 1]
):
smallChild += 1
if smallChild >= len(heap) or lastItem <= heap[smallChild]:
heap[insertionPoint] = lastItem
@@ -127,22 +130,24 @@ def smallest(self):
return heap[0][1]
def __iter__(self):
'''
"""
Create destructive sorted iterator of priorityDictionary.
'''
"""
def iterfn():
while len(self) > 0:
x = self.smallest()
yield x
del self[x]
return iterfn()
def __setitem__(self, key, val):
'''
"""
Change value stored in dictionary and add corresponding pair to heap.
Rebuilds the heap if the number of deleted items gets large, to avoid
memory leakage.
'''
"""
dict.__setitem__(self, key, val)
heap = self.__heap
if len(heap) > 2 * len(self):
@@ -152,15 +157,15 @@ def __setitem__(self, key, val):
newPair = (val, key)
insertionPoint = len(heap)
heap.append(None)
while insertionPoint > 0 and newPair < heap[(insertionPoint-1)//2]:
heap[insertionPoint] = heap[(insertionPoint-1)//2]
insertionPoint = (insertionPoint-1)//2
while insertionPoint > 0 and newPair < heap[(insertionPoint - 1) // 2]:
heap[insertionPoint] = heap[(insertionPoint - 1) // 2]
insertionPoint = (insertionPoint - 1) // 2
heap[insertionPoint] = newPair
def setdefault(self, key, val):
'''
"""
Reimplement setdefault to pass through our customized __setitem__.
'''
"""
if key not in self:
self[key] = val
return self[key]

View File

@@ -1,11 +1,11 @@
'''
"""
altgraph.GraphStat - Functions providing various graph statistics
=================================================================
'''
"""
def degree_dist(graph, limits=(0, 0), bin_num=10, mode='out'):
'''
def degree_dist(graph, limits=(0, 0), bin_num=10, mode="out"):
"""
Computes the degree distribution for a graph.
Returns a list of tuples where the first element of the tuple is the
@@ -15,10 +15,10 @@ def degree_dist(graph, limits=(0, 0), bin_num=10, mode='out'):
Example::
....
'''
"""
deg = []
if mode == 'inc':
if mode == "inc":
get_deg = graph.inc_degree
else:
get_deg = graph.out_degree
@@ -34,38 +34,38 @@ def degree_dist(graph, limits=(0, 0), bin_num=10, mode='out'):
return results
_EPS = 1.0/(2.0**32)
_EPS = 1.0 / (2.0 ** 32)
def _binning(values, limits=(0, 0), bin_num=10):
'''
"""
Bins data that falls between certain limits, if the limits are (0, 0) the
minimum and maximum values are used.
Returns a list of tuples where the first element of the tuple is the
center of the bin and the second element of the tuple are the counts.
'''
"""
if limits == (0, 0):
min_val, max_val = min(values) - _EPS, max(values) + _EPS
else:
min_val, max_val = limits
# get bin size
bin_size = (max_val - min_val)/float(bin_num)
bin_size = (max_val - min_val) / float(bin_num)
bins = [0] * (bin_num)
# will ignore these outliers for now
for value in values:
try:
if (value - min_val) >= 0:
index = int((value - min_val)/float(bin_size))
index = int((value - min_val) / float(bin_size))
bins[index] += 1
except IndexError:
pass
# make it ready for an x,y plot
result = []
center = (bin_size/2) + min_val
center = (bin_size / 2) + min_val
for i, y in enumerate(bins):
x = center + bin_size * i
result.append((x, y))

View File

@@ -1,31 +1,29 @@
'''
"""
altgraph.GraphUtil - Utility classes and functions
==================================================
'''
"""
import random
from collections import deque
from altgraph import Graph
from altgraph import GraphError
from altgraph import Graph, GraphError
def generate_random_graph(
node_num, edge_num, self_loops=False, multi_edges=False):
'''
def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=False):
"""
Generates and returns a :py:class:`~altgraph.Graph.Graph` instance with
*node_num* nodes randomly connected by *edge_num* edges.
'''
"""
g = Graph.Graph()
if not multi_edges:
if self_loops:
max_edges = node_num * node_num
else:
max_edges = node_num * (node_num-1)
max_edges = node_num * (node_num - 1)
if edge_num > max_edges:
raise GraphError(
"inconsistent arguments to 'generate_random_graph'")
raise GraphError("inconsistent arguments to 'generate_random_graph'")
nodes = range(node_num)
@@ -52,17 +50,16 @@ def generate_random_graph(
return g
def generate_scale_free_graph(
steps, growth_num, self_loops=False, multi_edges=False):
'''
def generate_scale_free_graph(steps, growth_num, self_loops=False, multi_edges=False):
"""
Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that
will have *steps* \* *growth_num* nodes and a scale free (powerlaw)
will have *steps* \\* *growth_num* nodes and a scale free (powerlaw)
connectivity. Starting with a fully connected graph with *growth_num*
nodes at every step *growth_num* nodes are added to the graph and are
connected to existing nodes with a probability proportional to the degree
of these existing nodes.
'''
# FIXME: The code doesn't seem to do what the documentation claims.
"""
# The code doesn't seem to do what the documentation claims.
graph = Graph.Graph()
# initialize the graph
@@ -113,7 +110,7 @@ def filter_stack(graph, head, filters):
in *removes*.
"""
visited, removes, orphans = set([head]), set(), set()
visited, removes, orphans = {head}, set(), set()
stack = deque([(head, head)])
get_data = graph.node_data
get_edges = graph.out_edges
@@ -137,8 +134,6 @@ def filter_stack(graph, head, filters):
visited.add(tail)
stack.append((last_good, tail))
orphans = [
(lg, tl)
for (lg, tl) in orphans if tl not in removes]
orphans = [(lg, tl) for (lg, tl) in orphans if tl not in removes]
return visited, removes, orphans

View File

@@ -27,7 +27,7 @@ def __init__(self, graph=None, debug=0):
graph.add_node(self, None)
def __repr__(self):
return '<%s>' % (type(self).__name__,)
return "<%s>" % (type(self).__name__,)
def flatten(self, condition=None, start=None):
"""
@@ -58,6 +58,7 @@ def iter_edges(lst, n):
if ident not in seen:
yield self.findNode(ident)
seen.add(ident)
return iter_edges(outraw, 3), iter_edges(incraw, 2)
def edgeData(self, fromNode, toNode):
@@ -87,12 +88,12 @@ def filterStack(self, filters):
visited, removes, orphans = filter_stack(self.graph, self, filters)
for last_good, tail in orphans:
self.graph.add_edge(last_good, tail, edge_data='orphan')
self.graph.add_edge(last_good, tail, edge_data="orphan")
for node in removes:
self.graph.hide_node(node)
return len(visited)-1, len(removes), len(orphans)
return len(visited) - 1, len(removes), len(orphans)
def removeNode(self, node):
"""
@@ -135,7 +136,7 @@ def getRawIdent(self, node):
"""
if node is self:
return node
ident = getattr(node, 'graphident', None)
ident = getattr(node, "graphident", None)
return ident
def __contains__(self, node):
@@ -192,8 +193,7 @@ def msg(self, level, s, *args):
Print a debug message with the given level
"""
if s and level <= self.debug:
print("%s%s %s" % (
" " * self.indent, s, ' '.join(map(repr, args))))
print("%s%s %s" % (" " * self.indent, s, " ".join(map(repr, args))))
def msgin(self, level, s, *args):
"""

View File

@@ -1,4 +1,4 @@
'''
"""
altgraph - a python graph library
=================================
@@ -138,13 +138,11 @@
@newfield contributor: Contributors:
@contributor: U{Reka Albert <http://www.phys.psu.edu/~ralbert/>}
'''
# import pkg_resources
# __version__ = pkg_resources.require('altgraph')[0].version
# pkg_resources is not finding the altgraph import despite the fact that it is in sys.path
# there is no .dist-info or .egg-info for pkg_resources to query the version from
# so it must be set manually
__version__ = '0.16.1'
"""
import pkg_resources
__version__ = pkg_resources.require("altgraph")[0].version
class GraphError(ValueError):
pass

View File

@@ -3,21 +3,43 @@
"""
from __future__ import print_function
import sys
import struct
import os
from .mach_o import MH_FILETYPE_SHORTNAMES, LC_DYSYMTAB, LC_SYMTAB
from .mach_o import load_command, S_ZEROFILL, section_64, section
from .mach_o import LC_REGISTRY, LC_ID_DYLIB, LC_SEGMENT, fat_header
from .mach_o import LC_SEGMENT_64, MH_CIGAM_64, MH_MAGIC_64, FAT_MAGIC
from .mach_o import mach_header, fat_arch64, FAT_MAGIC_64, fat_arch
from .mach_o import LC_REEXPORT_DYLIB, LC_PREBOUND_DYLIB, LC_LOAD_WEAK_DYLIB
from .mach_o import LC_LOAD_UPWARD_DYLIB, LC_LOAD_DYLIB, mach_header_64
from .mach_o import MH_CIGAM, MH_MAGIC
from .ptypes import sizeof
import struct
import sys
from macholib.util import fileview
from .mach_o import (
FAT_MAGIC,
FAT_MAGIC_64,
LC_DYSYMTAB,
LC_ID_DYLIB,
LC_LOAD_DYLIB,
LC_LOAD_UPWARD_DYLIB,
LC_LOAD_WEAK_DYLIB,
LC_PREBOUND_DYLIB,
LC_REEXPORT_DYLIB,
LC_REGISTRY,
LC_SEGMENT,
LC_SEGMENT_64,
LC_SYMTAB,
MH_CIGAM,
MH_CIGAM_64,
MH_FILETYPE_SHORTNAMES,
MH_MAGIC,
MH_MAGIC_64,
S_ZEROFILL,
fat_arch,
fat_arch64,
fat_header,
load_command,
mach_header,
mach_header_64,
section,
section_64,
)
from .ptypes import sizeof
try:
from macholib.compat import bytes
except ImportError:
@@ -31,23 +53,23 @@
if sys.version_info[0] == 2:
range = xrange # noqa: F821
__all__ = ['MachO']
__all__ = ["MachO"]
_RELOCATABLE = set((
_RELOCATABLE = {
# relocatable commands that should be used for dependency walking
LC_LOAD_DYLIB,
LC_LOAD_UPWARD_DYLIB,
LC_LOAD_WEAK_DYLIB,
LC_PREBOUND_DYLIB,
LC_REEXPORT_DYLIB,
))
}
_RELOCATABLE_NAMES = {
LC_LOAD_DYLIB: 'load_dylib',
LC_LOAD_UPWARD_DYLIB: 'load_upward_dylib',
LC_LOAD_WEAK_DYLIB: 'load_weak_dylib',
LC_PREBOUND_DYLIB: 'prebound_dylib',
LC_REEXPORT_DYLIB: 'reexport_dylib',
LC_LOAD_DYLIB: "load_dylib",
LC_LOAD_UPWARD_DYLIB: "load_upward_dylib",
LC_LOAD_WEAK_DYLIB: "load_weak_dylib",
LC_PREBOUND_DYLIB: "prebound_dylib",
LC_REEXPORT_DYLIB: "reexport_dylib",
}
@@ -65,13 +87,14 @@ def lc_str_value(offset, cmd_info):
cmd_load, cmd_cmd, cmd_data = cmd_info
offset -= sizeof(cmd_load) + sizeof(cmd_cmd)
return cmd_data[offset:].strip(b'\x00')
return cmd_data[offset:].strip(b"\x00")
class MachO(object):
"""
Provides reading/writing the Mach-O header of a specific existing file
"""
# filename - the original filename of this mach-o
# sizediff - the current deviation from the initial mach-o size
# header - the mach-o header
@@ -91,7 +114,7 @@ def __init__(self, filename):
# initialized by load
self.fat = None
self.headers = []
with open(filename, 'rb') as fp:
with open(filename, "rb") as fp:
self.load(fp)
def __repr__(self):
@@ -99,7 +122,7 @@ def __repr__(self):
def load(self, fh):
assert fh.tell() == 0
header = struct.unpack('>I', fh.read(4))[0]
header = struct.unpack(">I", fh.read(4))[0]
fh.seek(0)
if header in (FAT_MAGIC, FAT_MAGIC_64):
self.load_fat(fh)
@@ -112,11 +135,9 @@ def load(self, fh):
def load_fat(self, fh):
self.fat = fat_header.from_fileobj(fh)
if self.fat.magic == FAT_MAGIC:
archs = [fat_arch.from_fileobj(fh)
for i in range(self.fat.nfat_arch)]
archs = [fat_arch.from_fileobj(fh) for i in range(self.fat.nfat_arch)]
elif self.fat.magic == FAT_MAGIC_64:
archs = [fat_arch64.from_fileobj(fh)
for i in range(self.fat.nfat_arch)]
archs = [fat_arch64.from_fileobj(fh) for i in range(self.fat.nfat_arch)]
else:
raise ValueError("Unknown fat header magic: %r" % (self.fat.magic))
@@ -132,19 +153,18 @@ def rewriteLoadCommands(self, *args, **kw):
def load_header(self, fh, offset, size):
fh.seek(offset)
header = struct.unpack('>I', fh.read(4))[0]
header = struct.unpack(">I", fh.read(4))[0]
fh.seek(offset)
if header == MH_MAGIC:
magic, hdr, endian = MH_MAGIC, mach_header, '>'
magic, hdr, endian = MH_MAGIC, mach_header, ">"
elif header == MH_CIGAM:
magic, hdr, endian = MH_CIGAM, mach_header, '<'
magic, hdr, endian = MH_CIGAM, mach_header, "<"
elif header == MH_MAGIC_64:
magic, hdr, endian = MH_MAGIC_64, mach_header_64, '>'
magic, hdr, endian = MH_MAGIC_64, mach_header_64, ">"
elif header == MH_CIGAM_64:
magic, hdr, endian = MH_CIGAM_64, mach_header_64, '<'
magic, hdr, endian = MH_CIGAM_64, mach_header_64, "<"
else:
raise ValueError("Unknown Mach-O header: 0x%08x in %r" % (
header, fh))
raise ValueError("Unknown Mach-O header: 0x%08x in %r" % (header, fh))
hdr = MachOHeader(self, fh, offset, size, magic, hdr, endian)
self.headers.append(hdr)
@@ -157,6 +177,7 @@ class MachOHeader(object):
"""
Provides reading/writing the Mach-O header of a specific existing file
"""
# filename - the original filename of this mach-o
# sizediff - the current deviation from the initial mach-o size
# header - the mach-o header
@@ -189,15 +210,19 @@ def __init__(self, parent, fh, offset, size, magic, hdr, endian):
def __repr__(self):
return "<%s filename=%r offset=%d size=%d endian=%r>" % (
type(self).__name__, self.parent.filename, self.offset, self.size,
self.endian)
type(self).__name__,
self.parent.filename,
self.offset,
self.size,
self.endian,
)
def load(self, fh):
fh = fileview(fh, self.offset, self.size)
fh.seek(0)
self.sizediff = 0
kw = {'_endian_': self.endian}
kw = {"_endian_": self.endian}
header = self.mach_header.from_fileobj(fh, **kw)
self.header = header
# if header.magic != self.MH_MAGIC:
@@ -236,8 +261,9 @@ def load(self, fh):
section_cls = section_64
expected_size = (
sizeof(klass) + sizeof(load_command) +
(sizeof(section_cls) * cmd_cmd.nsects)
sizeof(klass)
+ sizeof(load_command)
+ (sizeof(section_cls) * cmd_cmd.nsects)
)
if cmd_load.cmdsize != expected_size:
raise ValueError("Segment size mismatch")
@@ -248,12 +274,12 @@ def load(self, fh):
low_offset = min(low_offset, cmd_cmd.fileoff)
else:
# this one has multiple segments
for j in range(cmd_cmd.nsects):
for _j in range(cmd_cmd.nsects):
# read the segment
seg = section_cls.from_fileobj(fh, **kw)
# if the segment has a size and is not zero filled
# then its beginning is the offset of this segment
not_zerofill = ((seg.flags & S_ZEROFILL) != S_ZEROFILL)
not_zerofill = (seg.flags & S_ZEROFILL) != S_ZEROFILL
if seg.offset > 0 and seg.size > 0 and not_zerofill:
low_offset = min(low_offset, seg.offset)
if not_zerofill:
@@ -266,7 +292,7 @@ def load(self, fh):
# data is a list of segments
cmd_data = segs
# XXX: Disabled for now because writing back doesn't work
# These are disabled for now because writing back doesn't work
# elif cmd_load.cmd == LC_CODE_SIGNATURE:
# c = fh.tell()
# fh.seek(cmd_cmd.dataoff)
@@ -280,17 +306,17 @@ def load(self, fh):
else:
# data is a raw str
data_size = (
cmd_load.cmdsize - sizeof(klass) - sizeof(load_command)
)
data_size = cmd_load.cmdsize - sizeof(klass) - sizeof(load_command)
cmd_data = fh.read(data_size)
cmd.append((cmd_load, cmd_cmd, cmd_data))
read_bytes += cmd_load.cmdsize
# make sure the header made sense
if read_bytes != header.sizeofcmds:
raise ValueError("Read %d bytes, header reports %d bytes" % (
read_bytes, header.sizeofcmds))
raise ValueError(
"Read %d bytes, header reports %d bytes"
% (read_bytes, header.sizeofcmds)
)
self.total_size = sizeof(self.mach_header) + read_bytes
self.low_offset = low_offset
@@ -303,8 +329,9 @@ def walkRelocatables(self, shouldRelocateCommand=_shouldRelocateCommand):
if shouldRelocateCommand(lc.cmd):
name = _RELOCATABLE_NAMES[lc.cmd]
ofs = cmd.name - sizeof(lc.__class__) - sizeof(cmd.__class__)
yield idx, name, data[ofs:data.find(b'\x00', ofs)].decode(
sys.getfilesystemencoding())
yield idx, name, data[
ofs : data.find(b"\x00", ofs) # noqa: E203
].decode(sys.getfilesystemencoding())
def rewriteInstallNameCommand(self, loadcmd):
"""Rewrite the load command of this dylib"""
@@ -317,8 +344,9 @@ def changedHeaderSizeBy(self, bytes):
self.sizediff += bytes
if (self.total_size + self.sizediff) > self.low_offset:
print(
"WARNING: Mach-O header in %r may be too large to relocate" % (
self.parent.filename,))
"WARNING: Mach-O header in %r may be too large to relocate"
% (self.parent.filename,)
)
def rewriteLoadCommands(self, changefunc):
"""
@@ -327,22 +355,22 @@ def rewriteLoadCommands(self, changefunc):
data = changefunc(self.parent.filename)
changed = False
if data is not None:
if self.rewriteInstallNameCommand(
data.encode(sys.getfilesystemencoding())):
if self.rewriteInstallNameCommand(data.encode(sys.getfilesystemencoding())):
changed = True
for idx, name, filename in self.walkRelocatables():
for idx, _name, filename in self.walkRelocatables():
data = changefunc(filename)
if data is not None:
if self.rewriteDataForCommand(idx, data.encode(
sys.getfilesystemencoding())):
if self.rewriteDataForCommand(
idx, data.encode(sys.getfilesystemencoding())
):
changed = True
return changed
def rewriteDataForCommand(self, idx, data):
lc, cmd, old_data = self.commands[idx]
hdrsize = sizeof(lc.__class__) + sizeof(cmd.__class__)
align = struct.calcsize('Q')
data = data + (b'\x00' * (align - (len(data) % align)))
align = struct.calcsize("Q")
data = data + (b"\x00" * (align - (len(data) % align)))
newsize = hdrsize + len(data)
self.commands[idx] = (lc, cmd, data)
self.changedHeaderSizeBy(newsize - lc.cmdsize)
@@ -352,10 +380,17 @@ def rewriteDataForCommand(self, idx, data):
def synchronize_size(self):
if (self.total_size + self.sizediff) > self.low_offset:
raise ValueError(
("New Mach-O header is too large to relocate in %r "
"(new size=%r, max size=%r, delta=%r)") % (
self.parent.filename, self.total_size + self.sizediff,
self.low_offset, self.sizediff))
(
"New Mach-O header is too large to relocate in %r "
"(new size=%r, max size=%r, delta=%r)"
)
% (
self.parent.filename,
self.total_size + self.sizediff,
self.low_offset,
self.sizediff,
)
)
self.header.sizeofcmds += self.sizediff
self.total_size = sizeof(self.mach_header) + self.header.sizeofcmds
self.sizediff = 0
@@ -396,16 +431,16 @@ def write(self, fileobj):
# zero out the unused space, doubt this is strictly necessary
# and is generally probably already the case
fileobj.write(b'\x00' * (self.low_offset - fileobj.tell()))
fileobj.write(b"\x00" * (self.low_offset - fileobj.tell()))
def getSymbolTableCommand(self):
for lc, cmd, data in self.commands:
for lc, cmd, _data in self.commands:
if lc.cmd == LC_SYMTAB:
return cmd
return None
def getDynamicSymbolTableCommand(self):
for lc, cmd, data in self.commands:
for lc, cmd, _data in self.commands:
if lc.cmd == LC_DYSYMTAB:
return cmd
return None
@@ -414,22 +449,23 @@ def get_filetype_shortname(self, filetype):
if filetype in MH_FILETYPE_SHORTNAMES:
return MH_FILETYPE_SHORTNAMES[filetype]
else:
return 'unknown'
return "unknown"
def main(fn):
m = MachO(fn)
seen = set()
for header in m.headers:
for idx, name, other in header.walkRelocatables():
for _idx, name, other in header.walkRelocatables():
if other not in seen:
seen.add(other)
print('\t' + name + ": " + other)
print("\t" + name + ": " + other)
if __name__ == '__main__':
if __name__ == "__main__":
import sys
files = sys.argv[1:] or ['/bin/ls']
files = sys.argv[1:] or ["/bin/ls"]
for fn in files:
print(fn)
main(fn)

View File

@@ -8,10 +8,10 @@
from altgraph.ObjectGraph import ObjectGraph
from macholib.dyld import dyld_find
from macholib.MachO import MachO
from macholib.itergraphreport import itergraphreport
from macholib.MachO import MachO
__all__ = ['MachOGraph']
__all__ = ["MachOGraph"]
try:
unicode
@@ -25,13 +25,14 @@ def __init__(self, filename):
self.headers = ()
def __repr__(self):
return '<%s graphident=%r>' % (type(self).__name__, self.graphident)
return "<%s graphident=%r>" % (type(self).__name__, self.graphident)
class MachOGraph(ObjectGraph):
"""
Graph data structure of Mach-O dependencies
"""
def __init__(self, debug=0, graph=None, env=None, executable_path=None):
super(MachOGraph, self).__init__(debug=debug, graph=graph)
self.env = env
@@ -41,16 +42,18 @@ def __init__(self, debug=0, graph=None, env=None, executable_path=None):
def locate(self, filename, loader=None):
if not isinstance(filename, (str, unicode)):
raise TypeError("%r is not a string" % (filename,))
if filename.startswith('@loader_path/') and loader is not None:
if filename.startswith("@loader_path/") and loader is not None:
fn = self.trans_table.get((loader.filename, filename))
if fn is None:
loader_path = loader.loader_path
try:
fn = dyld_find(
filename, env=self.env,
filename,
env=self.env,
executable_path=self.executable_path,
loader_path=loader_path)
loader_path=loader_path,
)
self.trans_table[(loader.filename, filename)] = fn
except ValueError:
return None
@@ -60,8 +63,8 @@ def locate(self, filename, loader=None):
if fn is None:
try:
fn = dyld_find(
filename, env=self.env,
executable_path=self.executable_path)
filename, env=self.env, executable_path=self.executable_path
)
self.trans_table[filename] = fn
except ValueError:
return None
@@ -83,11 +86,11 @@ def run_file(self, pathname, caller=None):
m = self.findNode(pathname, loader=caller)
if m is None:
if not os.path.exists(pathname):
raise ValueError('%r does not exist' % (pathname,))
raise ValueError("%r does not exist" % (pathname,))
m = self.createNode(MachO, pathname)
self.createReference(caller, m, edge_data='run_file')
self.createReference(caller, m, edge_data="run_file")
self.scan_node(m)
self.msgout(2, '')
self.msgout(2, "")
return m
def load_file(self, name, caller=None):
@@ -103,20 +106,20 @@ def load_file(self, name, caller=None):
self.scan_node(m)
else:
m = self.createNode(MissingMachO, name)
self.msgout(2, '')
self.msgout(2, "")
return m
def scan_node(self, node):
self.msgin(2, 'scan_node', node)
self.msgin(2, "scan_node", node)
for header in node.headers:
for idx, name, filename in header.walkRelocatables():
for _idx, name, filename in header.walkRelocatables():
assert isinstance(name, (str, unicode))
assert isinstance(filename, (str, unicode))
m = self.load_file(filename, caller=node)
self.createReference(node, m, edge_data=name)
self.msgout(2, '', node)
self.msgout(2, "", node)
def itergraphreport(self, name='G'):
def itergraphreport(self, name="G"):
nodes = map(self.graph.describe_node, self.graph.iterdfs(self))
describe_edge = self.graph.describe_edge
return itergraphreport(nodes, describe_edge, name=name)
@@ -134,5 +137,5 @@ def main(args):
g.graphreport()
if __name__ == '__main__':
main(sys.argv[1:] or ['/bin/ls'])
if __name__ == "__main__":
main(sys.argv[1:] or ["/bin/ls"])

View File

@@ -1,11 +1,17 @@
import os
from macholib.MachOGraph import MachOGraph, MissingMachO
from macholib.util import iter_platform_files, in_system_path, mergecopy, \
mergetree, flipwritable, has_filename_filter
from macholib.dyld import framework_info
from collections import deque
from macholib.dyld import framework_info
from macholib.MachOGraph import MachOGraph, MissingMachO
from macholib.util import (
flipwritable,
has_filename_filter,
in_system_path,
iter_platform_files,
mergecopy,
mergetree,
)
class ExcludedMachO(MissingMachO):
pass
@@ -23,22 +29,20 @@ def createNode(self, cls, name):
def locate(self, filename, loader=None):
newname = super(FilteredMachOGraph, self).locate(filename, loader)
print("locate", filename, loader, "->", newname)
if newname is None:
return None
return self.delegate.locate(newname, loader=loader)
class MachOStandalone(object):
def __init__(
self, base, dest=None, graph=None, env=None,
executable_path=None):
self.base = os.path.join(os.path.abspath(base), '')
def __init__(self, base, dest=None, graph=None, env=None, executable_path=None):
self.base = os.path.join(os.path.abspath(base), "")
if dest is None:
dest = os.path.join(self.base, 'Contents', 'Frameworks')
dest = os.path.join(self.base, "Contents", "Frameworks")
self.dest = dest
self.mm = FilteredMachOGraph(
self, graph=graph, env=env, executable_path=executable_path)
self, graph=graph, env=env, executable_path=executable_path
)
self.changemap = {}
self.excludes = []
self.pending = deque()
@@ -80,8 +84,7 @@ def copy_dylib(self, filename):
# when two libraries link to the same dylib but using different
# symlinks.
if os.path.islink(filename):
dest = os.path.join(
self.dest, os.path.basename(os.path.realpath(filename)))
dest = os.path.join(self.dest, os.path.basename(os.path.realpath(filename)))
else:
dest = os.path.join(self.dest, os.path.basename(filename))
@@ -96,9 +99,9 @@ def mergetree(self, src, dest):
return mergetree(src, dest)
def copy_framework(self, info):
dest = os.path.join(self.dest, info['shortname'] + '.framework')
destfn = os.path.join(self.dest, info['name'])
src = os.path.join(info['location'], info['shortname'] + '.framework')
dest = os.path.join(self.dest, info["shortname"] + ".framework")
destfn = os.path.join(self.dest, info["name"])
src = os.path.join(info["location"], info["shortname"] + ".framework")
if not os.path.exists(dest):
self.mergetree(src, dest)
self.pending.append((destfn, iter_platform_files(dest)))
@@ -107,7 +110,7 @@ def copy_framework(self, info):
def run(self, platfiles=None, contents=None):
mm = self.mm
if contents is None:
contents = '@executable_path/..'
contents = "@executable_path/.."
if platfiles is None:
platfiles = iter_platform_files(self.base)
@@ -121,18 +124,20 @@ def run(self, platfiles=None, contents=None):
mm.run_file(fn, caller=ref)
changemap = {}
skipcontents = os.path.join(os.path.dirname(self.dest), '')
skipcontents = os.path.join(os.path.dirname(self.dest), "")
machfiles = []
for node in mm.flatten(has_filename_filter):
machfiles.append(node)
dest = os.path.join(
contents, os.path.normpath(node.filename[len(skipcontents):]))
contents,
os.path.normpath(node.filename[len(skipcontents) :]), # noqa: E203
)
changemap[node.filename] = dest
def changefunc(path):
if path.startswith('@loader_path/'):
# XXX: This is a quick hack for py2app: In that
if path.startswith("@loader_path/"):
# This is a quick hack for py2app: In that
# usecase paths like this are found in the load
# commands of relocatable wheels. Those don't
# need rewriting.
@@ -140,9 +145,8 @@ def changefunc(path):
res = mm.locate(path)
rv = changemap.get(res)
if rv is None and path.startswith('@loader_path/'):
rv = changemap.get(mm.locate(mm.trans_table.get(
(node.filename, path))))
if rv is None and path.startswith("@loader_path/"):
rv = changemap.get(mm.locate(mm.trans_table.get((node.filename, path))))
return rv
for node in machfiles:
@@ -150,14 +154,14 @@ def changefunc(path):
if fn is None:
continue
rewroteAny = False
for header in node.headers:
for _header in node.headers:
if node.rewriteLoadCommands(changefunc):
rewroteAny = True
if rewroteAny:
old_mode = flipwritable(fn)
try:
with open(fn, 'rb+') as f:
for header in node.headers:
with open(fn, "rb+") as f:
for _header in node.headers:
f.seek(0)
node.write(f)
f.seek(0, 2)

View File

@@ -3,12 +3,20 @@
"""
from __future__ import with_statement
from macholib.mach_o import relocation_info, dylib_reference, dylib_module
from macholib.mach_o import dylib_table_of_contents, nlist, nlist_64
from macholib.mach_o import MH_CIGAM_64, MH_MAGIC_64
import sys
__all__ = ['SymbolTable']
from macholib.mach_o import (
MH_CIGAM_64,
MH_MAGIC_64,
dylib_module,
dylib_reference,
dylib_table_of_contents,
nlist,
nlist_64,
relocation_info,
)
__all__ = ["SymbolTable"]
if sys.version_info[0] == 2:
range = xrange # noqa: F821
@@ -21,7 +29,7 @@ def __init__(self, macho, header=None, openfile=None):
if header is None:
header = macho.headers[0]
self.macho_header = header
with openfile(macho.filename, 'rb') as fh:
with openfile(macho.filename, "rb") as fh:
self.symtab = header.getSymbolTableCommand()
self.dysymtab = header.getDynamicSymbolTableCommand()
@@ -43,22 +51,32 @@ def readSymbolTable(self, fh):
else:
cls = nlist
for i in range(cmd.nsyms):
for _i in range(cmd.nsyms):
cmd = cls.from_fileobj(fh, _endian_=self.macho_header.endian)
if cmd.n_un == 0:
nlists.append((cmd, ''))
nlists.append((cmd, ""))
else:
nlists.append(
(cmd, strtab[cmd.n_un:strtab.find(b'\x00', cmd.n_un)]))
(
cmd,
strtab[cmd.n_un : strtab.find(b"\x00", cmd.n_un)], # noqa: E203
)
)
return nlists
def readDynamicSymbolTable(self, fh):
cmd = self.dysymtab
nlists = self.nlists
self.localsyms = nlists[cmd.ilocalsym:cmd.ilocalsym+cmd.nlocalsym]
self.extdefsyms = nlists[cmd.iextdefsym:cmd.iextdefsym+cmd.nextdefsym]
self.undefsyms = nlists[cmd.iundefsym:cmd.iundefsym+cmd.nundefsym]
self.localsyms = nlists[
cmd.ilocalsym : cmd.ilocalsym + cmd.nlocalsym # noqa: E203
]
self.extdefsyms = nlists[
cmd.iextdefsym : cmd.iextdefsym + cmd.nextdefsym # noqa: E203
]
self.undefsyms = nlists[
cmd.iundefsym : cmd.iundefsym + cmd.nundefsym # noqa: E203
]
if cmd.tocoff == 0:
self.toc = None
else:
@@ -75,7 +93,7 @@ def readmodtab(self, fh, off, n):
def readsym(self, fh, off, n):
fh.seek(self.macho_header.offset + off)
refs = []
for i in range(n):
for _i in range(n):
ref = dylib_reference.from_fileobj(fh)
isym, flags = divmod(ref.isym_flags, 256)
refs.append((self.nlists[isym], flags))

View File

@@ -5,4 +5,4 @@
And also Apple's documentation.
"""
__version__ = '1.10'
__version__ = "1.15.2"

View File

@@ -1,26 +1,24 @@
from __future__ import print_function, absolute_import
from __future__ import absolute_import, print_function
import os
import sys
from macholib import macho_dump, macho_standalone
from macholib.util import is_platform_file
from macholib import macho_dump
from macholib import macho_standalone
gCommand = None
def check_file(fp, path, callback):
if not os.path.exists(path):
print(
'%s: %s: No such file or directory' % (gCommand, path),
file=sys.stderr)
print("%s: %s: No such file or directory" % (gCommand, path), file=sys.stderr)
return 1
try:
is_plat = is_platform_file(path)
except IOError as msg:
print('%s: %s: %s' % (gCommand, path, msg), file=sys.stderr)
print("%s: %s: %s" % (gCommand, path, msg), file=sys.stderr)
return 1
else:
@@ -34,10 +32,9 @@ def walk_tree(callback, paths):
for base in paths:
if os.path.isdir(base):
for root, dirs, files in os.walk(base):
for root, _dirs, files in os.walk(base):
for fn in files:
err |= check_file(
sys.stdout, os.path.join(root, fn), callback)
err |= check_file(sys.stdout, os.path.join(root, fn), callback)
else:
err |= check_file(sys.stdout, base, callback)
@@ -60,17 +57,17 @@ def main():
gCommand = sys.argv[1]
if gCommand == 'dump':
if gCommand == "dump":
walk_tree(macho_dump.print_file, sys.argv[2:])
elif gCommand == 'find':
elif gCommand == "find":
walk_tree(lambda fp, path: print(path, file=fp), sys.argv[2:])
elif gCommand == 'standalone':
elif gCommand == "standalone":
for dn in sys.argv[2:]:
macho_standalone.standaloneApp(dn)
elif gCommand in ('help', '--help'):
elif gCommand in ("help", "--help"):
print_usage(sys.stdout)
sys.exit(0)

View File

@@ -1,7 +1,8 @@
"""
Internal helpers for basic commandline tools
"""
from __future__ import print_function, absolute_import
from __future__ import absolute_import, print_function
import os
import sys
@@ -10,15 +11,16 @@
def check_file(fp, path, callback):
if not os.path.exists(path):
print('%s: %s: No such file or directory' % (
sys.argv[0], path), file=sys.stderr)
print(
"%s: %s: No such file or directory" % (sys.argv[0], path), file=sys.stderr
)
return 1
try:
is_plat = is_platform_file(path)
except IOError as msg:
print('%s: %s: %s' % (sys.argv[0], path, msg), file=sys.stderr)
print("%s: %s: %s" % (sys.argv[0], path, msg), file=sys.stderr)
return 1
else:
@@ -38,10 +40,9 @@ def main(callback):
for base in args:
if os.path.isdir(base):
for root, dirs, files in os.walk(base):
for root, _dirs, files in os.walk(base):
for fn in files:
err |= check_file(
sys.stdout, os.path.join(root, fn), callback)
err |= check_file(sys.stdout, os.path.join(root, fn), callback)
else:
err |= check_file(sys.stdout, base, callback)

View File

@@ -2,18 +2,45 @@
dyld emulation
"""
import ctypes
import os
import platform
import sys
from itertools import chain
import os
import sys
from macholib.framework import framework_info
from macholib.dylib import dylib_info
from macholib.framework import framework_info
__all__ = [
'dyld_find', 'framework_find',
'framework_info', 'dylib_info',
]
__all__ = ["dyld_find", "framework_find", "framework_info", "dylib_info"]
if sys.platform == "darwin" and [
int(x) for x in platform.mac_ver()[0].split(".")[:2]
] >= [10, 16]:
try:
libc = ctypes.CDLL("libSystem.dylib")
except OSError:
_dyld_shared_cache_contains_path = None
else:
try:
_dyld_shared_cache_contains_path = libc._dyld_shared_cache_contains_path
except AttributeError:
_dyld_shared_cache_contains_path = None
else:
_dyld_shared_cache_contains_path.restype = ctypes.c_bool
_dyld_shared_cache_contains_path.argtypes = [ctypes.c_char_p]
if sys.version_info[0] != 2:
__dyld_shared_cache_contains_path = _dyld_shared_cache_contains_path
def _dyld_shared_cache_contains_path(path):
return __dyld_shared_cache_contains_path(path.encode())
else:
_dyld_shared_cache_contains_path = None
# These are the defaults as per man dyld(1)
#
@@ -31,13 +58,16 @@
"/usr/lib",
]
# XXX: Is this function still needed?
if sys.version_info[0] == 2:
def _ensure_utf8(s):
if isinstance(s, unicode): # noqa: F821
return s.encode('utf8')
return s.encode("utf8")
return s
else:
def _ensure_utf8(s):
if s is not None and not isinstance(s, str):
raise ValueError(s)
@@ -48,31 +78,31 @@ def _dyld_env(env, var):
if env is None:
env = os.environ
rval = env.get(var)
if rval is None or rval == '':
if rval is None or rval == "":
return []
return rval.split(':')
return rval.split(":")
def dyld_image_suffix(env=None):
if env is None:
env = os.environ
return env.get('DYLD_IMAGE_SUFFIX')
return env.get("DYLD_IMAGE_SUFFIX")
def dyld_framework_path(env=None):
return _dyld_env(env, 'DYLD_FRAMEWORK_PATH')
return _dyld_env(env, "DYLD_FRAMEWORK_PATH")
def dyld_library_path(env=None):
return _dyld_env(env, 'DYLD_LIBRARY_PATH')
return _dyld_env(env, "DYLD_LIBRARY_PATH")
def dyld_fallback_framework_path(env=None):
return _dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH')
return _dyld_env(env, "DYLD_FALLBACK_FRAMEWORK_PATH")
def dyld_fallback_library_path(env=None):
return _dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH')
return _dyld_env(env, "DYLD_FALLBACK_LIBRARY_PATH")
def dyld_image_suffix_search(iterator, env=None):
@@ -83,8 +113,8 @@ def dyld_image_suffix_search(iterator, env=None):
def _inject(iterator=iterator, suffix=suffix):
for path in iterator:
if path.endswith('.dylib'):
yield path[:-len('.dylib')] + suffix + '.dylib'
if path.endswith(".dylib"):
yield path[: -len(".dylib")] + suffix + ".dylib"
else:
yield path + suffix
yield path
@@ -102,7 +132,7 @@ def dyld_override_search(name, env=None):
if framework is not None:
for path in dyld_framework_path(env):
yield os.path.join(path, framework['name'])
yield os.path.join(path, framework["name"])
# If DYLD_LIBRARY_PATH is set then use the first file that exists
# in the path. If none use the original name.
@@ -114,16 +144,18 @@ def dyld_executable_path_search(name, executable_path=None):
# If we haven't done any searching and found a library and the
# dylib_name starts with "@executable_path/" then construct the
# library name.
if name.startswith('@executable_path/') and executable_path is not None:
yield os.path.join(executable_path, name[len('@executable_path/'):])
if name.startswith("@executable_path/") and executable_path is not None:
yield os.path.join(
executable_path, name[len("@executable_path/") :] # noqa: E203
)
def dyld_loader_search(name, loader_path=None):
# If we haven't done any searching and found a library and the
# dylib_name starts with "@loader_path/" then construct the
# library name.
if name.startswith('@loader_path/') and loader_path is not None:
yield os.path.join(loader_path, name[len('@loader_path/'):])
if name.startswith("@loader_path/") and loader_path is not None:
yield os.path.join(loader_path, name[len("@loader_path/") :]) # noqa: E203
def dyld_default_search(name, env=None):
@@ -136,11 +168,11 @@ def dyld_default_search(name, env=None):
if fallback_framework_path:
for path in fallback_framework_path:
yield os.path.join(path, framework['name'])
yield os.path.join(path, framework["name"])
else:
for path in _DEFAULT_FRAMEWORK_FALLBACK:
yield os.path.join(path, framework['name'])
yield os.path.join(path, framework["name"])
fallback_library_path = dyld_fallback_library_path(env)
if fallback_library_path:
@@ -158,12 +190,20 @@ def dyld_find(name, executable_path=None, env=None, loader_path=None):
"""
name = _ensure_utf8(name)
executable_path = _ensure_utf8(executable_path)
for path in dyld_image_suffix_search(chain(
dyld_override_search(name, env),
dyld_executable_path_search(name, executable_path),
dyld_loader_search(name, loader_path),
dyld_default_search(name, env),
), env):
for path in dyld_image_suffix_search(
chain(
dyld_override_search(name, env),
dyld_executable_path_search(name, executable_path),
dyld_loader_search(name, loader_path),
dyld_default_search(name, env),
),
env,
):
if (
_dyld_shared_cache_contains_path is not None
and _dyld_shared_cache_contains_path(path)
):
return path
if os.path.isfile(path):
return path
raise ValueError("dylib %s could not be found" % (name,))
@@ -182,9 +222,9 @@ def framework_find(fn, executable_path=None, env=None):
return dyld_find(fn, executable_path=executable_path, env=env)
except ValueError:
pass
fmwk_index = fn.rfind('.framework')
fmwk_index = fn.rfind(".framework")
if fmwk_index == -1:
fmwk_index = len(fn)
fn += '.framework'
fn += ".framework"
fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
return dyld_find(fn, executable_path=executable_path, env=env)

View File

@@ -4,9 +4,10 @@
import re
__all__ = ['dylib_info']
__all__ = ["dylib_info"]
_DYLIB_RE = re.compile(r"""(?x)
_DYLIB_RE = re.compile(
r"""(?x)
(?P<location>^.*)(?:^|/)
(?P<name>
(?P<shortname>\w+?)
@@ -14,7 +15,8 @@
(?:_(?P<suffix>[^._]+))?
\.dylib$
)
""")
"""
)
def dylib_info(filename):

View File

@@ -4,9 +4,10 @@
import re
__all__ = ['framework_info']
__all__ = ["framework_info"]
_STRICT_FRAMEWORK_RE = re.compile(r"""(?x)
_STRICT_FRAMEWORK_RE = re.compile(
r"""(?x)
(?P<location>^.*)(?:^|/)
(?P<name>
(?P<shortname>[-_A-Za-z0-9]+).framework/
@@ -14,7 +15,8 @@
(?P=shortname)
(?:_(?P<suffix>[^_]+))?
)$
""")
"""
)
def framework_info(filename):

View File

@@ -1,7 +1,5 @@
"""
Utilities for creating dot output from a MachOGraph
XXX: need to rewrite this based on altgraph.Dot
"""
from collections import deque
@@ -11,28 +9,28 @@
except ImportError:
imap = map
__all__ = ['itergraphreport']
__all__ = ["itergraphreport"]
def itergraphreport(nodes, describe_edge, name='G'):
def itergraphreport(nodes, describe_edge, name="G"):
edges = deque()
nodetoident = {}
def nodevisitor(node, data, outgoing, incoming):
return {'label': str(node)}
return {"label": str(node)}
def edgevisitor(edge, data, head, tail):
return {}
yield 'digraph %s {\n' % (name,)
attr = dict(rankdir='LR', concentrate='true')
yield "digraph %s {\n" % (name,)
attr = {"rankdir": "LR", "concentrate": "true"}
cpatt = '%s="%s"'
for item in attr.iteritems():
yield '\t%s;\n' % (cpatt % item,)
for item in attr.items():
yield "\t%s;\n" % (cpatt % item,)
# find all packages (subgraphs)
for (node, data, outgoing, incoming) in nodes:
nodetoident[node] = getattr(data, 'identifier', node)
for (node, data, _outgoing, _incoming) in nodes:
nodetoident[node] = getattr(data, "identifier", node)
# create sets for subgraph, write out descriptions
for (node, data, outgoing, incoming) in nodes:
@@ -43,17 +41,19 @@ def edgevisitor(edge, data, head, tail):
# describe node
yield '\t"%s" [%s];\n' % (
node,
','.join([
(cpatt % item) for item in
nodevisitor(node, data, outgoing, incoming).iteritems()
]),
",".join(
[
(cpatt % item)
for item in nodevisitor(node, data, outgoing, incoming).items()
]
),
)
graph = []
while edges:
edge, data, head, tail = edges.popleft()
if data in ('run_file', 'load_dylib'):
if data in ("run_file", "load_dylib"):
graph.append((edge, data, head, tail))
def do_graph(edges, tabs):
@@ -64,10 +64,10 @@ def do_graph(edges, tabs):
yield edgestr % (
head,
tail,
','.join([(cpatt % item) for item in attribs.iteritems()]),
",".join([(cpatt % item) for item in attribs.items()]),
)
for s in do_graph(graph, '\t'):
for s in do_graph(graph, "\t"):
yield s
yield '}\n'
yield "}\n"

File diff suppressed because it is too large Load Diff

View File

@@ -5,15 +5,14 @@
import sys
from macholib._cmdline import main as _main
from macholib.mach_o import CPU_TYPE_NAMES, MH_CIGAM_64, MH_MAGIC_64, get_cpu_subtype
from macholib.MachO import MachO
from macholib.mach_o import get_cpu_subtype, CPU_TYPE_NAMES
from macholib.mach_o import MH_CIGAM_64, MH_MAGIC_64
ARCH_MAP = {
('<', '64-bit'): 'x86_64',
('<', '32-bit'): 'i386',
('>', '64-bit'): 'ppc64',
('>', '32-bit'): 'ppc',
("<", "64-bit"): "x86_64",
("<", "32-bit"): "i386",
(">", "64-bit"): "ppc64",
(">", "32-bit"): "ppc",
}
@@ -24,34 +23,34 @@ def print_file(fp, path):
seen = set()
if header.MH_MAGIC == MH_MAGIC_64 or header.MH_MAGIC == MH_CIGAM_64:
sz = '64-bit'
sz = "64-bit"
else:
sz = '32-bit'
sz = "32-bit"
arch = CPU_TYPE_NAMES.get(
header.header.cputype, header.header.cputype)
arch = CPU_TYPE_NAMES.get(header.header.cputype, header.header.cputype)
subarch = get_cpu_subtype(
header.header.cputype, header.header.cpusubtype)
subarch = get_cpu_subtype(header.header.cputype, header.header.cpusubtype)
print(' [%s endian=%r size=%r arch=%r subarch=%r]' % (
header.__class__.__name__, header.endian, sz, arch, subarch),
file=fp)
for idx, name, other in header.walkRelocatables():
print(
" [%s endian=%r size=%r arch=%r subarch=%r]"
% (header.__class__.__name__, header.endian, sz, arch, subarch),
file=fp,
)
for _idx, _name, other in header.walkRelocatables():
if other not in seen:
seen.add(other)
print('\t' + other, file=fp)
print('', file=fp)
print("\t" + other, file=fp)
print("", file=fp)
def main():
print(
"WARNING: 'macho_dump' is deprecated, use 'python -mmacholib dump' "
"instead")
"WARNING: 'macho_dump' is deprecated, use 'python -mmacholib dump' " "instead"
)
_main(print_file)
if __name__ == '__main__':
if __name__ == "__main__":
try:
sys.exit(main())
except KeyboardInterrupt:

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python
from __future__ import print_function
from macholib._cmdline import main as _main
@@ -9,12 +10,12 @@ def print_file(fp, path):
def main():
print(
"WARNING: 'macho_find' is deprecated, "
"use 'python -mmacholib dump' instead")
"WARNING: 'macho_find' is deprecated, " "use 'python -mmacholib dump' instead"
)
_main(print_file)
if __name__ == '__main__':
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:

View File

@@ -8,10 +8,8 @@
def standaloneApp(path):
if not (os.path.isdir(path) and os.path.exists(
os.path.join(path, 'Contents'))):
print(
'%s: %s does not look like an app bundle' % (sys.argv[0], path))
if not (os.path.isdir(path) and os.path.exists(os.path.join(path, "Contents"))):
print("%s: %s does not look like an app bundle" % (sys.argv[0], path))
sys.exit(1)
files = MachOStandalone(path).run()
strip_files(files)
@@ -20,12 +18,13 @@ def standaloneApp(path):
def main():
print(
"WARNING: 'macho_standalone' is deprecated, use "
"'python -mmacholib standalone' instead")
"'python -mmacholib standalone' instead"
)
if not sys.argv[1:]:
raise SystemExit('usage: %s [appbundle ...]' % (sys.argv[0],))
raise SystemExit("usage: %s [appbundle ...]" % (sys.argv[0],))
for fn in sys.argv[1:]:
standaloneApp(fn)
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@@ -4,12 +4,12 @@
"""
import struct
import sys
from itertools import chain, starmap
try:
from itertools import izip, imap
from itertools import imap, izip
except ImportError:
izip, imap = zip, map
from itertools import chain, starmap
__all__ = """
sizeof
@@ -44,7 +44,7 @@ def sizeof(s):
"""
Return the size of an object when packed
"""
if hasattr(s, '_size_'):
if hasattr(s, "_size_"):
return s._size_
elif isinstance(s, bytes):
@@ -58,14 +58,15 @@ class MetaPackable(type):
Fixed size struct.unpack-able types use from_tuple as their designated
initializer
"""
def from_mmap(cls, mm, ptr, **kw):
return cls.from_str(mm[ptr:ptr+cls._size_], **kw)
return cls.from_str(mm[ptr : ptr + cls._size_], **kw) # noqa: E203
def from_fileobj(cls, f, **kw):
return cls.from_str(f.read(cls._size_), **kw)
def from_str(cls, s, **kw):
endian = kw.get('_endian_', cls._endian_)
endian = kw.get("_endian_", cls._endian_)
return cls.from_tuple(struct.unpack(endian + cls._format_, s), **kw)
def from_tuple(cls, tpl, **kw):
@@ -73,7 +74,7 @@ def from_tuple(cls, tpl, **kw):
class BasePackable(object):
_endian_ = '>'
_endian_ = ">"
def to_str(self):
raise NotImplementedError
@@ -82,7 +83,7 @@ def to_fileobj(self, f):
f.write(self.to_str())
def to_mmap(self, mm, ptr):
mm[ptr:ptr+self._size_] = self.to_str()
mm[ptr : ptr + self._size_] = self.to_str() # noqa: E203
# This defines a class with a custom metaclass, we'd normally
@@ -92,9 +93,10 @@ def to_mmap(self, mm, ptr):
def _make():
def to_str(self):
cls = type(self)
endian = getattr(self, '_endian_', cls._endian_)
endian = getattr(self, "_endian_", cls._endian_)
return struct.pack(endian + cls._format_, self)
return MetaPackable("Packable", (BasePackable,), {'to_str': to_str})
return MetaPackable("Packable", (BasePackable,), {"to_str": to_str})
Packable = _make()
@@ -109,8 +111,8 @@ def pypackable(name, pytype, format):
size, items = _formatinfo(format)
def __new__(cls, *args, **kwds):
if '_endian_' in kwds:
_endian_ = kwds.pop('_endian_')
if "_endian_" in kwds:
_endian_ = kwds.pop("_endian_")
else:
_endian_ = cls._endian_
@@ -118,12 +120,11 @@ def __new__(cls, *args, **kwds):
result._endian_ = _endian_
return result
return type(Packable)(name, (pytype, Packable), {
'_format_': format,
'_size_': size,
'_items_': items,
'__new__': __new__,
})
return type(Packable)(
name,
(pytype, Packable),
{"_format_": format, "_size_": size, "_items_": items, "__new__": __new__},
)
def _formatinfo(format):
@@ -131,7 +132,7 @@ def _formatinfo(format):
Calculate the size and number of items in a struct format.
"""
size = struct.calcsize(format)
return size, len(struct.unpack(format, b'\x00' * size))
return size, len(struct.unpack(format, b"\x00" * size))
class MetaStructure(MetaPackable):
@@ -142,17 +143,17 @@ class MetaStructure(MetaPackable):
we can do a bunch of calculations up front and pack or
unpack the whole thing in one struct call.
"""
def __new__(cls, clsname, bases, dct):
fields = dct['_fields_']
fields = dct["_fields_"]
names = []
types = []
structmarks = []
format = ''
format = ""
items = 0
size = 0
def struct_property(name, typ):
def _get(self):
return self._objects_[name]
@@ -169,16 +170,16 @@ def _set(self, obj):
types.append(typ)
format += typ._format_
size += typ._size_
if (typ._items_ > 1):
if typ._items_ > 1:
structmarks.append((items, typ._items_, typ))
items += typ._items_
dct['_structmarks_'] = structmarks
dct['_names_'] = names
dct['_types_'] = types
dct['_size_'] = size
dct['_items_'] = items
dct['_format_'] = format
dct["_structmarks_"] = structmarks
dct["_names_"] = names
dct["_types_"] = types
dct["_size_"] = size
dct["_items_"] = items
dct["_format_"] = format
return super(MetaStructure, cls).__new__(cls, clsname, bases, dct)
def from_tuple(cls, tpl, **kw):
@@ -196,7 +197,7 @@ def from_tuple(cls, tpl, **kw):
# See metaclass discussion earlier in this file
def _make():
class_dict = {}
class_dict['_fields_'] = ()
class_dict["_fields_"] = ()
def as_method(function):
class_dict[function.__name__] = function
@@ -219,7 +220,7 @@ def __init__(self, *args, **kwargs):
@as_method
def _get_packables(self):
for obj in imap(self._objects_.__getitem__, self._names_):
if hasattr(obj, '_get_packables'):
if hasattr(obj, "_get_packables"):
for obj in obj._get_packables():
yield obj
@@ -228,18 +229,19 @@ def _get_packables(self):
@as_method
def to_str(self):
return struct.pack(
self._endian_ + self._format_, *self._get_packables())
return struct.pack(self._endian_ + self._format_, *self._get_packables())
@as_method
def __cmp__(self, other):
if type(other) is not type(self):
raise TypeError(
'Cannot compare objects of type %r to objects of type %r' % (
type(other), type(self)))
"Cannot compare objects of type %r to objects of type %r"
% (type(other), type(self))
)
if sys.version_info[0] == 2:
_cmp = cmp # noqa: F821
else:
def _cmp(a, b):
if a < b:
return -1
@@ -251,7 +253,8 @@ def _cmp(a, b):
raise TypeError()
for cmpval in starmap(
_cmp, izip(self._get_packables(), other._get_packables())):
_cmp, izip(self._get_packables(), other._get_packables())
):
if cmpval != 0:
return cmpval
return 0
@@ -289,12 +292,12 @@ def __ge__(self, other):
@as_method
def __repr__(self):
result = []
result.append('<')
result.append("<")
result.append(type(self).__name__)
for nm in self._names_:
result.append(' %s=%r' % (nm, getattr(self, nm)))
result.append('>')
return ''.join(result)
result.append(" %s=%r" % (nm, getattr(self, nm)))
result.append(">")
return "".join(result)
return MetaStructure("Structure", (BasePackable,), class_dict)
@@ -308,17 +311,17 @@ def __repr__(self):
long = int
# export common packables with predictable names
p_char = pypackable('p_char', bytes, 'c')
p_int8 = pypackable('p_int8', int, 'b')
p_uint8 = pypackable('p_uint8', int, 'B')
p_int16 = pypackable('p_int16', int, 'h')
p_uint16 = pypackable('p_uint16', int, 'H')
p_int32 = pypackable('p_int32', int, 'i')
p_uint32 = pypackable('p_uint32', long, 'I')
p_int64 = pypackable('p_int64', long, 'q')
p_uint64 = pypackable('p_uint64', long, 'Q')
p_float = pypackable('p_float', float, 'f')
p_double = pypackable('p_double', float, 'd')
p_char = pypackable("p_char", bytes, "c")
p_int8 = pypackable("p_int8", int, "b")
p_uint8 = pypackable("p_uint8", int, "B")
p_int16 = pypackable("p_int16", int, "h")
p_uint16 = pypackable("p_uint16", int, "H")
p_int32 = pypackable("p_int32", int, "i")
p_uint32 = pypackable("p_uint32", long, "I")
p_int64 = pypackable("p_int64", long, "q")
p_uint64 = pypackable("p_uint64", long, "Q")
p_float = pypackable("p_float", float, "f")
p_double = pypackable("p_double", float, "d")
# Deprecated names, need trick to emit deprecation warning.
p_byte = p_int8

View File

@@ -1,18 +1,18 @@
import os
import sys
import shutil
import stat
import struct
import shutil
import sys
from macholib import mach_o
MAGIC = [
struct.pack('!L', getattr(mach_o, 'MH_' + _))
for _ in ['MAGIC', 'CIGAM', 'MAGIC_64', 'CIGAM_64']
struct.pack("!L", getattr(mach_o, "MH_" + _))
for _ in ["MAGIC", "CIGAM", "MAGIC_64", "CIGAM_64"]
]
FAT_MAGIC_BYTES = struct.pack('!L', mach_o.FAT_MAGIC)
FAT_MAGIC_BYTES = struct.pack("!L", mach_o.FAT_MAGIC)
MAGIC_LEN = 4
STRIPCMD = ['/usr/bin/strip', '-x', '-S', '-']
STRIPCMD = ["/usr/bin/strip", "-x", "-S", "-"]
try:
unicode
@@ -20,7 +20,7 @@
unicode = str
def fsencoding(s, encoding=sys.getfilesystemencoding()):
def fsencoding(s, encoding=sys.getfilesystemencoding()): # noqa: M511,B008
"""
Ensure the given argument is in filesystem encoding (not unicode)
"""
@@ -66,16 +66,17 @@ def __init__(self, fileobj, start, size):
self._end = start + size
def __repr__(self):
return '<fileview [%d, %d] %r>' % (
self._start, self._end, self._fileobj)
return "<fileview [%d, %d] %r>" % (self._start, self._end, self._fileobj)
def tell(self):
return self._fileobj.tell() - self._start
def _checkwindow(self, seekto, op):
if not (self._start <= seekto <= self._end):
raise IOError("%s to offset %d is outside window [%d, %d]" % (
op, seekto, self._start, self._end))
raise IOError(
"%s to offset %d is outside window [%d, %d]"
% (op, seekto, self._start, self._end)
)
def seek(self, offset, whence=0):
seekto = offset
@@ -87,21 +88,22 @@ def seek(self, offset, whence=0):
seekto += self._end
else:
raise IOError("Invalid whence argument to seek: %r" % (whence,))
self._checkwindow(seekto, 'seek')
self._checkwindow(seekto, "seek")
self._fileobj.seek(seekto)
def write(self, bytes):
here = self._fileobj.tell()
self._checkwindow(here, 'write')
self._checkwindow(here + len(bytes), 'write')
self._checkwindow(here, "write")
self._checkwindow(here + len(bytes), "write")
self._fileobj.write(bytes)
def read(self, size=sys.maxsize):
if size < 0:
raise ValueError(
"Invalid size %s while reading from %s", size, self._fileobj)
"Invalid size %s while reading from %s", size, self._fileobj
)
here = self._fileobj.tell()
self._checkwindow(here, 'read')
self._checkwindow(here, "read")
bytes = min(size, self._end - here)
return self._fileobj.read(bytes)
@@ -110,8 +112,7 @@ def mergecopy(src, dest):
"""
copy2, but only if the destination isn't up to date
"""
if os.path.exists(dest) and \
os.stat(dest).st_mtime >= os.stat(src).st_mtime:
if os.path.exists(dest) and os.stat(dest).st_mtime >= os.stat(src).st_mtime:
return
copy2(src, dest)
@@ -138,13 +139,16 @@ def mergetree(src, dst, condition=None, copyfn=mergecopy, srcbase=None):
continue
try:
if os.path.islink(srcname):
# XXX: This is naive at best, should check srcbase(?)
realsrc = os.readlink(srcname)
os.symlink(realsrc, dstname)
elif os.path.isdir(srcname):
mergetree(
srcname, dstname,
condition=condition, copyfn=copyfn, srcbase=srcbase)
srcname,
dstname,
condition=condition,
copyfn=copyfn,
srcbase=srcbase,
)
else:
copyfn(srcname, dstname)
except (IOError, os.error) as why:
@@ -158,10 +162,10 @@ def sdk_normalize(filename):
Normalize a path to strip out the SDK portion, normally so that it
can be decided whether it is in a system path or not.
"""
if filename.startswith('/Developer/SDKs/'):
pathcomp = filename.split('/')
if filename.startswith("/Developer/SDKs/"):
pathcomp = filename.split("/")
del pathcomp[1:4]
filename = '/'.join(pathcomp)
filename = "/".join(pathcomp)
return filename
@@ -173,9 +177,9 @@ def in_system_path(filename):
Return True if the file is in a system path
"""
fn = sdk_normalize(os.path.realpath(filename))
if fn.startswith('/usr/local/'):
if fn.startswith("/usr/local/"):
return False
elif fn.startswith('/System/') or fn.startswith('/usr/'):
elif fn.startswith("/System/") or fn.startswith("/usr/"):
if fn in NOT_SYSTEM_FILES:
return False
return True
@@ -187,7 +191,7 @@ def has_filename_filter(module):
"""
Return False if the module does not have a filename attribute
"""
return getattr(module, 'filename', None) is not None
return getattr(module, "filename", None) is not None
def get_magic():
@@ -204,16 +208,16 @@ def is_platform_file(path):
if not os.path.exists(path) or os.path.islink(path):
return False
# If the header is fat, we need to read into the first arch
with open(path, 'rb') as fileobj:
with open(path, "rb") as fileobj:
bytes = fileobj.read(MAGIC_LEN)
if bytes == FAT_MAGIC_BYTES:
# Read in the fat header
fileobj.seek(0)
header = mach_o.fat_header.from_fileobj(fileobj, _endian_='>')
header = mach_o.fat_header.from_fileobj(fileobj, _endian_=">")
if header.nfat_arch < 1:
return False
# Read in the first fat arch header
arch = mach_o.fat_arch.from_fileobj(fileobj, _endian_='>')
arch = mach_o.fat_arch.from_fileobj(fileobj, _endian_=">")
fileobj.seek(arch.offset)
# Read magic off the first header
bytes = fileobj.read(MAGIC_LEN)
@@ -227,7 +231,7 @@ def iter_platform_files(dst):
"""
Walk a directory and yield each full path that is a Mach-O file
"""
for root, dirs, files in os.walk(dst):
for root, _dirs, files in os.walk(dst):
for fn in files:
fn = os.path.join(root, fn)
if is_platform_file(fn):
@@ -242,7 +246,7 @@ def strip_files(files, argv_max=(256 * 1024)):
while tostrip:
cmd = list(STRIPCMD)
flips = []
pathlen = sum([len(s) + 1 for s in cmd])
pathlen = sum(len(s) + 1 for s in cmd)
while pathlen < argv_max:
if not tostrip:
break