summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPatrick Gartung <gartung@fnal.gov>2019-09-26 11:48:22 -0500
committerGitHub <noreply@github.com>2019-09-26 11:48:22 -0500
commit321e956fa94d3e5faf9c96c249f25275c7aa60ca (patch)
tree62a05197403ff3e1c7ce8e246f212c8f33e38f54
parent90236bc9f54a0fccbf0e133d09ce258b379e5ae3 (diff)
downloadspack-321e956fa94d3e5faf9c96c249f25275c7aa60ca.tar.gz
spack-321e956fa94d3e5faf9c96c249f25275c7aa60ca.tar.bz2
spack-321e956fa94d3e5faf9c96c249f25275c7aa60ca.tar.xz
spack-321e956fa94d3e5faf9c96c249f25275c7aa60ca.zip
External: add macholib and altgraph needed to relocate Mach-o binaries on Linux (#12909)
-rw-r--r--COPYRIGHT8
-rw-r--r--lib/spack/external/__init__.py14
-rw-r--r--lib/spack/external/altgraph/Dot.py309
-rw-r--r--lib/spack/external/altgraph/Graph.py680
-rw-r--r--lib/spack/external/altgraph/GraphAlgo.py166
-rw-r--r--lib/spack/external/altgraph/GraphStat.py73
-rw-r--r--lib/spack/external/altgraph/GraphUtil.py144
-rw-r--r--lib/spack/external/altgraph/ObjectGraph.py212
-rw-r--r--lib/spack/external/altgraph/__init__.py147
-rw-r--r--lib/spack/external/macholib/MachO.py435
-rw-r--r--lib/spack/external/macholib/MachOGraph.py138
-rw-r--r--lib/spack/external/macholib/MachOStandalone.py169
-rw-r--r--lib/spack/external/macholib/SymbolTable.py86
-rw-r--r--lib/spack/external/macholib/__init__.py8
-rw-r--r--lib/spack/external/macholib/__main__.py83
-rw-r--r--lib/spack/external/macholib/_cmdline.py48
-rw-r--r--lib/spack/external/macholib/dyld.py190
-rw-r--r--lib/spack/external/macholib/dylib.py43
-rw-r--r--lib/spack/external/macholib/framework.py43
-rw-r--r--lib/spack/external/macholib/itergraphreport.py73
-rw-r--r--lib/spack/external/macholib/mach_o.py1665
-rw-r--r--lib/spack/external/macholib/macho_dump.py58
-rw-r--r--lib/spack/external/macholib/macho_find.py21
-rw-r--r--lib/spack/external/macholib/macho_standalone.py31
-rw-r--r--lib/spack/external/macholib/ptypes.py331
-rw-r--r--lib/spack/external/macholib/util.py258
26 files changed, 5433 insertions, 0 deletions
diff --git a/COPYRIGHT b/COPYRIGHT
index d2cdb2a0f4..ba792d2a9f 100644
--- a/COPYRIGHT
+++ b/COPYRIGHT
@@ -83,3 +83,11 @@ PackageLicenseDeclared: MIT
PackageName: six
PackageHomePage: https://pypi.python.org/pypi/six
PackageLicenseDeclared: MIT
+
+PackageName: macholib
+PackageHomePage: https://macholib.readthedocs.io/en/latest/index.html
+PackageLicenseDeclared: MIT
+
+PackageName: altgraph
+PackageHomePage: https://altgraph.readthedocs.io/en/latest/index.html
+PackageLicenseDeclared: MIT
diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py
index f791bc3067..1001ff5fff 100644
--- a/lib/spack/external/__init__.py
+++ b/lib/spack/external/__init__.py
@@ -119,4 +119,18 @@ six
* Homepage: https://pypi.python.org/pypi/six
* Usage: Python 2 and 3 compatibility utilities.
* Version: 1.11.0
+
+macholib
+--------
+
+* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
+* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
+* Version: 1.12
+
+altgraph
+--------
+
+* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
+* Usage: dependency of macholib
+* Version: 0.16.1
"""
diff --git a/lib/spack/external/altgraph/Dot.py b/lib/spack/external/altgraph/Dot.py
new file mode 100644
index 0000000000..3ef04d4c5b
--- /dev/null
+++ b/lib/spack/external/altgraph/Dot.py
@@ -0,0 +1,309 @@
+'''
+altgraph.Dot - Interface to the dot language
+============================================
+
+The :py:mod:`~altgraph.Dot` module provides a simple interface to the
+file format used in the
+`graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
+program. The module is intended to offload the most tedious part of the process
+(the **dot** file generation) while transparently exposing most of its
+features.
+
+To display the graphs or to generate image files the
+`graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
+package needs to be installed on the system, moreover the :command:`dot` and
+:command:`dotty` programs must be accesible in the program path so that they
+can be ran from processes spawned within the module.
+
+Example usage
+-------------
+
+Here is a typical usage::
+
+ from altgraph import Graph, Dot
+
+ # create a graph
+ edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ]
+ graph = Graph.Graph(edges)
+
+ # create a dot representation of the graph
+ dot = Dot.Dot(graph)
+
+ # display the graph
+ dot.display()
+
+ # save the dot representation into the mydot.dot file
+ dot.save_dot(file_name='mydot.dot')
+
+ # save dot file as gif image into the graph.gif file
+ dot.save_img(file_name='graph', file_type='gif')
+
+Directed graph and non-directed graph
+-------------------------------------
+
+Dot class can use for both directed graph and non-directed graph
+by passing ``graphtype`` parameter.
+
+Example::
+
+ # create directed graph(default)
+ dot = Dot.Dot(graph, graphtype="digraph")
+
+ # create non-directed graph
+ dot = Dot.Dot(graph, graphtype="graph")
+
+Customizing the output
+----------------------
+
+The graph drawing process may be customized by passing
+valid :command:`dot` parameters for the nodes and edges. For a list of all
+parameters see the `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
+documentation.
+
+Example::
+
+ # customizing the way the overall graph is drawn
+ dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75)
+
+ # customizing node drawing
+ dot.node_style(1, label='BASE_NODE',shape='box', color='blue' )
+ dot.node_style(2, style='filled', fillcolor='red')
+
+ # customizing edge drawing
+ dot.edge_style(1, 2, style='dotted')
+ dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90')
+ dot.edge_style(4, 5, arrowsize=2, style='bold')
+
+
+.. note::
+
+ dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to
+ display all graphics styles. To verify the output save it to an image file
+ and look at it that way.
+
+Valid attributes
+----------------
+
+ - dot styles, passed via the :py:meth:`Dot.style` method::
+
+ rankdir = 'LR' (draws the graph horizontally, left to right)
+ ranksep = number (rank separation in inches)
+
+ - node attributes, passed via the :py:meth:`Dot.node_style` method::
+
+ style = 'filled' | 'invisible' | 'diagonals' | 'rounded'
+ shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle'
+
+ - edge attributes, passed via the :py:meth:`Dot.edge_style` method::
+
+ style = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold'
+ arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none'
+ | 'tee' | 'vee'
+ weight = number (the larger the number the closer the nodes will be)
+
+ - valid `graphviz colors
+ <http://www.research.att.com/~erg/graphviz/info/colors.html>`_
+
+ - for more details on how to control the graph drawing process see the
+ `graphviz reference
+ <http://www.research.att.com/sw/tools/graphviz/refs.html>`_.
+'''
+import os
+import warnings
+
+from altgraph import GraphError
+
+
+class Dot(object):
+ '''
+ A class providing a **graphviz** (dot language) representation
+ allowing a fine grained control over how the graph is being
+ displayed.
+
+ If the :command:`dot` and :command:`dotty` programs are not in the current
+ system path their location needs to be specified in the contructor.
+ '''
+
+ def __init__(
+ self, graph=None, nodes=None, edgefn=None, nodevisitor=None,
+ edgevisitor=None, name="G", dot='dot', dotty='dotty',
+ neato='neato', graphtype="digraph"):
+ '''
+ Initialization.
+ '''
+ self.name, self.attr = name, {}
+
+ assert graphtype in ['graph', 'digraph']
+ self.type = graphtype
+
+ self.temp_dot = "tmp_dot.dot"
+ self.temp_neo = "tmp_neo.dot"
+
+ self.dot, self.dotty, self.neato = dot, dotty, neato
+
+ # self.nodes: node styles
+ # self.edges: edge styles
+ self.nodes, self.edges = {}, {}
+
+ if graph is not None and nodes is None:
+ nodes = graph
+ if graph is not None and edgefn is None:
+ def edgefn(node, graph=graph):
+ return graph.out_nbrs(node)
+ if nodes is None:
+ nodes = ()
+
+ seen = set()
+ for node in nodes:
+ if nodevisitor is None:
+ style = {}
+ else:
+ style = nodevisitor(node)
+ if style is not None:
+ self.nodes[node] = {}
+ self.node_style(node, **style)
+ seen.add(node)
+ if edgefn is not None:
+ for head in seen:
+ for tail in (n for n in edgefn(head) if n in seen):
+ if edgevisitor is None:
+ edgestyle = {}
+ else:
+ edgestyle = edgevisitor(head, tail)
+ if edgestyle is not None:
+ if head not in self.edges:
+ self.edges[head] = {}
+ self.edges[head][tail] = {}
+ self.edge_style(head, tail, **edgestyle)
+
+ def style(self, **attr):
+ '''
+ Changes the overall style
+ '''
+ self.attr = attr
+
+ def display(self, mode='dot'):
+ '''
+ Displays the current graph via dotty
+ '''
+
+ if mode == 'neato':
+ self.save_dot(self.temp_neo)
+ neato_cmd = "%s -o %s %s" % (
+ self.neato, self.temp_dot, self.temp_neo)
+ os.system(neato_cmd)
+ else:
+ self.save_dot(self.temp_dot)
+
+ plot_cmd = "%s %s" % (self.dotty, self.temp_dot)
+ os.system(plot_cmd)
+
+ def node_style(self, node, **kwargs):
+ '''
+ Modifies a node style to the dot representation.
+ '''
+ if node not in self.edges:
+ self.edges[node] = {}
+ self.nodes[node] = kwargs
+
+ def all_node_style(self, **kwargs):
+ '''
+ Modifies all node styles
+ '''
+ for node in self.nodes:
+ self.node_style(node, **kwargs)
+
+ def edge_style(self, head, tail, **kwargs):
+ '''
+ Modifies an edge style to the dot representation.
+ '''
+ if tail not in self.nodes:
+ raise GraphError("invalid node %s" % (tail,))
+
+ try:
+ if tail not in self.edges[head]:
+ self.edges[head][tail] = {}
+ self.edges[head][tail] = kwargs
+ except KeyError:
+ raise GraphError("invalid edge %s -> %s " % (head, tail))
+
+ def iterdot(self):
+ # write graph title
+ if self.type == 'digraph':
+ yield 'digraph %s {\n' % (self.name,)
+ elif self.type == 'graph':
+ yield 'graph %s {\n' % (self.name,)
+
+ else:
+ raise GraphError("unsupported graphtype %s" % (self.type,))
+
+ # write overall graph attributes
+ for attr_name, attr_value in sorted(self.attr.items()):
+ yield '%s="%s";' % (attr_name, attr_value)
+ yield '\n'
+
+ # some reusable patterns
+ cpatt = '%s="%s",' # to separate attributes
+ epatt = '];\n' # to end attributes
+
+ # write node attributes
+ for node_name, node_attr in sorted(self.nodes.items()):
+ yield '\t"%s" [' % (node_name,)
+ for attr_name, attr_value in sorted(node_attr.items()):
+ yield cpatt % (attr_name, attr_value)
+ yield epatt
+
+ # write edge attributes
+ for head in sorted(self.edges):
+ for tail in sorted(self.edges[head]):
+ if self.type == 'digraph':
+ yield '\t"%s" -> "%s" [' % (head, tail)
+ else:
+ yield '\t"%s" -- "%s" [' % (head, tail)
+ for attr_name, attr_value in \
+ sorted(self.edges[head][tail].items()):
+ yield cpatt % (attr_name, attr_value)
+ yield epatt
+
+ # finish file
+ yield '}\n'
+
+ def __iter__(self):
+ return self.iterdot()
+
+ def save_dot(self, file_name=None):
+ '''
+ Saves the current graph representation into a file
+ '''
+
+ if not file_name:
+ warnings.warn(DeprecationWarning, "always pass a file_name")
+ file_name = self.temp_dot
+
+ with open(file_name, "w") as fp:
+ for chunk in self.iterdot():
+ fp.write(chunk)
+
+ def save_img(self, file_name=None, file_type="gif", mode='dot'):
+ '''
+ Saves the dot file as an image file
+ '''
+
+ if not file_name:
+ warnings.warn(DeprecationWarning, "always pass a file_name")
+ file_name = "out"
+
+ if mode == 'neato':
+ self.save_dot(self.temp_neo)
+ neato_cmd = "%s -o %s %s" % (
+ self.neato, self.temp_dot, self.temp_neo)
+ os.system(neato_cmd)
+ plot_cmd = self.dot
+ else:
+ self.save_dot(self.temp_dot)
+ plot_cmd = self.dot
+
+ file_name = "%s.%s" % (file_name, file_type)
+ create_cmd = "%s -T%s %s -o %s" % (
+ plot_cmd, file_type, self.temp_dot, file_name)
+ os.system(create_cmd)
diff --git a/lib/spack/external/altgraph/Graph.py b/lib/spack/external/altgraph/Graph.py
new file mode 100644
index 0000000000..fc4f7e9743
--- /dev/null
+++ b/lib/spack/external/altgraph/Graph.py
@@ -0,0 +1,680 @@
+"""
+altgraph.Graph - Base Graph class
+=================================
+
+..
+ #--Version 2.1
+ #--Bob Ippolito October, 2004
+
+ #--Version 2.0
+ #--Istvan Albert June, 2004
+
+ #--Version 1.0
+ #--Nathan Denny, May 27, 1999
+"""
+
+from altgraph import GraphError
+from collections import deque
+
+
+class Graph(object):
+ """
+ The Graph class represents a directed graph with *N* nodes and *E* edges.
+
+ Naming conventions:
+
+ - the prefixes such as *out*, *inc* and *all* will refer to methods
+ that operate on the outgoing, incoming or all edges of that node.
+
+ For example: :py:meth:`inc_degree` will refer to the degree of the node
+ computed over the incoming edges (the number of neighbours linking to
+ the node).
+
+ - the prefixes such as *forw* and *back* will refer to the
+ orientation of the edges used in the method with respect to the node.
+
+ For example: :py:meth:`forw_bfs` will start at the node then use the
+ outgoing edges to traverse the graph (goes forward).
+ """
+
+ def __init__(self, edges=None):
+ """
+ Initialization
+ """
+
+ self.next_edge = 0
+ self.nodes, self.edges = {}, {}
+ self.hidden_edges, self.hidden_nodes = {}, {}
+
+ if edges is not None:
+ for item in edges:
+ if len(item) == 2:
+ head, tail = item
+ self.add_edge(head, tail)
+ elif len(item) == 3:
+ head, tail, data = item
+ self.add_edge(head, tail, data)
+ else:
+ raise GraphError("Cannot create edge from %s" % (item,))
+
+ def __repr__(self):
+ return '<Graph: %d nodes, %d edges>' % (
+ self.number_of_nodes(), self.number_of_edges())
+
+ def add_node(self, node, node_data=None):
+ """
+ Adds a new node to the graph. Arbitrary data can be attached to the
+ node via the node_data parameter. Adding the same node twice will be
+ silently ignored.
+
+ The node must be a hashable value.
+ """
+ #
+ # the nodes will contain tuples that will store incoming edges,
+ # outgoing edges and data
+ #
+ # index 0 -> incoming edges
+ # index 1 -> outgoing edges
+
+ if node in self.hidden_nodes:
+ # Node is present, but hidden
+ return
+
+ if node not in self.nodes:
+ self.nodes[node] = ([], [], node_data)
+
+ def add_edge(self, head_id, tail_id, edge_data=1, create_nodes=True):
+ """
+ Adds a directed edge going from head_id to tail_id.
+ Arbitrary data can be attached to the edge via edge_data.
+ It may create the nodes if adding edges between nonexisting ones.
+
+ :param head_id: head node
+ :param tail_id: tail node
+ :param edge_data: (optional) data attached to the edge
+ :param create_nodes: (optional) creates the head_id or tail_id
+ node in case they did not exist
+ """
+ # shorcut
+ edge = self.next_edge
+
+ # add nodes if on automatic node creation
+ if create_nodes:
+ self.add_node(head_id)
+ self.add_node(tail_id)
+
+ # update the corresponding incoming and outgoing lists in the nodes
+ # index 0 -> incoming edges
+ # index 1 -> outgoing edges
+
+ try:
+ self.nodes[tail_id][0].append(edge)
+ self.nodes[head_id][1].append(edge)
+ except KeyError:
+ raise GraphError('Invalid nodes %s -> %s' % (head_id, tail_id))
+
+ # store edge information
+ self.edges[edge] = (head_id, tail_id, edge_data)
+
+ self.next_edge += 1
+
+ def hide_edge(self, edge):
+ """
+ Hides an edge from the graph. The edge may be unhidden at some later
+ time.
+ """
+ try:
+ head_id, tail_id, edge_data = \
+ self.hidden_edges[edge] = self.edges[edge]
+ self.nodes[tail_id][0].remove(edge)
+ self.nodes[head_id][1].remove(edge)
+ del self.edges[edge]
+ except KeyError:
+ raise GraphError('Invalid edge %s' % edge)
+
+ def hide_node(self, node):
+ """
+ Hides a node from the graph. The incoming and outgoing edges of the
+ node will also be hidden. The node may be unhidden at some later time.
+ """
+ try:
+ all_edges = self.all_edges(node)
+ self.hidden_nodes[node] = (self.nodes[node], all_edges)
+ for edge in all_edges:
+ self.hide_edge(edge)
+ del self.nodes[node]
+ except KeyError:
+ raise GraphError('Invalid node %s' % node)
+
+ def restore_node(self, node):
+ """
+ Restores a previously hidden node back into the graph and restores
+ all of its incoming and outgoing edges.
+ """
+ try:
+ self.nodes[node], all_edges = self.hidden_nodes[node]
+ for edge in all_edges:
+ self.restore_edge(edge)
+ del self.hidden_nodes[node]
+ except KeyError:
+ raise GraphError('Invalid node %s' % node)
+
+ def restore_edge(self, edge):
+ """
+ Restores a previously hidden edge back into the graph.
+ """
+ try:
+ head_id, tail_id, data = self.hidden_edges[edge]
+ self.nodes[tail_id][0].append(edge)
+ self.nodes[head_id][1].append(edge)
+ self.edges[edge] = head_id, tail_id, data
+ del self.hidden_edges[edge]
+ except KeyError:
+ raise GraphError('Invalid edge %s' % edge)
+
+ def restore_all_edges(self):
+ """
+ Restores all hidden edges.
+ """
+ for edge in list(self.hidden_edges.keys()):
+ try:
+ self.restore_edge(edge)
+ except GraphError:
+ pass
+
+ def restore_all_nodes(self):
+ """
+ Restores all hidden nodes.
+ """
+ for node in list(self.hidden_nodes.keys()):
+ self.restore_node(node)
+
+ def __contains__(self, node):
+ """
+ Test whether a node is in the graph
+ """
+ return node in self.nodes
+
+ def edge_by_id(self, edge):
+ """
+ Returns the edge that connects the head_id and tail_id nodes
+ """
+ try:
+ head, tail, data = self.edges[edge]
+ except KeyError:
+ head, tail = None, None
+ raise GraphError('Invalid edge %s' % edge)
+
+ return (head, tail)
+
+ def edge_by_node(self, head, tail):
+ """
+ Returns the edge that connects the head_id and tail_id nodes
+ """
+ for edge in self.out_edges(head):
+ if self.tail(edge) == tail:
+ return edge
+ return None
+
+ def number_of_nodes(self):
+ """
+ Returns the number of nodes
+ """
+ return len(self.nodes)
+
+ def number_of_edges(self):
+ """
+ Returns the number of edges
+ """
+ return len(self.edges)
+
+ def __iter__(self):
+ """
+ Iterates over all nodes in the graph
+ """
+ return iter(self.nodes)
+
+ def node_list(self):
+ """
+ Return a list of the node ids for all visible nodes in the graph.
+ """
+ return list(self.nodes.keys())
+
+ def edge_list(self):
+ """
+ Returns an iterator for all visible nodes in the graph.
+ """
+ return list(self.edges.keys())
+
+ def number_of_hidden_edges(self):
+ """
+ Returns the number of hidden edges
+ """
+ return len(self.hidden_edges)
+
+ def number_of_hidden_nodes(self):
+ """
+ Returns the number of hidden nodes
+ """
+ return len(self.hidden_nodes)
+
+ def hidden_node_list(self):
+ """
+ Returns the list with the hidden nodes
+ """
+ return list(self.hidden_nodes.keys())
+
+ def hidden_edge_list(self):
+ """
+ Returns a list with the hidden edges
+ """
+ return list(self.hidden_edges.keys())
+
+ def describe_node(self, node):
+ """
+ return node, node data, outgoing edges, incoming edges for node
+ """
+ incoming, outgoing, data = self.nodes[node]
+ return node, data, outgoing, incoming
+
+ def describe_edge(self, edge):
+ """
+ return edge, edge data, head, tail for edge
+ """
+ head, tail, data = self.edges[edge]
+ return edge, data, head, tail
+
+ def node_data(self, node):
+ """
+ Returns the data associated with a node
+ """
+ return self.nodes[node][2]
+
+ def edge_data(self, edge):
+ """
+ Returns the data associated with an edge
+ """
+ return self.edges[edge][2]
+
+ def update_edge_data(self, edge, edge_data):
+ """
+ Replace the edge data for a specific edge
+ """
+ self.edges[edge] = self.edges[edge][0:2] + (edge_data,)
+
+ def head(self, edge):
+ """
+ Returns the node of the head of the edge.
+ """
+ return self.edges[edge][0]
+
+ def tail(self, edge):
+ """
+ Returns node of the tail of the edge.
+ """
+ return self.edges[edge][1]
+
+ def out_nbrs(self, node):
+ """
+ List of nodes connected by outgoing edges
+ """
+ return [self.tail(n) for n in self.out_edges(node)]
+
+ def inc_nbrs(self, node):
+ """
+ List of nodes connected by incoming edges
+ """
+ return [self.head(n) for n in self.inc_edges(node)]
+
+ def all_nbrs(self, node):
+ """
+ List of nodes connected by incoming and outgoing edges
+ """
+ return list(dict.fromkeys(self.inc_nbrs(node) + self.out_nbrs(node)))
+
+ def out_edges(self, node):
+ """
+ Returns a list of the outgoing edges
+ """
+ try:
+ return list(self.nodes[node][1])
+ except KeyError:
+ raise GraphError('Invalid node %s' % node)
+
+ def inc_edges(self, node):
+ """
+ Returns a list of the incoming edges
+ """
+ try:
+ return list(self.nodes[node][0])
+ except KeyError:
+ raise GraphError('Invalid node %s' % node)
+
+ def all_edges(self, node):
+ """
+ Returns a list of incoming and outging edges.
+ """
+ return set(self.inc_edges(node) + self.out_edges(node))
+
+ def out_degree(self, node):
+ """
+ Returns the number of outgoing edges
+ """
+ return len(self.out_edges(node))
+
+ def inc_degree(self, node):
+ """
+ Returns the number of incoming edges
+ """
+ return len(self.inc_edges(node))
+
+ def all_degree(self, node):
+ """
+ The total degree of a node
+ """
+ return self.inc_degree(node) + self.out_degree(node)
+
+ def _topo_sort(self, forward=True):
+ """
+ Topological sort.
+
+ Returns a list of nodes where the successors (based on outgoing and
+ incoming edges selected by the forward parameter) of any given node
+ appear in the sequence after that node.
+ """
+ topo_list = []
+ queue = deque()
+ indeg = {}
+
+ # select the operation that will be performed
+ if forward:
+ get_edges = self.out_edges
+ get_degree = self.inc_degree
+ get_next = self.tail
+ else:
+ get_edges = self.inc_edges
+ get_degree = self.out_degree
+ get_next = self.head
+
+ for node in self.node_list():
+ degree = get_degree(node)
+ if degree:
+ indeg[node] = degree
+ else:
+ queue.append(node)
+
+ while queue:
+ curr_node = queue.popleft()
+ topo_list.append(curr_node)
+ for edge in get_edges(curr_node):
+ tail_id = get_next(edge)
+ if tail_id in indeg:
+ indeg[tail_id] -= 1
+ if indeg[tail_id] == 0:
+ queue.append(tail_id)
+
+ if len(topo_list) == len(self.node_list()):
+ valid = True
+ else:
+ # the graph has cycles, invalid topological sort
+ valid = False
+
+ return (valid, topo_list)
+
+ def forw_topo_sort(self):
+ """
+ Topological sort.
+
+ Returns a list of nodes where the successors (based on outgoing edges)
+ of any given node appear in the sequence after that node.
+ """
+ return self._topo_sort(forward=True)
+
+ def back_topo_sort(self):
+ """
+ Reverse topological sort.
+
+ Returns a list of nodes where the successors (based on incoming edges)
+ of any given node appear in the sequence after that node.
+ """
+ return self._topo_sort(forward=False)
+
+ def _bfs_subgraph(self, start_id, forward=True):
+ """
+ Private method creates a subgraph in a bfs order.
+
+ The forward parameter specifies whether it is a forward or backward
+ traversal.
+ """
+ if forward:
+ get_bfs = self.forw_bfs
+ get_nbrs = self.out_nbrs
+ else:
+ get_bfs = self.back_bfs
+ get_nbrs = self.inc_nbrs
+
+ g = Graph()
+ bfs_list = get_bfs(start_id)
+ for node in bfs_list:
+ g.add_node(node)
+
+ for node in bfs_list:
+ for nbr_id in get_nbrs(node):
+ if forward:
+ g.add_edge(node, nbr_id)
+ else:
+ g.add_edge(nbr_id, node)
+
+ return g
+
+ def forw_bfs_subgraph(self, start_id):
+ """
+ Creates and returns a subgraph consisting of the breadth first
+ reachable nodes based on their outgoing edges.
+ """
+ return self._bfs_subgraph(start_id, forward=True)
+
+ def back_bfs_subgraph(self, start_id):
+ """
+ Creates and returns a subgraph consisting of the breadth first
+ reachable nodes based on the incoming edges.
+ """
+ return self._bfs_subgraph(start_id, forward=False)
+
+ def iterdfs(self, start, end=None, forward=True):
+ """
+ Collecting nodes in some depth first traversal.
+
+ The forward parameter specifies whether it is a forward or backward
+ traversal.
+ """
+ visited, stack = set([start]), deque([start])
+
+ if forward:
+ get_edges = self.out_edges
+ get_next = self.tail
+ else:
+ get_edges = self.inc_edges
+ get_next = self.head
+
+ while stack:
+ curr_node = stack.pop()
+ yield curr_node
+ if curr_node == end:
+ break
+ for edge in sorted(get_edges(curr_node)):
+ tail = get_next(edge)
+ if tail not in visited:
+ visited.add(tail)
+ stack.append(tail)
+
+ def iterdata(self, start, end=None, forward=True, condition=None):
+ """
+ Perform a depth-first walk of the graph (as ``iterdfs``)
+ and yield the item data of every node where condition matches. The
+ condition callback is only called when node_data is not None.
+ """
+
+ visited, stack = set([start]), deque([start])
+
+ if forward:
+ get_edges = self.out_edges
+ get_next = self.tail
+ else:
+ get_edges = self.inc_edges
+ get_next = self.head
+
+ get_data = self.node_data
+
+ while stack:
+ curr_node = stack.pop()
+ curr_data = get_data(curr_node)
+ if curr_data is not None:
+ if condition is not None and not condition(curr_data):
+ continue
+ yield curr_data
+ if curr_node == end:
+ break
+ for edge in get_edges(curr_node):
+ tail = get_next(edge)
+ if tail not in visited:
+ visited.add(tail)
+ stack.append(tail)
+
+ def _iterbfs(self, start, end=None, forward=True):
+ """
+ The forward parameter specifies whether it is a forward or backward
+ traversal. Returns a list of tuples where the first value is the hop
+ value the second value is the node id.
+ """
+ queue, visited = deque([(start, 0)]), set([start])
+
+ # the direction of the bfs depends on the edges that are sampled
+ if forward:
+ get_edges = self.out_edges
+ get_next = self.tail
+ else:
+ get_edges = self.inc_edges
+ get_next = self.head
+
+ while queue:
+ curr_node, curr_step = queue.popleft()
+ yield (curr_node, curr_step)
+ if curr_node == end:
+ break
+ for edge in get_edges(curr_node):
+ tail = get_next(edge)
+ if tail not in visited:
+ visited.add(tail)
+ queue.append((tail, curr_step + 1))
+
+ def forw_bfs(self, start, end=None):
+ """
+ Returns a list of nodes in some forward BFS order.
+
+ Starting from the start node the breadth first search proceeds along
+ outgoing edges.
+ """
+ return [node for node, step in self._iterbfs(start, end, forward=True)]
+
+ def back_bfs(self, start, end=None):
+ """
+ Returns a list of nodes in some backward BFS order.
+
+ Starting from the start node the breadth first search proceeds along
+ incoming edges.
+ """
+ return [node for node, _ in self._iterbfs(start, end, forward=False)]
+
+ def forw_dfs(self, start, end=None):
+ """
+ Returns a list of nodes in some forward DFS order.
+
+ Starting with the start node the depth first search proceeds along
+ outgoing edges.
+ """
+ return list(self.iterdfs(start, end, forward=True))
+
+ def back_dfs(self, start, end=None):
+ """
+ Returns a list of nodes in some backward DFS order.
+
+ Starting from the start node the depth first search proceeds along
+ incoming edges.
+ """
+ return list(self.iterdfs(start, end, forward=False))
+
+ def connected(self):
+ """
+ Returns :py:data:`True` if the graph's every node can be reached from
+ every other node.
+ """
+ node_list = self.node_list()
+ for node in node_list:
+ bfs_list = self.forw_bfs(node)
+ if len(bfs_list) != len(node_list):
+ return False
+ return True
+
+ def clust_coef(self, node):
+ """
+ Computes and returns the local clustering coefficient of node.
+
+ The local cluster coefficient is proportion of the actual number of
+ edges between neighbours of node and the maximum number of edges
+ between those neighbours.
+
+ See "Local Clustering Coefficient" on
+ <http://en.wikipedia.org/wiki/Clustering_coefficient>
+ for a formal definition.
+ """
+ num = 0
+ nbr_set = set(self.out_nbrs(node))
+
+ if node in nbr_set:
+ nbr_set.remove(node) # loop defense
+
+ for nbr in nbr_set:
+ sec_set = set(self.out_nbrs(nbr))
+ if nbr in sec_set:
+ sec_set.remove(nbr) # loop defense
+ num += len(nbr_set & sec_set)
+
+ nbr_num = len(nbr_set)
+ if nbr_num:
+ clust_coef = float(num) / (nbr_num * (nbr_num - 1))
+ else:
+ clust_coef = 0.0
+ return clust_coef
+
+ def get_hops(self, start, end=None, forward=True):
+ """
+ Computes the hop distance to all nodes centered around a node.
+
+ First order neighbours are at hop 1, their neigbours are at hop 2 etc.
+ Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value
+ of the forward parameter. If the distance between all neighbouring
+ nodes is 1 the hop number corresponds to the shortest distance between
+ the nodes.
+
+ :param start: the starting node
+ :param end: ending node (optional). When not specified will search the
+ whole graph.
+ :param forward: directionality parameter (optional).
+ If C{True} (default) it uses L{forw_bfs} otherwise L{back_bfs}.
+ :return: returns a list of tuples where each tuple contains the
+ node and the hop.
+
+ Typical usage::
+
+ >>> print (graph.get_hops(1, 8))
+ >>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
+ # node 1 is at 0 hops
+ # node 2 is at 1 hop
+ # ...
+ # node 8 is at 5 hops
+ """
+ if forward:
+ return list(self._iterbfs(start=start, end=end, forward=True))
+ else:
+ return list(self._iterbfs(start=start, end=end, forward=False))
diff --git a/lib/spack/external/altgraph/GraphAlgo.py b/lib/spack/external/altgraph/GraphAlgo.py
new file mode 100644
index 0000000000..b51e536314
--- /dev/null
+++ b/lib/spack/external/altgraph/GraphAlgo.py
@@ -0,0 +1,166 @@
+'''
+altgraph.GraphAlgo - Graph algorithms
+=====================================
+'''
+from altgraph import GraphError
+
+
+def dijkstra(graph, start, end=None):
+ """
+ Dijkstra's algorithm for shortest paths
+
+ `David Eppstein, UC Irvine, 4 April 2002
+ <http://www.ics.uci.edu/~eppstein/161/python/>`_
+
+ `Python Cookbook Recipe
+ <http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/119466>`_
+
+ Find shortest paths from the start node to all nodes nearer than or
+ equal to the end node.
+
+ Dijkstra's algorithm is only guaranteed to work correctly when all edge
+ lengths are positive. This code does not verify this property for all
+ edges (only the edges examined until the end vertex is reached), but will
+ correctly compute shortest paths even for some graphs with negative edges,
+ and will raise an exception if it discovers that a negative edge has
+ caused it to make a mistake.
+
+ Adapted to altgraph by Istvan Albert, Pennsylvania State University -
+ June, 9 2004
+ """
+ D = {} # dictionary of final distances
+ P = {} # dictionary of predecessors
+ Q = _priorityDictionary() # estimated distances of non-final vertices
+ Q[start] = 0
+
+ for v in Q:
+ D[v] = Q[v]
+ if v == end:
+ break
+
+ for w in graph.out_nbrs(v):
+ edge_id = graph.edge_by_node(v, w)
+ vwLength = D[v] + graph.edge_data(edge_id)
+ if w in D:
+ if vwLength < D[w]:
+ raise GraphError(
+ "Dijkstra: found better path to already-final vertex")
+ elif w not in Q or vwLength < Q[w]:
+ Q[w] = vwLength
+ P[w] = v
+
+ return (D, P)
+
+
+def shortest_path(graph, start, end):
+ """
+ Find a single shortest path from the *start* node to the *end* node.
+ The input has the same conventions as dijkstra(). The output is a list of
+ the nodes in order along the shortest path.
+
+ **Note that the distances must be stored in the edge data as numeric data**
+ """
+
+ D, P = dijkstra(graph, start, end)
+ Path = []
+ while 1:
+ Path.append(end)
+ if end == start:
+ break
+ end = P[end]
+ Path.reverse()
+ return Path
+
+
+#
+# Utility classes and functions
+#
+class _priorityDictionary(dict):
+ '''
+ Priority dictionary using binary heaps (internal use only)
+
+ David Eppstein, UC Irvine, 8 Mar 2002
+
+ Implements a data structure that acts almost like a dictionary, with
+ two modifications:
+
+ 1. D.smallest() returns the value x minimizing D[x]. For this to
+ work correctly, all values D[x] stored in the dictionary must be
+ comparable.
+
+ 2. iterating "for x in D" finds and removes the items from D in sorted
+ order. Each item is not removed until the next item is requested,
+ so D[x] will still return a useful value until the next iteration
+ of the for-loop. Each operation takes logarithmic amortized time.
+ '''
+
+ def __init__(self):
+ '''
+ Initialize priorityDictionary by creating binary heap of pairs
+ (value,key). Note that changing or removing a dict entry will not
+ remove the old pair from the heap until it is found by smallest()
+ or until the heap is rebuilt.
+ '''
+ self.__heap = []
+ dict.__init__(self)
+
+ def smallest(self):
+ '''
+ Find smallest item after removing deleted items from front of heap.
+ '''
+ if len(self) == 0:
+ raise IndexError("smallest of empty priorityDictionary")
+ heap = self.__heap
+ while heap[0][1] not in self or self[heap[0][1]] != heap[0][0]:
+ lastItem = heap.pop()
+ insertionPoint = 0
+ while 1:
+ smallChild = 2*insertionPoint+1
+ if smallChild+1 < len(heap) and \
+ heap[smallChild] > heap[smallChild+1]:
+ smallChild += 1
+ if smallChild >= len(heap) or lastItem <= heap[smallChild]:
+ heap[insertionPoint] = lastItem
+ break
+ heap[insertionPoint] = heap[smallChild]
+ insertionPoint = smallChild
+ return heap[0][1]
+
+ def __iter__(self):
+ '''
+ Create destructive sorted iterator of priorityDictionary.
+ '''
+ def iterfn():
+ while len(self) > 0:
+ x = self.smallest()
+ yield x
+ del self[x]
+ return iterfn()
+
+ def __setitem__(self, key, val):
+ '''
+ Change value stored in dictionary and add corresponding pair to heap.
+ Rebuilds the heap if the number of deleted items gets large, to avoid
+ memory leakage.
+ '''
+ dict.__setitem__(self, key, val)
+ heap = self.__heap
+ if len(heap) > 2 * len(self):
+ self.__heap = [(v, k) for k, v in self.items()]
+ self.__heap.sort()
+ else:
+ newPair = (val, key)
+ insertionPoint = len(heap)
+ heap.append(None)
+ while insertionPoint > 0 and newPair < heap[(insertionPoint-1)//2]:
+ heap[insertionPoint] = heap[(insertionPoint-1)//2]
+ insertionPoint = (insertionPoint-1)//2
+ heap[insertionPoint] = newPair
+
+ def setdefault(self, key, val):
+ '''
+ Reimplement setdefault to pass through our customized __setitem__.
+ '''
+ if key not in self:
+ self[key] = val
+ return self[key]
diff --git a/lib/spack/external/altgraph/GraphStat.py b/lib/spack/external/altgraph/GraphStat.py
new file mode 100644
index 0000000000..003b7167e7
--- /dev/null
+++ b/lib/spack/external/altgraph/GraphStat.py
@@ -0,0 +1,73 @@
+'''
+altgraph.GraphStat - Functions providing various graph statistics
+=================================================================
+'''
+
+
+def degree_dist(graph, limits=(0, 0), bin_num=10, mode='out'):
+ '''
+ Computes the degree distribution for a graph.
+
+ Returns a list of tuples where the first element of the tuple is the
+ center of the bin representing a range of degrees and the second element
+ of the tuple are the number of nodes with the degree falling in the range.
+
+ Example::
+
+ ....
+ '''
+
+ deg = []
+ if mode == 'inc':
+ get_deg = graph.inc_degree
+ else:
+ get_deg = graph.out_degree
+
+ for node in graph:
+ deg.append(get_deg(node))
+
+ if not deg:
+ return []
+
+ results = _binning(values=deg, limits=limits, bin_num=bin_num)
+
+ return results
+
+
+_EPS = 1.0/(2.0**32)
+
+
+def _binning(values, limits=(0, 0), bin_num=10):
+ '''
+ Bins data that falls between certain limits, if the limits are (0, 0) the
+ minimum and maximum values are used.
+
+ Returns a list of tuples where the first element of the tuple is the
+ center of the bin and the second element of the tuple are the counts.
+ '''
+ if limits == (0, 0):
+ min_val, max_val = min(values) - _EPS, max(values) + _EPS
+ else:
+ min_val, max_val = limits
+
+ # get bin size
+ bin_size = (max_val - min_val)/float(bin_num)
+ bins = [0] * (bin_num)
+
+ # will ignore these outliers for now
+ for value in values:
+ try:
+ if (value - min_val) >= 0:
+ index = int((value - min_val)/float(bin_size))
+ bins[index] += 1
+ except IndexError:
+ pass
+
+ # make it ready for an x,y plot
+ result = []
+ center = (bin_size/2) + min_val
+ for i, y in enumerate(bins):
+ x = center + bin_size * i
+ result.append((x, y))
+
+ return result
diff --git a/lib/spack/external/altgraph/GraphUtil.py b/lib/spack/external/altgraph/GraphUtil.py
new file mode 100644
index 0000000000..500a74b9f7
--- /dev/null
+++ b/lib/spack/external/altgraph/GraphUtil.py
@@ -0,0 +1,144 @@
+'''
+altgraph.GraphUtil - Utility classes and functions
+==================================================
+'''
+
+import random
+from collections import deque
+from altgraph import Graph
+from altgraph import GraphError
+
+
+def generate_random_graph(
+ node_num, edge_num, self_loops=False, multi_edges=False):
+ '''
+ Generates and returns a :py:class:`~altgraph.Graph.Graph` instance with
+ *node_num* nodes randomly connected by *edge_num* edges.
+ '''
+ g = Graph.Graph()
+
+ if not multi_edges:
+ if self_loops:
+ max_edges = node_num * node_num
+ else:
+ max_edges = node_num * (node_num-1)
+
+ if edge_num > max_edges:
+ raise GraphError(
+ "inconsistent arguments to 'generate_random_graph'")
+
+ nodes = range(node_num)
+
+ for node in nodes:
+ g.add_node(node)
+
+ while 1:
+ head = random.choice(nodes)
+ tail = random.choice(nodes)
+
+ # loop defense
+ if head == tail and not self_loops:
+ continue
+
+ # multiple edge defense
+ if g.edge_by_node(head, tail) is not None and not multi_edges:
+ continue
+
+ # add the edge
+ g.add_edge(head, tail)
+ if g.number_of_edges() >= edge_num:
+ break
+
+ return g
+
+
+def generate_scale_free_graph(
+ steps, growth_num, self_loops=False, multi_edges=False):
+ '''
+ Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that
+ will have *steps* \* *growth_num* nodes and a scale free (powerlaw)
+ connectivity. Starting with a fully connected graph with *growth_num*
+ nodes at every step *growth_num* nodes are added to the graph and are
+ connected to existing nodes with a probability proportional to the degree
+ of these existing nodes.
+ '''
+ # FIXME: The code doesn't seem to do what the documentation claims.
+ graph = Graph.Graph()
+
+ # initialize the graph
+ store = []
+ for i in range(growth_num):
+ for j in range(i + 1, growth_num):
+ store.append(i)
+ store.append(j)
+ graph.add_edge(i, j)
+
+ # generate
+ for node in range(growth_num, steps * growth_num):
+ graph.add_node(node)
+ while graph.out_degree(node) < growth_num:
+ nbr = random.choice(store)
+
+ # loop defense
+ if node == nbr and not self_loops:
+ continue
+
+ # multi edge defense
+ if graph.edge_by_node(node, nbr) and not multi_edges:
+ continue
+
+ graph.add_edge(node, nbr)
+
+ for nbr in graph.out_nbrs(node):
+ store.append(node)
+ store.append(nbr)
+
+ return graph
+
+
+def filter_stack(graph, head, filters):
+ """
+ Perform a walk in a depth-first order starting
+ at *head*.
+
+ Returns (visited, removes, orphans).
+
+ * visited: the set of visited nodes
+ * removes: the list of nodes where the node
+ data does not all *filters*
+ * orphans: tuples of (last_good, node),
+ where node is not in removes, is directly
+ reachable from a node in *removes* and
+ *last_good* is the closest upstream node that is not
+ in *removes*.
+ """
+
+ visited, removes, orphans = set([head]), set(), set()
+ stack = deque([(head, head)])
+ get_data = graph.node_data
+ get_edges = graph.out_edges
+ get_tail = graph.tail
+
+ while stack:
+ last_good, node = stack.pop()
+ data = get_data(node)
+ if data is not None:
+ for filtfunc in filters:
+ if not filtfunc(data):
+ removes.add(node)
+ break
+ else:
+ last_good = node
+ for edge in get_edges(node):
+ tail = get_tail(edge)
+ if last_good is not node:
+ orphans.add((last_good, tail))
+ if tail not in visited:
+ visited.add(tail)
+ stack.append((last_good, tail))
+
+ orphans = [
+ (lg, tl)
+ for (lg, tl) in orphans if tl not in removes]
+
+ return visited, removes, orphans
diff --git a/lib/spack/external/altgraph/ObjectGraph.py b/lib/spack/external/altgraph/ObjectGraph.py
new file mode 100644
index 0000000000..f3d6fa187b
--- /dev/null
+++ b/lib/spack/external/altgraph/ObjectGraph.py
@@ -0,0 +1,212 @@
+"""
+altgraph.ObjectGraph - Graph of objects with an identifier
+==========================================================
+
+A graph of objects that have a "graphident" attribute.
+graphident is the key for the object in the graph
+"""
+
+from altgraph import GraphError
+from altgraph.Graph import Graph
+from altgraph.GraphUtil import filter_stack
+
+
+class ObjectGraph(object):
+ """
+ A graph of objects that have a "graphident" attribute.
+ graphident is the key for the object in the graph
+ """
+
+ def __init__(self, graph=None, debug=0):
+ if graph is None:
+ graph = Graph()
+ self.graphident = self
+ self.graph = graph
+ self.debug = debug
+ self.indent = 0
+ graph.add_node(self, None)
+
+ def __repr__(self):
+ return '<%s>' % (type(self).__name__,)
+
+ def flatten(self, condition=None, start=None):
+ """
+ Iterate over the subgraph that is entirely reachable by condition
+ starting from the given start node or the ObjectGraph root
+ """
+ if start is None:
+ start = self
+ start = self.getRawIdent(start)
+ return self.graph.iterdata(start=start, condition=condition)
+
+ def nodes(self):
+ for ident in self.graph:
+ node = self.graph.node_data(ident)
+ if node is not None:
+ yield self.graph.node_data(ident)
+
+ def get_edges(self, node):
+ if node is None:
+ node = self
+ start = self.getRawIdent(node)
+ _, _, outraw, incraw = self.graph.describe_node(start)
+
+ def iter_edges(lst, n):
+ seen = set()
+ for tpl in (self.graph.describe_edge(e) for e in lst):
+ ident = tpl[n]
+ if ident not in seen:
+ yield self.findNode(ident)
+ seen.add(ident)
+ return iter_edges(outraw, 3), iter_edges(incraw, 2)
+
+ def edgeData(self, fromNode, toNode):
+ if fromNode is None:
+ fromNode = self
+ start = self.getRawIdent(fromNode)
+ stop = self.getRawIdent(toNode)
+ edge = self.graph.edge_by_node(start, stop)
+ return self.graph.edge_data(edge)
+
+ def updateEdgeData(self, fromNode, toNode, edgeData):
+ if fromNode is None:
+ fromNode = self
+ start = self.getRawIdent(fromNode)
+ stop = self.getRawIdent(toNode)
+ edge = self.graph.edge_by_node(start, stop)
+ self.graph.update_edge_data(edge, edgeData)
+
+ def filterStack(self, filters):
+ """
+ Filter the ObjectGraph in-place by removing all edges to nodes that
+ do not match every filter in the given filter list
+
+ Returns a tuple containing the number of:
+ (nodes_visited, nodes_removed, nodes_orphaned)
+ """
+ visited, removes, orphans = filter_stack(self.graph, self, filters)
+
+ for last_good, tail in orphans:
+ self.graph.add_edge(last_good, tail, edge_data='orphan')
+
+ for node in removes:
+ self.graph.hide_node(node)
+
+ return len(visited)-1, len(removes), len(orphans)
+
+ def removeNode(self, node):
+ """
+ Remove the given node from the graph if it exists
+ """
+ ident = self.getIdent(node)
+ if ident is not None:
+ self.graph.hide_node(ident)
+
+ def removeReference(self, fromnode, tonode):
+ """
+ Remove all edges from fromnode to tonode
+ """
+ if fromnode is None:
+ fromnode = self
+ fromident = self.getIdent(fromnode)
+ toident = self.getIdent(tonode)
+ if fromident is not None and toident is not None:
+ while True:
+ edge = self.graph.edge_by_node(fromident, toident)
+ if edge is None:
+ break
+ self.graph.hide_edge(edge)
+
+ def getIdent(self, node):
+ """
+ Get the graph identifier for a node
+ """
+ ident = self.getRawIdent(node)
+ if ident is not None:
+ return ident
+ node = self.findNode(node)
+ if node is None:
+ return None
+ return node.graphident
+
+ def getRawIdent(self, node):
+ """
+ Get the identifier for a node object
+ """
+ if node is self:
+ return node
+ ident = getattr(node, 'graphident', None)
+ return ident
+
+ def __contains__(self, node):
+ return self.findNode(node) is not None
+
+ def findNode(self, node):
+ """
+ Find the node on the graph
+ """
+ ident = self.getRawIdent(node)
+ if ident is None:
+ ident = node
+ try:
+ return self.graph.node_data(ident)
+ except KeyError:
+ return None
+
+ def addNode(self, node):
+ """
+ Add a node to the graph referenced by the root
+ """
+ self.msg(4, "addNode", node)
+
+ try:
+ self.graph.restore_node(node.graphident)
+ except GraphError:
+ self.graph.add_node(node.graphident, node)
+
+ def createReference(self, fromnode, tonode, edge_data=None):
+ """
+ Create a reference from fromnode to tonode
+ """
+ if fromnode is None:
+ fromnode = self
+ fromident, toident = self.getIdent(fromnode), self.getIdent(tonode)
+ if fromident is None or toident is None:
+ return
+ self.msg(4, "createReference", fromnode, tonode, edge_data)
+ self.graph.add_edge(fromident, toident, edge_data=edge_data)
+
+ def createNode(self, cls, name, *args, **kw):
+ """
+ Add a node of type cls to the graph if it does not already exist
+ by the given name
+ """
+ m = self.findNode(name)
+ if m is None:
+ m = cls(name, *args, **kw)
+ self.addNode(m)
+ return m
+
+ def msg(self, level, s, *args):
+ """
+ Print a debug message with the given level
+ """
+ if s and level <= self.debug:
+ print("%s%s %s" % (
+ " " * self.indent, s, ' '.join(map(repr, args))))
+
+ def msgin(self, level, s, *args):
+ """
+ Print a debug message and indent
+ """
+ if level <= self.debug:
+ self.msg(level, s, *args)
+ self.indent = self.indent + 1
+
+ def msgout(self, level, s, *args):
+ """
+ Dedent and print a debug message
+ """
+ if level <= self.debug:
+ self.indent = self.indent - 1
+ self.msg(level, s, *args)
diff --git a/lib/spack/external/altgraph/__init__.py b/lib/spack/external/altgraph/__init__.py
new file mode 100644
index 0000000000..289c6408d1
--- /dev/null
+++ b/lib/spack/external/altgraph/__init__.py
@@ -0,0 +1,147 @@
+'''
+altgraph - a python graph library
+=================================
+
+altgraph is a fork of `graphlib <http://pygraphlib.sourceforge.net>`_ tailored
+to use newer Python 2.3+ features, including additional support used by the
+py2app suite (modulegraph and macholib, specifically).
+
+altgraph is a python based graph (network) representation and manipulation
+package. It has started out as an extension to the
+`graph_lib module
+<http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html>`_
+written by Nathan Denny it has been significantly optimized and expanded.
+
+The :class:`altgraph.Graph.Graph` class is loosely modeled after the
+`LEDA <http://www.algorithmic-solutions.com/enleda.htm>`_
+(Library of Efficient Datatypes) representation. The library
+includes methods for constructing graphs, BFS and DFS traversals,
+topological sort, finding connected components, shortest paths as well as a
+number graph statistics functions. The library can also visualize graphs
+via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_.
+
+The package contains the following modules:
+
+ - the :py:mod:`altgraph.Graph` module contains the
+ :class:`~altgraph.Graph.Graph` class that stores the graph data
+
+ - the :py:mod:`altgraph.GraphAlgo` module implements graph algorithms
+ operating on graphs (:py:class:`~altgraph.Graph.Graph`} instances)
+
+ - the :py:mod:`altgraph.GraphStat` module contains functions for
+ computing statistical measures on graphs
+
+ - the :py:mod:`altgraph.GraphUtil` module contains functions for
+ generating, reading and saving graphs
+
+ - the :py:mod:`altgraph.Dot` module contains functions for displaying
+ graphs via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
+
+ - the :py:mod:`altgraph.ObjectGraph` module implements a graph of
+ objects with a unique identifier
+
+Installation
+------------
+
+Download and unpack the archive then type::
+
+ python setup.py install
+
+This will install the library in the default location. For instructions on
+how to customize the install procedure read the output of::
+
+ python setup.py --help install
+
+To verify that the code works run the test suite::
+
+ python setup.py test
+
+Example usage
+-------------
+
+Lets assume that we want to analyze the graph below (links to the full picture)
+GRAPH_IMG. Our script then might look the following way::
+
+ from altgraph import Graph, GraphAlgo, Dot
+
+ # these are the edges
+ edges = [ (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5),
+ (6,14), (14,15), (6, 15), (5,7), (7, 8), (7,13), (12,8),
+ (8,13), (11,12), (11,9), (13,11), (9,13), (13,10) ]
+
+ # creates the graph
+ graph = Graph.Graph()
+ for head, tail in edges:
+ graph.add_edge(head, tail)
+
+ # do a forward bfs from 1 at most to 20
+ print(graph.forw_bfs(1))
+
+This will print the nodes in some breadth first order::
+
+ [1, 2, 3, 4, 5, 7, 8, 13, 11, 10, 12, 9]
+
+If we wanted to get the hop-distance from node 1 to node 8
+we coud write::
+
+ print(graph.get_hops(1, 8))
+
+This will print the following::
+
+ [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
+
+Node 1 is at 0 hops since it is the starting node, nodes 2,3 are 1 hop away ...
+node 8 is 5 hops away. To find the shortest distance between two nodes you
+can use::
+
+ print(GraphAlgo.shortest_path(graph, 1, 12))
+
+It will print the nodes on one (if there are more) the shortest paths::
+
+ [1, 2, 4, 5, 7, 13, 11, 12]
+
+To display the graph we can use the GraphViz backend::
+
+ dot = Dot.Dot(graph)
+
+ # display the graph on the monitor
+ dot.display()
+
+ # save it in an image file
+ dot.save_img(file_name='graph', file_type='gif')
+
+
+
+..
+ @author: U{Istvan Albert<http://www.personal.psu.edu/staff/i/u/iua1/>}
+
+ @license: MIT License
+
+ Copyright (c) 2004 Istvan Albert unless otherwise noted.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to
+ deal in the Software without restriction, including without limitation the
+ rights to use, copy, modify, merge, publish, distribute, sublicense,
+ and/or sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ IN THE SOFTWARE.
+ @requires: Python 2.3 or higher
+
+ @newfield contributor: Contributors:
+ @contributor: U{Reka Albert <http://www.phys.psu.edu/~ralbert/>}
+
+'''
+import pkg_resources
+__version__ = pkg_resources.require('altgraph')[0].version
+
+
+class GraphError(ValueError):
+ pass
diff --git a/lib/spack/external/macholib/MachO.py b/lib/spack/external/macholib/MachO.py
new file mode 100644
index 0000000000..84b4e4b717
--- /dev/null
+++ b/lib/spack/external/macholib/MachO.py
@@ -0,0 +1,435 @@
+"""
+Utilities for reading and writing Mach-O headers
+"""
+from __future__ import print_function
+
+import sys
+import struct
+import os
+
+from .mach_o import MH_FILETYPE_SHORTNAMES, LC_DYSYMTAB, LC_SYMTAB
+from .mach_o import load_command, S_ZEROFILL, section_64, section
+from .mach_o import LC_REGISTRY, LC_ID_DYLIB, LC_SEGMENT, fat_header
+from .mach_o import LC_SEGMENT_64, MH_CIGAM_64, MH_MAGIC_64, FAT_MAGIC
+from .mach_o import mach_header, fat_arch64, FAT_MAGIC_64, fat_arch
+from .mach_o import LC_REEXPORT_DYLIB, LC_PREBOUND_DYLIB, LC_LOAD_WEAK_DYLIB
+from .mach_o import LC_LOAD_UPWARD_DYLIB, LC_LOAD_DYLIB, mach_header_64
+from .mach_o import MH_CIGAM, MH_MAGIC
+from .ptypes import sizeof
+
+from macholib.util import fileview
+try:
+ from macholib.compat import bytes
+except ImportError:
+ pass
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+if sys.version_info[0] == 2:
+ range = xrange # noqa: F821
+
+__all__ = ['MachO']
+
+_RELOCATABLE = set((
+ # relocatable commands that should be used for dependency walking
+ LC_LOAD_DYLIB,
+ LC_LOAD_UPWARD_DYLIB,
+ LC_LOAD_WEAK_DYLIB,
+ LC_PREBOUND_DYLIB,
+ LC_REEXPORT_DYLIB,
+))
+
+_RELOCATABLE_NAMES = {
+ LC_LOAD_DYLIB: 'load_dylib',
+ LC_LOAD_UPWARD_DYLIB: 'load_upward_dylib',
+ LC_LOAD_WEAK_DYLIB: 'load_weak_dylib',
+ LC_PREBOUND_DYLIB: 'prebound_dylib',
+ LC_REEXPORT_DYLIB: 'reexport_dylib',
+}
+
+
+def _shouldRelocateCommand(cmd):
+ """
+ Should this command id be investigated for relocation?
+ """
+ return cmd in _RELOCATABLE
+
+
+def lc_str_value(offset, cmd_info):
+ """
+ Fetch the actual value of a field of type "lc_str"
+ """
+ cmd_load, cmd_cmd, cmd_data = cmd_info
+
+ offset -= sizeof(cmd_load) + sizeof(cmd_cmd)
+ return cmd_data[offset:].strip(b'\x00')
+
+
+class MachO(object):
+ """
+ Provides reading/writing the Mach-O header of a specific existing file
+ """
+ # filename - the original filename of this mach-o
+ # sizediff - the current deviation from the initial mach-o size
+ # header - the mach-o header
+ # commands - a list of (load_command, somecommand, data)
+ # data is either a str, or a list of segment structures
+ # total_size - the current mach-o header size (including header)
+ # low_offset - essentially, the maximum mach-o header size
+ # id_cmd - the index of my id command, or None
+
+ def __init__(self, filename):
+
+ # supports the ObjectGraph protocol
+ self.graphident = filename
+ self.filename = filename
+ self.loader_path = os.path.dirname(filename)
+
+ # initialized by load
+ self.fat = None
+ self.headers = []
+ with open(filename, 'rb') as fp:
+ self.load(fp)
+
+ def __repr__(self):
+ return "<MachO filename=%r>" % (self.filename,)
+
+ def load(self, fh):
+ assert fh.tell() == 0
+ header = struct.unpack('>I', fh.read(4))[0]
+ fh.seek(0)
+ if header in (FAT_MAGIC, FAT_MAGIC_64):
+ self.load_fat(fh)
+ else:
+ fh.seek(0, 2)
+ size = fh.tell()
+ fh.seek(0)
+ self.load_header(fh, 0, size)
+
+ def load_fat(self, fh):
+ self.fat = fat_header.from_fileobj(fh)
+ if self.fat.magic == FAT_MAGIC:
+ archs = [fat_arch.from_fileobj(fh)
+ for i in range(self.fat.nfat_arch)]
+ elif self.fat.magic == FAT_MAGIC_64:
+ archs = [fat_arch64.from_fileobj(fh)
+ for i in range(self.fat.nfat_arch)]
+ else:
+ raise ValueError("Unknown fat header magic: %r" % (self.fat.magic))
+
+ for arch in archs:
+ self.load_header(fh, arch.offset, arch.size)
+
+ def rewriteLoadCommands(self, *args, **kw):
+ changed = False
+ for header in self.headers:
+ if header.rewriteLoadCommands(*args, **kw):
+ changed = True
+ return changed
+
+ def load_header(self, fh, offset, size):
+ fh.seek(offset)
+ header = struct.unpack('>I', fh.read(4))[0]
+ fh.seek(offset)
+ if header == MH_MAGIC:
+ magic, hdr, endian = MH_MAGIC, mach_header, '>'
+ elif header == MH_CIGAM:
+ magic, hdr, endian = MH_CIGAM, mach_header, '<'
+ elif header == MH_MAGIC_64:
+ magic, hdr, endian = MH_MAGIC_64, mach_header_64, '>'
+ elif header == MH_CIGAM_64:
+ magic, hdr, endian = MH_CIGAM_64, mach_header_64, '<'
+ else:
+ raise ValueError("Unknown Mach-O header: 0x%08x in %r" % (
+ header, fh))
+ hdr = MachOHeader(self, fh, offset, size, magic, hdr, endian)
+ self.headers.append(hdr)
+
+ def write(self, f):
+ for header in self.headers:
+ header.write(f)
+
+
+class MachOHeader(object):
+ """
+ Provides reading/writing the Mach-O header of a specific existing file
+ """
+ # filename - the original filename of this mach-o
+ # sizediff - the current deviation from the initial mach-o size
+ # header - the mach-o header
+ # commands - a list of (load_command, somecommand, data)
+ # data is either a str, or a list of segment structures
+ # total_size - the current mach-o header size (including header)
+ # low_offset - essentially, the maximum mach-o header size
+ # id_cmd - the index of my id command, or None
+
+ def __init__(self, parent, fh, offset, size, magic, hdr, endian):
+ self.MH_MAGIC = magic
+ self.mach_header = hdr
+
+ # These are all initialized by self.load()
+ self.parent = parent
+ self.offset = offset
+ self.size = size
+
+ self.endian = endian
+ self.header = None
+ self.commands = None
+ self.id_cmd = None
+ self.sizediff = None
+ self.total_size = None
+ self.low_offset = None
+ self.filetype = None
+ self.headers = []
+
+ self.load(fh)
+
+ def __repr__(self):
+ return "<%s filename=%r offset=%d size=%d endian=%r>" % (
+ type(self).__name__, self.parent.filename, self.offset, self.size,
+ self.endian)
+
+ def load(self, fh):
+ fh = fileview(fh, self.offset, self.size)
+ fh.seek(0)
+
+ self.sizediff = 0
+ kw = {'_endian_': self.endian}
+ header = self.mach_header.from_fileobj(fh, **kw)
+ self.header = header
+ # if header.magic != self.MH_MAGIC:
+ # raise ValueError("header has magic %08x, expecting %08x" % (
+ # header.magic, self.MH_MAGIC))
+
+ cmd = self.commands = []
+
+ self.filetype = self.get_filetype_shortname(header.filetype)
+
+ read_bytes = 0
+ low_offset = sys.maxsize
+ for i in range(header.ncmds):
+ # read the load command
+ cmd_load = load_command.from_fileobj(fh, **kw)
+
+ # read the specific command
+ klass = LC_REGISTRY.get(cmd_load.cmd, None)
+ if klass is None:
+ raise ValueError("Unknown load command: %d" % (cmd_load.cmd,))
+ cmd_cmd = klass.from_fileobj(fh, **kw)
+
+ if cmd_load.cmd == LC_ID_DYLIB:
+ # remember where this command was
+ if self.id_cmd is not None:
+ raise ValueError("This dylib already has an id")
+ self.id_cmd = i
+
+ if cmd_load.cmd in (LC_SEGMENT, LC_SEGMENT_64):
+ # for segment commands, read the list of segments
+ segs = []
+ # assert that the size makes sense
+ if cmd_load.cmd == LC_SEGMENT:
+ section_cls = section
+ else: # LC_SEGMENT_64
+ section_cls = section_64
+
+ expected_size = (
+ sizeof(klass) + sizeof(load_command) +
+ (sizeof(section_cls) * cmd_cmd.nsects)
+ )
+ if cmd_load.cmdsize != expected_size:
+ raise ValueError("Segment size mismatch")
+ # this is a zero block or something
+ # so the beginning is wherever the fileoff of this command is
+ if cmd_cmd.nsects == 0:
+ if cmd_cmd.filesize != 0:
+ low_offset = min(low_offset, cmd_cmd.fileoff)
+ else:
+ # this one has multiple segments
+ for j in range(cmd_cmd.nsects):
+ # read the segment
+ seg = section_cls.from_fileobj(fh, **kw)
+ # if the segment has a size and is not zero filled
+ # then its beginning is the offset of this segment
+ not_zerofill = ((seg.flags & S_ZEROFILL) != S_ZEROFILL)
+ if seg.offset > 0 and seg.size > 0 and not_zerofill:
+ low_offset = min(low_offset, seg.offset)
+ if not_zerofill:
+ c = fh.tell()
+ fh.seek(seg.offset)
+ sd = fh.read(seg.size)
+ seg.add_section_data(sd)
+ fh.seek(c)
+ segs.append(seg)
+ # data is a list of segments
+ cmd_data = segs
+
+ # XXX: Disabled for now because writing back doesn't work
+ # elif cmd_load.cmd == LC_CODE_SIGNATURE:
+ # c = fh.tell()
+ # fh.seek(cmd_cmd.dataoff)
+ # cmd_data = fh.read(cmd_cmd.datasize)
+ # fh.seek(c)
+ # elif cmd_load.cmd == LC_SYMTAB:
+ # c = fh.tell()
+ # fh.seek(cmd_cmd.stroff)
+ # cmd_data = fh.read(cmd_cmd.strsize)
+ # fh.seek(c)
+
+ else:
+ # data is a raw str
+ data_size = (
+ cmd_load.cmdsize - sizeof(klass) - sizeof(load_command)
+ )
+ cmd_data = fh.read(data_size)
+ cmd.append((cmd_load, cmd_cmd, cmd_data))
+ read_bytes += cmd_load.cmdsize
+
+ # make sure the header made sense
+ if read_bytes != header.sizeofcmds:
+ raise ValueError("Read %d bytes, header reports %d bytes" % (
+ read_bytes, header.sizeofcmds))
+ self.total_size = sizeof(self.mach_header) + read_bytes
+ self.low_offset = low_offset
+
+ def walkRelocatables(self, shouldRelocateCommand=_shouldRelocateCommand):
+ """
+ for all relocatable commands
+ yield (command_index, command_name, filename)
+ """
+ for (idx, (lc, cmd, data)) in enumerate(self.commands):
+ if shouldRelocateCommand(lc.cmd):
+ name = _RELOCATABLE_NAMES[lc.cmd]
+ ofs = cmd.name - sizeof(lc.__class__) - sizeof(cmd.__class__)
+ yield idx, name, data[ofs:data.find(b'\x00', ofs)].decode(
+ sys.getfilesystemencoding())
+
+ def rewriteInstallNameCommand(self, loadcmd):
+ """Rewrite the load command of this dylib"""
+ if self.id_cmd is not None:
+ self.rewriteDataForCommand(self.id_cmd, loadcmd)
+ return True
+ return False
+
+ def changedHeaderSizeBy(self, bytes):
+ self.sizediff += bytes
+ if (self.total_size + self.sizediff) > self.low_offset:
+ print(
+ "WARNING: Mach-O header in %r may be too large to relocate" % (
+ self.parent.filename,))
+
+ def rewriteLoadCommands(self, changefunc):
+ """
+ Rewrite the load commands based upon a change dictionary
+ """
+ data = changefunc(self.parent.filename)
+ changed = False
+ if data is not None:
+ if self.rewriteInstallNameCommand(
+ data.encode(sys.getfilesystemencoding())):
+ changed = True
+ for idx, name, filename in self.walkRelocatables():
+ data = changefunc(filename)
+ if data is not None:
+ if self.rewriteDataForCommand(idx, data.encode(
+ sys.getfilesystemencoding())):
+ changed = True
+ return changed
+
+ def rewriteDataForCommand(self, idx, data):
+ lc, cmd, old_data = self.commands[idx]
+ hdrsize = sizeof(lc.__class__) + sizeof(cmd.__class__)
+ align = struct.calcsize('Q')
+ data = data + (b'\x00' * (align - (len(data) % align)))
+ newsize = hdrsize + len(data)
+ self.commands[idx] = (lc, cmd, data)
+ self.changedHeaderSizeBy(newsize - lc.cmdsize)
+ lc.cmdsize, cmd.name = newsize, hdrsize
+ return True
+
+ def synchronize_size(self):
+ if (self.total_size + self.sizediff) > self.low_offset:
+ raise ValueError(
+ ("New Mach-O header is too large to relocate in %r "
+ "(new size=%r, max size=%r, delta=%r)") % (
+ self.parent.filename, self.total_size + self.sizediff,
+ self.low_offset, self.sizediff))
+ self.header.sizeofcmds += self.sizediff
+ self.total_size = sizeof(self.mach_header) + self.header.sizeofcmds
+ self.sizediff = 0
+
+ def write(self, fileobj):
+ fileobj = fileview(fileobj, self.offset, self.size)
+ fileobj.seek(0)
+
+ # serialize all the mach-o commands
+ self.synchronize_size()
+
+ self.header.to_fileobj(fileobj)
+ for lc, cmd, data in self.commands:
+ lc.to_fileobj(fileobj)
+ cmd.to_fileobj(fileobj)
+
+ if sys.version_info[0] == 2:
+ if isinstance(data, unicode):
+ fileobj.write(data.encode(sys.getfilesystemencoding()))
+
+ elif isinstance(data, (bytes, str)):
+ fileobj.write(data)
+ else:
+ # segments..
+ for obj in data:
+ obj.to_fileobj(fileobj)
+ else:
+ if isinstance(data, str):
+ fileobj.write(data.encode(sys.getfilesystemencoding()))
+
+ elif isinstance(data, bytes):
+ fileobj.write(data)
+
+ else:
+ # segments..
+ for obj in data:
+ obj.to_fileobj(fileobj)
+
+ # zero out the unused space, doubt this is strictly necessary
+ # and is generally probably already the case
+ fileobj.write(b'\x00' * (self.low_offset - fileobj.tell()))
+
+ def getSymbolTableCommand(self):
+ for lc, cmd, data in self.commands:
+ if lc.cmd == LC_SYMTAB:
+ return cmd
+ return None
+
+ def getDynamicSymbolTableCommand(self):
+ for lc, cmd, data in self.commands:
+ if lc.cmd == LC_DYSYMTAB:
+ return cmd
+ return None
+
+ def get_filetype_shortname(self, filetype):
+ if filetype in MH_FILETYPE_SHORTNAMES:
+ return MH_FILETYPE_SHORTNAMES[filetype]
+ else:
+ return 'unknown'
+
+
+def main(fn):
+ m = MachO(fn)
+ seen = set()
+ for header in m.headers:
+ for idx, name, other in header.walkRelocatables():
+ if other not in seen:
+ seen.add(other)
+ print('\t' + name + ": " + other)
+
+
+if __name__ == '__main__':
+ import sys
+ files = sys.argv[1:] or ['/bin/ls']
+ for fn in files:
+ print(fn)
+ main(fn)
diff --git a/lib/spack/external/macholib/MachOGraph.py b/lib/spack/external/macholib/MachOGraph.py
new file mode 100644
index 0000000000..5a733c3ed0
--- /dev/null
+++ b/lib/spack/external/macholib/MachOGraph.py
@@ -0,0 +1,138 @@
+"""
+Utilities for reading and writing Mach-O headers
+"""
+
+import os
+import sys
+
+from altgraph.ObjectGraph import ObjectGraph
+
+from macholib.dyld import dyld_find
+from macholib.MachO import MachO
+from macholib.itergraphreport import itergraphreport
+
+__all__ = ['MachOGraph']
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+
+class MissingMachO(object):
+ def __init__(self, filename):
+ self.graphident = filename
+ self.headers = ()
+
+ def __repr__(self):
+ return '<%s graphident=%r>' % (type(self).__name__, self.graphident)
+
+
+class MachOGraph(ObjectGraph):
+ """
+ Graph data structure of Mach-O dependencies
+ """
+ def __init__(self, debug=0, graph=None, env=None, executable_path=None):
+ super(MachOGraph, self).__init__(debug=debug, graph=graph)
+ self.env = env
+ self.trans_table = {}
+ self.executable_path = executable_path
+
+ def locate(self, filename, loader=None):
+ if not isinstance(filename, (str, unicode)):
+ raise TypeError("%r is not a string" % (filename,))
+ if filename.startswith('@loader_path/') and loader is not None:
+ fn = self.trans_table.get((loader.filename, filename))
+ if fn is None:
+ loader_path = loader.loader_path
+
+ try:
+ fn = dyld_find(
+ filename, env=self.env,
+ executable_path=self.executable_path,
+ loader_path=loader_path)
+ self.trans_table[(loader.filename, filename)] = fn
+ except ValueError:
+ return None
+
+ else:
+ fn = self.trans_table.get(filename)
+ if fn is None:
+ try:
+ fn = dyld_find(
+ filename, env=self.env,
+ executable_path=self.executable_path)
+ self.trans_table[filename] = fn
+ except ValueError:
+ return None
+ return fn
+
+ def findNode(self, name, loader=None):
+ assert isinstance(name, (str, unicode))
+ data = super(MachOGraph, self).findNode(name)
+ if data is not None:
+ return data
+ newname = self.locate(name, loader=loader)
+ if newname is not None and newname != name:
+ return self.findNode(newname)
+ return None
+
+ def run_file(self, pathname, caller=None):
+ assert isinstance(pathname, (str, unicode))
+ self.msgin(2, "run_file", pathname)
+ m = self.findNode(pathname, loader=caller)
+ if m is None:
+ if not os.path.exists(pathname):
+ raise ValueError('%r does not exist' % (pathname,))
+ m = self.createNode(MachO, pathname)
+ self.createReference(caller, m, edge_data='run_file')
+ self.scan_node(m)
+ self.msgout(2, '')
+ return m
+
+ def load_file(self, name, caller=None):
+ assert isinstance(name, (str, unicode))
+ self.msgin(2, "load_file", name, caller)
+ m = self.findNode(name, loader=caller)
+ if m is None:
+ newname = self.locate(name, loader=caller)
+ if newname is not None and newname != name:
+ return self.load_file(newname, caller=caller)
+ if os.path.exists(name):
+ m = self.createNode(MachO, name)
+ self.scan_node(m)
+ else:
+ m = self.createNode(MissingMachO, name)
+ self.msgout(2, '')
+ return m
+
+ def scan_node(self, node):
+ self.msgin(2, 'scan_node', node)
+ for header in node.headers:
+ for idx, name, filename in header.walkRelocatables():
+ assert isinstance(name, (str, unicode))
+ assert isinstance(filename, (str, unicode))
+ m = self.load_file(filename, caller=node)
+ self.createReference(node, m, edge_data=name)
+ self.msgout(2, '', node)
+
+ def itergraphreport(self, name='G'):
+ nodes = map(self.graph.describe_node, self.graph.iterdfs(self))
+ describe_edge = self.graph.describe_edge
+ return itergraphreport(nodes, describe_edge, name=name)
+
+ def graphreport(self, fileobj=None):
+ if fileobj is None:
+ fileobj = sys.stdout
+ fileobj.writelines(self.itergraphreport())
+
+
+def main(args):
+ g = MachOGraph()
+ for arg in args:
+ g.run_file(arg)
+ g.graphreport()
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:] or ['/bin/ls'])
diff --git a/lib/spack/external/macholib/MachOStandalone.py b/lib/spack/external/macholib/MachOStandalone.py
new file mode 100644
index 0000000000..c4f5b84d19
--- /dev/null
+++ b/lib/spack/external/macholib/MachOStandalone.py
@@ -0,0 +1,169 @@
+import os
+
+from macholib.MachOGraph import MachOGraph, MissingMachO
+from macholib.util import iter_platform_files, in_system_path, mergecopy, \
+ mergetree, flipwritable, has_filename_filter
+from macholib.dyld import framework_info
+from collections import deque
+
+
+class ExcludedMachO(MissingMachO):
+ pass
+
+
+class FilteredMachOGraph(MachOGraph):
+ def __init__(self, delegate, *args, **kwargs):
+ super(FilteredMachOGraph, self).__init__(*args, **kwargs)
+ self.delegate = delegate
+
+ def createNode(self, cls, name):
+ cls = self.delegate.getClass(name, cls)
+ res = super(FilteredMachOGraph, self).createNode(cls, name)
+ return self.delegate.update_node(res)
+
+ def locate(self, filename, loader=None):
+ newname = super(FilteredMachOGraph, self).locate(filename, loader)
+ print("locate", filename, loader, "->", newname)
+ if newname is None:
+ return None
+ return self.delegate.locate(newname, loader=loader)
+
+
+class MachOStandalone(object):
+ def __init__(
+ self, base, dest=None, graph=None, env=None,
+ executable_path=None):
+ self.base = os.path.join(os.path.abspath(base), '')
+ if dest is None:
+ dest = os.path.join(self.base, 'Contents', 'Frameworks')
+ self.dest = dest
+ self.mm = FilteredMachOGraph(
+ self, graph=graph, env=env, executable_path=executable_path)
+ self.changemap = {}
+ self.excludes = []
+ self.pending = deque()
+
+ def update_node(self, m):
+ return m
+
+ def getClass(self, name, cls):
+ if in_system_path(name):
+ return ExcludedMachO
+ for base in self.excludes:
+ if name.startswith(base):
+ return ExcludedMachO
+ return cls
+
+ def locate(self, filename, loader=None):
+ if in_system_path(filename):
+ return filename
+ if filename.startswith(self.base):
+ return filename
+ for base in self.excludes:
+ if filename.startswith(base):
+ return filename
+ if filename in self.changemap:
+ return self.changemap[filename]
+ info = framework_info(filename)
+ if info is None:
+ res = self.copy_dylib(filename)
+ self.changemap[filename] = res
+ return res
+ else:
+ res = self.copy_framework(info)
+ self.changemap[filename] = res
+ return res
+
+ def copy_dylib(self, filename):
+ # When the filename is a symlink use the basename of the target of
+ # the link as the name in standalone bundle. This avoids problems
+ # when two libraries link to the same dylib but using different
+ # symlinks.
+ if os.path.islink(filename):
+ dest = os.path.join(
+ self.dest, os.path.basename(os.path.realpath(filename)))
+ else:
+ dest = os.path.join(self.dest, os.path.basename(filename))
+
+ if not os.path.exists(dest):
+ self.mergecopy(filename, dest)
+ return dest
+
+ def mergecopy(self, src, dest):
+ return mergecopy(src, dest)
+
+ def mergetree(self, src, dest):
+ return mergetree(src, dest)
+
+ def copy_framework(self, info):
+ dest = os.path.join(self.dest, info['shortname'] + '.framework')
+ destfn = os.path.join(self.dest, info['name'])
+ src = os.path.join(info['location'], info['shortname'] + '.framework')
+ if not os.path.exists(dest):
+ self.mergetree(src, dest)
+ self.pending.append((destfn, iter_platform_files(dest)))
+ return destfn
+
+ def run(self, platfiles=None, contents=None):
+ mm = self.mm
+ if contents is None:
+ contents = '@executable_path/..'
+ if platfiles is None:
+ platfiles = iter_platform_files(self.base)
+
+ for fn in platfiles:
+ mm.run_file(fn)
+
+ while self.pending:
+ fmwk, files = self.pending.popleft()
+ ref = mm.findNode(fmwk)
+ for fn in files:
+ mm.run_file(fn, caller=ref)
+
+ changemap = {}
+ skipcontents = os.path.join(os.path.dirname(self.dest), '')
+ machfiles = []
+
+ for node in mm.flatten(has_filename_filter):
+ machfiles.append(node)
+ dest = os.path.join(
+ contents, os.path.normpath(node.filename[len(skipcontents):]))
+ changemap[node.filename] = dest
+
+ def changefunc(path):
+ if path.startswith('@loader_path/'):
+ # XXX: This is a quick hack for py2app: In that
+ # usecase paths like this are found in the load
+ # commands of relocatable wheels. Those don't
+ # need rewriting.
+ return path
+
+ res = mm.locate(path)
+ rv = changemap.get(res)
+ if rv is None and path.startswith('@loader_path/'):
+ rv = changemap.get(mm.locate(mm.trans_table.get(
+ (node.filename, path))))
+ return rv
+
+ for node in machfiles:
+ fn = mm.locate(node.filename)
+ if fn is None:
+ continue
+ rewroteAny = False
+ for header in node.headers:
+ if node.rewriteLoadCommands(changefunc):
+ rewroteAny = True
+ if rewroteAny:
+ old_mode = flipwritable(fn)
+ try:
+ with open(fn, 'rb+') as f:
+ for header in node.headers:
+ f.seek(0)
+ node.write(f)
+ f.seek(0, 2)
+ f.flush()
+ finally:
+ flipwritable(fn, old_mode)
+
+ allfiles = [mm.locate(node.filename) for node in machfiles]
+ return set(filter(None, allfiles))
diff --git a/lib/spack/external/macholib/SymbolTable.py b/lib/spack/external/macholib/SymbolTable.py
new file mode 100644
index 0000000000..bf4d383ab7
--- /dev/null
+++ b/lib/spack/external/macholib/SymbolTable.py
@@ -0,0 +1,86 @@
+"""
+Class to read the symbol table from a Mach-O header
+"""
+from __future__ import with_statement
+
+from macholib.mach_o import relocation_info, dylib_reference, dylib_module
+from macholib.mach_o import dylib_table_of_contents, nlist, nlist_64
+from macholib.mach_o import MH_CIGAM_64, MH_MAGIC_64
+import sys
+
+__all__ = ['SymbolTable']
+
+if sys.version_info[0] == 2:
+ range = xrange # noqa: F821
+
+
+class SymbolTable(object):
+ def __init__(self, macho, header=None, openfile=None):
+ if openfile is None:
+ openfile = open
+ if header is None:
+ header = macho.headers[0]
+ self.macho_header = header
+ with openfile(macho.filename, 'rb') as fh:
+ self.symtab = header.getSymbolTableCommand()
+ self.dysymtab = header.getDynamicSymbolTableCommand()
+
+ if self.symtab is not None:
+ self.nlists = self.readSymbolTable(fh)
+
+ if self.dysymtab is not None:
+ self.readDynamicSymbolTable(fh)
+
+ def readSymbolTable(self, fh):
+ cmd = self.symtab
+ fh.seek(self.macho_header.offset + cmd.stroff)
+ strtab = fh.read(cmd.strsize)
+ fh.seek(self.macho_header.offset + cmd.symoff)
+ nlists = []
+
+ if self.macho_header.MH_MAGIC in [MH_MAGIC_64, MH_CIGAM_64]:
+ cls = nlist_64
+ else:
+ cls = nlist
+
+ for i in range(cmd.nsyms):
+ cmd = cls.from_fileobj(fh, _endian_=self.macho_header.endian)
+ if cmd.n_un == 0:
+ nlists.append((cmd, ''))
+ else:
+ nlists.append(
+ (cmd, strtab[cmd.n_un:strtab.find(b'\x00', cmd.n_un)]))
+ return nlists
+
+ def readDynamicSymbolTable(self, fh):
+ cmd = self.dysymtab
+ nlists = self.nlists
+
+ self.localsyms = nlists[cmd.ilocalsym:cmd.ilocalsym+cmd.nlocalsym]
+ self.extdefsyms = nlists[cmd.iextdefsym:cmd.iextdefsym+cmd.nextdefsym]
+ self.undefsyms = nlists[cmd.iundefsym:cmd.iundefsym+cmd.nundefsym]
+ if cmd.tocoff == 0:
+ self.toc = None
+ else:
+ self.toc = self.readtoc(fh, cmd.tocoff, cmd.ntoc)
+
+ def readtoc(self, fh, off, n):
+ fh.seek(self.macho_header.offset + off)
+ return [dylib_table_of_contents.from_fileobj(fh) for i in range(n)]
+
+ def readmodtab(self, fh, off, n):
+ fh.seek(self.macho_header.offset + off)
+ return [dylib_module.from_fileobj(fh) for i in range(n)]
+
+ def readsym(self, fh, off, n):
+ fh.seek(self.macho_header.offset + off)
+ refs = []
+ for i in range(n):
+ ref = dylib_reference.from_fileobj(fh)
+ isym, flags = divmod(ref.isym_flags, 256)
+ refs.append((self.nlists[isym], flags))
+ return refs
+
+ def readrel(self, fh, off, n):
+ fh.seek(self.macho_header.offset + off)
+ return [relocation_info.from_fileobj(fh) for i in range(n)]
diff --git a/lib/spack/external/macholib/__init__.py b/lib/spack/external/macholib/__init__.py
new file mode 100644
index 0000000000..f77c79492b
--- /dev/null
+++ b/lib/spack/external/macholib/__init__.py
@@ -0,0 +1,8 @@
+"""
+Enough Mach-O to make your head spin.
+
+See the relevant header files in /usr/include/mach-o
+
+And also Apple's documentation.
+"""
+__version__ = '1.10'
diff --git a/lib/spack/external/macholib/__main__.py b/lib/spack/external/macholib/__main__.py
new file mode 100644
index 0000000000..66eaad865c
--- /dev/null
+++ b/lib/spack/external/macholib/__main__.py
@@ -0,0 +1,83 @@
+from __future__ import print_function, absolute_import
+import os
+import sys
+
+from macholib.util import is_platform_file
+from macholib import macho_dump
+from macholib import macho_standalone
+
+gCommand = None
+
+
+def check_file(fp, path, callback):
+ if not os.path.exists(path):
+ print(
+ '%s: %s: No such file or directory' % (gCommand, path),
+ file=sys.stderr)
+ return 1
+
+ try:
+ is_plat = is_platform_file(path)
+
+ except IOError as msg:
+ print('%s: %s: %s' % (gCommand, path, msg), file=sys.stderr)
+ return 1
+
+ else:
+ if is_plat:
+ callback(fp, path)
+ return 0
+
+
+def walk_tree(callback, paths):
+ err = 0
+
+ for base in paths:
+ if os.path.isdir(base):
+ for root, dirs, files in os.walk(base):
+ for fn in files:
+ err |= check_file(
+ sys.stdout, os.path.join(root, fn), callback)
+ else:
+ err |= check_file(sys.stdout, base, callback)
+
+ return err
+
+
+def print_usage(fp):
+ print("Usage:", file=fp)
+ print(" python -mmacholib [help|--help]", file=fp)
+ print(" python -mmacholib dump FILE ...", file=fp)
+ print(" python -mmacholib find DIR ...", file=fp)
+ print(" python -mmacholib standalone DIR ...", file=fp)
+
+
+def main():
+ global gCommand
+ if len(sys.argv) < 3:
+ print_usage(sys.stderr)
+ sys.exit(1)
+
+ gCommand = sys.argv[1]
+
+ if gCommand == 'dump':
+ walk_tree(macho_dump.print_file, sys.argv[2:])
+
+ elif gCommand == 'find':
+ walk_tree(lambda fp, path: print(path, file=fp), sys.argv[2:])
+
+ elif gCommand == 'standalone':
+ for dn in sys.argv[2:]:
+ macho_standalone.standaloneApp(dn)
+
+ elif gCommand in ('help', '--help'):
+ print_usage(sys.stdout)
+ sys.exit(0)
+
+ else:
+ print_usage(sys.stderr)
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/lib/spack/external/macholib/_cmdline.py b/lib/spack/external/macholib/_cmdline.py
new file mode 100644
index 0000000000..c6227334a1
--- /dev/null
+++ b/lib/spack/external/macholib/_cmdline.py
@@ -0,0 +1,48 @@
+"""
+Internal helpers for basic commandline tools
+"""
+from __future__ import print_function, absolute_import
+import os
+import sys
+
+from macholib.util import is_platform_file
+
+
+def check_file(fp, path, callback):
+ if not os.path.exists(path):
+ print('%s: %s: No such file or directory' % (
+ sys.argv[0], path), file=sys.stderr)
+ return 1
+
+ try:
+ is_plat = is_platform_file(path)
+
+ except IOError as msg:
+ print('%s: %s: %s' % (sys.argv[0], path, msg), file=sys.stderr)
+ return 1
+
+ else:
+ if is_plat:
+ callback(fp, path)
+ return 0
+
+
+def main(callback):
+ args = sys.argv[1:]
+ name = os.path.basename(sys.argv[0])
+ err = 0
+
+ if not args:
+ print("Usage: %s filename..." % (name,), file=sys.stderr)
+ return 1
+
+ for base in args:
+ if os.path.isdir(base):
+ for root, dirs, files in os.walk(base):
+ for fn in files:
+ err |= check_file(
+ sys.stdout, os.path.join(root, fn), callback)
+ else:
+ err |= check_file(sys.stdout, base, callback)
+
+ return err
diff --git a/lib/spack/external/macholib/dyld.py b/lib/spack/external/macholib/dyld.py
new file mode 100644
index 0000000000..1157ebf007
--- /dev/null
+++ b/lib/spack/external/macholib/dyld.py
@@ -0,0 +1,190 @@
+"""
+dyld emulation
+"""
+
+from itertools import chain
+
+import os
+import sys
+
+from macholib.framework import framework_info
+from macholib.dylib import dylib_info
+
+__all__ = [
+ 'dyld_find', 'framework_find',
+ 'framework_info', 'dylib_info',
+]
+
+# These are the defaults as per man dyld(1)
+#
+_DEFAULT_FRAMEWORK_FALLBACK = [
+ os.path.expanduser("~/Library/Frameworks"),
+ "/Library/Frameworks",
+ "/Network/Library/Frameworks",
+ "/System/Library/Frameworks",
+]
+
+_DEFAULT_LIBRARY_FALLBACK = [
+ os.path.expanduser("~/lib"),
+ "/usr/local/lib",
+ "/lib",
+ "/usr/lib",
+]
+
+# XXX: Is this function still needed?
+if sys.version_info[0] == 2:
+ def _ensure_utf8(s):
+ if isinstance(s, unicode): # noqa: F821
+ return s.encode('utf8')
+ return s
+else:
+ def _ensure_utf8(s):
+ if s is not None and not isinstance(s, str):
+ raise ValueError(s)
+ return s
+
+
+def _dyld_env(env, var):
+ if env is None:
+ env = os.environ
+ rval = env.get(var)
+ if rval is None or rval == '':
+ return []
+ return rval.split(':')
+
+
+def dyld_image_suffix(env=None):
+ if env is None:
+ env = os.environ
+ return env.get('DYLD_IMAGE_SUFFIX')
+
+
+def dyld_framework_path(env=None):
+ return _dyld_env(env, 'DYLD_FRAMEWORK_PATH')
+
+
+def dyld_library_path(env=None):
+ return _dyld_env(env, 'DYLD_LIBRARY_PATH')
+
+
+def dyld_fallback_framework_path(env=None):
+ return _dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH')
+
+
+def dyld_fallback_library_path(env=None):
+ return _dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH')
+
+
+def dyld_image_suffix_search(iterator, env=None):
+ """For a potential path iterator, add DYLD_IMAGE_SUFFIX semantics"""
+ suffix = dyld_image_suffix(env)
+ if suffix is None:
+ return iterator
+
+ def _inject(iterator=iterator, suffix=suffix):
+ for path in iterator:
+ if path.endswith('.dylib'):
+ yield path[:-len('.dylib')] + suffix + '.dylib'
+ else:
+ yield path + suffix
+ yield path
+
+ return _inject()
+
+
+def dyld_override_search(name, env=None):
+ # If DYLD_FRAMEWORK_PATH is set and this dylib_name is a
+ # framework name, use the first file that exists in the framework
+ # path if any. If there is none go on to search the DYLD_LIBRARY_PATH
+ # if any.
+
+ framework = framework_info(name)
+
+ if framework is not None:
+ for path in dyld_framework_path(env):
+ yield os.path.join(path, framework['name'])
+
+ # If DYLD_LIBRARY_PATH is set then use the first file that exists
+ # in the path. If none use the original name.
+ for path in dyld_library_path(env):
+ yield os.path.join(path, os.path.basename(name))
+
+
+def dyld_executable_path_search(name, executable_path=None):
+ # If we haven't done any searching and found a library and the
+ # dylib_name starts with "@executable_path/" then construct the
+ # library name.
+ if name.startswith('@executable_path/') and executable_path is not None:
+ yield os.path.join(executable_path, name[len('@executable_path/'):])
+
+
+def dyld_loader_search(name, loader_path=None):
+ # If we haven't done any searching and found a library and the
+ # dylib_name starts with "@loader_path/" then construct the
+ # library name.
+ if name.startswith('@loader_path/') and loader_path is not None:
+ yield os.path.join(loader_path, name[len('@loader_path/'):])
+
+
+def dyld_default_search(name, env=None):
+ yield name
+
+ framework = framework_info(name)
+
+ if framework is not None:
+ fallback_framework_path = dyld_fallback_framework_path(env)
+
+ if fallback_framework_path:
+ for path in fallback_framework_path:
+ yield os.path.join(path, framework['name'])
+
+ else:
+ for path in _DEFAULT_FRAMEWORK_FALLBACK:
+ yield os.path.join(path, framework['name'])
+
+ fallback_library_path = dyld_fallback_library_path(env)
+ if fallback_library_path:
+ for path in fallback_library_path:
+ yield os.path.join(path, os.path.basename(name))
+
+ else:
+ for path in _DEFAULT_LIBRARY_FALLBACK:
+ yield os.path.join(path, os.path.basename(name))
+
+
+def dyld_find(name, executable_path=None, env=None, loader_path=None):
+ """
+ Find a library or framework using dyld semantics
+ """
+ name = _ensure_utf8(name)
+ executable_path = _ensure_utf8(executable_path)
+ for path in dyld_image_suffix_search(chain(
+ dyld_override_search(name, env),
+ dyld_executable_path_search(name, executable_path),
+ dyld_loader_search(name, loader_path),
+ dyld_default_search(name, env),
+ ), env):
+ if os.path.isfile(path):
+ return path
+ raise ValueError("dylib %s could not be found" % (name,))
+
+
+def framework_find(fn, executable_path=None, env=None):
+ """
+ Find a framework using dyld semantics in a very loose manner.
+
+ Will take input such as:
+ Python
+ Python.framework
+ Python.framework/Versions/Current
+ """
+ try:
+ return dyld_find(fn, executable_path=executable_path, env=env)
+ except ValueError:
+ pass
+ fmwk_index = fn.rfind('.framework')
+ if fmwk_index == -1:
+ fmwk_index = len(fn)
+ fn += '.framework'
+ fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
+ return dyld_find(fn, executable_path=executable_path, env=env)
diff --git a/lib/spack/external/macholib/dylib.py b/lib/spack/external/macholib/dylib.py
new file mode 100644
index 0000000000..5b06b40e13
--- /dev/null
+++ b/lib/spack/external/macholib/dylib.py
@@ -0,0 +1,43 @@
+"""
+Generic dylib path manipulation
+"""
+
+import re
+
+__all__ = ['dylib_info']
+
+_DYLIB_RE = re.compile(r"""(?x)
+(?P<location>^.*)(?:^|/)
+(?P<name>
+ (?P<shortname>\w+?)
+ (?:\.(?P<version>[^._]+))?
+ (?:_(?P<suffix>[^._]+))?
+ \.dylib$
+)
+""")
+
+
+def dylib_info(filename):
+ """
+ A dylib name can take one of the following four forms:
+ Location/Name.SomeVersion_Suffix.dylib
+ Location/Name.SomeVersion.dylib
+ Location/Name_Suffix.dylib
+ Location/Name.dylib
+
+ returns None if not found or a mapping equivalent to:
+ dict(
+ location='Location',
+ name='Name.SomeVersion_Suffix.dylib',
+ shortname='Name',
+ version='SomeVersion',
+ suffix='Suffix',
+ )
+
+ Note that SomeVersion and Suffix are optional and may be None
+ if not present.
+ """
+ is_dylib = _DYLIB_RE.match(filename)
+ if not is_dylib:
+ return None
+ return is_dylib.groupdict()
diff --git a/lib/spack/external/macholib/framework.py b/lib/spack/external/macholib/framework.py
new file mode 100644
index 0000000000..8f8632c209
--- /dev/null
+++ b/lib/spack/external/macholib/framework.py
@@ -0,0 +1,43 @@
+"""
+Generic framework path manipulation
+"""
+
+import re
+
+__all__ = ['framework_info']
+
+_STRICT_FRAMEWORK_RE = re.compile(r"""(?x)
+(?P<location>^.*)(?:^|/)
+(?P<name>
+ (?P<shortname>[-_A-Za-z0-9]+).framework/
+ (?:Versions/(?P<version>[^/]+)/)?
+ (?P=shortname)
+ (?:_(?P<suffix>[^_]+))?
+)$
+""")
+
+
+def framework_info(filename):
+ """
+ A framework name can take one of the following four forms:
+ Location/Name.framework/Versions/SomeVersion/Name_Suffix
+ Location/Name.framework/Versions/SomeVersion/Name
+ Location/Name.framework/Name_Suffix
+ Location/Name.framework/Name
+
+ returns None if not found, or a mapping equivalent to:
+ dict(
+ location='Location',
+ name='Name.framework/Versions/SomeVersion/Name_Suffix',
+ shortname='Name',
+ version='SomeVersion',
+ suffix='Suffix',
+ )
+
+ Note that SomeVersion and Suffix are optional and may be None
+ if not present
+ """
+ is_framework = _STRICT_FRAMEWORK_RE.match(filename)
+ if not is_framework:
+ return None
+ return is_framework.groupdict()
diff --git a/lib/spack/external/macholib/itergraphreport.py b/lib/spack/external/macholib/itergraphreport.py
new file mode 100644
index 0000000000..eea3fd69bc
--- /dev/null
+++ b/lib/spack/external/macholib/itergraphreport.py
@@ -0,0 +1,73 @@
+"""
+Utilities for creating dot output from a MachOGraph
+
+XXX: need to rewrite this based on altgraph.Dot
+"""
+
+from collections import deque
+
+try:
+ from itertools import imap
+except ImportError:
+ imap = map
+
+__all__ = ['itergraphreport']
+
+
+def itergraphreport(nodes, describe_edge, name='G'):
+ edges = deque()
+ nodetoident = {}
+
+ def nodevisitor(node, data, outgoing, incoming):
+ return {'label': str(node)}
+
+ def edgevisitor(edge, data, head, tail):
+ return {}
+
+ yield 'digraph %s {\n' % (name,)
+ attr = dict(rankdir='LR', concentrate='true')
+ cpatt = '%s="%s"'
+ for item in attr.iteritems():
+ yield '\t%s;\n' % (cpatt % item,)
+
+ # find all packages (subgraphs)
+ for (node, data, outgoing, incoming) in nodes:
+ nodetoident[node] = getattr(data, 'identifier', node)
+
+ # create sets for subgraph, write out descriptions
+ for (node, data, outgoing, incoming) in nodes:
+ # update edges
+ for edge in imap(describe_edge, outgoing):
+ edges.append(edge)
+
+ # describe node
+ yield '\t"%s" [%s];\n' % (
+ node,
+ ','.join([
+ (cpatt % item) for item in
+ nodevisitor(node, data, outgoing, incoming).iteritems()
+ ]),
+ )
+
+ graph = []
+
+ while edges:
+ edge, data, head, tail = edges.popleft()
+ if data in ('run_file', 'load_dylib'):
+ graph.append((edge, data, head, tail))
+
+ def do_graph(edges, tabs):
+ edgestr = tabs + '"%s" -> "%s" [%s];\n'
+ # describe edge
+ for (edge, data, head, tail) in edges:
+ attribs = edgevisitor(edge, data, head, tail)
+ yield edgestr % (
+ head,
+ tail,
+ ','.join([(cpatt % item) for item in attribs.iteritems()]),
+ )
+
+ for s in do_graph(graph, '\t'):
+ yield s
+
+ yield '}\n'
diff --git a/lib/spack/external/macholib/mach_o.py b/lib/spack/external/macholib/mach_o.py
new file mode 100644
index 0000000000..7c5803d939
--- /dev/null
+++ b/lib/spack/external/macholib/mach_o.py
@@ -0,0 +1,1665 @@
+"""
+Other than changing the load commands in such a way that they do not
+contain the load command itself, this is largely a by-hand conversion
+of the C headers. Hopefully everything in here should be at least as
+obvious as the C headers, and you should be using the C headers as a real
+reference because the documentation didn't come along for the ride.
+
+Doing much of anything with the symbol tables or segments is really
+not covered at this point.
+
+See /usr/include/mach-o and friends.
+"""
+
+import time
+
+from macholib.ptypes import p_uint32, p_uint64, Structure, p_long, pypackable
+from macholib.ptypes import p_int64, p_short, p_uint8, p_int32, p_ulong
+
+_CPU_ARCH_ABI64 = 0x01000000
+
+CPU_TYPE_NAMES = {
+ -1: 'ANY',
+ 1: 'VAX',
+ 6: 'MC680x0',
+ 7: 'i386',
+ _CPU_ARCH_ABI64 | 7: 'x86_64',
+ 8: 'MIPS',
+ 10: 'MC98000',
+ 11: 'HPPA',
+ 12: 'ARM',
+ _CPU_ARCH_ABI64 | 12: 'ARM64',
+ 13: 'MC88000',
+ 14: 'SPARC',
+ 15: 'i860',
+ 16: 'Alpha',
+ 18: 'PowerPC',
+ _CPU_ARCH_ABI64 | 18: 'PowerPC64',
+}
+
+INTEL64_SUBTYPE = {
+ 3: "CPU_SUBTYPE_X86_64_ALL",
+ 4: "CPU_SUBTYPE_X86_ARCH1",
+ 8: "CPU_SUBTYPE_X86_64_H",
+}
+
+# define CPU_SUBTYPE_INTEL(f, m) ((cpu_subtype_t) (f) + ((m) << 4))
+INTEL_SUBTYPE = {
+ 0: "CPU_SUBTYPE_INTEL_MODEL_ALL",
+ 1: "CPU_THREADTYPE_INTEL_HTT",
+ 3: "CPU_SUBTYPE_I386_ALL",
+ 4: "CPU_SUBTYPE_486",
+ 5: "CPU_SUBTYPE_586",
+ 8: "CPU_SUBTYPE_PENTIUM_3",
+ 9: "CPU_SUBTYPE_PENTIUM_M",
+ 10: "CPU_SUBTYPE_PENTIUM_4",
+ 11: "CPU_SUBTYPE_ITANIUM",
+ 12: "CPU_SUBTYPE_XEON",
+ 34: "CPU_SUBTYPE_XEON_MP",
+ 42: "CPU_SUBTYPE_PENTIUM_4_M",
+ 43: "CPU_SUBTYPE_ITANIUM_2",
+ 38: "CPU_SUBTYPE_PENTPRO",
+ 40: "CPU_SUBTYPE_PENTIUM_3_M",
+ 52: "CPU_SUBTYPE_PENTIUM_3_XEON",
+ 102: "CPU_SUBTYPE_PENTII_M3",
+ 132: "CPU_SUBTYPE_486SX",
+ 166: "CPU_SUBTYPE_PENTII_M5",
+ 199: "CPU_SUBTYPE_CELERON",
+ 231: "CPU_SUBTYPE_CELERON_MOBILE"
+}
+
+MC680_SUBTYPE = {
+ 1: "CPU_SUBTYPE_MC680x0_ALL",
+ 2: "CPU_SUBTYPE_MC68040",
+ 3: "CPU_SUBTYPE_MC68030_ONLY"
+}
+
+MIPS_SUBTYPE = {
+ 0: "CPU_SUBTYPE_MIPS_ALL",
+ 1: "CPU_SUBTYPE_MIPS_R2300",
+ 2: "CPU_SUBTYPE_MIPS_R2600",
+ 3: "CPU_SUBTYPE_MIPS_R2800",
+ 4: "CPU_SUBTYPE_MIPS_R2000a",
+ 5: "CPU_SUBTYPE_MIPS_R2000",
+ 6: "CPU_SUBTYPE_MIPS_R3000a",
+ 7: "CPU_SUBTYPE_MIPS_R3000"
+}
+
+MC98000_SUBTYPE = {
+ 0: "CPU_SUBTYPE_MC98000_ALL",
+ 1: "CPU_SUBTYPE_MC98601"
+}
+
+HPPA_SUBTYPE = {
+ 0: "CPU_SUBTYPE_HPPA_7100",
+ 1: "CPU_SUBTYPE_HPPA_7100LC"
+}
+
+MC88_SUBTYPE = {
+ 0: "CPU_SUBTYPE_MC88000_ALL",
+ 1: "CPU_SUBTYPE_MC88100",
+ 2: "CPU_SUBTYPE_MC88110"
+}
+
+SPARC_SUBTYPE = {
+ 0: "CPU_SUBTYPE_SPARC_ALL"
+}
+
+I860_SUBTYPE = {
+ 0: "CPU_SUBTYPE_I860_ALL",
+ 1: "CPU_SUBTYPE_I860_860"
+}
+
+POWERPC_SUBTYPE = {
+ 0: "CPU_SUBTYPE_POWERPC_ALL",
+ 1: "CPU_SUBTYPE_POWERPC_601",
+ 2: "CPU_SUBTYPE_POWERPC_602",
+ 3: "CPU_SUBTYPE_POWERPC_603",
+ 4: "CPU_SUBTYPE_POWERPC_603e",
+ 5: "CPU_SUBTYPE_POWERPC_603ev",
+ 6: "CPU_SUBTYPE_POWERPC_604",
+ 7: "CPU_SUBTYPE_POWERPC_604e",
+ 8: "CPU_SUBTYPE_POWERPC_620",
+ 9: "CPU_SUBTYPE_POWERPC_750",
+ 10: "CPU_SUBTYPE_POWERPC_7400",
+ 11: "CPU_SUBTYPE_POWERPC_7450",
+ 100: "CPU_SUBTYPE_POWERPC_970"
+}
+
+ARM_SUBTYPE = {
+ 0: "CPU_SUBTYPE_ARM_ALL12",
+ 5: "CPU_SUBTYPE_ARM_V4T",
+ 6: "CPU_SUBTYPE_ARM_V6",
+ 7: "CPU_SUBTYPE_ARM_V5TEJ",
+ 8: "CPU_SUBTYPE_ARM_XSCALE",
+ 9: "CPU_SUBTYPE_ARM_V7",
+ 10: "CPU_SUBTYPE_ARM_V7F",
+ 11: "CPU_SUBTYPE_ARM_V7S",
+ 12: "CPU_SUBTYPE_ARM_V7K",
+ 13: "CPU_SUBTYPE_ARM_V8",
+ 14: "CPU_SUBTYPE_ARM_V6M",
+ 15: "CPU_SUBTYPE_ARM_V7M",
+ 16: "CPU_SUBTYPE_ARM_V7EM",
+}
+
+ARM64_SUBTYPE = {
+ 0: "CPU_SUBTYPE_ARM64_ALL",
+ 1: "CPU_SUBTYPE_ARM64_V8",
+}
+
+VAX_SUBTYPE = {
+ 0: "CPU_SUBTYPE_VAX_ALL",
+ 1: "CPU_SUBTYPE_VAX780",
+ 2: "CPU_SUBTYPE_VAX785",
+ 3: "CPU_SUBTYPE_VAX750",
+ 4: "CPU_SUBTYPE_VAX730",
+ 5: "CPU_SUBTYPE_UVAXI",
+ 6: "CPU_SUBTYPE_UVAXII",
+ 7: "CPU_SUBTYPE_VAX8200",
+ 8: "CPU_SUBTYPE_VAX8500",
+ 9: "CPU_SUBTYPE_VAX8600",
+ 10: "CPU_SUBTYPE_VAX8650",
+ 11: "CPU_SUBTYPE_VAX8800",
+ 12: "CPU_SUBTYPE_UVAXIII",
+}
+
+
+def get_cpu_subtype(cpu_type, cpu_subtype):
+ st = cpu_subtype & 0x0fffffff
+
+ if cpu_type == 1:
+ subtype = VAX_SUBTYPE.get(st, st)
+ elif cpu_type == 6:
+ subtype = MC680_SUBTYPE.get(st, st)
+ elif cpu_type == 7:
+ subtype = INTEL_SUBTYPE.get(st, st)
+ elif cpu_type == 7 | _CPU_ARCH_ABI64:
+ subtype = INTEL64_SUBTYPE.get(st, st)
+ elif cpu_type == 8:
+ subtype = MIPS_SUBTYPE.get(st, st)
+ elif cpu_type == 10:
+ subtype = MC98000_SUBTYPE.get(st, st)
+ elif cpu_type == 11:
+ subtype = HPPA_SUBTYPE.get(st, st)
+ elif cpu_type == 12:
+ subtype = ARM_SUBTYPE.get(st, st)
+ elif cpu_type == 12 | _CPU_ARCH_ABI64:
+ subtype = ARM64_SUBTYPE.get(st, st)
+ elif cpu_type == 13:
+ subtype = MC88_SUBTYPE.get(st, st)
+ elif cpu_type == 14:
+ subtype = SPARC_SUBTYPE.get(st, st)
+ elif cpu_type == 15:
+ subtype = I860_SUBTYPE.get(st, st)
+ elif cpu_type == 16:
+ subtype = MIPS_SUBTYPE.get(st, st)
+ elif cpu_type == 18:
+ subtype = POWERPC_SUBTYPE.get(st, st)
+ elif cpu_type == 18 | _CPU_ARCH_ABI64:
+ subtype = POWERPC_SUBTYPE.get(st, st)
+ else:
+ subtype = str(st)
+
+ return subtype
+
+
+_MH_EXECUTE_SYM = "__mh_execute_header"
+MH_EXECUTE_SYM = "_mh_execute_header"
+_MH_BUNDLE_SYM = "__mh_bundle_header"
+MH_BUNDLE_SYM = "_mh_bundle_header"
+_MH_DYLIB_SYM = "__mh_dylib_header"
+MH_DYLIB_SYM = "_mh_dylib_header"
+_MH_DYLINKER_SYM = "__mh_dylinker_header"
+MH_DYLINKER_SYM = "_mh_dylinker_header"
+
+(
+ MH_OBJECT, MH_EXECUTE, MH_FVMLIB, MH_CORE, MH_PRELOAD, MH_DYLIB,
+ MH_DYLINKER, MH_BUNDLE, MH_DYLIB_STUB, MH_DSYM
+) = range(0x1, 0xb)
+
+(
+ MH_NOUNDEFS, MH_INCRLINK, MH_DYLDLINK, MH_BINDATLOAD, MH_PREBOUND,
+ MH_SPLIT_SEGS, MH_LAZY_INIT, MH_TWOLEVEL, MH_FORCE_FLAT, MH_NOMULTIDEFS,
+ MH_NOFIXPREBINDING, MH_PREBINDABLE, MH_ALLMODSBOUND,
+ MH_SUBSECTIONS_VIA_SYMBOLS, MH_CANONICAL, MH_WEAK_DEFINES,
+ MH_BINDS_TO_WEAK, MH_ALLOW_STACK_EXECUTION,
+ MH_ROOT_SAFE, MH_SETUID_SAFE, MH_NO_REEXPORTED_DYLIBS, MH_PIE,
+ MH_DEAD_STRIPPABLE_DYLIB, MH_HAS_TLV_DESCRIPTORS, MH_NO_HEAP_EXECUTION,
+ MH_APP_EXTENSION_SAFE
+) = map((1).__lshift__, range(26))
+
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+
+integer_t = p_int32
+cpu_type_t = integer_t
+cpu_subtype_t = p_uint32
+
+MH_FILETYPE_NAMES = {
+ MH_OBJECT: 'relocatable object',
+ MH_EXECUTE: 'demand paged executable',
+ MH_FVMLIB: 'fixed vm shared library',
+ MH_CORE: 'core',
+ MH_PRELOAD: 'preloaded executable',
+ MH_DYLIB: 'dynamically bound shared library',
+ MH_DYLINKER: 'dynamic link editor',
+ MH_BUNDLE: 'dynamically bound bundle',
+ MH_DYLIB_STUB: 'shared library stub for static linking',
+ MH_DSYM: 'symbol information',
+}
+
+MH_FILETYPE_SHORTNAMES = {
+ MH_OBJECT: 'object',
+ MH_EXECUTE: 'execute',
+ MH_FVMLIB: 'fvmlib',
+ MH_CORE: 'core',
+ MH_PRELOAD: 'preload',
+ MH_DYLIB: 'dylib',
+ MH_DYLINKER: 'dylinker',
+ MH_BUNDLE: 'bundle',
+ MH_DYLIB_STUB: 'dylib_stub',
+ MH_DSYM: 'dsym',
+}
+
+MH_FLAGS_NAMES = {
+ MH_NOUNDEFS: 'MH_NOUNDEFS',
+ MH_INCRLINK: 'MH_INCRLINK',
+ MH_DYLDLINK: 'MH_DYLDLINK',
+ MH_BINDATLOAD: 'MH_BINDATLOAD',
+ MH_PREBOUND: 'MH_PREBOUND',
+ MH_SPLIT_SEGS: 'MH_SPLIT_SEGS',
+ MH_LAZY_INIT: 'MH_LAZY_INIT',
+ MH_TWOLEVEL: 'MH_TWOLEVEL',
+ MH_FORCE_FLAT: 'MH_FORCE_FLAT',
+ MH_NOMULTIDEFS: 'MH_NOMULTIDEFS',
+ MH_NOFIXPREBINDING: 'MH_NOFIXPREBINDING',
+ MH_PREBINDABLE: 'MH_PREBINDABLE',
+ MH_ALLMODSBOUND: 'MH_ALLMODSBOUND',
+ MH_SUBSECTIONS_VIA_SYMBOLS: 'MH_SUBSECTIONS_VIA_SYMBOLS',
+ MH_CANONICAL: 'MH_CANONICAL',
+ MH_WEAK_DEFINES: 'MH_WEAK_DEFINES',
+ MH_BINDS_TO_WEAK: 'MH_BINDS_TO_WEAK',
+ MH_ALLOW_STACK_EXECUTION: 'MH_ALLOW_STACK_EXECUTION',
+ MH_ROOT_SAFE: 'MH_ROOT_SAFE',
+ MH_SETUID_SAFE: 'MH_SETUID_SAFE',
+ MH_NO_REEXPORTED_DYLIBS: 'MH_NO_REEXPORTED_DYLIBS',
+ MH_PIE: 'MH_PIE',
+ MH_DEAD_STRIPPABLE_DYLIB: 'MH_DEAD_STRIPPABLE_DYLIB',
+ MH_HAS_TLV_DESCRIPTORS: 'MH_HAS_TLV_DESCRIPTORS',
+ MH_NO_HEAP_EXECUTION: 'MH_NO_HEAP_EXECUTION',
+ MH_APP_EXTENSION_SAFE: 'MH_APP_EXTENSION_SAFE',
+}
+
+MH_FLAGS_DESCRIPTIONS = {
+ MH_NOUNDEFS: 'no undefined references',
+ MH_INCRLINK: 'output of an incremental link',
+ MH_DYLDLINK: 'input for the dynamic linker',
+ MH_BINDATLOAD: 'undefined references bound dynamically when loaded',
+ MH_PREBOUND: 'dynamic undefined references prebound',
+ MH_SPLIT_SEGS: 'split read-only and read-write segments',
+ MH_LAZY_INIT: '(obsolete)',
+ MH_TWOLEVEL: 'using two-level name space bindings',
+ MH_FORCE_FLAT: 'forcing all imagges to use flat name space bindings',
+ MH_NOMULTIDEFS: 'umbrella guarantees no multiple definitions',
+ MH_NOFIXPREBINDING: 'do not notify prebinding agent about this executable',
+ MH_PREBINDABLE:
+ 'the binary is not prebound but can have its prebinding redone',
+ MH_ALLMODSBOUND:
+ 'indicates that this binary binds to all two-level namespace modules '
+ 'of its dependent libraries',
+ MH_SUBSECTIONS_VIA_SYMBOLS:
+ 'safe to divide up the sections into sub-sections via symbols for '
+ 'dead code stripping',
+ MH_CANONICAL:
+ 'the binary has been canonicalized via the unprebind operation',
+ MH_WEAK_DEFINES: 'the final linked image contains external weak symbols',
+ MH_BINDS_TO_WEAK: 'the final linked image uses weak symbols',
+ MH_ALLOW_STACK_EXECUTION:
+ 'all stacks in the task will be given stack execution privilege',
+ MH_ROOT_SAFE:
+ 'the binary declares it is safe for use in processes with uid zero',
+ MH_SETUID_SAFE:
+ 'the binary declares it is safe for use in processes when issetugid() '
+ 'is true',
+ MH_NO_REEXPORTED_DYLIBS:
+ 'the static linker does not need to examine dependent dylibs to see '
+ 'if any are re-exported',
+ MH_PIE: 'the OS will load the main executable at a random address',
+ MH_DEAD_STRIPPABLE_DYLIB:
+ 'the static linker will automatically not create a LC_LOAD_DYLIB load '
+ 'command to the dylib if no symbols are being referenced from the '
+ 'dylib',
+ MH_HAS_TLV_DESCRIPTORS:
+ 'contains a section of type S_THREAD_LOCAL_VARIABLES',
+ MH_NO_HEAP_EXECUTION:
+ 'the OS will run the main executable with a non-executable heap '
+ 'even on platforms that don\'t require it',
+ MH_APP_EXTENSION_SAFE:
+ 'the code was linked for use in an application extension.',
+}
+
+
+class mach_version_helper(Structure):
+ _fields_ = (
+ ('_version', p_uint32),
+ )
+
+ @property
+ def major(self):
+ return self._version >> 16 & 0xffff
+
+ @major.setter
+ def major(self, v):
+ self._version = (self._version & 0xffff) | (v << 16)
+
+ @property
+ def minor(self):
+ return self._version >> 8 & 0xff
+
+ @minor.setter
+ def minor(self, v):
+ self._version = (self._version & 0xffff00ff) | (v << 8)
+
+ @property
+ def rev(self):
+ return self._version & 0xff
+
+ @rev.setter
+ def rev(self, v):
+ return (self._version & 0xffffff00) | v
+
+ def __str__(self):
+ return '%s.%s.%s' % (self.major, self.minor, self.rev)
+
+
+class mach_timestamp_helper(p_uint32):
+ def __str__(self):
+ return time.ctime(self)
+
+
+def read_struct(f, s, **kw):
+ return s.from_fileobj(f, **kw)
+
+
+class mach_header(Structure):
+ _fields_ = (
+ ('magic', p_uint32),
+ ('cputype', cpu_type_t),
+ ('cpusubtype', cpu_subtype_t),
+ ('filetype', p_uint32),
+ ('ncmds', p_uint32),
+ ('sizeofcmds', p_uint32),
+ ('flags', p_uint32),
+ )
+
+ def _describe(self):
+ bit = 1
+ flags = self.flags
+ dflags = []
+ while flags and bit < (1 << 32):
+ if flags & bit:
+ dflags.append({
+ 'name': MH_FLAGS_NAMES.get(bit, str(bit)),
+ 'description': MH_FLAGS_DESCRIPTIONS.get(bit, str(bit))
+ })
+ flags = flags ^ bit
+ bit <<= 1
+ return (
+ ('magic', int(self.magic)),
+ ('cputype_string', CPU_TYPE_NAMES.get(self.cputype, self.cputype)),
+ ('cputype', int(self.cputype)),
+ ('cpusubtype_string',
+ get_cpu_subtype(self.cputype, self.cpusubtype)),
+ ('cpusubtype', int(self.cpusubtype)),
+ ('filetype_string',
+ MH_FILETYPE_NAMES.get(self.filetype, self.filetype)),
+ ('filetype', int(self.filetype)),
+ ('ncmds', self.ncmds),
+ ('sizeofcmds', self.sizeofcmds),
+ ('flags', dflags),
+ ('raw_flags', int(self.flags))
+ )
+
+
+class mach_header_64(mach_header):
+ _fields_ = mach_header._fields_ + (('reserved', p_uint32),)
+
+
+class load_command(Structure):
+ _fields_ = (
+ ('cmd', p_uint32),
+ ('cmdsize', p_uint32),
+ )
+
+ def get_cmd_name(self):
+ return LC_NAMES.get(self.cmd, self.cmd)
+
+
+LC_REQ_DYLD = 0x80000000
+
+(
+ LC_SEGMENT, LC_SYMTAB, LC_SYMSEG, LC_THREAD, LC_UNIXTHREAD, LC_LOADFVMLIB,
+ LC_IDFVMLIB, LC_IDENT, LC_FVMFILE, LC_PREPAGE, LC_DYSYMTAB, LC_LOAD_DYLIB,
+ LC_ID_DYLIB, LC_LOAD_DYLINKER, LC_ID_DYLINKER, LC_PREBOUND_DYLIB,
+ LC_ROUTINES, LC_SUB_FRAMEWORK, LC_SUB_UMBRELLA, LC_SUB_CLIENT,
+ LC_SUB_LIBRARY, LC_TWOLEVEL_HINTS, LC_PREBIND_CKSUM
+) = range(0x1, 0x18)
+
+LC_LOAD_WEAK_DYLIB = LC_REQ_DYLD | 0x18
+
+LC_SEGMENT_64 = 0x19
+LC_ROUTINES_64 = 0x1a
+LC_UUID = 0x1b
+LC_RPATH = (0x1c | LC_REQ_DYLD)
+LC_CODE_SIGNATURE = 0x1d
+LC_CODE_SEGMENT_SPLIT_INFO = 0x1e
+LC_REEXPORT_DYLIB = 0x1f | LC_REQ_DYLD
+LC_LAZY_LOAD_DYLIB = 0x20
+LC_ENCRYPTION_INFO = 0x21
+LC_DYLD_INFO = 0x22
+LC_DYLD_INFO_ONLY = 0x22 | LC_REQ_DYLD
+LC_LOAD_UPWARD_DYLIB = 0x23 | LC_REQ_DYLD
+LC_VERSION_MIN_MACOSX = 0x24
+LC_VERSION_MIN_IPHONEOS = 0x25
+LC_FUNCTION_STARTS = 0x26
+LC_DYLD_ENVIRONMENT = 0x27
+LC_MAIN = 0x28 | LC_REQ_DYLD
+LC_DATA_IN_CODE = 0x29
+LC_SOURCE_VERSION = 0x2a
+LC_DYLIB_CODE_SIGN_DRS = 0x2b
+LC_ENCRYPTION_INFO_64 = 0x2c
+LC_LINKER_OPTION = 0x2d
+LC_LINKER_OPTIMIZATION_HINT = 0x2e
+LC_VERSION_MIN_TVOS = 0x2f
+LC_VERSION_MIN_WATCHOS = 0x30
+LC_NOTE = 0x31
+LC_BUILD_VERSION = 0x32
+
+
+# this is really a union.. but whatever
+class lc_str(p_uint32):
+ pass
+
+
+p_str16 = pypackable('p_str16', bytes, '16s')
+
+vm_prot_t = p_int32
+
+
+class segment_command(Structure):
+ _fields_ = (
+ ('segname', p_str16),
+ ('vmaddr', p_uint32),
+ ('vmsize', p_uint32),
+ ('fileoff', p_uint32),
+ ('filesize', p_uint32),
+ ('maxprot', vm_prot_t),
+ ('initprot', vm_prot_t),
+ ('nsects', p_uint32), # read the section structures ?
+ ('flags', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['segname'] = self.segname.rstrip('\x00')
+ s['vmaddr'] = int(self.vmaddr)
+ s['vmsize'] = int(self.vmsize)
+ s['fileoff'] = int(self.fileoff)
+ s['filesize'] = int(self.filesize)
+ s['initprot'] = self.get_initial_virtual_memory_protections()
+ s['initprot_raw'] = int(self.initprot)
+ s['maxprot'] = self.get_max_virtual_memory_protections()
+ s['maxprot_raw'] = int(self.maxprot)
+ s['nsects'] = int(self.nsects)
+ s['flags'] = self.flags
+ return s
+
+ def get_initial_virtual_memory_protections(self):
+ vm = []
+ if self.initprot == 0:
+ vm.append("VM_PROT_NONE")
+ if self.initprot & 1:
+ vm.append("VM_PROT_READ")
+ if self.initprot & 2:
+ vm.append("VM_PROT_WRITE")
+ if self.initprot & 4:
+ vm.append("VM_PROT_EXECUTE")
+ return vm
+
+ def get_max_virtual_memory_protections(self):
+ vm = []
+ if self.maxprot == 0:
+ vm.append("VM_PROT_NONE")
+ if self.maxprot & 1:
+ vm.append("VM_PROT_READ")
+ if self.maxprot & 2:
+ vm.append("VM_PROT_WRITE")
+ if self.maxprot & 4:
+ vm.append("VM_PROT_EXECUTE")
+ return vm
+
+
+class segment_command_64(Structure):
+ _fields_ = (
+ ('segname', p_str16),
+ ('vmaddr', p_uint64),
+ ('vmsize', p_uint64),
+ ('fileoff', p_uint64),
+ ('filesize', p_uint64),
+ ('maxprot', vm_prot_t),
+ ('initprot', vm_prot_t),
+ ('nsects', p_uint32), # read the section structures ?
+ ('flags', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['segname'] = self.segname.rstrip('\x00')
+ s['vmaddr'] = int(self.vmaddr)
+ s['vmsize'] = int(self.vmsize)
+ s['fileoff'] = int(self.fileoff)
+ s['filesize'] = int(self.filesize)
+ s['initprot'] = self.get_initial_virtual_memory_protections()
+ s['initprot_raw'] = int(self.initprot)
+ s['maxprot'] = self.get_max_virtual_memory_protections()
+ s['maxprot_raw'] = int(self.maxprot)
+ s['nsects'] = int(self.nsects)
+ s['flags'] = self.flags
+ return s
+
+ def get_initial_virtual_memory_protections(self):
+ vm = []
+ if self.initprot == 0:
+ vm.append("VM_PROT_NONE")
+ if self.initprot & 1:
+ vm.append("VM_PROT_READ")
+ if self.initprot & 2:
+ vm.append("VM_PROT_WRITE")
+ if self.initprot & 4:
+ vm.append("VM_PROT_EXECUTE")
+ return vm
+
+ def get_max_virtual_memory_protections(self):
+ vm = []
+ if self.maxprot == 0:
+ vm.append("VM_PROT_NONE")
+ if self.maxprot & 1:
+ vm.append("VM_PROT_READ")
+ if self.maxprot & 2:
+ vm.append("VM_PROT_WRITE")
+ if self.maxprot & 4:
+ vm.append("VM_PROT_EXECUTE")
+ return vm
+
+
+SG_HIGHVM = 0x1
+SG_FVMLIB = 0x2
+SG_NORELOC = 0x4
+SG_PROTECTED_VERSION_1 = 0x8
+
+
+class section(Structure):
+ _fields_ = (
+ ('sectname', p_str16),
+ ('segname', p_str16),
+ ('addr', p_uint32),
+ ('size', p_uint32),
+ ('offset', p_uint32),
+ ('align', p_uint32),
+ ('reloff', p_uint32),
+ ('nreloc', p_uint32),
+ ('flags', p_uint32),
+ ('reserved1', p_uint32),
+ ('reserved2', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['sectname'] = self.sectname.rstrip('\x00')
+ s['segname'] = self.segname.rstrip('\x00')
+ s['addr'] = int(self.addr)
+ s['size'] = int(self.size)
+ s['offset'] = int(self.offset)
+ s['align'] = int(self.align)
+ s['reloff'] = int(self.reloff)
+ s['nreloc'] = int(self.nreloc)
+ f = {}
+ f['type'] = FLAG_SECTION_TYPES[int(self.flags) & 0xff]
+ f['attributes'] = []
+ for k in FLAG_SECTION_ATTRIBUTES:
+ if k & self.flags:
+ f['attributes'].append(FLAG_SECTION_ATTRIBUTES[k])
+ if not f['attributes']:
+ del f['attributes']
+ s['flags'] = f
+ s['reserved1'] = int(self.reserved1)
+ s['reserved2'] = int(self.reserved2)
+ return s
+
+ def add_section_data(self, data):
+ self.section_data = data
+
+
+class section_64(Structure):
+ _fields_ = (
+ ('sectname', p_str16),
+ ('segname', p_str16),
+ ('addr', p_uint64),
+ ('size', p_uint64),
+ ('offset', p_uint32),
+ ('align', p_uint32),
+ ('reloff', p_uint32),
+ ('nreloc', p_uint32),
+ ('flags', p_uint32),
+ ('reserved1', p_uint32),
+ ('reserved2', p_uint32),
+ ('reserved3', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['sectname'] = self.sectname.rstrip('\x00')
+ s['segname'] = self.segname.rstrip('\x00')
+ s['addr'] = int(self.addr)
+ s['size'] = int(self.size)
+ s['offset'] = int(self.offset)
+ s['align'] = int(self.align)
+ s['reloff'] = int(self.reloff)
+ s['nreloc'] = int(self.nreloc)
+ f = {}
+ f['type'] = FLAG_SECTION_TYPES[int(self.flags) & 0xff]
+ f['attributes'] = []
+ for k in FLAG_SECTION_ATTRIBUTES:
+ if k & self.flags:
+ f['attributes'].append(FLAG_SECTION_ATTRIBUTES[k])
+ if not f['attributes']:
+ del f['attributes']
+ s['flags'] = f
+ s['reserved1'] = int(self.reserved1)
+ s['reserved2'] = int(self.reserved2)
+ s['reserved3'] = int(self.reserved3)
+ return s
+
+ def add_section_data(self, data):
+ self.section_data = data
+
+
+SECTION_TYPE = 0xff
+SECTION_ATTRIBUTES = 0xffffff00
+S_REGULAR = 0x0
+S_ZEROFILL = 0x1
+S_CSTRING_LITERALS = 0x2
+S_4BYTE_LITERALS = 0x3
+S_8BYTE_LITERALS = 0x4
+S_LITERAL_POINTERS = 0x5
+S_NON_LAZY_SYMBOL_POINTERS = 0x6
+S_LAZY_SYMBOL_POINTERS = 0x7
+S_SYMBOL_STUBS = 0x8
+S_MOD_INIT_FUNC_POINTERS = 0x9
+S_MOD_TERM_FUNC_POINTERS = 0xa
+S_COALESCED = 0xb
+S_GB_ZEROFILL = 0xc
+S_INTERPOSING = 0xd
+S_16BYTE_LITERALS = 0xe
+S_DTRACE_DOF = 0xf
+S_LAZY_DYLIB_SYMBOL_POINTERS = 0x10
+S_THREAD_LOCAL_REGULAR = 0x11
+S_THREAD_LOCAL_ZEROFILL = 0x12
+S_THREAD_LOCAL_VARIABLES = 0x13
+S_THREAD_LOCAL_VARIABLE_POINTERS = 0x14
+S_THREAD_LOCAL_INIT_FUNCTION_POINTERS = 0x15
+
+FLAG_SECTION_TYPES = {
+ S_REGULAR: "S_REGULAR",
+ S_ZEROFILL: "S_ZEROFILL",
+ S_CSTRING_LITERALS: "S_CSTRING_LITERALS",
+ S_4BYTE_LITERALS: "S_4BYTE_LITERALS",
+ S_8BYTE_LITERALS: "S_8BYTE_LITERALS",
+ S_LITERAL_POINTERS: "S_LITERAL_POINTERS",
+ S_NON_LAZY_SYMBOL_POINTERS: "S_NON_LAZY_SYMBOL_POINTERS",
+ S_LAZY_SYMBOL_POINTERS: "S_LAZY_SYMBOL_POINTERS",
+ S_SYMBOL_STUBS: "S_SYMBOL_STUBS",
+ S_MOD_INIT_FUNC_POINTERS: "S_MOD_INIT_FUNC_POINTERS",
+ S_MOD_TERM_FUNC_POINTERS: "S_MOD_TERM_FUNC_POINTERS",
+ S_COALESCED: "S_COALESCED",
+ S_GB_ZEROFILL: "S_GB_ZEROFILL",
+ S_INTERPOSING: "S_INTERPOSING",
+ S_16BYTE_LITERALS: "S_16BYTE_LITERALS",
+ S_DTRACE_DOF: "S_DTRACE_DOF",
+ S_LAZY_DYLIB_SYMBOL_POINTERS: "S_LAZY_DYLIB_SYMBOL_POINTERS",
+ S_THREAD_LOCAL_REGULAR: "S_THREAD_LOCAL_REGULAR",
+ S_THREAD_LOCAL_ZEROFILL: "S_THREAD_LOCAL_ZEROFILL",
+ S_THREAD_LOCAL_VARIABLES: "S_THREAD_LOCAL_VARIABLES",
+ S_THREAD_LOCAL_VARIABLE_POINTERS: "S_THREAD_LOCAL_VARIABLE_POINTERS",
+ S_THREAD_LOCAL_INIT_FUNCTION_POINTERS:
+ "S_THREAD_LOCAL_INIT_FUNCTION_POINTERS"
+}
+
+SECTION_ATTRIBUTES_USR = 0xff000000
+S_ATTR_PURE_INSTRUCTIONS = 0x80000000
+S_ATTR_NO_TOC = 0x40000000
+S_ATTR_STRIP_STATIC_SYMS = 0x20000000
+S_ATTR_NO_DEAD_STRIP = 0x10000000
+S_ATTR_LIVE_SUPPORT = 0x08000000
+S_ATTR_SELF_MODIFYING_CODE = 0x04000000
+S_ATTR_DEBUG = 0x02000000
+
+SECTION_ATTRIBUTES_SYS = 0x00ffff00
+S_ATTR_SOME_INSTRUCTIONS = 0x00000400
+S_ATTR_EXT_RELOC = 0x00000200
+S_ATTR_LOC_RELOC = 0x00000100
+
+FLAG_SECTION_ATTRIBUTES = {
+ S_ATTR_PURE_INSTRUCTIONS: "S_ATTR_PURE_INSTRUCTIONS",
+ S_ATTR_NO_TOC: "S_ATTR_NO_TOC",
+ S_ATTR_STRIP_STATIC_SYMS: "S_ATTR_STRIP_STATIC_SYMS",
+ S_ATTR_NO_DEAD_STRIP: "S_ATTR_NO_DEAD_STRIP",
+ S_ATTR_LIVE_SUPPORT: "S_ATTR_LIVE_SUPPORT",
+ S_ATTR_SELF_MODIFYING_CODE: "S_ATTR_SELF_MODIFYING_CODE",
+ S_ATTR_DEBUG: "S_ATTR_DEBUG",
+ S_ATTR_SOME_INSTRUCTIONS: "S_ATTR_SOME_INSTRUCTIONS",
+ S_ATTR_EXT_RELOC: "S_ATTR_EXT_RELOC",
+ S_ATTR_LOC_RELOC: "S_ATTR_LOC_RELOC"
+}
+
+SEG_PAGEZERO = "__PAGEZERO"
+SEG_TEXT = "__TEXT"
+SECT_TEXT = "__text"
+SECT_FVMLIB_INIT0 = "__fvmlib_init0"
+SECT_FVMLIB_INIT1 = "__fvmlib_init1"
+SEG_DATA = "__DATA"
+SECT_DATA = "__data"
+SECT_BSS = "__bss"
+SECT_COMMON = "__common"
+SEG_OBJC = "__OBJC"
+SECT_OBJC_SYMBOLS = "__symbol_table"
+SECT_OBJC_MODULES = "__module_info"
+SECT_OBJC_STRINGS = "__selector_strs"
+SECT_OBJC_REFS = "__selector_refs"
+SEG_ICON = "__ICON"
+SECT_ICON_HEADER = "__header"
+SECT_ICON_TIFF = "__tiff"
+SEG_LINKEDIT = "__LINKEDIT"
+SEG_UNIXSTACK = "__UNIXSTACK"
+SEG_IMPORT = "__IMPORT"
+
+#
+# I really should remove all these _command classes because they
+# are no different. I decided to keep the load commands separate,
+# so classes like fvmlib and fvmlib_command are equivalent.
+#
+
+
+class fvmlib(Structure):
+ _fields_ = (
+ ('name', lc_str),
+ ('minor_version', mach_version_helper),
+ ('header_addr', p_uint32),
+ )
+
+
+class fvmlib_command(Structure):
+ _fields_ = fvmlib._fields_
+
+ def describe(self):
+ s = {}
+ s['header_addr'] = int(self.header_addr)
+ return s
+
+
+class dylib(Structure):
+ _fields_ = (
+ ('name', lc_str),
+ ('timestamp', mach_timestamp_helper),
+ ('current_version', mach_version_helper),
+ ('compatibility_version', mach_version_helper),
+ )
+
+
+# merged dylib structure
+class dylib_command(Structure):
+ _fields_ = dylib._fields_
+
+ def describe(self):
+ s = {}
+ s['timestamp'] = str(self.timestamp)
+ s['current_version'] = str(self.current_version)
+ s['compatibility_version'] = str(self.compatibility_version)
+ return s
+
+
+class sub_framework_command(Structure):
+ _fields_ = (
+ ('umbrella', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+
+class sub_client_command(Structure):
+ _fields_ = (
+ ('client', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+
+class sub_umbrella_command(Structure):
+ _fields_ = (
+ ('sub_umbrella', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+
+class sub_library_command(Structure):
+ _fields_ = (
+ ('sub_library', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+
+class prebound_dylib_command(Structure):
+ _fields_ = (
+ ('name', lc_str),
+ ('nmodules', p_uint32),
+ ('linked_modules', lc_str),
+ )
+
+ def describe(self):
+ return {'nmodules': int(self.nmodules)}
+
+
+class dylinker_command(Structure):
+ _fields_ = (
+ ('name', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+
+class thread_command(Structure):
+ _fields_ = (
+ ('flavor', p_uint32),
+ ('count', p_uint32)
+ )
+
+ def describe(self):
+ s = {}
+ s['flavor'] = int(self.flavor)
+ s['count'] = int(self.count)
+ return s
+
+
+class entry_point_command(Structure):
+ _fields_ = (
+ ('entryoff', p_uint64),
+ ('stacksize', p_uint64),
+ )
+
+ def describe(self):
+ s = {}
+ s['entryoff'] = int(self.entryoff)
+ s['stacksize'] = int(self.stacksize)
+ return s
+
+
+class routines_command(Structure):
+ _fields_ = (
+ ('init_address', p_uint32),
+ ('init_module', p_uint32),
+ ('reserved1', p_uint32),
+ ('reserved2', p_uint32),
+ ('reserved3', p_uint32),
+ ('reserved4', p_uint32),
+ ('reserved5', p_uint32),
+ ('reserved6', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['init_address'] = int(self.init_address)
+ s['init_module'] = int(self.init_module)
+ s['reserved1'] = int(self.reserved1)
+ s['reserved2'] = int(self.reserved2)
+ s['reserved3'] = int(self.reserved3)
+ s['reserved4'] = int(self.reserved4)
+ s['reserved5'] = int(self.reserved5)
+ s['reserved6'] = int(self.reserved6)
+ return s
+
+
+class routines_command_64(Structure):
+ _fields_ = (
+ ('init_address', p_uint64),
+ ('init_module', p_uint64),
+ ('reserved1', p_uint64),
+ ('reserved2', p_uint64),
+ ('reserved3', p_uint64),
+ ('reserved4', p_uint64),
+ ('reserved5', p_uint64),
+ ('reserved6', p_uint64),
+ )
+
+ def describe(self):
+ s = {}
+ s['init_address'] = int(self.init_address)
+ s['init_module'] = int(self.init_module)
+ s['reserved1'] = int(self.reserved1)
+ s['reserved2'] = int(self.reserved2)
+ s['reserved3'] = int(self.reserved3)
+ s['reserved4'] = int(self.reserved4)
+ s['reserved5'] = int(self.reserved5)
+ s['reserved6'] = int(self.reserved6)
+ return s
+
+
+class symtab_command(Structure):
+ _fields_ = (
+ ('symoff', p_uint32),
+ ('nsyms', p_uint32),
+ ('stroff', p_uint32),
+ ('strsize', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['symoff'] = int(self.symoff)
+ s['nsyms'] = int(self.nsyms)
+ s['stroff'] = int(self.stroff)
+ s['strsize'] = int(self.strsize)
+ return s
+
+
+class dysymtab_command(Structure):
+ _fields_ = (
+ ('ilocalsym', p_uint32),
+ ('nlocalsym', p_uint32),
+ ('iextdefsym', p_uint32),
+ ('nextdefsym', p_uint32),
+ ('iundefsym', p_uint32),
+ ('nundefsym', p_uint32),
+ ('tocoff', p_uint32),
+ ('ntoc', p_uint32),
+ ('modtaboff', p_uint32),
+ ('nmodtab', p_uint32),
+ ('extrefsymoff', p_uint32),
+ ('nextrefsyms', p_uint32),
+ ('indirectsymoff', p_uint32),
+ ('nindirectsyms', p_uint32),
+ ('extreloff', p_uint32),
+ ('nextrel', p_uint32),
+ ('locreloff', p_uint32),
+ ('nlocrel', p_uint32),
+ )
+
+ def describe(self):
+ dys = {}
+ dys['ilocalsym'] = int(self.ilocalsym)
+ dys['nlocalsym'] = int(self.nlocalsym)
+ dys['iextdefsym'] = int(self.iextdefsym)
+ dys['nextdefsym'] = int(self.nextdefsym)
+ dys['iundefsym'] = int(self.iundefsym)
+ dys['nundefsym'] = int(self.nundefsym)
+ dys['tocoff'] = int(self.tocoff)
+ dys['ntoc'] = int(self.ntoc)
+ dys['modtaboff'] = int(self.modtaboff)
+ dys['nmodtab'] = int(self.nmodtab)
+ dys['extrefsymoff'] = int(self.extrefsymoff)
+ dys['nextrefsyms'] = int(self.nextrefsyms)
+ dys['indirectsymoff'] = int(self.indirectsymoff)
+ dys['nindirectsyms'] = int(self.nindirectsyms)
+ dys['extreloff'] = int(self.extreloff)
+ dys['nextrel'] = int(self.nextrel)
+ dys['locreloff'] = int(self.locreloff)
+ dys['nlocrel'] = int(self.nlocrel)
+ return dys
+
+
+INDIRECT_SYMBOL_LOCAL = 0x80000000
+INDIRECT_SYMBOL_ABS = 0x40000000
+
+
+class dylib_table_of_contents(Structure):
+ _fields_ = (
+ ('symbol_index', p_uint32),
+ ('module_index', p_uint32),
+ )
+
+
+class dylib_module(Structure):
+ _fields_ = (
+ ('module_name', p_uint32),
+ ('iextdefsym', p_uint32),
+ ('nextdefsym', p_uint32),
+ ('irefsym', p_uint32),
+ ('nrefsym', p_uint32),
+ ('ilocalsym', p_uint32),
+ ('nlocalsym', p_uint32),
+ ('iextrel', p_uint32),
+ ('nextrel', p_uint32),
+ ('iinit_iterm', p_uint32),
+ ('ninit_nterm', p_uint32),
+ ('objc_module_info_addr', p_uint32),
+ ('objc_module_info_size', p_uint32),
+ )
+
+
+class dylib_module_64(Structure):
+ _fields_ = (
+ ('module_name', p_uint32),
+ ('iextdefsym', p_uint32),
+ ('nextdefsym', p_uint32),
+ ('irefsym', p_uint32),
+ ('nrefsym', p_uint32),
+ ('ilocalsym', p_uint32),
+ ('nlocalsym', p_uint32),
+ ('iextrel', p_uint32),
+ ('nextrel', p_uint32),
+ ('iinit_iterm', p_uint32),
+ ('ninit_nterm', p_uint32),
+ ('objc_module_info_size', p_uint32),
+ ('objc_module_info_addr', p_uint64),
+ )
+
+
+class dylib_reference(Structure):
+ _fields_ = (
+ # XXX - ick, fix
+ ('isym_flags', p_uint32),
+ # ('isym', p_uint8 * 3),
+ # ('flags', p_uint8),
+ )
+
+
+class twolevel_hints_command(Structure):
+ _fields_ = (
+ ('offset', p_uint32),
+ ('nhints', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['offset'] = int(self.offset)
+ s['nhints'] = int(self.nhints)
+ return s
+
+
+class twolevel_hint(Structure):
+ _fields_ = (
+ # XXX - ick, fix
+ ('isub_image_itoc', p_uint32),
+ # ('isub_image', p_uint8),
+ # ('itoc', p_uint8 * 3),
+ )
+
+
+class prebind_cksum_command(Structure):
+ _fields_ = (
+ ('cksum', p_uint32),
+ )
+
+ def describe(self):
+ return {'cksum': int(self.cksum)}
+
+
+class symseg_command(Structure):
+ _fields_ = (
+ ('offset', p_uint32),
+ ('size', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['offset'] = int(self.offset)
+ s['size'] = int(self.size)
+
+
+class ident_command(Structure):
+ _fields_ = (
+ )
+
+ def describe(self):
+ return {}
+
+
+class fvmfile_command(Structure):
+ _fields_ = (
+ ('name', lc_str),
+ ('header_addr', p_uint32),
+ )
+
+ def describe(self):
+ return {'header_addr': int(self.header_addr)}
+
+
+class uuid_command (Structure):
+ _fields_ = (
+ ('uuid', p_str16),
+ )
+
+ def describe(self):
+ return {'uuid': self.uuid.rstrip('\x00')}
+
+
+class rpath_command (Structure):
+ _fields_ = (
+ ('path', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+
+class linkedit_data_command (Structure):
+ _fields_ = (
+ ('dataoff', p_uint32),
+ ('datasize', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['dataoff'] = int(self.dataoff)
+ s['datasize'] = int(self.datasize)
+ return s
+
+
+class version_min_command (Structure):
+ _fields_ = (
+ ('version', p_uint32), # X.Y.Z is encoded in nibbles xxxx.yy.zz
+ ('sdk', p_uint32),
+ )
+
+ def describe(self):
+ v = int(self.version)
+ v3 = v & 0xFF
+ v = v >> 8
+ v2 = v & 0xFF
+ v = v >> 8
+ v1 = v & 0xFFFF
+ s = int(self.sdk)
+ s3 = s & 0xFF
+ s = s >> 8
+ s2 = s & 0xFF
+ s = s >> 8
+ s1 = s & 0xFFFF
+ return {
+ 'version': str(int(v1)) + "." + str(int(v2)) + "." + str(int(v3)),
+ 'sdk': str(int(s1)) + "." + str(int(s2)) + "." + str(int(s3))
+ }
+
+
+class source_version_command (Structure):
+ _fields_ = (
+ ('version', p_uint64),
+ )
+
+ def describe(self):
+ v = int(self.version)
+ a = v >> 40
+ b = (v >> 30) & 0x3ff
+ c = (v >> 20) & 0x3ff
+ d = (v >> 10) & 0x3ff
+ e = v & 0x3ff
+ r = str(a)+'.'+str(b)+'.'+str(c)+'.'+str(d)+'.'+str(e)
+ return {'version': r}
+
+
+class note_command (Structure):
+ _fields_ = (
+ ('data_owner', p_str16),
+ ('offset', p_uint64),
+ ('size', p_uint64),
+ )
+
+
+class build_version_command (Structure):
+ _fields_ = (
+ ('platform', p_uint32),
+ ('minos', p_uint32),
+ ('sdk', p_uint32),
+ ('ntools', p_uint32),
+ )
+
+ # XXX: Add computed field for accessing 'tools' array
+
+
+class build_tool_version (Structure):
+ _fields_ = (
+ ('tool', p_uint32),
+ ('version', p_uint32),
+ )
+
+
+class data_in_code_entry (Structure):
+ _fields_ = (
+ ('offset', p_uint32),
+ ('length', p_uint32),
+ ('kind', p_uint32),
+ )
+
+ def describe(self):
+ return {
+ 'offset': self.offset, 'length': self.length, 'kind': self.kind}
+
+
+DICE_KIND_DATA = 0x0001
+DICE_KIND_JUMP_TABLE8 = 0x0002
+DICE_KIND_JUMP_TABLE16 = 0x0003
+DICE_KIND_JUMP_TABLE32 = 0x0004
+DICE_KIND_ABS_JUMP_TABLE32 = 0x0005
+
+DATA_IN_CODE_KINDS = {
+ DICE_KIND_DATA: 'DICE_KIND_DATA',
+ DICE_KIND_JUMP_TABLE8: 'DICE_KIND_JUMP_TABLE8',
+ DICE_KIND_JUMP_TABLE16: 'DICE_KIND_JUMP_TABLE16',
+ DICE_KIND_JUMP_TABLE32: 'DICE_KIND_JUMP_TABLE32',
+ DICE_KIND_ABS_JUMP_TABLE32: 'DICE_KIND_ABS_JUMP_TABLE32',
+}
+
+
+class tlv_descriptor (Structure):
+ _fields_ = (
+ ('thunk', p_long), # Actually a pointer to a function
+ ('key', p_ulong),
+ ('offset', p_ulong),
+ )
+
+ def describe(self):
+ return {'thunk': self.thunk, 'key': self.key, 'offset': self.offset}
+
+
+class encryption_info_command (Structure):
+ _fields_ = (
+ ('cryptoff', p_uint32),
+ ('cryptsize', p_uint32),
+ ('cryptid', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['cryptoff'] = int(self.cryptoff)
+ s['cryptsize'] = int(self.cryptsize)
+ s['cryptid'] = int(self.cryptid)
+ return s
+
+
+class encryption_info_command_64 (Structure):
+ _fields_ = (
+ ('cryptoff', p_uint32),
+ ('cryptsize', p_uint32),
+ ('cryptid', p_uint32),
+ ('pad', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['cryptoff'] = int(self.cryptoff)
+ s['cryptsize'] = int(self.cryptsize)
+ s['cryptid'] = int(self.cryptid)
+ s['pad'] = int(self.pad)
+ return s
+
+
+class dyld_info_command (Structure):
+ _fields_ = (
+ ('rebase_off', p_uint32),
+ ('rebase_size', p_uint32),
+ ('bind_off', p_uint32),
+ ('bind_size', p_uint32),
+ ('weak_bind_off', p_uint32),
+ ('weak_bind_size', p_uint32),
+ ('lazy_bind_off', p_uint32),
+ ('lazy_bind_size', p_uint32),
+ ('export_off', p_uint32),
+ ('export_size', p_uint32),
+ )
+
+ def describe(self):
+ dyld = {}
+ dyld['rebase_off'] = int(self.rebase_off)
+ dyld['rebase_size'] = int(self.rebase_size)
+ dyld['bind_off'] = int(self.bind_off)
+ dyld['bind_size'] = int(self.bind_size)
+ dyld['weak_bind_off'] = int(self.weak_bind_off)
+ dyld['weak_bind_size'] = int(self.weak_bind_size)
+ dyld['lazy_bind_off'] = int(self.lazy_bind_off)
+ dyld['lazy_bind_size'] = int(self.lazy_bind_size)
+ dyld['export_off'] = int(self.export_off)
+ dyld['export_size'] = int(self.export_size)
+ return dyld
+
+
+class linker_option_command (Structure):
+ _fields_ = (
+ ('count', p_uint32),
+ )
+
+ def describe(self):
+ return {'count': int(self.count)}
+
+
+LC_REGISTRY = {
+ LC_SEGMENT: segment_command,
+ LC_IDFVMLIB: fvmlib_command,
+ LC_LOADFVMLIB: fvmlib_command,
+ LC_ID_DYLIB: dylib_command,
+ LC_LOAD_DYLIB: dylib_command,
+ LC_LOAD_WEAK_DYLIB: dylib_command,
+ LC_SUB_FRAMEWORK: sub_framework_command,
+ LC_SUB_CLIENT: sub_client_command,
+ LC_SUB_UMBRELLA: sub_umbrella_command,
+ LC_SUB_LIBRARY: sub_library_command,
+ LC_PREBOUND_DYLIB: prebound_dylib_command,
+ LC_ID_DYLINKER: dylinker_command,
+ LC_LOAD_DYLINKER: dylinker_command,
+ LC_THREAD: thread_command,
+ LC_UNIXTHREAD: thread_command,
+ LC_ROUTINES: routines_command,
+ LC_SYMTAB: symtab_command,
+ LC_DYSYMTAB: dysymtab_command,
+ LC_TWOLEVEL_HINTS: twolevel_hints_command,
+ LC_PREBIND_CKSUM: prebind_cksum_command,
+ LC_SYMSEG: symseg_command,
+ LC_IDENT: ident_command,
+ LC_FVMFILE: fvmfile_command,
+ LC_SEGMENT_64: segment_command_64,
+ LC_ROUTINES_64: routines_command_64,
+ LC_UUID: uuid_command,
+ LC_RPATH: rpath_command,
+ LC_CODE_SIGNATURE: linkedit_data_command,
+ LC_CODE_SEGMENT_SPLIT_INFO: linkedit_data_command,
+ LC_REEXPORT_DYLIB: dylib_command,
+ LC_LAZY_LOAD_DYLIB: dylib_command,
+ LC_ENCRYPTION_INFO: encryption_info_command,
+ LC_DYLD_INFO: dyld_info_command,
+ LC_DYLD_INFO_ONLY: dyld_info_command,
+ LC_LOAD_UPWARD_DYLIB: dylib_command,
+ LC_VERSION_MIN_MACOSX: version_min_command,
+ LC_VERSION_MIN_IPHONEOS: version_min_command,
+ LC_FUNCTION_STARTS: linkedit_data_command,
+ LC_DYLD_ENVIRONMENT: dylinker_command,
+ LC_MAIN: entry_point_command,
+ LC_DATA_IN_CODE: linkedit_data_command,
+ LC_SOURCE_VERSION: source_version_command,
+ LC_DYLIB_CODE_SIGN_DRS: linkedit_data_command,
+ LC_ENCRYPTION_INFO_64: encryption_info_command_64,
+ LC_LINKER_OPTION: linker_option_command,
+ LC_LINKER_OPTIMIZATION_HINT: linkedit_data_command,
+ LC_VERSION_MIN_TVOS: version_min_command,
+ LC_VERSION_MIN_WATCHOS: version_min_command,
+ LC_NOTE: note_command,
+ LC_BUILD_VERSION: build_version_command,
+}
+
+LC_NAMES = {
+ LC_SEGMENT: 'LC_SEGMENT',
+ LC_IDFVMLIB: 'LC_IDFVMLIB',
+ LC_LOADFVMLIB: 'LC_LOADFVMLIB',
+ LC_ID_DYLIB: 'LC_ID_DYLIB',
+ LC_LOAD_DYLIB: 'LC_LOAD_DYLIB',
+ LC_LOAD_WEAK_DYLIB: 'LC_LOAD_WEAK_DYLIB',
+ LC_SUB_FRAMEWORK: 'LC_SUB_FRAMEWORK',
+ LC_SUB_CLIENT: 'LC_SUB_CLIENT',
+ LC_SUB_UMBRELLA: 'LC_SUB_UMBRELLA',
+ LC_SUB_LIBRARY: 'LC_SUB_LIBRARY',
+ LC_PREBOUND_DYLIB: 'LC_PREBOUND_DYLIB',
+ LC_ID_DYLINKER: 'LC_ID_DYLINKER',
+ LC_LOAD_DYLINKER: 'LC_LOAD_DYLINKER',
+ LC_THREAD: 'LC_THREAD',
+ LC_UNIXTHREAD: 'LC_UNIXTHREAD',
+ LC_ROUTINES: 'LC_ROUTINES',
+ LC_SYMTAB: 'LC_SYMTAB',
+ LC_DYSYMTAB: 'LC_DYSYMTAB',
+ LC_TWOLEVEL_HINTS: 'LC_TWOLEVEL_HINTS',
+ LC_PREBIND_CKSUM: 'LC_PREBIND_CKSUM',
+ LC_SYMSEG: 'LC_SYMSEG',
+ LC_IDENT: 'LC_IDENT',
+ LC_FVMFILE: 'LC_FVMFILE',
+ LC_SEGMENT_64: 'LC_SEGMENT_64',
+ LC_ROUTINES_64: 'LC_ROUTINES_64',
+ LC_UUID: 'LC_UUID',
+ LC_RPATH: 'LC_RPATH',
+ LC_CODE_SIGNATURE: 'LC_CODE_SIGNATURE',
+ LC_CODE_SEGMENT_SPLIT_INFO: 'LC_CODE_SEGMENT_SPLIT_INFO',
+ LC_REEXPORT_DYLIB: 'LC_REEXPORT_DYLIB',
+ LC_LAZY_LOAD_DYLIB: 'LC_LAZY_LOAD_DYLIB',
+ LC_ENCRYPTION_INFO: 'LC_ENCRYPTION_INFO',
+ LC_DYLD_INFO: 'LC_DYLD_INFO',
+ LC_DYLD_INFO_ONLY: 'LC_DYLD_INFO_ONLY',
+ LC_LOAD_UPWARD_DYLIB: 'LC_LOAD_UPWARD_DYLIB',
+ LC_VERSION_MIN_MACOSX: 'LC_VERSION_MIN_MACOSX',
+ LC_VERSION_MIN_IPHONEOS: 'LC_VERSION_MIN_IPHONEOS',
+ LC_FUNCTION_STARTS: 'LC_FUNCTION_STARTS',
+ LC_DYLD_ENVIRONMENT: 'LC_DYLD_ENVIRONMENT',
+ LC_MAIN: 'LC_MAIN',
+ LC_DATA_IN_CODE: 'LC_DATA_IN_CODE',
+ LC_SOURCE_VERSION: 'LC_SOURCE_VERSION',
+ LC_DYLIB_CODE_SIGN_DRS: 'LC_DYLIB_CODE_SIGN_DRS',
+ LC_LINKER_OPTIMIZATION_HINT: 'LC_LINKER_OPTIMIZATION_HINT',
+ LC_VERSION_MIN_TVOS: 'LC_VERSION_MIN_TVOS',
+ LC_VERSION_MIN_WATCHOS: 'LC_VERSION_MIN_WATCHOS',
+ LC_NOTE: 'LC_NOTE',
+ LC_BUILD_VERSION: 'LC_BUILD_VERSION',
+}
+
+
+# this is another union.
+class n_un(p_int32):
+ pass
+
+
+class nlist(Structure):
+ _fields_ = (
+ ('n_un', n_un),
+ ('n_type', p_uint8),
+ ('n_sect', p_uint8),
+ ('n_desc', p_short),
+ ('n_value', p_uint32),
+ )
+
+
+class nlist_64(Structure):
+ _fields_ = [
+ ('n_un', n_un),
+ ('n_type', p_uint8),
+ ('n_sect', p_uint8),
+ ('n_desc', p_short),
+ ('n_value', p_int64),
+ ]
+
+
+N_STAB = 0xe0
+N_PEXT = 0x10
+N_TYPE = 0x0e
+N_EXT = 0x01
+
+N_UNDF = 0x0
+N_ABS = 0x2
+N_SECT = 0xe
+N_PBUD = 0xc
+N_INDR = 0xa
+
+NO_SECT = 0
+MAX_SECT = 255
+
+
+class relocation_info(Structure):
+ # XXX: Need to add code for decoding the bitfield!
+ _fields_ = (
+ ('r_address', p_uint32),
+ ('_r_bitfield', p_uint32),
+ )
+
+ def _describe(self):
+ return (
+ ('r_address', self.r_address),
+ ('_r_bitfield', self._r_bitfield),
+ )
+
+
+def GET_COMM_ALIGN(n_desc):
+ return (n_desc >> 8) & 0x0f
+
+
+def SET_COMM_ALIGN(n_desc, align):
+ return (n_desc & 0xf0ff) | ((align & 0x0f) << 8)
+
+
+REFERENCE_TYPE = 0xf
+REFERENCE_FLAG_UNDEFINED_NON_LAZY = 0
+REFERENCE_FLAG_UNDEFINED_LAZY = 1
+REFERENCE_FLAG_DEFINED = 2
+REFERENCE_FLAG_PRIVATE_DEFINED = 3
+REFERENCE_FLAG_PRIVATE_UNDEFINED_NON_LAZY = 4
+REFERENCE_FLAG_PRIVATE_UNDEFINED_LAZY = 5
+
+REFERENCED_DYNAMICALLY = 0x0010
+
+
+def GET_LIBRARY_ORDINAL(n_desc):
+ return (((n_desc) >> 8) & 0xff)
+
+
+def SET_LIBRARY_ORDINAL(n_desc, ordinal):
+ return (((n_desc) & 0x00ff) | (((ordinal & 0xff) << 8)))
+
+
+SELF_LIBRARY_ORDINAL = 0x0
+MAX_LIBRARY_ORDINAL = 0xfd
+DYNAMIC_LOOKUP_ORDINAL = 0xfe
+EXECUTABLE_ORDINAL = 0xff
+
+N_NO_DEAD_STRIP = 0x0020
+N_DESC_DISCARDED = 0x0020
+N_WEAK_REF = 0x0040
+N_WEAK_DEF = 0x0080
+N_REF_TO_WEAK = 0x0080
+N_ARM_THUMB_DEF = 0x0008
+N_SYMBOL_RESOLVER = 0x0100
+N_ALT_ENTRY = 0x0200
+
+# /usr/include/mach-o/fat.h
+FAT_MAGIC = 0xcafebabe
+FAT_CIGAM = 0xbebafeca
+FAT_MAGIC_64 = 0xcafebabf
+FAT_CIGAM_64 = 0xbfbafeca
+
+
+class fat_header(Structure):
+ _fields_ = (
+ ('magic', p_uint32),
+ ('nfat_arch', p_uint32),
+ )
+
+
+class fat_arch(Structure):
+ _fields_ = (
+ ('cputype', cpu_type_t),
+ ('cpusubtype', cpu_subtype_t),
+ ('offset', p_uint32),
+ ('size', p_uint32),
+ ('align', p_uint32),
+ )
+
+
+class fat_arch64(Structure):
+ _fields_ = (
+ ('cputype', cpu_type_t),
+ ('cpusubtype', cpu_subtype_t),
+ ('offset', p_uint64),
+ ('size', p_uint64),
+ ('align', p_uint32),
+ ('reserved', p_uint32),
+ )
+
+
+REBASE_TYPE_POINTER = 1 # noqa: E221
+REBASE_TYPE_TEXT_ABSOLUTE32 = 2 # noqa: E221
+REBASE_TYPE_TEXT_PCREL32 = 3 # noqa: E221
+
+REBASE_OPCODE_MASK = 0xF0 # noqa: E221
+REBASE_IMMEDIATE_MASK = 0x0F # noqa: E221
+REBASE_OPCODE_DONE = 0x00 # noqa: E221
+REBASE_OPCODE_SET_TYPE_IMM = 0x10 # noqa: E221
+REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB = 0x20 # noqa: E221
+REBASE_OPCODE_ADD_ADDR_ULEB = 0x30 # noqa: E221
+REBASE_OPCODE_ADD_ADDR_IMM_SCALED = 0x40 # noqa: E221
+REBASE_OPCODE_DO_REBASE_IMM_TIMES = 0x50 # noqa: E221
+REBASE_OPCODE_DO_REBASE_ULEB_TIMES = 0x60 # noqa: E221
+REBASE_OPCODE_DO_REBASE_ADD_ADDR_ULEB = 0x70 # noqa: E221
+REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB = 0x80 # noqa: E221
+
+BIND_TYPE_POINTER = 1 # noqa: E221
+BIND_TYPE_TEXT_ABSOLUTE32 = 2 # noqa: E221
+BIND_TYPE_TEXT_PCREL32 = 3 # noqa: E221
+
+BIND_SPECIAL_DYLIB_SELF = 0 # noqa: E221
+BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE = -1 # noqa: E221
+BIND_SPECIAL_DYLIB_FLAT_LOOKUP = -2 # noqa: E221
+
+BIND_SYMBOL_FLAGS_WEAK_IMPORT = 0x1 # noqa: E221
+BIND_SYMBOL_FLAGS_NON_WEAK_DEFINITION = 0x8 # noqa: E221
+
+BIND_OPCODE_MASK = 0xF0 # noqa: E221
+BIND_IMMEDIATE_MASK = 0x0F # noqa: E221
+BIND_OPCODE_DONE = 0x00 # noqa: E221
+BIND_OPCODE_SET_DYLIB_ORDINAL_IMM = 0x10 # noqa: E221
+BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB = 0x20 # noqa: E221
+BIND_OPCODE_SET_DYLIB_SPECIAL_IMM = 0x30 # noqa: E221
+BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM = 0x40 # noqa: E221
+BIND_OPCODE_SET_TYPE_IMM = 0x50 # noqa: E221
+BIND_OPCODE_SET_ADDEND_SLEB = 0x60 # noqa: E221
+BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB = 0x70 # noqa: E221
+BIND_OPCODE_ADD_ADDR_ULEB = 0x80 # noqa: E221
+BIND_OPCODE_DO_BIND = 0x90 # noqa: E221
+BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB = 0xA0 # noqa: E221
+BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED = 0xB0 # noqa: E221
+BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB = 0xC0 # noqa: E221
+
+EXPORT_SYMBOL_FLAGS_KIND_MASK = 0x03 # noqa: E221
+EXPORT_SYMBOL_FLAGS_KIND_REGULAR = 0x00 # noqa: E221
+EXPORT_SYMBOL_FLAGS_KIND_THREAD_LOCAL = 0x01 # noqa: E221
+EXPORT_SYMBOL_FLAGS_WEAK_DEFINITION = 0x04 # noqa: E221
+EXPORT_SYMBOL_FLAGS_REEXPORT = 0x08 # noqa: E221
+EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER = 0x10 # noqa: E221
+
+PLATFORM_MACOS = 1
+PLATFORM_IOS = 2
+PLATFORM_TVOS = 3
+PLATFORM_WATCHOS = 4
+PLATFORM_BRIDGEOS = 5
+PLATFORM_IOSMAC = 6
+PLATFORM_IOSSIMULATOR = 7
+PLATFORM_TVOSSIMULATOR = 8
+PLATFORM_WATCHOSSIMULATOR = 9
+
+PLATFORM_NAMES = {
+ PLATFORM_MACOS: 'macOS',
+ PLATFORM_IOS: 'iOS',
+ PLATFORM_TVOS: 'tvOS',
+ PLATFORM_WATCHOS: 'watchOS',
+ PLATFORM_BRIDGEOS: 'bridgeOS',
+ PLATFORM_IOSMAC: 'ios-on-mac',
+ PLATFORM_IOSSIMULATOR: 'iOS simulator',
+ PLATFORM_TVOSSIMULATOR: 'tvOS simulator',
+ PLATFORM_WATCHOSSIMULATOR: 'watchOS simulator',
+}
+
+TOOL_CLANG = 1
+TOOL_SWIFT = 2
+TOOL_LD = 3
+
+TOOL_NAMES = {
+ TOOL_CLANG: 'clang',
+ TOOL_SWIFT: 'swift',
+ TOOL_LD: 'ld',
+}
diff --git a/lib/spack/external/macholib/macho_dump.py b/lib/spack/external/macholib/macho_dump.py
new file mode 100644
index 0000000000..a30c2b92f8
--- /dev/null
+++ b/lib/spack/external/macholib/macho_dump.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import sys
+
+from macholib._cmdline import main as _main
+from macholib.MachO import MachO
+from macholib.mach_o import get_cpu_subtype, CPU_TYPE_NAMES
+from macholib.mach_o import MH_CIGAM_64, MH_MAGIC_64
+
+ARCH_MAP = {
+ ('<', '64-bit'): 'x86_64',
+ ('<', '32-bit'): 'i386',
+ ('>', '64-bit'): 'ppc64',
+ ('>', '32-bit'): 'ppc',
+}
+
+
+def print_file(fp, path):
+ print(path, file=fp)
+ m = MachO(path)
+ for header in m.headers:
+ seen = set()
+
+ if header.MH_MAGIC == MH_MAGIC_64 or header.MH_MAGIC == MH_CIGAM_64:
+ sz = '64-bit'
+ else:
+ sz = '32-bit'
+
+ arch = CPU_TYPE_NAMES.get(
+ header.header.cputype, header.header.cputype)
+
+ subarch = get_cpu_subtype(
+ header.header.cputype, header.header.cpusubtype)
+
+ print(' [%s endian=%r size=%r arch=%r subarch=%r]' % (
+ header.__class__.__name__, header.endian, sz, arch, subarch),
+ file=fp)
+ for idx, name, other in header.walkRelocatables():
+ if other not in seen:
+ seen.add(other)
+ print('\t' + other, file=fp)
+ print('', file=fp)
+
+
+def main():
+ print(
+ "WARNING: 'macho_dump' is deprecated, use 'python -mmacholib dump' "
+ "instead")
+ _main(print_file)
+
+
+if __name__ == '__main__':
+ try:
+ sys.exit(main())
+ except KeyboardInterrupt:
+ pass
diff --git a/lib/spack/external/macholib/macho_find.py b/lib/spack/external/macholib/macho_find.py
new file mode 100644
index 0000000000..8f8243e6db
--- /dev/null
+++ b/lib/spack/external/macholib/macho_find.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+from __future__ import print_function
+from macholib._cmdline import main as _main
+
+
+def print_file(fp, path):
+ print(path, file=fp)
+
+
+def main():
+ print(
+ "WARNING: 'macho_find' is deprecated, "
+ "use 'python -mmacholib dump' instead")
+ _main(print_file)
+
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
diff --git a/lib/spack/external/macholib/macho_standalone.py b/lib/spack/external/macholib/macho_standalone.py
new file mode 100644
index 0000000000..f9ce91e9bf
--- /dev/null
+++ b/lib/spack/external/macholib/macho_standalone.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+import os
+import sys
+
+from macholib.MachOStandalone import MachOStandalone
+from macholib.util import strip_files
+
+
+def standaloneApp(path):
+ if not (os.path.isdir(path) and os.path.exists(
+ os.path.join(path, 'Contents'))):
+ print(
+ '%s: %s does not look like an app bundle' % (sys.argv[0], path))
+ sys.exit(1)
+ files = MachOStandalone(path).run()
+ strip_files(files)
+
+
+def main():
+ print(
+ "WARNING: 'macho_standalone' is deprecated, use "
+ "'python -mmacholib standalone' instead")
+ if not sys.argv[1:]:
+ raise SystemExit('usage: %s [appbundle ...]' % (sys.argv[0],))
+ for fn in sys.argv[1:]:
+ standaloneApp(fn)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/lib/spack/external/macholib/ptypes.py b/lib/spack/external/macholib/ptypes.py
new file mode 100644
index 0000000000..a5643696b6
--- /dev/null
+++ b/lib/spack/external/macholib/ptypes.py
@@ -0,0 +1,331 @@
+"""
+This module defines packable types, that is types than can be easily
+converted to a binary format as used in MachO headers.
+"""
+import struct
+import sys
+
+try:
+ from itertools import izip, imap
+except ImportError:
+ izip, imap = zip, map
+from itertools import chain, starmap
+
+__all__ = """
+sizeof
+BasePackable
+Structure
+pypackable
+p_char
+p_byte
+p_ubyte
+p_short
+p_ushort
+p_int
+p_uint
+p_long
+p_ulong
+p_longlong
+p_ulonglong
+p_int8
+p_uint8
+p_int16
+p_uint16
+p_int32
+p_uint32
+p_int64
+p_uint64
+p_float
+p_double
+""".split()
+
+
+def sizeof(s):
+ """
+ Return the size of an object when packed
+ """
+ if hasattr(s, '_size_'):
+ return s._size_
+
+ elif isinstance(s, bytes):
+ return len(s)
+
+ raise ValueError(s)
+
+
+class MetaPackable(type):
+ """
+ Fixed size struct.unpack-able types use from_tuple as their designated
+ initializer
+ """
+ def from_mmap(cls, mm, ptr, **kw):
+ return cls.from_str(mm[ptr:ptr+cls._size_], **kw)
+
+ def from_fileobj(cls, f, **kw):
+ return cls.from_str(f.read(cls._size_), **kw)
+
+ def from_str(cls, s, **kw):
+ endian = kw.get('_endian_', cls._endian_)
+ return cls.from_tuple(struct.unpack(endian + cls._format_, s), **kw)
+
+ def from_tuple(cls, tpl, **kw):
+ return cls(tpl[0], **kw)
+
+
+class BasePackable(object):
+ _endian_ = '>'
+
+ def to_str(self):
+ raise NotImplementedError
+
+ def to_fileobj(self, f):
+ f.write(self.to_str())
+
+ def to_mmap(self, mm, ptr):
+ mm[ptr:ptr+self._size_] = self.to_str()
+
+
+# This defines a class with a custom metaclass, we'd normally
+# use "class Packable(BasePackable, metaclass=MetaPackage)",
+# but that syntax is not valid in Python 2 (and likewise the
+# python 2 syntax is not valid in Python 3)
+def _make():
+ def to_str(self):
+ cls = type(self)
+ endian = getattr(self, '_endian_', cls._endian_)
+ return struct.pack(endian + cls._format_, self)
+ return MetaPackable("Packable", (BasePackable,), {'to_str': to_str})
+
+
+Packable = _make()
+del _make
+
+
+def pypackable(name, pytype, format):
+ """
+ Create a "mix-in" class with a python type and a
+ Packable with the given struct format
+ """
+ size, items = _formatinfo(format)
+
+ def __new__(cls, *args, **kwds):
+ if '_endian_' in kwds:
+ _endian_ = kwds.pop('_endian_')
+ else:
+ _endian_ = cls._endian_
+
+ result = pytype.__new__(cls, *args, **kwds)
+ result._endian_ = _endian_
+ return result
+
+ return type(Packable)(name, (pytype, Packable), {
+ '_format_': format,
+ '_size_': size,
+ '_items_': items,
+ '__new__': __new__,
+ })
+
+
+def _formatinfo(format):
+ """
+ Calculate the size and number of items in a struct format.
+ """
+ size = struct.calcsize(format)
+ return size, len(struct.unpack(format, b'\x00' * size))
+
+
+class MetaStructure(MetaPackable):
+ """
+ The metaclass of Structure objects that does all the magic.
+
+ Since we can assume that all Structures have a fixed size,
+ we can do a bunch of calculations up front and pack or
+ unpack the whole thing in one struct call.
+ """
+ def __new__(cls, clsname, bases, dct):
+ fields = dct['_fields_']
+ names = []
+ types = []
+ structmarks = []
+ format = ''
+ items = 0
+ size = 0
+
+ def struct_property(name, typ):
+
+ def _get(self):
+ return self._objects_[name]
+
+ def _set(self, obj):
+ if type(obj) is not typ:
+ obj = typ(obj)
+ self._objects_[name] = obj
+
+ return property(_get, _set, typ.__name__)
+
+ for name, typ in fields:
+ dct[name] = struct_property(name, typ)
+ names.append(name)
+ types.append(typ)
+ format += typ._format_
+ size += typ._size_
+ if (typ._items_ > 1):
+ structmarks.append((items, typ._items_, typ))
+ items += typ._items_
+
+ dct['_structmarks_'] = structmarks
+ dct['_names_'] = names
+ dct['_types_'] = types
+ dct['_size_'] = size
+ dct['_items_'] = items
+ dct['_format_'] = format
+ return super(MetaStructure, cls).__new__(cls, clsname, bases, dct)
+
+ def from_tuple(cls, tpl, **kw):
+ values = []
+ current = 0
+ for begin, length, typ in cls._structmarks_:
+ if begin > current:
+ values.extend(tpl[current:begin])
+ current = begin + length
+ values.append(typ.from_tuple(tpl[begin:current], **kw))
+ values.extend(tpl[current:])
+ return cls(*values, **kw)
+
+
+# See metaclass discussion earlier in this file
+def _make():
+ class_dict = {}
+ class_dict['_fields_'] = ()
+
+ def as_method(function):
+ class_dict[function.__name__] = function
+
+ @as_method
+ def __init__(self, *args, **kwargs):
+ if len(args) == 1 and not kwargs and type(args[0]) is type(self):
+ kwargs = args[0]._objects_
+ args = ()
+ self._objects_ = {}
+ iargs = chain(izip(self._names_, args), kwargs.items())
+ for key, value in iargs:
+ if key not in self._names_ and key != "_endian_":
+ raise TypeError
+ setattr(self, key, value)
+ for key, typ in izip(self._names_, self._types_):
+ if key not in self._objects_:
+ self._objects_[key] = typ()
+
+ @as_method
+ def _get_packables(self):
+ for obj in imap(self._objects_.__getitem__, self._names_):
+ if hasattr(obj, '_get_packables'):
+ for obj in obj._get_packables():
+ yield obj
+
+ else:
+ yield obj
+
+ @as_method
+ def to_str(self):
+ return struct.pack(
+ self._endian_ + self._format_, *self._get_packables())
+
+ @as_method
+ def __cmp__(self, other):
+ if type(other) is not type(self):
+ raise TypeError(
+ 'Cannot compare objects of type %r to objects of type %r' % (
+ type(other), type(self)))
+ if sys.version_info[0] == 2:
+ _cmp = cmp # noqa: F821
+ else:
+ def _cmp(a, b):
+ if a < b:
+ return -1
+ elif a > b:
+ return 1
+ elif a == b:
+ return 0
+ else:
+ raise TypeError()
+
+ for cmpval in starmap(
+ _cmp, izip(self._get_packables(), other._get_packables())):
+ if cmpval != 0:
+ return cmpval
+ return 0
+
+ @as_method
+ def __eq__(self, other):
+ r = self.__cmp__(other)
+ return r == 0
+
+ @as_method
+ def __ne__(self, other):
+ r = self.__cmp__(other)
+ return r != 0
+
+ @as_method
+ def __lt__(self, other):
+ r = self.__cmp__(other)
+ return r < 0
+
+ @as_method
+ def __le__(self, other):
+ r = self.__cmp__(other)
+ return r <= 0
+
+ @as_method
+ def __gt__(self, other):
+ r = self.__cmp__(other)
+ return r > 0
+
+ @as_method
+ def __ge__(self, other):
+ r = self.__cmp__(other)
+ return r >= 0
+
+ @as_method
+ def __repr__(self):
+ result = []
+ result.append('<')
+ result.append(type(self).__name__)
+ for nm in self._names_:
+ result.append(' %s=%r' % (nm, getattr(self, nm)))
+ result.append('>')
+ return ''.join(result)
+
+ return MetaStructure("Structure", (BasePackable,), class_dict)
+
+
+Structure = _make()
+del _make
+
+try:
+ long
+except NameError:
+ long = int
+
+# export common packables with predictable names
+p_char = pypackable('p_char', bytes, 'c')
+p_int8 = pypackable('p_int8', int, 'b')
+p_uint8 = pypackable('p_uint8', int, 'B')
+p_int16 = pypackable('p_int16', int, 'h')
+p_uint16 = pypackable('p_uint16', int, 'H')
+p_int32 = pypackable('p_int32', int, 'i')
+p_uint32 = pypackable('p_uint32', long, 'I')
+p_int64 = pypackable('p_int64', long, 'q')
+p_uint64 = pypackable('p_uint64', long, 'Q')
+p_float = pypackable('p_float', float, 'f')
+p_double = pypackable('p_double', float, 'd')
+
+# Deprecated names, need trick to emit deprecation warning.
+p_byte = p_int8
+p_ubyte = p_uint8
+p_short = p_int16
+p_ushort = p_uint16
+p_int = p_long = p_int32
+p_uint = p_ulong = p_uint32
+p_longlong = p_int64
+p_ulonglong = p_uint64
diff --git a/lib/spack/external/macholib/util.py b/lib/spack/external/macholib/util.py
new file mode 100644
index 0000000000..99b682a27f
--- /dev/null
+++ b/lib/spack/external/macholib/util.py
@@ -0,0 +1,258 @@
+import os
+import sys
+import stat
+import struct
+import shutil
+
+from macholib import mach_o
+
+MAGIC = [
+ struct.pack('!L', getattr(mach_o, 'MH_' + _))
+ for _ in ['MAGIC', 'CIGAM', 'MAGIC_64', 'CIGAM_64']
+]
+FAT_MAGIC_BYTES = struct.pack('!L', mach_o.FAT_MAGIC)
+MAGIC_LEN = 4
+STRIPCMD = ['/usr/bin/strip', '-x', '-S', '-']
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+
+def fsencoding(s, encoding=sys.getfilesystemencoding()):
+ """
+ Ensure the given argument is in filesystem encoding (not unicode)
+ """
+ if isinstance(s, unicode):
+ s = s.encode(encoding)
+ return s
+
+
+def move(src, dst):
+ """
+ move that ensures filesystem encoding of paths
+ """
+ shutil.move(fsencoding(src), fsencoding(dst))
+
+
+def copy2(src, dst):
+ """
+ copy2 that ensures filesystem encoding of paths
+ """
+ shutil.copy2(fsencoding(src), fsencoding(dst))
+
+
+def flipwritable(fn, mode=None):
+ """
+ Flip the writability of a file and return the old mode. Returns None
+ if the file is already writable.
+ """
+ if os.access(fn, os.W_OK):
+ return None
+ old_mode = os.stat(fn).st_mode
+ os.chmod(fn, stat.S_IWRITE | old_mode)
+ return old_mode
+
+
+class fileview(object):
+ """
+ A proxy for file-like objects that exposes a given view of a file
+ """
+
+ def __init__(self, fileobj, start, size):
+ self._fileobj = fileobj
+ self._start = start
+ self._end = start + size
+
+ def __repr__(self):
+ return '<fileview [%d, %d] %r>' % (
+ self._start, self._end, self._fileobj)
+
+ def tell(self):
+ return self._fileobj.tell() - self._start
+
+ def _checkwindow(self, seekto, op):
+ if not (self._start <= seekto <= self._end):
+ raise IOError("%s to offset %d is outside window [%d, %d]" % (
+ op, seekto, self._start, self._end))
+
+ def seek(self, offset, whence=0):
+ seekto = offset
+ if whence == 0:
+ seekto += self._start
+ elif whence == 1:
+ seekto += self._fileobj.tell()
+ elif whence == 2:
+ seekto += self._end
+ else:
+ raise IOError("Invalid whence argument to seek: %r" % (whence,))
+ self._checkwindow(seekto, 'seek')
+ self._fileobj.seek(seekto)
+
+ def write(self, bytes):
+ here = self._fileobj.tell()
+ self._checkwindow(here, 'write')
+ self._checkwindow(here + len(bytes), 'write')
+ self._fileobj.write(bytes)
+
+ def read(self, size=sys.maxsize):
+ if size < 0:
+ raise ValueError(
+ "Invalid size %s while reading from %s", size, self._fileobj)
+ here = self._fileobj.tell()
+ self._checkwindow(here, 'read')
+ bytes = min(size, self._end - here)
+ return self._fileobj.read(bytes)
+
+
+def mergecopy(src, dest):
+ """
+ copy2, but only if the destination isn't up to date
+ """
+ if os.path.exists(dest) and \
+ os.stat(dest).st_mtime >= os.stat(src).st_mtime:
+ return
+
+ copy2(src, dest)
+
+
+def mergetree(src, dst, condition=None, copyfn=mergecopy, srcbase=None):
+ """
+ Recursively merge a directory tree using mergecopy().
+ """
+ src = fsencoding(src)
+ dst = fsencoding(dst)
+ if srcbase is None:
+ srcbase = src
+ names = map(fsencoding, os.listdir(src))
+ try:
+ os.makedirs(dst)
+ except OSError:
+ pass
+ errors = []
+ for name in names:
+ srcname = os.path.join(src, name)
+ dstname = os.path.join(dst, name)
+ if condition is not None and not condition(srcname):
+ continue
+ try:
+ if os.path.islink(srcname):
+ # XXX: This is naive at best, should check srcbase(?)
+ realsrc = os.readlink(srcname)
+ os.symlink(realsrc, dstname)
+ elif os.path.isdir(srcname):
+ mergetree(
+ srcname, dstname,
+ condition=condition, copyfn=copyfn, srcbase=srcbase)
+ else:
+ copyfn(srcname, dstname)
+ except (IOError, os.error) as why:
+ errors.append((srcname, dstname, why))
+ if errors:
+ raise IOError(errors)
+
+
+def sdk_normalize(filename):
+ """
+ Normalize a path to strip out the SDK portion, normally so that it
+ can be decided whether it is in a system path or not.
+ """
+ if filename.startswith('/Developer/SDKs/'):
+ pathcomp = filename.split('/')
+ del pathcomp[1:4]
+ filename = '/'.join(pathcomp)
+ return filename
+
+
+NOT_SYSTEM_FILES = []
+
+
+def in_system_path(filename):
+ """
+ Return True if the file is in a system path
+ """
+ fn = sdk_normalize(os.path.realpath(filename))
+ if fn.startswith('/usr/local/'):
+ return False
+ elif fn.startswith('/System/') or fn.startswith('/usr/'):
+ if fn in NOT_SYSTEM_FILES:
+ return False
+ return True
+ else:
+ return False
+
+
+def has_filename_filter(module):
+ """
+ Return False if the module does not have a filename attribute
+ """
+ return getattr(module, 'filename', None) is not None
+
+
+def get_magic():
+ """
+ Get a list of valid Mach-O header signatures, not including the fat header
+ """
+ return MAGIC
+
+
+def is_platform_file(path):
+ """
+ Return True if the file is Mach-O
+ """
+ if not os.path.exists(path) or os.path.islink(path):
+ return False
+ # If the header is fat, we need to read into the first arch
+ with open(path, 'rb') as fileobj:
+ bytes = fileobj.read(MAGIC_LEN)
+ if bytes == FAT_MAGIC_BYTES:
+ # Read in the fat header
+ fileobj.seek(0)
+ header = mach_o.fat_header.from_fileobj(fileobj, _endian_='>')
+ if header.nfat_arch < 1:
+ return False
+ # Read in the first fat arch header
+ arch = mach_o.fat_arch.from_fileobj(fileobj, _endian_='>')
+ fileobj.seek(arch.offset)
+ # Read magic off the first header
+ bytes = fileobj.read(MAGIC_LEN)
+ for magic in MAGIC:
+ if bytes == magic:
+ return True
+ return False
+
+
+def iter_platform_files(dst):
+ """
+ Walk a directory and yield each full path that is a Mach-O file
+ """
+ for root, dirs, files in os.walk(dst):
+ for fn in files:
+ fn = os.path.join(root, fn)
+ if is_platform_file(fn):
+ yield fn
+
+
+def strip_files(files, argv_max=(256 * 1024)):
+ """
+ Strip a list of files
+ """
+ tostrip = [(fn, flipwritable(fn)) for fn in files]
+ while tostrip:
+ cmd = list(STRIPCMD)
+ flips = []
+ pathlen = sum([len(s) + 1 for s in cmd])
+ while pathlen < argv_max:
+ if not tostrip:
+ break
+ added, flip = tostrip.pop()
+ pathlen += len(added) + 1
+ cmd.append(added)
+ flips.append((added, flip))
+ else:
+ cmd.pop()
+ tostrip.append(flips.pop())
+ os.spawnv(os.P_WAIT, cmd[0], cmd)
+ for args in flips:
+ flipwritable(*args)