summaryrefslogtreecommitdiff
path: root/lib/spack/external
diff options
context:
space:
mode:
authorMassimiliano Culpo <massimiliano.culpo@gmail.com>2022-11-16 15:41:16 +0100
committerTodd Gamblin <tgamblin@llnl.gov>2023-01-04 09:43:04 -0800
commit51751894122ff02f96a3df8fcdb37884deebf385 (patch)
tree4dde53e04286df69bb3d2b22141f507efc3404ca /lib/spack/external
parent86378502f9fda95725c535614d587d4c345aad08 (diff)
downloadspack-51751894122ff02f96a3df8fcdb37884deebf385.tar.gz
spack-51751894122ff02f96a3df8fcdb37884deebf385.tar.bz2
spack-51751894122ff02f96a3df8fcdb37884deebf385.tar.xz
spack-51751894122ff02f96a3df8fcdb37884deebf385.zip
Delete outdated externals
Diffstat (limited to 'lib/spack/external')
-rw-r--r--lib/spack/external/altgraph/Dot.py321
-rw-r--r--lib/spack/external/altgraph/Graph.py682
-rw-r--r--lib/spack/external/altgraph/GraphAlgo.py171
-rw-r--r--lib/spack/external/altgraph/GraphStat.py73
-rw-r--r--lib/spack/external/altgraph/GraphUtil.py139
-rw-r--r--lib/spack/external/altgraph/ObjectGraph.py212
-rw-r--r--lib/spack/external/altgraph/__init__.py148
-rw-r--r--lib/spack/external/attr/LICENSE21
-rw-r--r--lib/spack/external/attr/__init__.py78
-rw-r--r--lib/spack/external/attr/_cmp.py152
-rw-r--r--lib/spack/external/attr/_compat.py242
-rw-r--r--lib/spack/external/attr/_config.py23
-rw-r--r--lib/spack/external/attr/_funcs.py395
-rw-r--r--lib/spack/external/attr/_make.py3052
-rw-r--r--lib/spack/external/attr/_next_gen.py158
-rw-r--r--lib/spack/external/attr/_version_info.py85
-rw-r--r--lib/spack/external/attr/converters.py111
-rw-r--r--lib/spack/external/attr/exceptions.py92
-rw-r--r--lib/spack/external/attr/filters.py52
-rw-r--r--lib/spack/external/attr/setters.py77
-rw-r--r--lib/spack/external/attr/validators.py379
-rw-r--r--lib/spack/external/distro.py1386
-rw-r--r--lib/spack/external/jinja2/LICENSE.rst28
-rw-r--r--lib/spack/external/jinja2/__init__.py44
-rw-r--r--lib/spack/external/jinja2/_compat.py132
-rw-r--r--lib/spack/external/jinja2/_identifier.py6
-rw-r--r--lib/spack/external/jinja2/asyncfilters.py158
-rw-r--r--lib/spack/external/jinja2/asyncsupport.py264
-rw-r--r--lib/spack/external/jinja2/bccache.py350
-rw-r--r--lib/spack/external/jinja2/compiler.py1843
-rw-r--r--lib/spack/external/jinja2/constants.py21
-rw-r--r--lib/spack/external/jinja2/debug.py268
-rw-r--r--lib/spack/external/jinja2/defaults.py44
-rw-r--r--lib/spack/external/jinja2/environment.py1362
-rw-r--r--lib/spack/external/jinja2/exceptions.py177
-rw-r--r--lib/spack/external/jinja2/ext.py704
-rw-r--r--lib/spack/external/jinja2/filters.py1382
-rw-r--r--lib/spack/external/jinja2/idtracking.py290
-rw-r--r--lib/spack/external/jinja2/lexer.py848
-rw-r--r--lib/spack/external/jinja2/loaders.py504
-rw-r--r--lib/spack/external/jinja2/meta.py101
-rw-r--r--lib/spack/external/jinja2/nativetypes.py94
-rw-r--r--lib/spack/external/jinja2/nodes.py1088
-rw-r--r--lib/spack/external/jinja2/optimizer.py41
-rw-r--r--lib/spack/external/jinja2/parser.py939
-rw-r--r--lib/spack/external/jinja2/runtime.py1011
-rw-r--r--lib/spack/external/jinja2/sandbox.py510
-rw-r--r--lib/spack/external/jinja2/tests.py215
-rw-r--r--lib/spack/external/jinja2/utils.py737
-rw-r--r--lib/spack/external/jinja2/visitor.py81
-rw-r--r--lib/spack/external/jsonschema/COPYING19
-rw-r--r--lib/spack/external/jsonschema/__init__.py37
-rw-r--r--lib/spack/external/jsonschema/__main__.py2
-rw-r--r--lib/spack/external/jsonschema/_format.py425
-rw-r--r--lib/spack/external/jsonschema/_legacy_validators.py141
-rw-r--r--lib/spack/external/jsonschema/_reflect.py155
-rw-r--r--lib/spack/external/jsonschema/_types.py188
-rw-r--r--lib/spack/external/jsonschema/_utils.py212
-rw-r--r--lib/spack/external/jsonschema/_validators.py373
-rw-r--r--lib/spack/external/jsonschema/cli.py90
-rw-r--r--lib/spack/external/jsonschema/compat.py55
-rw-r--r--lib/spack/external/jsonschema/exceptions.py374
-rw-r--r--lib/spack/external/jsonschema/schemas/draft3.json199
-rw-r--r--lib/spack/external/jsonschema/schemas/draft4.json222
-rw-r--r--lib/spack/external/jsonschema/schemas/draft6.json153
-rw-r--r--lib/spack/external/jsonschema/schemas/draft7.json166
-rw-r--r--lib/spack/external/jsonschema/validators.py970
-rw-r--r--lib/spack/external/macholib/MachO.py471
-rw-r--r--lib/spack/external/macholib/MachOGraph.py141
-rw-r--r--lib/spack/external/macholib/MachOStandalone.py173
-rw-r--r--lib/spack/external/macholib/SymbolTable.py104
-rw-r--r--lib/spack/external/macholib/__init__.py8
-rw-r--r--lib/spack/external/macholib/__main__.py80
-rw-r--r--lib/spack/external/macholib/_cmdline.py49
-rw-r--r--lib/spack/external/macholib/dyld.py230
-rw-r--r--lib/spack/external/macholib/dylib.py45
-rw-r--r--lib/spack/external/macholib/framework.py45
-rw-r--r--lib/spack/external/macholib/itergraphreport.py73
-rw-r--r--lib/spack/external/macholib/mach_o.py1636
-rw-r--r--lib/spack/external/macholib/macho_dump.py57
-rw-r--r--lib/spack/external/macholib/macho_find.py22
-rw-r--r--lib/spack/external/macholib/macho_standalone.py30
-rw-r--r--lib/spack/external/macholib/ptypes.py334
-rw-r--r--lib/spack/external/macholib/util.py262
-rw-r--r--lib/spack/external/markupsafe/LICENSE.rst28
-rw-r--r--lib/spack/external/markupsafe/README.rst69
-rw-r--r--lib/spack/external/markupsafe/__init__.py327
-rw-r--r--lib/spack/external/markupsafe/_compat.py33
-rw-r--r--lib/spack/external/markupsafe/_constants.py264
-rw-r--r--lib/spack/external/markupsafe/_native.py69
-rw-r--r--lib/spack/external/pyrsistent/LICENSE22
-rw-r--r--lib/spack/external/pyrsistent/__init__.py6
-rw-r--r--lib/spack/external/pyrsistent/_compat.py31
-rw-r--r--lib/spack/external/pyrsistent/_pmap.py460
-rw-r--r--lib/spack/external/pyrsistent/_pvector.py713
-rw-r--r--lib/spack/external/pyrsistent/_transformations.py143
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/LICENSE21
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/__init__.py8
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_argcomplete.py106
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_code/__init__.py10
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_code/_py2traceback.py85
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_code/code.py908
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_code/source.py416
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_pluggy.py11
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_version.py4
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/assertion/__init__.py148
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/assertion/rewrite.py952
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/assertion/truncate.py102
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/assertion/util.py310
-rwxr-xr-xlib/spack/external/pytest-fallback/_pytest/cacheprovider.py260
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/capture.py577
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/compat.py326
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/config.py1398
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/debugging.py123
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/deprecated.py42
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/doctest.py362
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/fixtures.py1135
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/freeze_support.py43
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/helpconfig.py184
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/hookspec.py423
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/junitxml.py453
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/main.py838
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/mark.py465
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/monkeypatch.py259
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/nodes.py37
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/nose.py73
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/outcomes.py140
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/pastebin.py100
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/pytester.py1167
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/python.py1173
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/python_api.py629
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/recwarn.py205
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/resultlog.py113
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/runner.py508
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/setuponly.py74
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/setupplan.py25
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/skipping.py372
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/terminal.py650
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/tmpdir.py126
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/unittest.py239
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/README.md13
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/__init__.py0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst11
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER1
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt22
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA40
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD9
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL6
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json1
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt1
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy.py782
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/warnings.py94
-rw-r--r--lib/spack/external/pytest-fallback/py/__init__.py152
-rw-r--r--lib/spack/external/pytest-fallback/py/__metainfo.py2
-rw-r--r--lib/spack/external/pytest-fallback/py/_apipkg.py181
-rw-r--r--lib/spack/external/pytest-fallback/py/_builtin.py248
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/__init__.py1
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/_assertionnew.py339
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/_assertionold.py555
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/_py2traceback.py79
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/assertion.py94
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/code.py787
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/source.py411
-rw-r--r--lib/spack/external/pytest-fallback/py/_error.py89
-rw-r--r--lib/spack/external/pytest-fallback/py/_iniconfig.py162
-rw-r--r--lib/spack/external/pytest-fallback/py/_io/__init__.py1
-rw-r--r--lib/spack/external/pytest-fallback/py/_io/capture.py371
-rw-r--r--lib/spack/external/pytest-fallback/py/_io/saferepr.py71
-rw-r--r--lib/spack/external/pytest-fallback/py/_io/terminalwriter.py357
-rw-r--r--lib/spack/external/pytest-fallback/py/_log/__init__.py2
-rw-r--r--lib/spack/external/pytest-fallback/py/_log/log.py186
-rw-r--r--lib/spack/external/pytest-fallback/py/_log/warning.py76
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/__init__.py1
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/cacheutil.py114
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/common.py445
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/local.py930
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/svnurl.py380
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/svnwc.py1240
-rw-r--r--lib/spack/external/pytest-fallback/py/_process/__init__.py1
-rw-r--r--lib/spack/external/pytest-fallback/py/_process/cmdexec.py49
-rw-r--r--lib/spack/external/pytest-fallback/py/_process/forkedfunc.py120
-rw-r--r--lib/spack/external/pytest-fallback/py/_process/killproc.py23
-rw-r--r--lib/spack/external/pytest-fallback/py/_std.py18
-rw-r--r--lib/spack/external/pytest-fallback/py/_xmlgen.py255
-rw-r--r--lib/spack/external/pytest-fallback/py/test.py10
-rw-r--r--lib/spack/external/pytest-fallback/pytest.py100
-rw-r--r--lib/spack/external/six.py998
187 files changed, 0 insertions, 56425 deletions
diff --git a/lib/spack/external/altgraph/Dot.py b/lib/spack/external/altgraph/Dot.py
deleted file mode 100644
index f265a7121c..0000000000
--- a/lib/spack/external/altgraph/Dot.py
+++ /dev/null
@@ -1,321 +0,0 @@
-"""
-altgraph.Dot - Interface to the dot language
-============================================
-
-The :py:mod:`~altgraph.Dot` module provides a simple interface to the
-file format used in the
-`graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
-program. The module is intended to offload the most tedious part of the process
-(the **dot** file generation) while transparently exposing most of its
-features.
-
-To display the graphs or to generate image files the
-`graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
-package needs to be installed on the system, moreover the :command:`dot` and
-:command:`dotty` programs must be accesible in the program path so that they
-can be ran from processes spawned within the module.
-
-Example usage
--------------
-
-Here is a typical usage::
-
- from altgraph import Graph, Dot
-
- # create a graph
- edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ]
- graph = Graph.Graph(edges)
-
- # create a dot representation of the graph
- dot = Dot.Dot(graph)
-
- # display the graph
- dot.display()
-
- # save the dot representation into the mydot.dot file
- dot.save_dot(file_name='mydot.dot')
-
- # save dot file as gif image into the graph.gif file
- dot.save_img(file_name='graph', file_type='gif')
-
-Directed graph and non-directed graph
--------------------------------------
-
-Dot class can use for both directed graph and non-directed graph
-by passing ``graphtype`` parameter.
-
-Example::
-
- # create directed graph(default)
- dot = Dot.Dot(graph, graphtype="digraph")
-
- # create non-directed graph
- dot = Dot.Dot(graph, graphtype="graph")
-
-Customizing the output
-----------------------
-
-The graph drawing process may be customized by passing
-valid :command:`dot` parameters for the nodes and edges. For a list of all
-parameters see the `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
-documentation.
-
-Example::
-
- # customizing the way the overall graph is drawn
- dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75)
-
- # customizing node drawing
- dot.node_style(1, label='BASE_NODE',shape='box', color='blue' )
- dot.node_style(2, style='filled', fillcolor='red')
-
- # customizing edge drawing
- dot.edge_style(1, 2, style='dotted')
- dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90')
- dot.edge_style(4, 5, arrowsize=2, style='bold')
-
-
-.. note::
-
- dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to
- display all graphics styles. To verify the output save it to an image file
- and look at it that way.
-
-Valid attributes
-----------------
-
- - dot styles, passed via the :py:meth:`Dot.style` method::
-
- rankdir = 'LR' (draws the graph horizontally, left to right)
- ranksep = number (rank separation in inches)
-
- - node attributes, passed via the :py:meth:`Dot.node_style` method::
-
- style = 'filled' | 'invisible' | 'diagonals' | 'rounded'
- shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle'
-
- - edge attributes, passed via the :py:meth:`Dot.edge_style` method::
-
- style = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold'
- arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none'
- | 'tee' | 'vee'
- weight = number (the larger the number the closer the nodes will be)
-
- - valid `graphviz colors
- <http://www.research.att.com/~erg/graphviz/info/colors.html>`_
-
- - for more details on how to control the graph drawing process see the
- `graphviz reference
- <http://www.research.att.com/sw/tools/graphviz/refs.html>`_.
-"""
-import os
-import warnings
-
-from altgraph import GraphError
-
-
-class Dot(object):
- """
- A class providing a **graphviz** (dot language) representation
- allowing a fine grained control over how the graph is being
- displayed.
-
- If the :command:`dot` and :command:`dotty` programs are not in the current
- system path their location needs to be specified in the contructor.
- """
-
- def __init__(
- self,
- graph=None,
- nodes=None,
- edgefn=None,
- nodevisitor=None,
- edgevisitor=None,
- name="G",
- dot="dot",
- dotty="dotty",
- neato="neato",
- graphtype="digraph",
- ):
- """
- Initialization.
- """
- self.name, self.attr = name, {}
-
- assert graphtype in ["graph", "digraph"]
- self.type = graphtype
-
- self.temp_dot = "tmp_dot.dot"
- self.temp_neo = "tmp_neo.dot"
-
- self.dot, self.dotty, self.neato = dot, dotty, neato
-
- # self.nodes: node styles
- # self.edges: edge styles
- self.nodes, self.edges = {}, {}
-
- if graph is not None and nodes is None:
- nodes = graph
- if graph is not None and edgefn is None:
-
- def edgefn(node, graph=graph):
- return graph.out_nbrs(node)
-
- if nodes is None:
- nodes = ()
-
- seen = set()
- for node in nodes:
- if nodevisitor is None:
- style = {}
- else:
- style = nodevisitor(node)
- if style is not None:
- self.nodes[node] = {}
- self.node_style(node, **style)
- seen.add(node)
- if edgefn is not None:
- for head in seen:
- for tail in (n for n in edgefn(head) if n in seen):
- if edgevisitor is None:
- edgestyle = {}
- else:
- edgestyle = edgevisitor(head, tail)
- if edgestyle is not None:
- if head not in self.edges:
- self.edges[head] = {}
- self.edges[head][tail] = {}
- self.edge_style(head, tail, **edgestyle)
-
- def style(self, **attr):
- """
- Changes the overall style
- """
- self.attr = attr
-
- def display(self, mode="dot"):
- """
- Displays the current graph via dotty
- """
-
- if mode == "neato":
- self.save_dot(self.temp_neo)
- neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo)
- os.system(neato_cmd)
- else:
- self.save_dot(self.temp_dot)
-
- plot_cmd = "%s %s" % (self.dotty, self.temp_dot)
- os.system(plot_cmd)
-
- def node_style(self, node, **kwargs):
- """
- Modifies a node style to the dot representation.
- """
- if node not in self.edges:
- self.edges[node] = {}
- self.nodes[node] = kwargs
-
- def all_node_style(self, **kwargs):
- """
- Modifies all node styles
- """
- for node in self.nodes:
- self.node_style(node, **kwargs)
-
- def edge_style(self, head, tail, **kwargs):
- """
- Modifies an edge style to the dot representation.
- """
- if tail not in self.nodes:
- raise GraphError("invalid node %s" % (tail,))
-
- try:
- if tail not in self.edges[head]:
- self.edges[head][tail] = {}
- self.edges[head][tail] = kwargs
- except KeyError:
- raise GraphError("invalid edge %s -> %s " % (head, tail))
-
- def iterdot(self):
- # write graph title
- if self.type == "digraph":
- yield "digraph %s {\n" % (self.name,)
- elif self.type == "graph":
- yield "graph %s {\n" % (self.name,)
-
- else:
- raise GraphError("unsupported graphtype %s" % (self.type,))
-
- # write overall graph attributes
- for attr_name, attr_value in sorted(self.attr.items()):
- yield '%s="%s";' % (attr_name, attr_value)
- yield "\n"
-
- # some reusable patterns
- cpatt = '%s="%s",' # to separate attributes
- epatt = "];\n" # to end attributes
-
- # write node attributes
- for node_name, node_attr in sorted(self.nodes.items()):
- yield '\t"%s" [' % (node_name,)
- for attr_name, attr_value in sorted(node_attr.items()):
- yield cpatt % (attr_name, attr_value)
- yield epatt
-
- # write edge attributes
- for head in sorted(self.edges):
- for tail in sorted(self.edges[head]):
- if self.type == "digraph":
- yield '\t"%s" -> "%s" [' % (head, tail)
- else:
- yield '\t"%s" -- "%s" [' % (head, tail)
- for attr_name, attr_value in sorted(self.edges[head][tail].items()):
- yield cpatt % (attr_name, attr_value)
- yield epatt
-
- # finish file
- yield "}\n"
-
- def __iter__(self):
- return self.iterdot()
-
- def save_dot(self, file_name=None):
- """
- Saves the current graph representation into a file
- """
-
- if not file_name:
- warnings.warn(DeprecationWarning, "always pass a file_name")
- file_name = self.temp_dot
-
- with open(file_name, "w") as fp:
- for chunk in self.iterdot():
- fp.write(chunk)
-
- def save_img(self, file_name=None, file_type="gif", mode="dot"):
- """
- Saves the dot file as an image file
- """
-
- if not file_name:
- warnings.warn(DeprecationWarning, "always pass a file_name")
- file_name = "out"
-
- if mode == "neato":
- self.save_dot(self.temp_neo)
- neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo)
- os.system(neato_cmd)
- plot_cmd = self.dot
- else:
- self.save_dot(self.temp_dot)
- plot_cmd = self.dot
-
- file_name = "%s.%s" % (file_name, file_type)
- create_cmd = "%s -T%s %s -o %s" % (
- plot_cmd,
- file_type,
- self.temp_dot,
- file_name,
- )
- os.system(create_cmd)
diff --git a/lib/spack/external/altgraph/Graph.py b/lib/spack/external/altgraph/Graph.py
deleted file mode 100644
index 8088007abd..0000000000
--- a/lib/spack/external/altgraph/Graph.py
+++ /dev/null
@@ -1,682 +0,0 @@
-"""
-altgraph.Graph - Base Graph class
-=================================
-
-..
- #--Version 2.1
- #--Bob Ippolito October, 2004
-
- #--Version 2.0
- #--Istvan Albert June, 2004
-
- #--Version 1.0
- #--Nathan Denny, May 27, 1999
-"""
-
-from collections import deque
-
-from altgraph import GraphError
-
-
-class Graph(object):
- """
- The Graph class represents a directed graph with *N* nodes and *E* edges.
-
- Naming conventions:
-
- - the prefixes such as *out*, *inc* and *all* will refer to methods
- that operate on the outgoing, incoming or all edges of that node.
-
- For example: :py:meth:`inc_degree` will refer to the degree of the node
- computed over the incoming edges (the number of neighbours linking to
- the node).
-
- - the prefixes such as *forw* and *back* will refer to the
- orientation of the edges used in the method with respect to the node.
-
- For example: :py:meth:`forw_bfs` will start at the node then use the
- outgoing edges to traverse the graph (goes forward).
- """
-
- def __init__(self, edges=None):
- """
- Initialization
- """
-
- self.next_edge = 0
- self.nodes, self.edges = {}, {}
- self.hidden_edges, self.hidden_nodes = {}, {}
-
- if edges is not None:
- for item in edges:
- if len(item) == 2:
- head, tail = item
- self.add_edge(head, tail)
- elif len(item) == 3:
- head, tail, data = item
- self.add_edge(head, tail, data)
- else:
- raise GraphError("Cannot create edge from %s" % (item,))
-
- def __repr__(self):
- return "<Graph: %d nodes, %d edges>" % (
- self.number_of_nodes(),
- self.number_of_edges(),
- )
-
- def add_node(self, node, node_data=None):
- """
- Adds a new node to the graph. Arbitrary data can be attached to the
- node via the node_data parameter. Adding the same node twice will be
- silently ignored.
-
- The node must be a hashable value.
- """
- #
- # the nodes will contain tuples that will store incoming edges,
- # outgoing edges and data
- #
- # index 0 -> incoming edges
- # index 1 -> outgoing edges
-
- if node in self.hidden_nodes:
- # Node is present, but hidden
- return
-
- if node not in self.nodes:
- self.nodes[node] = ([], [], node_data)
-
- def add_edge(self, head_id, tail_id, edge_data=1, create_nodes=True):
- """
- Adds a directed edge going from head_id to tail_id.
- Arbitrary data can be attached to the edge via edge_data.
- It may create the nodes if adding edges between nonexisting ones.
-
- :param head_id: head node
- :param tail_id: tail node
- :param edge_data: (optional) data attached to the edge
- :param create_nodes: (optional) creates the head_id or tail_id
- node in case they did not exist
- """
- # shorcut
- edge = self.next_edge
-
- # add nodes if on automatic node creation
- if create_nodes:
- self.add_node(head_id)
- self.add_node(tail_id)
-
- # update the corresponding incoming and outgoing lists in the nodes
- # index 0 -> incoming edges
- # index 1 -> outgoing edges
-
- try:
- self.nodes[tail_id][0].append(edge)
- self.nodes[head_id][1].append(edge)
- except KeyError:
- raise GraphError("Invalid nodes %s -> %s" % (head_id, tail_id))
-
- # store edge information
- self.edges[edge] = (head_id, tail_id, edge_data)
-
- self.next_edge += 1
-
- def hide_edge(self, edge):
- """
- Hides an edge from the graph. The edge may be unhidden at some later
- time.
- """
- try:
- head_id, tail_id, edge_data = self.hidden_edges[edge] = self.edges[edge]
- self.nodes[tail_id][0].remove(edge)
- self.nodes[head_id][1].remove(edge)
- del self.edges[edge]
- except KeyError:
- raise GraphError("Invalid edge %s" % edge)
-
- def hide_node(self, node):
- """
- Hides a node from the graph. The incoming and outgoing edges of the
- node will also be hidden. The node may be unhidden at some later time.
- """
- try:
- all_edges = self.all_edges(node)
- self.hidden_nodes[node] = (self.nodes[node], all_edges)
- for edge in all_edges:
- self.hide_edge(edge)
- del self.nodes[node]
- except KeyError:
- raise GraphError("Invalid node %s" % node)
-
- def restore_node(self, node):
- """
- Restores a previously hidden node back into the graph and restores
- all of its incoming and outgoing edges.
- """
- try:
- self.nodes[node], all_edges = self.hidden_nodes[node]
- for edge in all_edges:
- self.restore_edge(edge)
- del self.hidden_nodes[node]
- except KeyError:
- raise GraphError("Invalid node %s" % node)
-
- def restore_edge(self, edge):
- """
- Restores a previously hidden edge back into the graph.
- """
- try:
- head_id, tail_id, data = self.hidden_edges[edge]
- self.nodes[tail_id][0].append(edge)
- self.nodes[head_id][1].append(edge)
- self.edges[edge] = head_id, tail_id, data
- del self.hidden_edges[edge]
- except KeyError:
- raise GraphError("Invalid edge %s" % edge)
-
- def restore_all_edges(self):
- """
- Restores all hidden edges.
- """
- for edge in list(self.hidden_edges.keys()):
- try:
- self.restore_edge(edge)
- except GraphError:
- pass
-
- def restore_all_nodes(self):
- """
- Restores all hidden nodes.
- """
- for node in list(self.hidden_nodes.keys()):
- self.restore_node(node)
-
- def __contains__(self, node):
- """
- Test whether a node is in the graph
- """
- return node in self.nodes
-
- def edge_by_id(self, edge):
- """
- Returns the edge that connects the head_id and tail_id nodes
- """
- try:
- head, tail, data = self.edges[edge]
- except KeyError:
- head, tail = None, None
- raise GraphError("Invalid edge %s" % edge)
-
- return (head, tail)
-
- def edge_by_node(self, head, tail):
- """
- Returns the edge that connects the head_id and tail_id nodes
- """
- for edge in self.out_edges(head):
- if self.tail(edge) == tail:
- return edge
- return None
-
- def number_of_nodes(self):
- """
- Returns the number of nodes
- """
- return len(self.nodes)
-
- def number_of_edges(self):
- """
- Returns the number of edges
- """
- return len(self.edges)
-
- def __iter__(self):
- """
- Iterates over all nodes in the graph
- """
- return iter(self.nodes)
-
- def node_list(self):
- """
- Return a list of the node ids for all visible nodes in the graph.
- """
- return list(self.nodes.keys())
-
- def edge_list(self):
- """
- Returns an iterator for all visible nodes in the graph.
- """
- return list(self.edges.keys())
-
- def number_of_hidden_edges(self):
- """
- Returns the number of hidden edges
- """
- return len(self.hidden_edges)
-
- def number_of_hidden_nodes(self):
- """
- Returns the number of hidden nodes
- """
- return len(self.hidden_nodes)
-
- def hidden_node_list(self):
- """
- Returns the list with the hidden nodes
- """
- return list(self.hidden_nodes.keys())
-
- def hidden_edge_list(self):
- """
- Returns a list with the hidden edges
- """
- return list(self.hidden_edges.keys())
-
- def describe_node(self, node):
- """
- return node, node data, outgoing edges, incoming edges for node
- """
- incoming, outgoing, data = self.nodes[node]
- return node, data, outgoing, incoming
-
- def describe_edge(self, edge):
- """
- return edge, edge data, head, tail for edge
- """
- head, tail, data = self.edges[edge]
- return edge, data, head, tail
-
- def node_data(self, node):
- """
- Returns the data associated with a node
- """
- return self.nodes[node][2]
-
- def edge_data(self, edge):
- """
- Returns the data associated with an edge
- """
- return self.edges[edge][2]
-
- def update_edge_data(self, edge, edge_data):
- """
- Replace the edge data for a specific edge
- """
- self.edges[edge] = self.edges[edge][0:2] + (edge_data,)
-
- def head(self, edge):
- """
- Returns the node of the head of the edge.
- """
- return self.edges[edge][0]
-
- def tail(self, edge):
- """
- Returns node of the tail of the edge.
- """
- return self.edges[edge][1]
-
- def out_nbrs(self, node):
- """
- List of nodes connected by outgoing edges
- """
- return [self.tail(n) for n in self.out_edges(node)]
-
- def inc_nbrs(self, node):
- """
- List of nodes connected by incoming edges
- """
- return [self.head(n) for n in self.inc_edges(node)]
-
- def all_nbrs(self, node):
- """
- List of nodes connected by incoming and outgoing edges
- """
- return list(dict.fromkeys(self.inc_nbrs(node) + self.out_nbrs(node)))
-
- def out_edges(self, node):
- """
- Returns a list of the outgoing edges
- """
- try:
- return list(self.nodes[node][1])
- except KeyError:
- raise GraphError("Invalid node %s" % node)
-
- def inc_edges(self, node):
- """
- Returns a list of the incoming edges
- """
- try:
- return list(self.nodes[node][0])
- except KeyError:
- raise GraphError("Invalid node %s" % node)
-
- def all_edges(self, node):
- """
- Returns a list of incoming and outging edges.
- """
- return set(self.inc_edges(node) + self.out_edges(node))
-
- def out_degree(self, node):
- """
- Returns the number of outgoing edges
- """
- return len(self.out_edges(node))
-
- def inc_degree(self, node):
- """
- Returns the number of incoming edges
- """
- return len(self.inc_edges(node))
-
- def all_degree(self, node):
- """
- The total degree of a node
- """
- return self.inc_degree(node) + self.out_degree(node)
-
- def _topo_sort(self, forward=True):
- """
- Topological sort.
-
- Returns a list of nodes where the successors (based on outgoing and
- incoming edges selected by the forward parameter) of any given node
- appear in the sequence after that node.
- """
- topo_list = []
- queue = deque()
- indeg = {}
-
- # select the operation that will be performed
- if forward:
- get_edges = self.out_edges
- get_degree = self.inc_degree
- get_next = self.tail
- else:
- get_edges = self.inc_edges
- get_degree = self.out_degree
- get_next = self.head
-
- for node in self.node_list():
- degree = get_degree(node)
- if degree:
- indeg[node] = degree
- else:
- queue.append(node)
-
- while queue:
- curr_node = queue.popleft()
- topo_list.append(curr_node)
- for edge in get_edges(curr_node):
- tail_id = get_next(edge)
- if tail_id in indeg:
- indeg[tail_id] -= 1
- if indeg[tail_id] == 0:
- queue.append(tail_id)
-
- if len(topo_list) == len(self.node_list()):
- valid = True
- else:
- # the graph has cycles, invalid topological sort
- valid = False
-
- return (valid, topo_list)
-
- def forw_topo_sort(self):
- """
- Topological sort.
-
- Returns a list of nodes where the successors (based on outgoing edges)
- of any given node appear in the sequence after that node.
- """
- return self._topo_sort(forward=True)
-
- def back_topo_sort(self):
- """
- Reverse topological sort.
-
- Returns a list of nodes where the successors (based on incoming edges)
- of any given node appear in the sequence after that node.
- """
- return self._topo_sort(forward=False)
-
- def _bfs_subgraph(self, start_id, forward=True):
- """
- Private method creates a subgraph in a bfs order.
-
- The forward parameter specifies whether it is a forward or backward
- traversal.
- """
- if forward:
- get_bfs = self.forw_bfs
- get_nbrs = self.out_nbrs
- else:
- get_bfs = self.back_bfs
- get_nbrs = self.inc_nbrs
-
- g = Graph()
- bfs_list = get_bfs(start_id)
- for node in bfs_list:
- g.add_node(node)
-
- for node in bfs_list:
- for nbr_id in get_nbrs(node):
- if forward:
- g.add_edge(node, nbr_id)
- else:
- g.add_edge(nbr_id, node)
-
- return g
-
- def forw_bfs_subgraph(self, start_id):
- """
- Creates and returns a subgraph consisting of the breadth first
- reachable nodes based on their outgoing edges.
- """
- return self._bfs_subgraph(start_id, forward=True)
-
- def back_bfs_subgraph(self, start_id):
- """
- Creates and returns a subgraph consisting of the breadth first
- reachable nodes based on the incoming edges.
- """
- return self._bfs_subgraph(start_id, forward=False)
-
- def iterdfs(self, start, end=None, forward=True):
- """
- Collecting nodes in some depth first traversal.
-
- The forward parameter specifies whether it is a forward or backward
- traversal.
- """
- visited, stack = {start}, deque([start])
-
- if forward:
- get_edges = self.out_edges
- get_next = self.tail
- else:
- get_edges = self.inc_edges
- get_next = self.head
-
- while stack:
- curr_node = stack.pop()
- yield curr_node
- if curr_node == end:
- break
- for edge in sorted(get_edges(curr_node)):
- tail = get_next(edge)
- if tail not in visited:
- visited.add(tail)
- stack.append(tail)
-
- def iterdata(self, start, end=None, forward=True, condition=None):
- """
- Perform a depth-first walk of the graph (as ``iterdfs``)
- and yield the item data of every node where condition matches. The
- condition callback is only called when node_data is not None.
- """
-
- visited, stack = {start}, deque([start])
-
- if forward:
- get_edges = self.out_edges
- get_next = self.tail
- else:
- get_edges = self.inc_edges
- get_next = self.head
-
- get_data = self.node_data
-
- while stack:
- curr_node = stack.pop()
- curr_data = get_data(curr_node)
- if curr_data is not None:
- if condition is not None and not condition(curr_data):
- continue
- yield curr_data
- if curr_node == end:
- break
- for edge in get_edges(curr_node):
- tail = get_next(edge)
- if tail not in visited:
- visited.add(tail)
- stack.append(tail)
-
- def _iterbfs(self, start, end=None, forward=True):
- """
- The forward parameter specifies whether it is a forward or backward
- traversal. Returns a list of tuples where the first value is the hop
- value the second value is the node id.
- """
- queue, visited = deque([(start, 0)]), {start}
-
- # the direction of the bfs depends on the edges that are sampled
- if forward:
- get_edges = self.out_edges
- get_next = self.tail
- else:
- get_edges = self.inc_edges
- get_next = self.head
-
- while queue:
- curr_node, curr_step = queue.popleft()
- yield (curr_node, curr_step)
- if curr_node == end:
- break
- for edge in get_edges(curr_node):
- tail = get_next(edge)
- if tail not in visited:
- visited.add(tail)
- queue.append((tail, curr_step + 1))
-
- def forw_bfs(self, start, end=None):
- """
- Returns a list of nodes in some forward BFS order.
-
- Starting from the start node the breadth first search proceeds along
- outgoing edges.
- """
- return [node for node, step in self._iterbfs(start, end, forward=True)]
-
- def back_bfs(self, start, end=None):
- """
- Returns a list of nodes in some backward BFS order.
-
- Starting from the start node the breadth first search proceeds along
- incoming edges.
- """
- return [node for node, _ in self._iterbfs(start, end, forward=False)]
-
- def forw_dfs(self, start, end=None):
- """
- Returns a list of nodes in some forward DFS order.
-
- Starting with the start node the depth first search proceeds along
- outgoing edges.
- """
- return list(self.iterdfs(start, end, forward=True))
-
- def back_dfs(self, start, end=None):
- """
- Returns a list of nodes in some backward DFS order.
-
- Starting from the start node the depth first search proceeds along
- incoming edges.
- """
- return list(self.iterdfs(start, end, forward=False))
-
- def connected(self):
- """
- Returns :py:data:`True` if the graph's every node can be reached from
- every other node.
- """
- node_list = self.node_list()
- for node in node_list:
- bfs_list = self.forw_bfs(node)
- if len(bfs_list) != len(node_list):
- return False
- return True
-
- def clust_coef(self, node):
- """
- Computes and returns the local clustering coefficient of node.
-
- The local cluster coefficient is proportion of the actual number of
- edges between neighbours of node and the maximum number of edges
- between those neighbours.
-
- See "Local Clustering Coefficient" on
- <http://en.wikipedia.org/wiki/Clustering_coefficient>
- for a formal definition.
- """
- num = 0
- nbr_set = set(self.out_nbrs(node))
-
- if node in nbr_set:
- nbr_set.remove(node) # loop defense
-
- for nbr in nbr_set:
- sec_set = set(self.out_nbrs(nbr))
- if nbr in sec_set:
- sec_set.remove(nbr) # loop defense
- num += len(nbr_set & sec_set)
-
- nbr_num = len(nbr_set)
- if nbr_num:
- clust_coef = float(num) / (nbr_num * (nbr_num - 1))
- else:
- clust_coef = 0.0
- return clust_coef
-
- def get_hops(self, start, end=None, forward=True):
- """
- Computes the hop distance to all nodes centered around a node.
-
- First order neighbours are at hop 1, their neigbours are at hop 2 etc.
- Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value
- of the forward parameter. If the distance between all neighbouring
- nodes is 1 the hop number corresponds to the shortest distance between
- the nodes.
-
- :param start: the starting node
- :param end: ending node (optional). When not specified will search the
- whole graph.
- :param forward: directionality parameter (optional).
- If C{True} (default) it uses L{forw_bfs} otherwise L{back_bfs}.
- :return: returns a list of tuples where each tuple contains the
- node and the hop.
-
- Typical usage::
-
- >>> print (graph.get_hops(1, 8))
- >>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
- # node 1 is at 0 hops
- # node 2 is at 1 hop
- # ...
- # node 8 is at 5 hops
- """
- if forward:
- return list(self._iterbfs(start=start, end=end, forward=True))
- else:
- return list(self._iterbfs(start=start, end=end, forward=False))
diff --git a/lib/spack/external/altgraph/GraphAlgo.py b/lib/spack/external/altgraph/GraphAlgo.py
deleted file mode 100644
index f93e73dcda..0000000000
--- a/lib/spack/external/altgraph/GraphAlgo.py
+++ /dev/null
@@ -1,171 +0,0 @@
-"""
-altgraph.GraphAlgo - Graph algorithms
-=====================================
-"""
-from altgraph import GraphError
-
-
-def dijkstra(graph, start, end=None):
- """
- Dijkstra's algorithm for shortest paths
-
- `David Eppstein, UC Irvine, 4 April 2002
- <http://www.ics.uci.edu/~eppstein/161/python/>`_
-
- `Python Cookbook Recipe
- <http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/119466>`_
-
- Find shortest paths from the start node to all nodes nearer than or
- equal to the end node.
-
- Dijkstra's algorithm is only guaranteed to work correctly when all edge
- lengths are positive. This code does not verify this property for all
- edges (only the edges examined until the end vertex is reached), but will
- correctly compute shortest paths even for some graphs with negative edges,
- and will raise an exception if it discovers that a negative edge has
- caused it to make a mistake.
-
- Adapted to altgraph by Istvan Albert, Pennsylvania State University -
- June, 9 2004
- """
- D = {} # dictionary of final distances
- P = {} # dictionary of predecessors
- Q = _priorityDictionary() # estimated distances of non-final vertices
- Q[start] = 0
-
- for v in Q:
- D[v] = Q[v]
- if v == end:
- break
-
- for w in graph.out_nbrs(v):
- edge_id = graph.edge_by_node(v, w)
- vwLength = D[v] + graph.edge_data(edge_id)
- if w in D:
- if vwLength < D[w]:
- raise GraphError(
- "Dijkstra: found better path to already-final vertex"
- )
- elif w not in Q or vwLength < Q[w]:
- Q[w] = vwLength
- P[w] = v
-
- return (D, P)
-
-
-def shortest_path(graph, start, end):
- """
- Find a single shortest path from the *start* node to the *end* node.
- The input has the same conventions as dijkstra(). The output is a list of
- the nodes in order along the shortest path.
-
- **Note that the distances must be stored in the edge data as numeric data**
- """
-
- D, P = dijkstra(graph, start, end)
- Path = []
- while 1:
- Path.append(end)
- if end == start:
- break
- end = P[end]
- Path.reverse()
- return Path
-
-
-#
-# Utility classes and functions
-#
-class _priorityDictionary(dict):
- """
- Priority dictionary using binary heaps (internal use only)
-
- David Eppstein, UC Irvine, 8 Mar 2002
-
- Implements a data structure that acts almost like a dictionary, with
- two modifications:
-
- 1. D.smallest() returns the value x minimizing D[x]. For this to
- work correctly, all values D[x] stored in the dictionary must be
- comparable.
-
- 2. iterating "for x in D" finds and removes the items from D in sorted
- order. Each item is not removed until the next item is requested,
- so D[x] will still return a useful value until the next iteration
- of the for-loop. Each operation takes logarithmic amortized time.
- """
-
- def __init__(self):
- """
- Initialize priorityDictionary by creating binary heap of pairs
- (value,key). Note that changing or removing a dict entry will not
- remove the old pair from the heap until it is found by smallest()
- or until the heap is rebuilt.
- """
- self.__heap = []
- dict.__init__(self)
-
- def smallest(self):
- """
- Find smallest item after removing deleted items from front of heap.
- """
- if len(self) == 0:
- raise IndexError("smallest of empty priorityDictionary")
- heap = self.__heap
- while heap[0][1] not in self or self[heap[0][1]] != heap[0][0]:
- lastItem = heap.pop()
- insertionPoint = 0
- while 1:
- smallChild = 2 * insertionPoint + 1
- if (
- smallChild + 1 < len(heap)
- and heap[smallChild] > heap[smallChild + 1]
- ):
- smallChild += 1
- if smallChild >= len(heap) or lastItem <= heap[smallChild]:
- heap[insertionPoint] = lastItem
- break
- heap[insertionPoint] = heap[smallChild]
- insertionPoint = smallChild
- return heap[0][1]
-
- def __iter__(self):
- """
- Create destructive sorted iterator of priorityDictionary.
- """
-
- def iterfn():
- while len(self) > 0:
- x = self.smallest()
- yield x
- del self[x]
-
- return iterfn()
-
- def __setitem__(self, key, val):
- """
- Change value stored in dictionary and add corresponding pair to heap.
- Rebuilds the heap if the number of deleted items gets large, to avoid
- memory leakage.
- """
- dict.__setitem__(self, key, val)
- heap = self.__heap
- if len(heap) > 2 * len(self):
- self.__heap = [(v, k) for k, v in self.items()]
- self.__heap.sort()
- else:
- newPair = (val, key)
- insertionPoint = len(heap)
- heap.append(None)
- while insertionPoint > 0 and newPair < heap[(insertionPoint - 1) // 2]:
- heap[insertionPoint] = heap[(insertionPoint - 1) // 2]
- insertionPoint = (insertionPoint - 1) // 2
- heap[insertionPoint] = newPair
-
- def setdefault(self, key, val):
- """
- Reimplement setdefault to pass through our customized __setitem__.
- """
- if key not in self:
- self[key] = val
- return self[key]
diff --git a/lib/spack/external/altgraph/GraphStat.py b/lib/spack/external/altgraph/GraphStat.py
deleted file mode 100644
index 577464b41e..0000000000
--- a/lib/spack/external/altgraph/GraphStat.py
+++ /dev/null
@@ -1,73 +0,0 @@
-"""
-altgraph.GraphStat - Functions providing various graph statistics
-=================================================================
-"""
-
-
-def degree_dist(graph, limits=(0, 0), bin_num=10, mode="out"):
- """
- Computes the degree distribution for a graph.
-
- Returns a list of tuples where the first element of the tuple is the
- center of the bin representing a range of degrees and the second element
- of the tuple are the number of nodes with the degree falling in the range.
-
- Example::
-
- ....
- """
-
- deg = []
- if mode == "inc":
- get_deg = graph.inc_degree
- else:
- get_deg = graph.out_degree
-
- for node in graph:
- deg.append(get_deg(node))
-
- if not deg:
- return []
-
- results = _binning(values=deg, limits=limits, bin_num=bin_num)
-
- return results
-
-
-_EPS = 1.0 / (2.0 ** 32)
-
-
-def _binning(values, limits=(0, 0), bin_num=10):
- """
- Bins data that falls between certain limits, if the limits are (0, 0) the
- minimum and maximum values are used.
-
- Returns a list of tuples where the first element of the tuple is the
- center of the bin and the second element of the tuple are the counts.
- """
- if limits == (0, 0):
- min_val, max_val = min(values) - _EPS, max(values) + _EPS
- else:
- min_val, max_val = limits
-
- # get bin size
- bin_size = (max_val - min_val) / float(bin_num)
- bins = [0] * (bin_num)
-
- # will ignore these outliers for now
- for value in values:
- try:
- if (value - min_val) >= 0:
- index = int((value - min_val) / float(bin_size))
- bins[index] += 1
- except IndexError:
- pass
-
- # make it ready for an x,y plot
- result = []
- center = (bin_size / 2) + min_val
- for i, y in enumerate(bins):
- x = center + bin_size * i
- result.append((x, y))
-
- return result
diff --git a/lib/spack/external/altgraph/GraphUtil.py b/lib/spack/external/altgraph/GraphUtil.py
deleted file mode 100644
index cfd6a34f3c..0000000000
--- a/lib/spack/external/altgraph/GraphUtil.py
+++ /dev/null
@@ -1,139 +0,0 @@
-"""
-altgraph.GraphUtil - Utility classes and functions
-==================================================
-"""
-
-import random
-from collections import deque
-
-from altgraph import Graph, GraphError
-
-
-def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=False):
- """
- Generates and returns a :py:class:`~altgraph.Graph.Graph` instance with
- *node_num* nodes randomly connected by *edge_num* edges.
- """
- g = Graph.Graph()
-
- if not multi_edges:
- if self_loops:
- max_edges = node_num * node_num
- else:
- max_edges = node_num * (node_num - 1)
-
- if edge_num > max_edges:
- raise GraphError("inconsistent arguments to 'generate_random_graph'")
-
- nodes = range(node_num)
-
- for node in nodes:
- g.add_node(node)
-
- while 1:
- head = random.choice(nodes)
- tail = random.choice(nodes)
-
- # loop defense
- if head == tail and not self_loops:
- continue
-
- # multiple edge defense
- if g.edge_by_node(head, tail) is not None and not multi_edges:
- continue
-
- # add the edge
- g.add_edge(head, tail)
- if g.number_of_edges() >= edge_num:
- break
-
- return g
-
-
-def generate_scale_free_graph(steps, growth_num, self_loops=False, multi_edges=False):
- """
- Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that
- will have *steps* \\* *growth_num* nodes and a scale free (powerlaw)
- connectivity. Starting with a fully connected graph with *growth_num*
- nodes at every step *growth_num* nodes are added to the graph and are
- connected to existing nodes with a probability proportional to the degree
- of these existing nodes.
- """
- # The code doesn't seem to do what the documentation claims.
- graph = Graph.Graph()
-
- # initialize the graph
- store = []
- for i in range(growth_num):
- for j in range(i + 1, growth_num):
- store.append(i)
- store.append(j)
- graph.add_edge(i, j)
-
- # generate
- for node in range(growth_num, steps * growth_num):
- graph.add_node(node)
- while graph.out_degree(node) < growth_num:
- nbr = random.choice(store)
-
- # loop defense
- if node == nbr and not self_loops:
- continue
-
- # multi edge defense
- if graph.edge_by_node(node, nbr) and not multi_edges:
- continue
-
- graph.add_edge(node, nbr)
-
- for nbr in graph.out_nbrs(node):
- store.append(node)
- store.append(nbr)
-
- return graph
-
-
-def filter_stack(graph, head, filters):
- """
- Perform a walk in a depth-first order starting
- at *head*.
-
- Returns (visited, removes, orphans).
-
- * visited: the set of visited nodes
- * removes: the list of nodes where the node
- data does not all *filters*
- * orphans: tuples of (last_good, node),
- where node is not in removes, is directly
- reachable from a node in *removes* and
- *last_good* is the closest upstream node that is not
- in *removes*.
- """
-
- visited, removes, orphans = {head}, set(), set()
- stack = deque([(head, head)])
- get_data = graph.node_data
- get_edges = graph.out_edges
- get_tail = graph.tail
-
- while stack:
- last_good, node = stack.pop()
- data = get_data(node)
- if data is not None:
- for filtfunc in filters:
- if not filtfunc(data):
- removes.add(node)
- break
- else:
- last_good = node
- for edge in get_edges(node):
- tail = get_tail(edge)
- if last_good is not node:
- orphans.add((last_good, tail))
- if tail not in visited:
- visited.add(tail)
- stack.append((last_good, tail))
-
- orphans = [(lg, tl) for (lg, tl) in orphans if tl not in removes]
-
- return visited, removes, orphans
diff --git a/lib/spack/external/altgraph/ObjectGraph.py b/lib/spack/external/altgraph/ObjectGraph.py
deleted file mode 100644
index 379b05b129..0000000000
--- a/lib/spack/external/altgraph/ObjectGraph.py
+++ /dev/null
@@ -1,212 +0,0 @@
-"""
-altgraph.ObjectGraph - Graph of objects with an identifier
-==========================================================
-
-A graph of objects that have a "graphident" attribute.
-graphident is the key for the object in the graph
-"""
-
-from altgraph import GraphError
-from altgraph.Graph import Graph
-from altgraph.GraphUtil import filter_stack
-
-
-class ObjectGraph(object):
- """
- A graph of objects that have a "graphident" attribute.
- graphident is the key for the object in the graph
- """
-
- def __init__(self, graph=None, debug=0):
- if graph is None:
- graph = Graph()
- self.graphident = self
- self.graph = graph
- self.debug = debug
- self.indent = 0
- graph.add_node(self, None)
-
- def __repr__(self):
- return "<%s>" % (type(self).__name__,)
-
- def flatten(self, condition=None, start=None):
- """
- Iterate over the subgraph that is entirely reachable by condition
- starting from the given start node or the ObjectGraph root
- """
- if start is None:
- start = self
- start = self.getRawIdent(start)
- return self.graph.iterdata(start=start, condition=condition)
-
- def nodes(self):
- for ident in self.graph:
- node = self.graph.node_data(ident)
- if node is not None:
- yield self.graph.node_data(ident)
-
- def get_edges(self, node):
- if node is None:
- node = self
- start = self.getRawIdent(node)
- _, _, outraw, incraw = self.graph.describe_node(start)
-
- def iter_edges(lst, n):
- seen = set()
- for tpl in (self.graph.describe_edge(e) for e in lst):
- ident = tpl[n]
- if ident not in seen:
- yield self.findNode(ident)
- seen.add(ident)
-
- return iter_edges(outraw, 3), iter_edges(incraw, 2)
-
- def edgeData(self, fromNode, toNode):
- if fromNode is None:
- fromNode = self
- start = self.getRawIdent(fromNode)
- stop = self.getRawIdent(toNode)
- edge = self.graph.edge_by_node(start, stop)
- return self.graph.edge_data(edge)
-
- def updateEdgeData(self, fromNode, toNode, edgeData):
- if fromNode is None:
- fromNode = self
- start = self.getRawIdent(fromNode)
- stop = self.getRawIdent(toNode)
- edge = self.graph.edge_by_node(start, stop)
- self.graph.update_edge_data(edge, edgeData)
-
- def filterStack(self, filters):
- """
- Filter the ObjectGraph in-place by removing all edges to nodes that
- do not match every filter in the given filter list
-
- Returns a tuple containing the number of:
- (nodes_visited, nodes_removed, nodes_orphaned)
- """
- visited, removes, orphans = filter_stack(self.graph, self, filters)
-
- for last_good, tail in orphans:
- self.graph.add_edge(last_good, tail, edge_data="orphan")
-
- for node in removes:
- self.graph.hide_node(node)
-
- return len(visited) - 1, len(removes), len(orphans)
-
- def removeNode(self, node):
- """
- Remove the given node from the graph if it exists
- """
- ident = self.getIdent(node)
- if ident is not None:
- self.graph.hide_node(ident)
-
- def removeReference(self, fromnode, tonode):
- """
- Remove all edges from fromnode to tonode
- """
- if fromnode is None:
- fromnode = self
- fromident = self.getIdent(fromnode)
- toident = self.getIdent(tonode)
- if fromident is not None and toident is not None:
- while True:
- edge = self.graph.edge_by_node(fromident, toident)
- if edge is None:
- break
- self.graph.hide_edge(edge)
-
- def getIdent(self, node):
- """
- Get the graph identifier for a node
- """
- ident = self.getRawIdent(node)
- if ident is not None:
- return ident
- node = self.findNode(node)
- if node is None:
- return None
- return node.graphident
-
- def getRawIdent(self, node):
- """
- Get the identifier for a node object
- """
- if node is self:
- return node
- ident = getattr(node, "graphident", None)
- return ident
-
- def __contains__(self, node):
- return self.findNode(node) is not None
-
- def findNode(self, node):
- """
- Find the node on the graph
- """
- ident = self.getRawIdent(node)
- if ident is None:
- ident = node
- try:
- return self.graph.node_data(ident)
- except KeyError:
- return None
-
- def addNode(self, node):
- """
- Add a node to the graph referenced by the root
- """
- self.msg(4, "addNode", node)
-
- try:
- self.graph.restore_node(node.graphident)
- except GraphError:
- self.graph.add_node(node.graphident, node)
-
- def createReference(self, fromnode, tonode, edge_data=None):
- """
- Create a reference from fromnode to tonode
- """
- if fromnode is None:
- fromnode = self
- fromident, toident = self.getIdent(fromnode), self.getIdent(tonode)
- if fromident is None or toident is None:
- return
- self.msg(4, "createReference", fromnode, tonode, edge_data)
- self.graph.add_edge(fromident, toident, edge_data=edge_data)
-
- def createNode(self, cls, name, *args, **kw):
- """
- Add a node of type cls to the graph if it does not already exist
- by the given name
- """
- m = self.findNode(name)
- if m is None:
- m = cls(name, *args, **kw)
- self.addNode(m)
- return m
-
- def msg(self, level, s, *args):
- """
- Print a debug message with the given level
- """
- if s and level <= self.debug:
- print("%s%s %s" % (" " * self.indent, s, " ".join(map(repr, args))))
-
- def msgin(self, level, s, *args):
- """
- Print a debug message and indent
- """
- if level <= self.debug:
- self.msg(level, s, *args)
- self.indent = self.indent + 1
-
- def msgout(self, level, s, *args):
- """
- Dedent and print a debug message
- """
- if level <= self.debug:
- self.indent = self.indent - 1
- self.msg(level, s, *args)
diff --git a/lib/spack/external/altgraph/__init__.py b/lib/spack/external/altgraph/__init__.py
deleted file mode 100644
index a56342438b..0000000000
--- a/lib/spack/external/altgraph/__init__.py
+++ /dev/null
@@ -1,148 +0,0 @@
-"""
-altgraph - a python graph library
-=================================
-
-altgraph is a fork of `graphlib <http://pygraphlib.sourceforge.net>`_ tailored
-to use newer Python 2.3+ features, including additional support used by the
-py2app suite (modulegraph and macholib, specifically).
-
-altgraph is a python based graph (network) representation and manipulation
-package. It has started out as an extension to the
-`graph_lib module
-<http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html>`_
-written by Nathan Denny it has been significantly optimized and expanded.
-
-The :class:`altgraph.Graph.Graph` class is loosely modeled after the
-`LEDA <http://www.algorithmic-solutions.com/enleda.htm>`_
-(Library of Efficient Datatypes) representation. The library
-includes methods for constructing graphs, BFS and DFS traversals,
-topological sort, finding connected components, shortest paths as well as a
-number graph statistics functions. The library can also visualize graphs
-via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_.
-
-The package contains the following modules:
-
- - the :py:mod:`altgraph.Graph` module contains the
- :class:`~altgraph.Graph.Graph` class that stores the graph data
-
- - the :py:mod:`altgraph.GraphAlgo` module implements graph algorithms
- operating on graphs (:py:class:`~altgraph.Graph.Graph`} instances)
-
- - the :py:mod:`altgraph.GraphStat` module contains functions for
- computing statistical measures on graphs
-
- - the :py:mod:`altgraph.GraphUtil` module contains functions for
- generating, reading and saving graphs
-
- - the :py:mod:`altgraph.Dot` module contains functions for displaying
- graphs via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
-
- - the :py:mod:`altgraph.ObjectGraph` module implements a graph of
- objects with a unique identifier
-
-Installation
-------------
-
-Download and unpack the archive then type::
-
- python setup.py install
-
-This will install the library in the default location. For instructions on
-how to customize the install procedure read the output of::
-
- python setup.py --help install
-
-To verify that the code works run the test suite::
-
- python setup.py test
-
-Example usage
--------------
-
-Lets assume that we want to analyze the graph below (links to the full picture)
-GRAPH_IMG. Our script then might look the following way::
-
- from altgraph import Graph, GraphAlgo, Dot
-
- # these are the edges
- edges = [ (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5),
- (6,14), (14,15), (6, 15), (5,7), (7, 8), (7,13), (12,8),
- (8,13), (11,12), (11,9), (13,11), (9,13), (13,10) ]
-
- # creates the graph
- graph = Graph.Graph()
- for head, tail in edges:
- graph.add_edge(head, tail)
-
- # do a forward bfs from 1 at most to 20
- print(graph.forw_bfs(1))
-
-This will print the nodes in some breadth first order::
-
- [1, 2, 3, 4, 5, 7, 8, 13, 11, 10, 12, 9]
-
-If we wanted to get the hop-distance from node 1 to node 8
-we coud write::
-
- print(graph.get_hops(1, 8))
-
-This will print the following::
-
- [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
-
-Node 1 is at 0 hops since it is the starting node, nodes 2,3 are 1 hop away ...
-node 8 is 5 hops away. To find the shortest distance between two nodes you
-can use::
-
- print(GraphAlgo.shortest_path(graph, 1, 12))
-
-It will print the nodes on one (if there are more) the shortest paths::
-
- [1, 2, 4, 5, 7, 13, 11, 12]
-
-To display the graph we can use the GraphViz backend::
-
- dot = Dot.Dot(graph)
-
- # display the graph on the monitor
- dot.display()
-
- # save it in an image file
- dot.save_img(file_name='graph', file_type='gif')
-
-
-
-..
- @author: U{Istvan Albert<http://www.personal.psu.edu/staff/i/u/iua1/>}
-
- @license: MIT License
-
- Copyright (c) 2004 Istvan Albert unless otherwise noted.
-
- Permission is hereby granted, free of charge, to any person obtaining a copy
- of this software and associated documentation files (the "Software"), to
- deal in the Software without restriction, including without limitation the
- rights to use, copy, modify, merge, publish, distribute, sublicense,
- and/or sell copies of the Software, and to permit persons to whom the
- Software is furnished to do so.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
- IN THE SOFTWARE.
- @requires: Python 2.3 or higher
-
- @newfield contributor: Contributors:
- @contributor: U{Reka Albert <http://www.phys.psu.edu/~ralbert/>}
-
-"""
-import pkg_resources
-
-__version__ = pkg_resources.require("altgraph")[0].version
-
-
-class GraphError(ValueError):
- pass
diff --git a/lib/spack/external/attr/LICENSE b/lib/spack/external/attr/LICENSE
deleted file mode 100644
index 7ae3df9309..0000000000
--- a/lib/spack/external/attr/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2015 Hynek Schlawack
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/lib/spack/external/attr/__init__.py b/lib/spack/external/attr/__init__.py
deleted file mode 100644
index b1ce7fe248..0000000000
--- a/lib/spack/external/attr/__init__.py
+++ /dev/null
@@ -1,78 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import sys
-
-from functools import partial
-
-from . import converters, exceptions, filters, setters, validators
-from ._cmp import cmp_using
-from ._config import get_run_validators, set_run_validators
-from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
-from ._make import (
- NOTHING,
- Attribute,
- Factory,
- attrib,
- attrs,
- fields,
- fields_dict,
- make_class,
- validate,
-)
-from ._version_info import VersionInfo
-
-
-__version__ = "21.2.0"
-__version_info__ = VersionInfo._from_version_string(__version__)
-
-__title__ = "attrs"
-__description__ = "Classes Without Boilerplate"
-__url__ = "https://www.attrs.org/"
-__uri__ = __url__
-__doc__ = __description__ + " <" + __uri__ + ">"
-
-__author__ = "Hynek Schlawack"
-__email__ = "hs@ox.cx"
-
-__license__ = "MIT"
-__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
-
-
-s = attributes = attrs
-ib = attr = attrib
-dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
-
-__all__ = [
- "Attribute",
- "Factory",
- "NOTHING",
- "asdict",
- "assoc",
- "astuple",
- "attr",
- "attrib",
- "attributes",
- "attrs",
- "cmp_using",
- "converters",
- "evolve",
- "exceptions",
- "fields",
- "fields_dict",
- "filters",
- "get_run_validators",
- "has",
- "ib",
- "make_class",
- "resolve_types",
- "s",
- "set_run_validators",
- "setters",
- "validate",
- "validators",
-]
-
-if sys.version_info[:2] >= (3, 6):
- from ._next_gen import define, field, frozen, mutable
-
- __all__.extend((define, field, frozen, mutable))
diff --git a/lib/spack/external/attr/_cmp.py b/lib/spack/external/attr/_cmp.py
deleted file mode 100644
index b747b603f1..0000000000
--- a/lib/spack/external/attr/_cmp.py
+++ /dev/null
@@ -1,152 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import functools
-
-from ._compat import new_class
-from ._make import _make_ne
-
-
-_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
-
-
-def cmp_using(
- eq=None,
- lt=None,
- le=None,
- gt=None,
- ge=None,
- require_same_type=True,
- class_name="Comparable",
-):
- """
- Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
- ``cmp`` arguments to customize field comparison.
-
- The resulting class will have a full set of ordering methods if
- at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
-
- :param Optional[callable] eq: `callable` used to evaluate equality
- of two objects.
- :param Optional[callable] lt: `callable` used to evaluate whether
- one object is less than another object.
- :param Optional[callable] le: `callable` used to evaluate whether
- one object is less than or equal to another object.
- :param Optional[callable] gt: `callable` used to evaluate whether
- one object is greater than another object.
- :param Optional[callable] ge: `callable` used to evaluate whether
- one object is greater than or equal to another object.
-
- :param bool require_same_type: When `True`, equality and ordering methods
- will return `NotImplemented` if objects are not of the same type.
-
- :param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
-
- See `comparison` for more details.
-
- .. versionadded:: 21.1.0
- """
-
- body = {
- "__slots__": ["value"],
- "__init__": _make_init(),
- "_requirements": [],
- "_is_comparable_to": _is_comparable_to,
- }
-
- # Add operations.
- num_order_functions = 0
- has_eq_function = False
-
- if eq is not None:
- has_eq_function = True
- body["__eq__"] = _make_operator("eq", eq)
- body["__ne__"] = _make_ne()
-
- if lt is not None:
- num_order_functions += 1
- body["__lt__"] = _make_operator("lt", lt)
-
- if le is not None:
- num_order_functions += 1
- body["__le__"] = _make_operator("le", le)
-
- if gt is not None:
- num_order_functions += 1
- body["__gt__"] = _make_operator("gt", gt)
-
- if ge is not None:
- num_order_functions += 1
- body["__ge__"] = _make_operator("ge", ge)
-
- type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body))
-
- # Add same type requirement.
- if require_same_type:
- type_._requirements.append(_check_same_type)
-
- # Add total ordering if at least one operation was defined.
- if 0 < num_order_functions < 4:
- if not has_eq_function:
- # functools.total_ordering requires __eq__ to be defined,
- # so raise early error here to keep a nice stack.
- raise ValueError(
- "eq must be define is order to complete ordering from "
- "lt, le, gt, ge."
- )
- type_ = functools.total_ordering(type_)
-
- return type_
-
-
-def _make_init():
- """
- Create __init__ method.
- """
-
- def __init__(self, value):
- """
- Initialize object with *value*.
- """
- self.value = value
-
- return __init__
-
-
-def _make_operator(name, func):
- """
- Create operator method.
- """
-
- def method(self, other):
- if not self._is_comparable_to(other):
- return NotImplemented
-
- result = func(self.value, other.value)
- if result is NotImplemented:
- return NotImplemented
-
- return result
-
- method.__name__ = "__%s__" % (name,)
- method.__doc__ = "Return a %s b. Computed by attrs." % (
- _operation_names[name],
- )
-
- return method
-
-
-def _is_comparable_to(self, other):
- """
- Check whether `other` is comparable to `self`.
- """
- for func in self._requirements:
- if not func(self, other):
- return False
- return True
-
-
-def _check_same_type(self, other):
- """
- Return True if *self* and *other* are of the same type, False otherwise.
- """
- return other.value.__class__ is self.value.__class__
diff --git a/lib/spack/external/attr/_compat.py b/lib/spack/external/attr/_compat.py
deleted file mode 100644
index 6939f338da..0000000000
--- a/lib/spack/external/attr/_compat.py
+++ /dev/null
@@ -1,242 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import platform
-import sys
-import types
-import warnings
-
-
-PY2 = sys.version_info[0] == 2
-PYPY = platform.python_implementation() == "PyPy"
-
-
-if PYPY or sys.version_info[:2] >= (3, 6):
- ordered_dict = dict
-else:
- from collections import OrderedDict
-
- ordered_dict = OrderedDict
-
-
-if PY2:
- from collections import Mapping, Sequence
-
- from UserDict import IterableUserDict
-
- # We 'bundle' isclass instead of using inspect as importing inspect is
- # fairly expensive (order of 10-15 ms for a modern machine in 2016)
- def isclass(klass):
- return isinstance(klass, (type, types.ClassType))
-
- def new_class(name, bases, kwds, exec_body):
- """
- A minimal stub of types.new_class that we need for make_class.
- """
- ns = {}
- exec_body(ns)
-
- return type(name, bases, ns)
-
- # TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
- TYPE = "type"
-
- def iteritems(d):
- return d.iteritems()
-
- # Python 2 is bereft of a read-only dict proxy, so we make one!
- class ReadOnlyDict(IterableUserDict):
- """
- Best-effort read-only dict wrapper.
- """
-
- def __setitem__(self, key, val):
- # We gently pretend we're a Python 3 mappingproxy.
- raise TypeError(
- "'mappingproxy' object does not support item assignment"
- )
-
- def update(self, _):
- # We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError(
- "'mappingproxy' object has no attribute 'update'"
- )
-
- def __delitem__(self, _):
- # We gently pretend we're a Python 3 mappingproxy.
- raise TypeError(
- "'mappingproxy' object does not support item deletion"
- )
-
- def clear(self):
- # We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError(
- "'mappingproxy' object has no attribute 'clear'"
- )
-
- def pop(self, key, default=None):
- # We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError(
- "'mappingproxy' object has no attribute 'pop'"
- )
-
- def popitem(self):
- # We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError(
- "'mappingproxy' object has no attribute 'popitem'"
- )
-
- def setdefault(self, key, default=None):
- # We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError(
- "'mappingproxy' object has no attribute 'setdefault'"
- )
-
- def __repr__(self):
- # Override to be identical to the Python 3 version.
- return "mappingproxy(" + repr(self.data) + ")"
-
- def metadata_proxy(d):
- res = ReadOnlyDict()
- res.data.update(d) # We blocked update, so we have to do it like this.
- return res
-
- def just_warn(*args, **kw): # pragma: no cover
- """
- We only warn on Python 3 because we are not aware of any concrete
- consequences of not setting the cell on Python 2.
- """
-
-
-else: # Python 3 and later.
- from collections.abc import Mapping, Sequence # noqa
-
- def just_warn(*args, **kw):
- """
- We only warn on Python 3 because we are not aware of any concrete
- consequences of not setting the cell on Python 2.
- """
- warnings.warn(
- "Running interpreter doesn't sufficiently support code object "
- "introspection. Some features like bare super() or accessing "
- "__class__ will not work with slotted classes.",
- RuntimeWarning,
- stacklevel=2,
- )
-
- def isclass(klass):
- return isinstance(klass, type)
-
- TYPE = "class"
-
- def iteritems(d):
- return d.items()
-
- new_class = types.new_class
-
- def metadata_proxy(d):
- return types.MappingProxyType(dict(d))
-
-
-def make_set_closure_cell():
- """Return a function of two arguments (cell, value) which sets
- the value stored in the closure cell `cell` to `value`.
- """
- # pypy makes this easy. (It also supports the logic below, but
- # why not do the easy/fast thing?)
- if PYPY:
-
- def set_closure_cell(cell, value):
- cell.__setstate__((value,))
-
- return set_closure_cell
-
- # Otherwise gotta do it the hard way.
-
- # Create a function that will set its first cellvar to `value`.
- def set_first_cellvar_to(value):
- x = value
- return
-
- # This function will be eliminated as dead code, but
- # not before its reference to `x` forces `x` to be
- # represented as a closure cell rather than a local.
- def force_x_to_be_a_cell(): # pragma: no cover
- return x
-
- try:
- # Extract the code object and make sure our assumptions about
- # the closure behavior are correct.
- if PY2:
- co = set_first_cellvar_to.func_code
- else:
- co = set_first_cellvar_to.__code__
- if co.co_cellvars != ("x",) or co.co_freevars != ():
- raise AssertionError # pragma: no cover
-
- # Convert this code object to a code object that sets the
- # function's first _freevar_ (not cellvar) to the argument.
- if sys.version_info >= (3, 8):
- # CPython 3.8+ has an incompatible CodeType signature
- # (added a posonlyargcount argument) but also added
- # CodeType.replace() to do this without counting parameters.
- set_first_freevar_code = co.replace(
- co_cellvars=co.co_freevars, co_freevars=co.co_cellvars
- )
- else:
- args = [co.co_argcount]
- if not PY2:
- args.append(co.co_kwonlyargcount)
- args.extend(
- [
- co.co_nlocals,
- co.co_stacksize,
- co.co_flags,
- co.co_code,
- co.co_consts,
- co.co_names,
- co.co_varnames,
- co.co_filename,
- co.co_name,
- co.co_firstlineno,
- co.co_lnotab,
- # These two arguments are reversed:
- co.co_cellvars,
- co.co_freevars,
- ]
- )
- set_first_freevar_code = types.CodeType(*args)
-
- def set_closure_cell(cell, value):
- # Create a function using the set_first_freevar_code,
- # whose first closure cell is `cell`. Calling it will
- # change the value of that cell.
- setter = types.FunctionType(
- set_first_freevar_code, {}, "setter", (), (cell,)
- )
- # And call it to set the cell.
- setter(value)
-
- # Make sure it works on this interpreter:
- def make_func_with_cell():
- x = None
-
- def func():
- return x # pragma: no cover
-
- return func
-
- if PY2:
- cell = make_func_with_cell().func_closure[0]
- else:
- cell = make_func_with_cell().__closure__[0]
- set_closure_cell(cell, 100)
- if cell.cell_contents != 100:
- raise AssertionError # pragma: no cover
-
- except Exception:
- return just_warn
- else:
- return set_closure_cell
-
-
-set_closure_cell = make_set_closure_cell()
diff --git a/lib/spack/external/attr/_config.py b/lib/spack/external/attr/_config.py
deleted file mode 100644
index 8ec920962d..0000000000
--- a/lib/spack/external/attr/_config.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-
-__all__ = ["set_run_validators", "get_run_validators"]
-
-_run_validators = True
-
-
-def set_run_validators(run):
- """
- Set whether or not validators are run. By default, they are run.
- """
- if not isinstance(run, bool):
- raise TypeError("'run' must be bool.")
- global _run_validators
- _run_validators = run
-
-
-def get_run_validators():
- """
- Return whether or not validators are run.
- """
- return _run_validators
diff --git a/lib/spack/external/attr/_funcs.py b/lib/spack/external/attr/_funcs.py
deleted file mode 100644
index fda508c5c4..0000000000
--- a/lib/spack/external/attr/_funcs.py
+++ /dev/null
@@ -1,395 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import copy
-
-from ._compat import iteritems
-from ._make import NOTHING, _obj_setattr, fields
-from .exceptions import AttrsAttributeNotFoundError
-
-
-def asdict(
- inst,
- recurse=True,
- filter=None,
- dict_factory=dict,
- retain_collection_types=False,
- value_serializer=None,
-):
- """
- Return the ``attrs`` attribute values of *inst* as a dict.
-
- Optionally recurse into other ``attrs``-decorated classes.
-
- :param inst: Instance of an ``attrs``-decorated class.
- :param bool recurse: Recurse into classes that are also
- ``attrs``-decorated.
- :param callable filter: A callable whose return code determines whether an
- attribute or element is included (``True``) or dropped (``False``). Is
- called with the `attr.Attribute` as the first argument and the
- value as the second argument.
- :param callable dict_factory: A callable to produce dictionaries from. For
- example, to produce ordered dictionaries instead of normal Python
- dictionaries, pass in ``collections.OrderedDict``.
- :param bool retain_collection_types: Do not convert to ``list`` when
- encountering an attribute whose type is ``tuple`` or ``set``. Only
- meaningful if ``recurse`` is ``True``.
- :param Optional[callable] value_serializer: A hook that is called for every
- attribute or dict key/value. It receives the current instance, field
- and value and must return the (updated) value. The hook is run *after*
- the optional *filter* has been applied.
-
- :rtype: return type of *dict_factory*
-
- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
- class.
-
- .. versionadded:: 16.0.0 *dict_factory*
- .. versionadded:: 16.1.0 *retain_collection_types*
- .. versionadded:: 20.3.0 *value_serializer*
- """
- attrs = fields(inst.__class__)
- rv = dict_factory()
- for a in attrs:
- v = getattr(inst, a.name)
- if filter is not None and not filter(a, v):
- continue
-
- if value_serializer is not None:
- v = value_serializer(inst, a, v)
-
- if recurse is True:
- if has(v.__class__):
- rv[a.name] = asdict(
- v,
- True,
- filter,
- dict_factory,
- retain_collection_types,
- value_serializer,
- )
- elif isinstance(v, (tuple, list, set, frozenset)):
- cf = v.__class__ if retain_collection_types is True else list
- rv[a.name] = cf(
- [
- _asdict_anything(
- i,
- filter,
- dict_factory,
- retain_collection_types,
- value_serializer,
- )
- for i in v
- ]
- )
- elif isinstance(v, dict):
- df = dict_factory
- rv[a.name] = df(
- (
- _asdict_anything(
- kk,
- filter,
- df,
- retain_collection_types,
- value_serializer,
- ),
- _asdict_anything(
- vv,
- filter,
- df,
- retain_collection_types,
- value_serializer,
- ),
- )
- for kk, vv in iteritems(v)
- )
- else:
- rv[a.name] = v
- else:
- rv[a.name] = v
- return rv
-
-
-def _asdict_anything(
- val,
- filter,
- dict_factory,
- retain_collection_types,
- value_serializer,
-):
- """
- ``asdict`` only works on attrs instances, this works on anything.
- """
- if getattr(val.__class__, "__attrs_attrs__", None) is not None:
- # Attrs class.
- rv = asdict(
- val,
- True,
- filter,
- dict_factory,
- retain_collection_types,
- value_serializer,
- )
- elif isinstance(val, (tuple, list, set, frozenset)):
- cf = val.__class__ if retain_collection_types is True else list
- rv = cf(
- [
- _asdict_anything(
- i,
- filter,
- dict_factory,
- retain_collection_types,
- value_serializer,
- )
- for i in val
- ]
- )
- elif isinstance(val, dict):
- df = dict_factory
- rv = df(
- (
- _asdict_anything(
- kk, filter, df, retain_collection_types, value_serializer
- ),
- _asdict_anything(
- vv, filter, df, retain_collection_types, value_serializer
- ),
- )
- for kk, vv in iteritems(val)
- )
- else:
- rv = val
- if value_serializer is not None:
- rv = value_serializer(None, None, rv)
-
- return rv
-
-
-def astuple(
- inst,
- recurse=True,
- filter=None,
- tuple_factory=tuple,
- retain_collection_types=False,
-):
- """
- Return the ``attrs`` attribute values of *inst* as a tuple.
-
- Optionally recurse into other ``attrs``-decorated classes.
-
- :param inst: Instance of an ``attrs``-decorated class.
- :param bool recurse: Recurse into classes that are also
- ``attrs``-decorated.
- :param callable filter: A callable whose return code determines whether an
- attribute or element is included (``True``) or dropped (``False``). Is
- called with the `attr.Attribute` as the first argument and the
- value as the second argument.
- :param callable tuple_factory: A callable to produce tuples from. For
- example, to produce lists instead of tuples.
- :param bool retain_collection_types: Do not convert to ``list``
- or ``dict`` when encountering an attribute which type is
- ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
- ``True``.
-
- :rtype: return type of *tuple_factory*
-
- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
- class.
-
- .. versionadded:: 16.2.0
- """
- attrs = fields(inst.__class__)
- rv = []
- retain = retain_collection_types # Very long. :/
- for a in attrs:
- v = getattr(inst, a.name)
- if filter is not None and not filter(a, v):
- continue
- if recurse is True:
- if has(v.__class__):
- rv.append(
- astuple(
- v,
- recurse=True,
- filter=filter,
- tuple_factory=tuple_factory,
- retain_collection_types=retain,
- )
- )
- elif isinstance(v, (tuple, list, set, frozenset)):
- cf = v.__class__ if retain is True else list
- rv.append(
- cf(
- [
- astuple(
- j,
- recurse=True,
- filter=filter,
- tuple_factory=tuple_factory,
- retain_collection_types=retain,
- )
- if has(j.__class__)
- else j
- for j in v
- ]
- )
- )
- elif isinstance(v, dict):
- df = v.__class__ if retain is True else dict
- rv.append(
- df(
- (
- astuple(
- kk,
- tuple_factory=tuple_factory,
- retain_collection_types=retain,
- )
- if has(kk.__class__)
- else kk,
- astuple(
- vv,
- tuple_factory=tuple_factory,
- retain_collection_types=retain,
- )
- if has(vv.__class__)
- else vv,
- )
- for kk, vv in iteritems(v)
- )
- )
- else:
- rv.append(v)
- else:
- rv.append(v)
-
- return rv if tuple_factory is list else tuple_factory(rv)
-
-
-def has(cls):
- """
- Check whether *cls* is a class with ``attrs`` attributes.
-
- :param type cls: Class to introspect.
- :raise TypeError: If *cls* is not a class.
-
- :rtype: bool
- """
- return getattr(cls, "__attrs_attrs__", None) is not None
-
-
-def assoc(inst, **changes):
- """
- Copy *inst* and apply *changes*.
-
- :param inst: Instance of a class with ``attrs`` attributes.
- :param changes: Keyword changes in the new copy.
-
- :return: A copy of inst with *changes* incorporated.
-
- :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
- be found on *cls*.
- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
- class.
-
- .. deprecated:: 17.1.0
- Use `evolve` instead.
- """
- import warnings
-
- warnings.warn(
- "assoc is deprecated and will be removed after 2018/01.",
- DeprecationWarning,
- stacklevel=2,
- )
- new = copy.copy(inst)
- attrs = fields(inst.__class__)
- for k, v in iteritems(changes):
- a = getattr(attrs, k, NOTHING)
- if a is NOTHING:
- raise AttrsAttributeNotFoundError(
- "{k} is not an attrs attribute on {cl}.".format(
- k=k, cl=new.__class__
- )
- )
- _obj_setattr(new, k, v)
- return new
-
-
-def evolve(inst, **changes):
- """
- Create a new instance, based on *inst* with *changes* applied.
-
- :param inst: Instance of a class with ``attrs`` attributes.
- :param changes: Keyword changes in the new copy.
-
- :return: A copy of inst with *changes* incorporated.
-
- :raise TypeError: If *attr_name* couldn't be found in the class
- ``__init__``.
- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
- class.
-
- .. versionadded:: 17.1.0
- """
- cls = inst.__class__
- attrs = fields(cls)
- for a in attrs:
- if not a.init:
- continue
- attr_name = a.name # To deal with private attributes.
- init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
- if init_name not in changes:
- changes[init_name] = getattr(inst, attr_name)
-
- return cls(**changes)
-
-
-def resolve_types(cls, globalns=None, localns=None, attribs=None):
- """
- Resolve any strings and forward annotations in type annotations.
-
- This is only required if you need concrete types in `Attribute`'s *type*
- field. In other words, you don't need to resolve your types if you only
- use them for static type checking.
-
- With no arguments, names will be looked up in the module in which the class
- was created. If this is not what you want, e.g. if the name only exists
- inside a method, you may pass *globalns* or *localns* to specify other
- dictionaries in which to look up these names. See the docs of
- `typing.get_type_hints` for more details.
-
- :param type cls: Class to resolve.
- :param Optional[dict] globalns: Dictionary containing global variables.
- :param Optional[dict] localns: Dictionary containing local variables.
- :param Optional[list] attribs: List of attribs for the given class.
- This is necessary when calling from inside a ``field_transformer``
- since *cls* is not an ``attrs`` class yet.
-
- :raise TypeError: If *cls* is not a class.
- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
- class and you didn't pass any attribs.
- :raise NameError: If types cannot be resolved because of missing variables.
-
- :returns: *cls* so you can use this function also as a class decorator.
- Please note that you have to apply it **after** `attr.s`. That means
- the decorator has to come in the line **before** `attr.s`.
-
- .. versionadded:: 20.1.0
- .. versionadded:: 21.1.0 *attribs*
-
- """
- try:
- # Since calling get_type_hints is expensive we cache whether we've
- # done it already.
- cls.__attrs_types_resolved__
- except AttributeError:
- import typing
-
- hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
- for field in fields(cls) if attribs is None else attribs:
- if field.name in hints:
- # Since fields have been frozen we must work around it.
- _obj_setattr(field, "type", hints[field.name])
- cls.__attrs_types_resolved__ = True
-
- # Return the class so you can use it as a decorator too.
- return cls
diff --git a/lib/spack/external/attr/_make.py b/lib/spack/external/attr/_make.py
deleted file mode 100644
index a1912b1233..0000000000
--- a/lib/spack/external/attr/_make.py
+++ /dev/null
@@ -1,3052 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import copy
-import inspect
-import linecache
-import sys
-import threading
-import uuid
-import warnings
-
-from operator import itemgetter
-
-from . import _config, setters
-from ._compat import (
- PY2,
- PYPY,
- isclass,
- iteritems,
- metadata_proxy,
- new_class,
- ordered_dict,
- set_closure_cell,
-)
-from .exceptions import (
- DefaultAlreadySetError,
- FrozenInstanceError,
- NotAnAttrsClassError,
- PythonTooOldError,
- UnannotatedAttributeError,
-)
-
-
-if not PY2:
- import typing
-
-
-# This is used at least twice, so cache it here.
-_obj_setattr = object.__setattr__
-_init_converter_pat = "__attr_converter_%s"
-_init_factory_pat = "__attr_factory_{}"
-_tuple_property_pat = (
- " {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
-)
-_classvar_prefixes = (
- "typing.ClassVar",
- "t.ClassVar",
- "ClassVar",
- "typing_extensions.ClassVar",
-)
-# we don't use a double-underscore prefix because that triggers
-# name mangling when trying to create a slot for the field
-# (when slots=True)
-_hash_cache_field = "_attrs_cached_hash"
-
-_empty_metadata_singleton = metadata_proxy({})
-
-# Unique object for unequivocal getattr() defaults.
-_sentinel = object()
-
-
-class _Nothing(object):
- """
- Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
-
- ``_Nothing`` is a singleton. There is only ever one of it.
-
- .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
- """
-
- _singleton = None
-
- def __new__(cls):
- if _Nothing._singleton is None:
- _Nothing._singleton = super(_Nothing, cls).__new__(cls)
- return _Nothing._singleton
-
- def __repr__(self):
- return "NOTHING"
-
- def __bool__(self):
- return False
-
- def __len__(self):
- return 0 # __bool__ for Python 2
-
-
-NOTHING = _Nothing()
-"""
-Sentinel to indicate the lack of a value when ``None`` is ambiguous.
-"""
-
-
-class _CacheHashWrapper(int):
- """
- An integer subclass that pickles / copies as None
-
- This is used for non-slots classes with ``cache_hash=True``, to avoid
- serializing a potentially (even likely) invalid hash value. Since ``None``
- is the default value for uncalculated hashes, whenever this is copied,
- the copy's value for the hash should automatically reset.
-
- See GH #613 for more details.
- """
-
- if PY2:
- # For some reason `type(None)` isn't callable in Python 2, but we don't
- # actually need a constructor for None objects, we just need any
- # available function that returns None.
- def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)):
- return _none_constructor, _args
-
- else:
-
- def __reduce__(self, _none_constructor=type(None), _args=()):
- return _none_constructor, _args
-
-
-def attrib(
- default=NOTHING,
- validator=None,
- repr=True,
- cmp=None,
- hash=None,
- init=True,
- metadata=None,
- type=None,
- converter=None,
- factory=None,
- kw_only=False,
- eq=None,
- order=None,
- on_setattr=None,
-):
- """
- Create a new attribute on a class.
-
- .. warning::
-
- Does *not* do anything unless the class is also decorated with
- `attr.s`!
-
- :param default: A value that is used if an ``attrs``-generated ``__init__``
- is used and no value is passed while instantiating or the attribute is
- excluded using ``init=False``.
-
- If the value is an instance of `Factory`, its callable will be
- used to construct a new value (useful for mutable data types like lists
- or dicts).
-
- If a default is not set (or set manually to `attr.NOTHING`), a value
- *must* be supplied when instantiating; otherwise a `TypeError`
- will be raised.
-
- The default can also be set using decorator notation as shown below.
-
- :type default: Any value
-
- :param callable factory: Syntactic sugar for
- ``default=attr.Factory(factory)``.
-
- :param validator: `callable` that is called by ``attrs``-generated
- ``__init__`` methods after the instance has been initialized. They
- receive the initialized instance, the `Attribute`, and the
- passed value.
-
- The return value is *not* inspected so the validator has to throw an
- exception itself.
-
- If a `list` is passed, its items are treated as validators and must
- all pass.
-
- Validators can be globally disabled and re-enabled using
- `get_run_validators`.
-
- The validator can also be set using decorator notation as shown below.
-
- :type validator: `callable` or a `list` of `callable`\\ s.
-
- :param repr: Include this attribute in the generated ``__repr__``
- method. If ``True``, include the attribute; if ``False``, omit it. By
- default, the built-in ``repr()`` function is used. To override how the
- attribute value is formatted, pass a ``callable`` that takes a single
- value and returns a string. Note that the resulting string is used
- as-is, i.e. it will be used directly *instead* of calling ``repr()``
- (the default).
- :type repr: a `bool` or a `callable` to use a custom function.
-
- :param eq: If ``True`` (default), include this attribute in the
- generated ``__eq__`` and ``__ne__`` methods that check two instances
- for equality. To override how the attribute value is compared,
- pass a ``callable`` that takes a single value and returns the value
- to be compared.
- :type eq: a `bool` or a `callable`.
-
- :param order: If ``True`` (default), include this attributes in the
- generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods.
- To override how the attribute value is ordered,
- pass a ``callable`` that takes a single value and returns the value
- to be ordered.
- :type order: a `bool` or a `callable`.
-
- :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the
- same value. Must not be mixed with *eq* or *order*.
- :type cmp: a `bool` or a `callable`.
-
- :param Optional[bool] hash: Include this attribute in the generated
- ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This
- is the correct behavior according the Python spec. Setting this value
- to anything else than ``None`` is *discouraged*.
- :param bool init: Include this attribute in the generated ``__init__``
- method. It is possible to set this to ``False`` and set a default
- value. In that case this attributed is unconditionally initialized
- with the specified default value or factory.
- :param callable converter: `callable` that is called by
- ``attrs``-generated ``__init__`` methods to convert attribute's value
- to the desired format. It is given the passed-in value, and the
- returned value will be used as the new value of the attribute. The
- value is converted before being passed to the validator, if any.
- :param metadata: An arbitrary mapping, to be used by third-party
- components. See `extending_metadata`.
- :param type: The type of the attribute. In Python 3.6 or greater, the
- preferred method to specify the type is using a variable annotation
- (see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
- This argument is provided for backward compatibility.
- Regardless of the approach used, the type will be stored on
- ``Attribute.type``.
-
- Please note that ``attrs`` doesn't do anything with this metadata by
- itself. You can use it as part of your own code or for
- `static type checking <types>`.
- :param kw_only: Make this attribute keyword-only (Python 3+)
- in the generated ``__init__`` (if ``init`` is ``False``, this
- parameter is ignored).
- :param on_setattr: Allows to overwrite the *on_setattr* setting from
- `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used.
- Set to `attr.setters.NO_OP` to run **no** `setattr` hooks for this
- attribute -- regardless of the setting in `attr.s`.
- :type on_setattr: `callable`, or a list of callables, or `None`, or
- `attr.setters.NO_OP`
-
- .. versionadded:: 15.2.0 *convert*
- .. versionadded:: 16.3.0 *metadata*
- .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
- .. versionchanged:: 17.1.0
- *hash* is ``None`` and therefore mirrors *eq* by default.
- .. versionadded:: 17.3.0 *type*
- .. deprecated:: 17.4.0 *convert*
- .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
- *convert* to achieve consistency with other noun-based arguments.
- .. versionadded:: 18.1.0
- ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
- .. versionadded:: 18.2.0 *kw_only*
- .. versionchanged:: 19.2.0 *convert* keyword argument removed.
- .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
- .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
- .. versionadded:: 19.2.0 *eq* and *order*
- .. versionadded:: 20.1.0 *on_setattr*
- .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
- .. versionchanged:: 21.1.0
- *eq*, *order*, and *cmp* also accept a custom callable
- .. versionchanged:: 21.1.0 *cmp* undeprecated
- """
- eq, eq_key, order, order_key = _determine_attrib_eq_order(
- cmp, eq, order, True
- )
-
- if hash is not None and hash is not True and hash is not False:
- raise TypeError(
- "Invalid value for hash. Must be True, False, or None."
- )
-
- if factory is not None:
- if default is not NOTHING:
- raise ValueError(
- "The `default` and `factory` arguments are mutually "
- "exclusive."
- )
- if not callable(factory):
- raise ValueError("The `factory` argument must be a callable.")
- default = Factory(factory)
-
- if metadata is None:
- metadata = {}
-
- # Apply syntactic sugar by auto-wrapping.
- if isinstance(on_setattr, (list, tuple)):
- on_setattr = setters.pipe(*on_setattr)
-
- if validator and isinstance(validator, (list, tuple)):
- validator = and_(*validator)
-
- if converter and isinstance(converter, (list, tuple)):
- converter = pipe(*converter)
-
- return _CountingAttr(
- default=default,
- validator=validator,
- repr=repr,
- cmp=None,
- hash=hash,
- init=init,
- converter=converter,
- metadata=metadata,
- type=type,
- kw_only=kw_only,
- eq=eq,
- eq_key=eq_key,
- order=order,
- order_key=order_key,
- on_setattr=on_setattr,
- )
-
-
-def _compile_and_eval(script, globs, locs=None, filename=""):
- """
- "Exec" the script with the given global (globs) and local (locs) variables.
- """
- bytecode = compile(script, filename, "exec")
- eval(bytecode, globs, locs)
-
-
-def _make_method(name, script, filename, globs=None):
- """
- Create the method with the script given and return the method object.
- """
- locs = {}
- if globs is None:
- globs = {}
-
- _compile_and_eval(script, globs, locs, filename)
-
- # In order of debuggers like PDB being able to step through the code,
- # we add a fake linecache entry.
- linecache.cache[filename] = (
- len(script),
- None,
- script.splitlines(True),
- filename,
- )
-
- return locs[name]
-
-
-def _make_attr_tuple_class(cls_name, attr_names):
- """
- Create a tuple subclass to hold `Attribute`s for an `attrs` class.
-
- The subclass is a bare tuple with properties for names.
-
- class MyClassAttributes(tuple):
- __slots__ = ()
- x = property(itemgetter(0))
- """
- attr_class_name = "{}Attributes".format(cls_name)
- attr_class_template = [
- "class {}(tuple):".format(attr_class_name),
- " __slots__ = ()",
- ]
- if attr_names:
- for i, attr_name in enumerate(attr_names):
- attr_class_template.append(
- _tuple_property_pat.format(index=i, attr_name=attr_name)
- )
- else:
- attr_class_template.append(" pass")
- globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
- _compile_and_eval("\n".join(attr_class_template), globs)
- return globs[attr_class_name]
-
-
-# Tuple class for extracted attributes from a class definition.
-# `base_attrs` is a subset of `attrs`.
-_Attributes = _make_attr_tuple_class(
- "_Attributes",
- [
- # all attributes to build dunder methods for
- "attrs",
- # attributes that have been inherited
- "base_attrs",
- # map inherited attributes to their originating classes
- "base_attrs_map",
- ],
-)
-
-
-def _is_class_var(annot):
- """
- Check whether *annot* is a typing.ClassVar.
-
- The string comparison hack is used to avoid evaluating all string
- annotations which would put attrs-based classes at a performance
- disadvantage compared to plain old classes.
- """
- annot = str(annot)
-
- # Annotation can be quoted.
- if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
- annot = annot[1:-1]
-
- return annot.startswith(_classvar_prefixes)
-
-
-def _has_own_attribute(cls, attrib_name):
- """
- Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
-
- Requires Python 3.
- """
- attr = getattr(cls, attrib_name, _sentinel)
- if attr is _sentinel:
- return False
-
- for base_cls in cls.__mro__[1:]:
- a = getattr(base_cls, attrib_name, None)
- if attr is a:
- return False
-
- return True
-
-
-def _get_annotations(cls):
- """
- Get annotations for *cls*.
- """
- if _has_own_attribute(cls, "__annotations__"):
- return cls.__annotations__
-
- return {}
-
-
-def _counter_getter(e):
- """
- Key function for sorting to avoid re-creating a lambda for every class.
- """
- return e[1].counter
-
-
-def _collect_base_attrs(cls, taken_attr_names):
- """
- Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
- """
- base_attrs = []
- base_attr_map = {} # A dictionary of base attrs to their classes.
-
- # Traverse the MRO and collect attributes.
- for base_cls in reversed(cls.__mro__[1:-1]):
- for a in getattr(base_cls, "__attrs_attrs__", []):
- if a.inherited or a.name in taken_attr_names:
- continue
-
- a = a.evolve(inherited=True)
- base_attrs.append(a)
- base_attr_map[a.name] = base_cls
-
- # For each name, only keep the freshest definition i.e. the furthest at the
- # back. base_attr_map is fine because it gets overwritten with every new
- # instance.
- filtered = []
- seen = set()
- for a in reversed(base_attrs):
- if a.name in seen:
- continue
- filtered.insert(0, a)
- seen.add(a.name)
-
- return filtered, base_attr_map
-
-
-def _collect_base_attrs_broken(cls, taken_attr_names):
- """
- Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
-
- N.B. *taken_attr_names* will be mutated.
-
- Adhere to the old incorrect behavior.
-
- Notably it collects from the front and considers inherited attributes which
- leads to the buggy behavior reported in #428.
- """
- base_attrs = []
- base_attr_map = {} # A dictionary of base attrs to their classes.
-
- # Traverse the MRO and collect attributes.
- for base_cls in cls.__mro__[1:-1]:
- for a in getattr(base_cls, "__attrs_attrs__", []):
- if a.name in taken_attr_names:
- continue
-
- a = a.evolve(inherited=True)
- taken_attr_names.add(a.name)
- base_attrs.append(a)
- base_attr_map[a.name] = base_cls
-
- return base_attrs, base_attr_map
-
-
-def _transform_attrs(
- cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
-):
- """
- Transform all `_CountingAttr`s on a class into `Attribute`s.
-
- If *these* is passed, use that and don't look for them on the class.
-
- *collect_by_mro* is True, collect them in the correct MRO order, otherwise
- use the old -- incorrect -- order. See #428.
-
- Return an `_Attributes`.
- """
- cd = cls.__dict__
- anns = _get_annotations(cls)
-
- if these is not None:
- ca_list = [(name, ca) for name, ca in iteritems(these)]
-
- if not isinstance(these, ordered_dict):
- ca_list.sort(key=_counter_getter)
- elif auto_attribs is True:
- ca_names = {
- name
- for name, attr in cd.items()
- if isinstance(attr, _CountingAttr)
- }
- ca_list = []
- annot_names = set()
- for attr_name, type in anns.items():
- if _is_class_var(type):
- continue
- annot_names.add(attr_name)
- a = cd.get(attr_name, NOTHING)
-
- if not isinstance(a, _CountingAttr):
- if a is NOTHING:
- a = attrib()
- else:
- a = attrib(default=a)
- ca_list.append((attr_name, a))
-
- unannotated = ca_names - annot_names
- if len(unannotated) > 0:
- raise UnannotatedAttributeError(
- "The following `attr.ib`s lack a type annotation: "
- + ", ".join(
- sorted(unannotated, key=lambda n: cd.get(n).counter)
- )
- + "."
- )
- else:
- ca_list = sorted(
- (
- (name, attr)
- for name, attr in cd.items()
- if isinstance(attr, _CountingAttr)
- ),
- key=lambda e: e[1].counter,
- )
-
- own_attrs = [
- Attribute.from_counting_attr(
- name=attr_name, ca=ca, type=anns.get(attr_name)
- )
- for attr_name, ca in ca_list
- ]
-
- if collect_by_mro:
- base_attrs, base_attr_map = _collect_base_attrs(
- cls, {a.name for a in own_attrs}
- )
- else:
- base_attrs, base_attr_map = _collect_base_attrs_broken(
- cls, {a.name for a in own_attrs}
- )
-
- attr_names = [a.name for a in base_attrs + own_attrs]
-
- AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
-
- if kw_only:
- own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
- base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
-
- attrs = AttrsClass(base_attrs + own_attrs)
-
- # Mandatory vs non-mandatory attr order only matters when they are part of
- # the __init__ signature and when they aren't kw_only (which are moved to
- # the end and can be mandatory or non-mandatory in any order, as they will
- # be specified as keyword args anyway). Check the order of those attrs:
- had_default = False
- for a in (a for a in attrs if a.init is not False and a.kw_only is False):
- if had_default is True and a.default is NOTHING:
- raise ValueError(
- "No mandatory attributes allowed after an attribute with a "
- "default value or factory. Attribute in question: %r" % (a,)
- )
-
- if had_default is False and a.default is not NOTHING:
- had_default = True
-
- if field_transformer is not None:
- attrs = field_transformer(cls, attrs)
- return _Attributes((attrs, base_attrs, base_attr_map))
-
-
-if PYPY:
-
- def _frozen_setattrs(self, name, value):
- """
- Attached to frozen classes as __setattr__.
- """
- if isinstance(self, BaseException) and name in (
- "__cause__",
- "__context__",
- ):
- BaseException.__setattr__(self, name, value)
- return
-
- raise FrozenInstanceError()
-
-
-else:
-
- def _frozen_setattrs(self, name, value):
- """
- Attached to frozen classes as __setattr__.
- """
- raise FrozenInstanceError()
-
-
-def _frozen_delattrs(self, name):
- """
- Attached to frozen classes as __delattr__.
- """
- raise FrozenInstanceError()
-
-
-class _ClassBuilder(object):
- """
- Iteratively build *one* class.
- """
-
- __slots__ = (
- "_attr_names",
- "_attrs",
- "_base_attr_map",
- "_base_names",
- "_cache_hash",
- "_cls",
- "_cls_dict",
- "_delete_attribs",
- "_frozen",
- "_has_pre_init",
- "_has_post_init",
- "_is_exc",
- "_on_setattr",
- "_slots",
- "_weakref_slot",
- "_has_own_setattr",
- "_has_custom_setattr",
- )
-
- def __init__(
- self,
- cls,
- these,
- slots,
- frozen,
- weakref_slot,
- getstate_setstate,
- auto_attribs,
- kw_only,
- cache_hash,
- is_exc,
- collect_by_mro,
- on_setattr,
- has_custom_setattr,
- field_transformer,
- ):
- attrs, base_attrs, base_map = _transform_attrs(
- cls,
- these,
- auto_attribs,
- kw_only,
- collect_by_mro,
- field_transformer,
- )
-
- self._cls = cls
- self._cls_dict = dict(cls.__dict__) if slots else {}
- self._attrs = attrs
- self._base_names = set(a.name for a in base_attrs)
- self._base_attr_map = base_map
- self._attr_names = tuple(a.name for a in attrs)
- self._slots = slots
- self._frozen = frozen
- self._weakref_slot = weakref_slot
- self._cache_hash = cache_hash
- self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
- self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
- self._delete_attribs = not bool(these)
- self._is_exc = is_exc
- self._on_setattr = on_setattr
-
- self._has_custom_setattr = has_custom_setattr
- self._has_own_setattr = False
-
- self._cls_dict["__attrs_attrs__"] = self._attrs
-
- if frozen:
- self._cls_dict["__setattr__"] = _frozen_setattrs
- self._cls_dict["__delattr__"] = _frozen_delattrs
-
- self._has_own_setattr = True
-
- if getstate_setstate:
- (
- self._cls_dict["__getstate__"],
- self._cls_dict["__setstate__"],
- ) = self._make_getstate_setstate()
-
- def __repr__(self):
- return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
-
- def build_class(self):
- """
- Finalize class based on the accumulated configuration.
-
- Builder cannot be used after calling this method.
- """
- if self._slots is True:
- return self._create_slots_class()
- else:
- return self._patch_original_class()
-
- def _patch_original_class(self):
- """
- Apply accumulated methods and return the class.
- """
- cls = self._cls
- base_names = self._base_names
-
- # Clean class of attribute definitions (`attr.ib()`s).
- if self._delete_attribs:
- for name in self._attr_names:
- if (
- name not in base_names
- and getattr(cls, name, _sentinel) is not _sentinel
- ):
- try:
- delattr(cls, name)
- except AttributeError:
- # This can happen if a base class defines a class
- # variable and we want to set an attribute with the
- # same name by using only a type annotation.
- pass
-
- # Attach our dunder methods.
- for name, value in self._cls_dict.items():
- setattr(cls, name, value)
-
- # If we've inherited an attrs __setattr__ and don't write our own,
- # reset it to object's.
- if not self._has_own_setattr and getattr(
- cls, "__attrs_own_setattr__", False
- ):
- cls.__attrs_own_setattr__ = False
-
- if not self._has_custom_setattr:
- cls.__setattr__ = object.__setattr__
-
- return cls
-
- def _create_slots_class(self):
- """
- Build and return a new class with a `__slots__` attribute.
- """
- cd = {
- k: v
- for k, v in iteritems(self._cls_dict)
- if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
- }
-
- # If our class doesn't have its own implementation of __setattr__
- # (either from the user or by us), check the bases, if one of them has
- # an attrs-made __setattr__, that needs to be reset. We don't walk the
- # MRO because we only care about our immediate base classes.
- # XXX: This can be confused by subclassing a slotted attrs class with
- # XXX: a non-attrs class and subclass the resulting class with an attrs
- # XXX: class. See `test_slotted_confused` for details. For now that's
- # XXX: OK with us.
- if not self._has_own_setattr:
- cd["__attrs_own_setattr__"] = False
-
- if not self._has_custom_setattr:
- for base_cls in self._cls.__bases__:
- if base_cls.__dict__.get("__attrs_own_setattr__", False):
- cd["__setattr__"] = object.__setattr__
- break
-
- # Traverse the MRO to collect existing slots
- # and check for an existing __weakref__.
- existing_slots = dict()
- weakref_inherited = False
- for base_cls in self._cls.__mro__[1:-1]:
- if base_cls.__dict__.get("__weakref__", None) is not None:
- weakref_inherited = True
- existing_slots.update(
- {
- name: getattr(base_cls, name)
- for name in getattr(base_cls, "__slots__", [])
- }
- )
-
- base_names = set(self._base_names)
-
- names = self._attr_names
- if (
- self._weakref_slot
- and "__weakref__" not in getattr(self._cls, "__slots__", ())
- and "__weakref__" not in names
- and not weakref_inherited
- ):
- names += ("__weakref__",)
-
- # We only add the names of attributes that aren't inherited.
- # Setting __slots__ to inherited attributes wastes memory.
- slot_names = [name for name in names if name not in base_names]
- # There are slots for attributes from current class
- # that are defined in parent classes.
- # As their descriptors may be overriden by a child class,
- # we collect them here and update the class dict
- reused_slots = {
- slot: slot_descriptor
- for slot, slot_descriptor in iteritems(existing_slots)
- if slot in slot_names
- }
- slot_names = [name for name in slot_names if name not in reused_slots]
- cd.update(reused_slots)
- if self._cache_hash:
- slot_names.append(_hash_cache_field)
- cd["__slots__"] = tuple(slot_names)
-
- qualname = getattr(self._cls, "__qualname__", None)
- if qualname is not None:
- cd["__qualname__"] = qualname
-
- # Create new class based on old class and our methods.
- cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
-
- # The following is a fix for
- # https://github.com/python-attrs/attrs/issues/102. On Python 3,
- # if a method mentions `__class__` or uses the no-arg super(), the
- # compiler will bake a reference to the class in the method itself
- # as `method.__closure__`. Since we replace the class with a
- # clone, we rewrite these references so it keeps working.
- for item in cls.__dict__.values():
- if isinstance(item, (classmethod, staticmethod)):
- # Class- and staticmethods hide their functions inside.
- # These might need to be rewritten as well.
- closure_cells = getattr(item.__func__, "__closure__", None)
- elif isinstance(item, property):
- # Workaround for property `super()` shortcut (PY3-only).
- # There is no universal way for other descriptors.
- closure_cells = getattr(item.fget, "__closure__", None)
- else:
- closure_cells = getattr(item, "__closure__", None)
-
- if not closure_cells: # Catch None or the empty list.
- continue
- for cell in closure_cells:
- try:
- match = cell.cell_contents is self._cls
- except ValueError: # ValueError: Cell is empty
- pass
- else:
- if match:
- set_closure_cell(cell, cls)
-
- return cls
-
- def add_repr(self, ns):
- self._cls_dict["__repr__"] = self._add_method_dunders(
- _make_repr(self._attrs, ns=ns)
- )
- return self
-
- def add_str(self):
- repr = self._cls_dict.get("__repr__")
- if repr is None:
- raise ValueError(
- "__str__ can only be generated if a __repr__ exists."
- )
-
- def __str__(self):
- return self.__repr__()
-
- self._cls_dict["__str__"] = self._add_method_dunders(__str__)
- return self
-
- def _make_getstate_setstate(self):
- """
- Create custom __setstate__ and __getstate__ methods.
- """
- # __weakref__ is not writable.
- state_attr_names = tuple(
- an for an in self._attr_names if an != "__weakref__"
- )
-
- def slots_getstate(self):
- """
- Automatically created by attrs.
- """
- return tuple(getattr(self, name) for name in state_attr_names)
-
- hash_caching_enabled = self._cache_hash
-
- def slots_setstate(self, state):
- """
- Automatically created by attrs.
- """
- __bound_setattr = _obj_setattr.__get__(self, Attribute)
- for name, value in zip(state_attr_names, state):
- __bound_setattr(name, value)
-
- # The hash code cache is not included when the object is
- # serialized, but it still needs to be initialized to None to
- # indicate that the first call to __hash__ should be a cache
- # miss.
- if hash_caching_enabled:
- __bound_setattr(_hash_cache_field, None)
-
- return slots_getstate, slots_setstate
-
- def make_unhashable(self):
- self._cls_dict["__hash__"] = None
- return self
-
- def add_hash(self):
- self._cls_dict["__hash__"] = self._add_method_dunders(
- _make_hash(
- self._cls,
- self._attrs,
- frozen=self._frozen,
- cache_hash=self._cache_hash,
- )
- )
-
- return self
-
- def add_init(self):
- self._cls_dict["__init__"] = self._add_method_dunders(
- _make_init(
- self._cls,
- self._attrs,
- self._has_pre_init,
- self._has_post_init,
- self._frozen,
- self._slots,
- self._cache_hash,
- self._base_attr_map,
- self._is_exc,
- self._on_setattr is not None
- and self._on_setattr is not setters.NO_OP,
- attrs_init=False,
- )
- )
-
- return self
-
- def add_attrs_init(self):
- self._cls_dict["__attrs_init__"] = self._add_method_dunders(
- _make_init(
- self._cls,
- self._attrs,
- self._has_pre_init,
- self._has_post_init,
- self._frozen,
- self._slots,
- self._cache_hash,
- self._base_attr_map,
- self._is_exc,
- self._on_setattr is not None
- and self._on_setattr is not setters.NO_OP,
- attrs_init=True,
- )
- )
-
- return self
-
- def add_eq(self):
- cd = self._cls_dict
-
- cd["__eq__"] = self._add_method_dunders(
- _make_eq(self._cls, self._attrs)
- )
- cd["__ne__"] = self._add_method_dunders(_make_ne())
-
- return self
-
- def add_order(self):
- cd = self._cls_dict
-
- cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
- self._add_method_dunders(meth)
- for meth in _make_order(self._cls, self._attrs)
- )
-
- return self
-
- def add_setattr(self):
- if self._frozen:
- return self
-
- sa_attrs = {}
- for a in self._attrs:
- on_setattr = a.on_setattr or self._on_setattr
- if on_setattr and on_setattr is not setters.NO_OP:
- sa_attrs[a.name] = a, on_setattr
-
- if not sa_attrs:
- return self
-
- if self._has_custom_setattr:
- # We need to write a __setattr__ but there already is one!
- raise ValueError(
- "Can't combine custom __setattr__ with on_setattr hooks."
- )
-
- # docstring comes from _add_method_dunders
- def __setattr__(self, name, val):
- try:
- a, hook = sa_attrs[name]
- except KeyError:
- nval = val
- else:
- nval = hook(self, a, val)
-
- _obj_setattr(self, name, nval)
-
- self._cls_dict["__attrs_own_setattr__"] = True
- self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
- self._has_own_setattr = True
-
- return self
-
- def _add_method_dunders(self, method):
- """
- Add __module__ and __qualname__ to a *method* if possible.
- """
- try:
- method.__module__ = self._cls.__module__
- except AttributeError:
- pass
-
- try:
- method.__qualname__ = ".".join(
- (self._cls.__qualname__, method.__name__)
- )
- except AttributeError:
- pass
-
- try:
- method.__doc__ = "Method generated by attrs for class %s." % (
- self._cls.__qualname__,
- )
- except AttributeError:
- pass
-
- return method
-
-
-_CMP_DEPRECATION = (
- "The usage of `cmp` is deprecated and will be removed on or after "
- "2021-06-01. Please use `eq` and `order` instead."
-)
-
-
-def _determine_attrs_eq_order(cmp, eq, order, default_eq):
- """
- Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
- values of eq and order. If *eq* is None, set it to *default_eq*.
- """
- if cmp is not None and any((eq is not None, order is not None)):
- raise ValueError("Don't mix `cmp` with `eq' and `order`.")
-
- # cmp takes precedence due to bw-compatibility.
- if cmp is not None:
- return cmp, cmp
-
- # If left None, equality is set to the specified default and ordering
- # mirrors equality.
- if eq is None:
- eq = default_eq
-
- if order is None:
- order = eq
-
- if eq is False and order is True:
- raise ValueError("`order` can only be True if `eq` is True too.")
-
- return eq, order
-
-
-def _determine_attrib_eq_order(cmp, eq, order, default_eq):
- """
- Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
- values of eq and order. If *eq* is None, set it to *default_eq*.
- """
- if cmp is not None and any((eq is not None, order is not None)):
- raise ValueError("Don't mix `cmp` with `eq' and `order`.")
-
- def decide_callable_or_boolean(value):
- """
- Decide whether a key function is used.
- """
- if callable(value):
- value, key = True, value
- else:
- key = None
- return value, key
-
- # cmp takes precedence due to bw-compatibility.
- if cmp is not None:
- cmp, cmp_key = decide_callable_or_boolean(cmp)
- return cmp, cmp_key, cmp, cmp_key
-
- # If left None, equality is set to the specified default and ordering
- # mirrors equality.
- if eq is None:
- eq, eq_key = default_eq, None
- else:
- eq, eq_key = decide_callable_or_boolean(eq)
-
- if order is None:
- order, order_key = eq, eq_key
- else:
- order, order_key = decide_callable_or_boolean(order)
-
- if eq is False and order is True:
- raise ValueError("`order` can only be True if `eq` is True too.")
-
- return eq, eq_key, order, order_key
-
-
-def _determine_whether_to_implement(
- cls, flag, auto_detect, dunders, default=True
-):
- """
- Check whether we should implement a set of methods for *cls*.
-
- *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
- same as passed into @attr.s and *dunders* is a tuple of attribute names
- whose presence signal that the user has implemented it themselves.
-
- Return *default* if no reason for either for or against is found.
-
- auto_detect must be False on Python 2.
- """
- if flag is True or flag is False:
- return flag
-
- if flag is None and auto_detect is False:
- return default
-
- # Logically, flag is None and auto_detect is True here.
- for dunder in dunders:
- if _has_own_attribute(cls, dunder):
- return False
-
- return default
-
-
-def attrs(
- maybe_cls=None,
- these=None,
- repr_ns=None,
- repr=None,
- cmp=None,
- hash=None,
- init=None,
- slots=False,
- frozen=False,
- weakref_slot=True,
- str=False,
- auto_attribs=False,
- kw_only=False,
- cache_hash=False,
- auto_exc=False,
- eq=None,
- order=None,
- auto_detect=False,
- collect_by_mro=False,
- getstate_setstate=None,
- on_setattr=None,
- field_transformer=None,
-):
- r"""
- A class decorator that adds `dunder
- <https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
- specified attributes using `attr.ib` or the *these* argument.
-
- :param these: A dictionary of name to `attr.ib` mappings. This is
- useful to avoid the definition of your attributes within the class body
- because you can't (e.g. if you want to add ``__repr__`` methods to
- Django models) or don't want to.
-
- If *these* is not ``None``, ``attrs`` will *not* search the class body
- for attributes and will *not* remove any attributes from it.
-
- If *these* is an ordered dict (`dict` on Python 3.6+,
- `collections.OrderedDict` otherwise), the order is deduced from
- the order of the attributes inside *these*. Otherwise the order
- of the definition of the attributes is used.
-
- :type these: `dict` of `str` to `attr.ib`
-
- :param str repr_ns: When using nested classes, there's no way in Python 2
- to automatically detect that. Therefore it's possible to set the
- namespace explicitly for a more meaningful ``repr`` output.
- :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*,
- *order*, and *hash* arguments explicitly, assume they are set to
- ``True`` **unless any** of the involved methods for one of the
- arguments is implemented in the *current* class (i.e. it is *not*
- inherited from some base class).
-
- So for example by implementing ``__eq__`` on a class yourself,
- ``attrs`` will deduce ``eq=False`` and will create *neither*
- ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible
- ``__ne__`` by default, so it *should* be enough to only implement
- ``__eq__`` in most cases).
-
- .. warning::
-
- If you prevent ``attrs`` from creating the ordering methods for you
- (``order=False``, e.g. by implementing ``__le__``), it becomes
- *your* responsibility to make sure its ordering is sound. The best
- way is to use the `functools.total_ordering` decorator.
-
-
- Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*,
- *cmp*, or *hash* overrides whatever *auto_detect* would determine.
-
- *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises
- a `PythonTooOldError`.
-
- :param bool repr: Create a ``__repr__`` method with a human readable
- representation of ``attrs`` attributes..
- :param bool str: Create a ``__str__`` method that is identical to
- ``__repr__``. This is usually not necessary except for
- `Exception`\ s.
- :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__``
- and ``__ne__`` methods that check two instances for equality.
-
- They compare the instances as if they were tuples of their ``attrs``
- attributes if and only if the types of both classes are *identical*!
- :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``,
- ``__gt__``, and ``__ge__`` methods that behave like *eq* above and
- allow instances to be ordered. If ``None`` (default) mirror value of
- *eq*.
- :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq*
- and *order* to the same value. Must not be mixed with *eq* or *order*.
- :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method
- is generated according how *eq* and *frozen* are set.
-
- 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
- 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to
- None, marking it unhashable (which it is).
- 3. If *eq* is False, ``__hash__`` will be left untouched meaning the
- ``__hash__`` method of the base class will be used (if base class is
- ``object``, this means it will fall back to id-based hashing.).
-
- Although not recommended, you can decide for yourself and force
- ``attrs`` to create one (e.g. if the class is immutable even though you
- didn't freeze it programmatically) by passing ``True`` or not. Both of
- these cases are rather special and should be used carefully.
-
- See our documentation on `hashing`, Python's documentation on
- `object.__hash__`, and the `GitHub issue that led to the default \
- behavior <https://github.com/python-attrs/attrs/issues/136>`_ for more
- details.
- :param bool init: Create a ``__init__`` method that initializes the
- ``attrs`` attributes. Leading underscores are stripped for the argument
- name. If a ``__attrs_pre_init__`` method exists on the class, it will
- be called before the class is initialized. If a ``__attrs_post_init__``
- method exists on the class, it will be called after the class is fully
- initialized.
-
- If ``init`` is ``False``, an ``__attrs_init__`` method will be
- injected instead. This allows you to define a custom ``__init__``
- method that can do pre-init work such as ``super().__init__()``,
- and then call ``__attrs_init__()`` and ``__attrs_post_init__()``.
- :param bool slots: Create a `slotted class <slotted classes>` that's more
- memory-efficient. Slotted classes are generally superior to the default
- dict classes, but have some gotchas you should know about, so we
- encourage you to read the `glossary entry <slotted classes>`.
- :param bool frozen: Make instances immutable after initialization. If
- someone attempts to modify a frozen instance,
- `attr.exceptions.FrozenInstanceError` is raised.
-
- .. note::
-
- 1. This is achieved by installing a custom ``__setattr__`` method
- on your class, so you can't implement your own.
-
- 2. True immutability is impossible in Python.
-
- 3. This *does* have a minor a runtime performance `impact
- <how-frozen>` when initializing new instances. In other words:
- ``__init__`` is slightly slower with ``frozen=True``.
-
- 4. If a class is frozen, you cannot modify ``self`` in
- ``__attrs_post_init__`` or a self-written ``__init__``. You can
- circumvent that limitation by using
- ``object.__setattr__(self, "attribute_name", value)``.
-
- 5. Subclasses of a frozen class are frozen too.
-
- :param bool weakref_slot: Make instances weak-referenceable. This has no
- effect unless ``slots`` is also enabled.
- :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated
- attributes (Python 3.6 and later only) from the class body.
-
- In this case, you **must** annotate every field. If ``attrs``
- encounters a field that is set to an `attr.ib` but lacks a type
- annotation, an `attr.exceptions.UnannotatedAttributeError` is
- raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't
- want to set a type.
-
- If you assign a value to those attributes (e.g. ``x: int = 42``), that
- value becomes the default value like if it were passed using
- ``attr.ib(default=42)``. Passing an instance of `Factory` also
- works as expected in most cases (see warning below).
-
- Attributes annotated as `typing.ClassVar`, and attributes that are
- neither annotated nor set to an `attr.ib` are **ignored**.
-
- .. warning::
- For features that use the attribute name to create decorators (e.g.
- `validators <validators>`), you still *must* assign `attr.ib` to
- them. Otherwise Python will either not find the name or try to use
- the default value to call e.g. ``validator`` on it.
-
- These errors can be quite confusing and probably the most common bug
- report on our bug tracker.
-
- .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/
- :param bool kw_only: Make all attributes keyword-only (Python 3+)
- in the generated ``__init__`` (if ``init`` is ``False``, this
- parameter is ignored).
- :param bool cache_hash: Ensure that the object's hash code is computed
- only once and stored on the object. If this is set to ``True``,
- hashing must be either explicitly or implicitly enabled for this
- class. If the hash code is cached, avoid any reassignments of
- fields involved in hash code computation or mutations of the objects
- those fields point to after object creation. If such changes occur,
- the behavior of the object's hash code is undefined.
- :param bool auto_exc: If the class subclasses `BaseException`
- (which implicitly includes any subclass of any exception), the
- following happens to behave like a well-behaved Python exceptions
- class:
-
- - the values for *eq*, *order*, and *hash* are ignored and the
- instances compare and hash by the instance's ids (N.B. ``attrs`` will
- *not* remove existing implementations of ``__hash__`` or the equality
- methods. It just won't add own ones.),
- - all attributes that are either passed into ``__init__`` or have a
- default value are additionally available as a tuple in the ``args``
- attribute,
- - the value of *str* is ignored leaving ``__str__`` to base classes.
- :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs``
- collects attributes from base classes. The default behavior is
- incorrect in certain cases of multiple inheritance. It should be on by
- default but is kept off for backward-compatability.
-
- See issue `#428 <https://github.com/python-attrs/attrs/issues/428>`_ for
- more details.
-
- :param Optional[bool] getstate_setstate:
- .. note::
- This is usually only interesting for slotted classes and you should
- probably just set *auto_detect* to `True`.
-
- If `True`, ``__getstate__`` and
- ``__setstate__`` are generated and attached to the class. This is
- necessary for slotted classes to be pickleable. If left `None`, it's
- `True` by default for slotted classes and ``False`` for dict classes.
-
- If *auto_detect* is `True`, and *getstate_setstate* is left `None`,
- and **either** ``__getstate__`` or ``__setstate__`` is detected directly
- on the class (i.e. not inherited), it is set to `False` (this is usually
- what you want).
-
- :param on_setattr: A callable that is run whenever the user attempts to set
- an attribute (either by assignment like ``i.x = 42`` or by using
- `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments
- as validators: the instance, the attribute that is being modified, and
- the new value.
-
- If no exception is raised, the attribute is set to the return value of
- the callable.
-
- If a list of callables is passed, they're automatically wrapped in an
- `attr.setters.pipe`.
-
- :param Optional[callable] field_transformer:
- A function that is called with the original class object and all
- fields right before ``attrs`` finalizes the class. You can use
- this, e.g., to automatically add converters or validators to
- fields based on their types. See `transform-fields` for more details.
-
- .. versionadded:: 16.0.0 *slots*
- .. versionadded:: 16.1.0 *frozen*
- .. versionadded:: 16.3.0 *str*
- .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
- .. versionchanged:: 17.1.0
- *hash* supports ``None`` as value which is also the default now.
- .. versionadded:: 17.3.0 *auto_attribs*
- .. versionchanged:: 18.1.0
- If *these* is passed, no attributes are deleted from the class body.
- .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
- .. versionadded:: 18.2.0 *weakref_slot*
- .. deprecated:: 18.2.0
- ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
- `DeprecationWarning` if the classes compared are subclasses of
- each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
- to each other.
- .. versionchanged:: 19.2.0
- ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
- subclasses comparable anymore.
- .. versionadded:: 18.2.0 *kw_only*
- .. versionadded:: 18.2.0 *cache_hash*
- .. versionadded:: 19.1.0 *auto_exc*
- .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
- .. versionadded:: 19.2.0 *eq* and *order*
- .. versionadded:: 20.1.0 *auto_detect*
- .. versionadded:: 20.1.0 *collect_by_mro*
- .. versionadded:: 20.1.0 *getstate_setstate*
- .. versionadded:: 20.1.0 *on_setattr*
- .. versionadded:: 20.3.0 *field_transformer*
- .. versionchanged:: 21.1.0
- ``init=False`` injects ``__attrs_init__``
- .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
- .. versionchanged:: 21.1.0 *cmp* undeprecated
- """
- if auto_detect and PY2:
- raise PythonTooOldError(
- "auto_detect only works on Python 3 and later."
- )
-
- eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
- hash_ = hash # work around the lack of nonlocal
-
- if isinstance(on_setattr, (list, tuple)):
- on_setattr = setters.pipe(*on_setattr)
-
- def wrap(cls):
-
- if getattr(cls, "__class__", None) is None:
- raise TypeError("attrs only works with new-style classes.")
-
- is_frozen = frozen or _has_frozen_base_class(cls)
- is_exc = auto_exc is True and issubclass(cls, BaseException)
- has_own_setattr = auto_detect and _has_own_attribute(
- cls, "__setattr__"
- )
-
- if has_own_setattr and is_frozen:
- raise ValueError("Can't freeze a class with a custom __setattr__.")
-
- builder = _ClassBuilder(
- cls,
- these,
- slots,
- is_frozen,
- weakref_slot,
- _determine_whether_to_implement(
- cls,
- getstate_setstate,
- auto_detect,
- ("__getstate__", "__setstate__"),
- default=slots,
- ),
- auto_attribs,
- kw_only,
- cache_hash,
- is_exc,
- collect_by_mro,
- on_setattr,
- has_own_setattr,
- field_transformer,
- )
- if _determine_whether_to_implement(
- cls, repr, auto_detect, ("__repr__",)
- ):
- builder.add_repr(repr_ns)
- if str is True:
- builder.add_str()
-
- eq = _determine_whether_to_implement(
- cls, eq_, auto_detect, ("__eq__", "__ne__")
- )
- if not is_exc and eq is True:
- builder.add_eq()
- if not is_exc and _determine_whether_to_implement(
- cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
- ):
- builder.add_order()
-
- builder.add_setattr()
-
- if (
- hash_ is None
- and auto_detect is True
- and _has_own_attribute(cls, "__hash__")
- ):
- hash = False
- else:
- hash = hash_
- if hash is not True and hash is not False and hash is not None:
- # Can't use `hash in` because 1 == True for example.
- raise TypeError(
- "Invalid value for hash. Must be True, False, or None."
- )
- elif hash is False or (hash is None and eq is False) or is_exc:
- # Don't do anything. Should fall back to __object__'s __hash__
- # which is by id.
- if cache_hash:
- raise TypeError(
- "Invalid value for cache_hash. To use hash caching,"
- " hashing must be either explicitly or implicitly "
- "enabled."
- )
- elif hash is True or (
- hash is None and eq is True and is_frozen is True
- ):
- # Build a __hash__ if told so, or if it's safe.
- builder.add_hash()
- else:
- # Raise TypeError on attempts to hash.
- if cache_hash:
- raise TypeError(
- "Invalid value for cache_hash. To use hash caching,"
- " hashing must be either explicitly or implicitly "
- "enabled."
- )
- builder.make_unhashable()
-
- if _determine_whether_to_implement(
- cls, init, auto_detect, ("__init__",)
- ):
- builder.add_init()
- else:
- builder.add_attrs_init()
- if cache_hash:
- raise TypeError(
- "Invalid value for cache_hash. To use hash caching,"
- " init must be True."
- )
-
- return builder.build_class()
-
- # maybe_cls's type depends on the usage of the decorator. It's a class
- # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
- if maybe_cls is None:
- return wrap
- else:
- return wrap(maybe_cls)
-
-
-_attrs = attrs
-"""
-Internal alias so we can use it in functions that take an argument called
-*attrs*.
-"""
-
-
-if PY2:
-
- def _has_frozen_base_class(cls):
- """
- Check whether *cls* has a frozen ancestor by looking at its
- __setattr__.
- """
- return (
- getattr(cls.__setattr__, "__module__", None)
- == _frozen_setattrs.__module__
- and cls.__setattr__.__name__ == _frozen_setattrs.__name__
- )
-
-
-else:
-
- def _has_frozen_base_class(cls):
- """
- Check whether *cls* has a frozen ancestor by looking at its
- __setattr__.
- """
- return cls.__setattr__ == _frozen_setattrs
-
-
-def _generate_unique_filename(cls, func_name):
- """
- Create a "filename" suitable for a function being generated.
- """
- unique_id = uuid.uuid4()
- extra = ""
- count = 1
-
- while True:
- unique_filename = "<attrs generated {0} {1}.{2}{3}>".format(
- func_name,
- cls.__module__,
- getattr(cls, "__qualname__", cls.__name__),
- extra,
- )
- # To handle concurrency we essentially "reserve" our spot in
- # the linecache with a dummy line. The caller can then
- # set this value correctly.
- cache_line = (1, None, (str(unique_id),), unique_filename)
- if (
- linecache.cache.setdefault(unique_filename, cache_line)
- == cache_line
- ):
- return unique_filename
-
- # Looks like this spot is taken. Try again.
- count += 1
- extra = "-{0}".format(count)
-
-
-def _make_hash(cls, attrs, frozen, cache_hash):
- attrs = tuple(
- a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
- )
-
- tab = " "
-
- unique_filename = _generate_unique_filename(cls, "hash")
- type_hash = hash(unique_filename)
-
- hash_def = "def __hash__(self"
- hash_func = "hash(("
- closing_braces = "))"
- if not cache_hash:
- hash_def += "):"
- else:
- if not PY2:
- hash_def += ", *"
-
- hash_def += (
- ", _cache_wrapper="
- + "__import__('attr._make')._make._CacheHashWrapper):"
- )
- hash_func = "_cache_wrapper(" + hash_func
- closing_braces += ")"
-
- method_lines = [hash_def]
-
- def append_hash_computation_lines(prefix, indent):
- """
- Generate the code for actually computing the hash code.
- Below this will either be returned directly or used to compute
- a value which is then cached, depending on the value of cache_hash
- """
-
- method_lines.extend(
- [
- indent + prefix + hash_func,
- indent + " %d," % (type_hash,),
- ]
- )
-
- for a in attrs:
- method_lines.append(indent + " self.%s," % a.name)
-
- method_lines.append(indent + " " + closing_braces)
-
- if cache_hash:
- method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
- if frozen:
- append_hash_computation_lines(
- "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
- )
- method_lines.append(tab * 2 + ")") # close __setattr__
- else:
- append_hash_computation_lines(
- "self.%s = " % _hash_cache_field, tab * 2
- )
- method_lines.append(tab + "return self.%s" % _hash_cache_field)
- else:
- append_hash_computation_lines("return ", tab)
-
- script = "\n".join(method_lines)
- return _make_method("__hash__", script, unique_filename)
-
-
-def _add_hash(cls, attrs):
- """
- Add a hash method to *cls*.
- """
- cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
- return cls
-
-
-def _make_ne():
- """
- Create __ne__ method.
- """
-
- def __ne__(self, other):
- """
- Check equality and either forward a NotImplemented or
- return the result negated.
- """
- result = self.__eq__(other)
- if result is NotImplemented:
- return NotImplemented
-
- return not result
-
- return __ne__
-
-
-def _make_eq(cls, attrs):
- """
- Create __eq__ method for *cls* with *attrs*.
- """
- attrs = [a for a in attrs if a.eq]
-
- unique_filename = _generate_unique_filename(cls, "eq")
- lines = [
- "def __eq__(self, other):",
- " if other.__class__ is not self.__class__:",
- " return NotImplemented",
- ]
-
- # We can't just do a big self.x = other.x and... clause due to
- # irregularities like nan == nan is false but (nan,) == (nan,) is true.
- globs = {}
- if attrs:
- lines.append(" return (")
- others = [" ) == ("]
- for a in attrs:
- if a.eq_key:
- cmp_name = "_%s_key" % (a.name,)
- # Add the key function to the global namespace
- # of the evaluated function.
- globs[cmp_name] = a.eq_key
- lines.append(
- " %s(self.%s),"
- % (
- cmp_name,
- a.name,
- )
- )
- others.append(
- " %s(other.%s),"
- % (
- cmp_name,
- a.name,
- )
- )
- else:
- lines.append(" self.%s," % (a.name,))
- others.append(" other.%s," % (a.name,))
-
- lines += others + [" )"]
- else:
- lines.append(" return True")
-
- script = "\n".join(lines)
-
- return _make_method("__eq__", script, unique_filename, globs)
-
-
-def _make_order(cls, attrs):
- """
- Create ordering methods for *cls* with *attrs*.
- """
- attrs = [a for a in attrs if a.order]
-
- def attrs_to_tuple(obj):
- """
- Save us some typing.
- """
- return tuple(
- key(value) if key else value
- for value, key in (
- (getattr(obj, a.name), a.order_key) for a in attrs
- )
- )
-
- def __lt__(self, other):
- """
- Automatically created by attrs.
- """
- if other.__class__ is self.__class__:
- return attrs_to_tuple(self) < attrs_to_tuple(other)
-
- return NotImplemented
-
- def __le__(self, other):
- """
- Automatically created by attrs.
- """
- if other.__class__ is self.__class__:
- return attrs_to_tuple(self) <= attrs_to_tuple(other)
-
- return NotImplemented
-
- def __gt__(self, other):
- """
- Automatically created by attrs.
- """
- if other.__class__ is self.__class__:
- return attrs_to_tuple(self) > attrs_to_tuple(other)
-
- return NotImplemented
-
- def __ge__(self, other):
- """
- Automatically created by attrs.
- """
- if other.__class__ is self.__class__:
- return attrs_to_tuple(self) >= attrs_to_tuple(other)
-
- return NotImplemented
-
- return __lt__, __le__, __gt__, __ge__
-
-
-def _add_eq(cls, attrs=None):
- """
- Add equality methods to *cls* with *attrs*.
- """
- if attrs is None:
- attrs = cls.__attrs_attrs__
-
- cls.__eq__ = _make_eq(cls, attrs)
- cls.__ne__ = _make_ne()
-
- return cls
-
-
-_already_repring = threading.local()
-
-
-def _make_repr(attrs, ns):
- """
- Make a repr method that includes relevant *attrs*, adding *ns* to the full
- name.
- """
-
- # Figure out which attributes to include, and which function to use to
- # format them. The a.repr value can be either bool or a custom callable.
- attr_names_with_reprs = tuple(
- (a.name, repr if a.repr is True else a.repr)
- for a in attrs
- if a.repr is not False
- )
-
- def __repr__(self):
- """
- Automatically created by attrs.
- """
- try:
- working_set = _already_repring.working_set
- except AttributeError:
- working_set = set()
- _already_repring.working_set = working_set
-
- if id(self) in working_set:
- return "..."
- real_cls = self.__class__
- if ns is None:
- qualname = getattr(real_cls, "__qualname__", None)
- if qualname is not None:
- class_name = qualname.rsplit(">.", 1)[-1]
- else:
- class_name = real_cls.__name__
- else:
- class_name = ns + "." + real_cls.__name__
-
- # Since 'self' remains on the stack (i.e.: strongly referenced) for the
- # duration of this call, it's safe to depend on id(...) stability, and
- # not need to track the instance and therefore worry about properties
- # like weakref- or hash-ability.
- working_set.add(id(self))
- try:
- result = [class_name, "("]
- first = True
- for name, attr_repr in attr_names_with_reprs:
- if first:
- first = False
- else:
- result.append(", ")
- result.extend(
- (name, "=", attr_repr(getattr(self, name, NOTHING)))
- )
- return "".join(result) + ")"
- finally:
- working_set.remove(id(self))
-
- return __repr__
-
-
-def _add_repr(cls, ns=None, attrs=None):
- """
- Add a repr method to *cls*.
- """
- if attrs is None:
- attrs = cls.__attrs_attrs__
-
- cls.__repr__ = _make_repr(attrs, ns)
- return cls
-
-
-def fields(cls):
- """
- Return the tuple of ``attrs`` attributes for a class.
-
- The tuple also allows accessing the fields by their names (see below for
- examples).
-
- :param type cls: Class to introspect.
-
- :raise TypeError: If *cls* is not a class.
- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
- class.
-
- :rtype: tuple (with name accessors) of `attr.Attribute`
-
- .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
- by name.
- """
- if not isclass(cls):
- raise TypeError("Passed object must be a class.")
- attrs = getattr(cls, "__attrs_attrs__", None)
- if attrs is None:
- raise NotAnAttrsClassError(
- "{cls!r} is not an attrs-decorated class.".format(cls=cls)
- )
- return attrs
-
-
-def fields_dict(cls):
- """
- Return an ordered dictionary of ``attrs`` attributes for a class, whose
- keys are the attribute names.
-
- :param type cls: Class to introspect.
-
- :raise TypeError: If *cls* is not a class.
- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
- class.
-
- :rtype: an ordered dict where keys are attribute names and values are
- `attr.Attribute`\\ s. This will be a `dict` if it's
- naturally ordered like on Python 3.6+ or an
- :class:`~collections.OrderedDict` otherwise.
-
- .. versionadded:: 18.1.0
- """
- if not isclass(cls):
- raise TypeError("Passed object must be a class.")
- attrs = getattr(cls, "__attrs_attrs__", None)
- if attrs is None:
- raise NotAnAttrsClassError(
- "{cls!r} is not an attrs-decorated class.".format(cls=cls)
- )
- return ordered_dict(((a.name, a) for a in attrs))
-
-
-def validate(inst):
- """
- Validate all attributes on *inst* that have a validator.
-
- Leaves all exceptions through.
-
- :param inst: Instance of a class with ``attrs`` attributes.
- """
- if _config._run_validators is False:
- return
-
- for a in fields(inst.__class__):
- v = a.validator
- if v is not None:
- v(inst, a, getattr(inst, a.name))
-
-
-def _is_slot_cls(cls):
- return "__slots__" in cls.__dict__
-
-
-def _is_slot_attr(a_name, base_attr_map):
- """
- Check if the attribute name comes from a slot class.
- """
- return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
-
-
-def _make_init(
- cls,
- attrs,
- pre_init,
- post_init,
- frozen,
- slots,
- cache_hash,
- base_attr_map,
- is_exc,
- has_global_on_setattr,
- attrs_init,
-):
- if frozen and has_global_on_setattr:
- raise ValueError("Frozen classes can't use on_setattr.")
-
- needs_cached_setattr = cache_hash or frozen
- filtered_attrs = []
- attr_dict = {}
- for a in attrs:
- if not a.init and a.default is NOTHING:
- continue
-
- filtered_attrs.append(a)
- attr_dict[a.name] = a
-
- if a.on_setattr is not None:
- if frozen is True:
- raise ValueError("Frozen classes can't use on_setattr.")
-
- needs_cached_setattr = True
- elif (
- has_global_on_setattr and a.on_setattr is not setters.NO_OP
- ) or _is_slot_attr(a.name, base_attr_map):
- needs_cached_setattr = True
-
- unique_filename = _generate_unique_filename(cls, "init")
-
- script, globs, annotations = _attrs_to_init_script(
- filtered_attrs,
- frozen,
- slots,
- pre_init,
- post_init,
- cache_hash,
- base_attr_map,
- is_exc,
- needs_cached_setattr,
- has_global_on_setattr,
- attrs_init,
- )
- if cls.__module__ in sys.modules:
- # This makes typing.get_type_hints(CLS.__init__) resolve string types.
- globs.update(sys.modules[cls.__module__].__dict__)
-
- globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
-
- if needs_cached_setattr:
- # Save the lookup overhead in __init__ if we need to circumvent
- # setattr hooks.
- globs["_cached_setattr"] = _obj_setattr
-
- init = _make_method(
- "__attrs_init__" if attrs_init else "__init__",
- script,
- unique_filename,
- globs,
- )
- init.__annotations__ = annotations
-
- return init
-
-
-def _setattr(attr_name, value_var, has_on_setattr):
- """
- Use the cached object.setattr to set *attr_name* to *value_var*.
- """
- return "_setattr('%s', %s)" % (attr_name, value_var)
-
-
-def _setattr_with_converter(attr_name, value_var, has_on_setattr):
- """
- Use the cached object.setattr to set *attr_name* to *value_var*, but run
- its converter first.
- """
- return "_setattr('%s', %s(%s))" % (
- attr_name,
- _init_converter_pat % (attr_name,),
- value_var,
- )
-
-
-def _assign(attr_name, value, has_on_setattr):
- """
- Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
- relegate to _setattr.
- """
- if has_on_setattr:
- return _setattr(attr_name, value, True)
-
- return "self.%s = %s" % (attr_name, value)
-
-
-def _assign_with_converter(attr_name, value_var, has_on_setattr):
- """
- Unless *attr_name* has an on_setattr hook, use normal assignment after
- conversion. Otherwise relegate to _setattr_with_converter.
- """
- if has_on_setattr:
- return _setattr_with_converter(attr_name, value_var, True)
-
- return "self.%s = %s(%s)" % (
- attr_name,
- _init_converter_pat % (attr_name,),
- value_var,
- )
-
-
-if PY2:
-
- def _unpack_kw_only_py2(attr_name, default=None):
- """
- Unpack *attr_name* from _kw_only dict.
- """
- if default is not None:
- arg_default = ", %s" % default
- else:
- arg_default = ""
- return "%s = _kw_only.pop('%s'%s)" % (
- attr_name,
- attr_name,
- arg_default,
- )
-
- def _unpack_kw_only_lines_py2(kw_only_args):
- """
- Unpack all *kw_only_args* from _kw_only dict and handle errors.
-
- Given a list of strings "{attr_name}" and "{attr_name}={default}"
- generates list of lines of code that pop attrs from _kw_only dict and
- raise TypeError similar to builtin if required attr is missing or
- extra key is passed.
-
- >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"])))
- try:
- a = _kw_only.pop('a')
- b = _kw_only.pop('b', 42)
- except KeyError as _key_error:
- raise TypeError(
- ...
- if _kw_only:
- raise TypeError(
- ...
- """
- lines = ["try:"]
- lines.extend(
- " " + _unpack_kw_only_py2(*arg.split("="))
- for arg in kw_only_args
- )
- lines += """\
-except KeyError as _key_error:
- raise TypeError(
- '__init__() missing required keyword-only argument: %s' % _key_error
- )
-if _kw_only:
- raise TypeError(
- '__init__() got an unexpected keyword argument %r'
- % next(iter(_kw_only))
- )
-""".split(
- "\n"
- )
- return lines
-
-
-def _attrs_to_init_script(
- attrs,
- frozen,
- slots,
- pre_init,
- post_init,
- cache_hash,
- base_attr_map,
- is_exc,
- needs_cached_setattr,
- has_global_on_setattr,
- attrs_init,
-):
- """
- Return a script of an initializer for *attrs* and a dict of globals.
-
- The globals are expected by the generated script.
-
- If *frozen* is True, we cannot set the attributes directly so we use
- a cached ``object.__setattr__``.
- """
- lines = []
- if pre_init:
- lines.append("self.__attrs_pre_init__()")
-
- if needs_cached_setattr:
- lines.append(
- # Circumvent the __setattr__ descriptor to save one lookup per
- # assignment.
- # Note _setattr will be used again below if cache_hash is True
- "_setattr = _cached_setattr.__get__(self, self.__class__)"
- )
-
- if frozen is True:
- if slots is True:
- fmt_setter = _setattr
- fmt_setter_with_converter = _setattr_with_converter
- else:
- # Dict frozen classes assign directly to __dict__.
- # But only if the attribute doesn't come from an ancestor slot
- # class.
- # Note _inst_dict will be used again below if cache_hash is True
- lines.append("_inst_dict = self.__dict__")
-
- def fmt_setter(attr_name, value_var, has_on_setattr):
- if _is_slot_attr(attr_name, base_attr_map):
- return _setattr(attr_name, value_var, has_on_setattr)
-
- return "_inst_dict['%s'] = %s" % (attr_name, value_var)
-
- def fmt_setter_with_converter(
- attr_name, value_var, has_on_setattr
- ):
- if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
- return _setattr_with_converter(
- attr_name, value_var, has_on_setattr
- )
-
- return "_inst_dict['%s'] = %s(%s)" % (
- attr_name,
- _init_converter_pat % (attr_name,),
- value_var,
- )
-
- else:
- # Not frozen.
- fmt_setter = _assign
- fmt_setter_with_converter = _assign_with_converter
-
- args = []
- kw_only_args = []
- attrs_to_validate = []
-
- # This is a dictionary of names to validator and converter callables.
- # Injecting this into __init__ globals lets us avoid lookups.
- names_for_globals = {}
- annotations = {"return": None}
-
- for a in attrs:
- if a.validator:
- attrs_to_validate.append(a)
-
- attr_name = a.name
- has_on_setattr = a.on_setattr is not None or (
- a.on_setattr is not setters.NO_OP and has_global_on_setattr
- )
- arg_name = a.name.lstrip("_")
-
- has_factory = isinstance(a.default, Factory)
- if has_factory and a.default.takes_self:
- maybe_self = "self"
- else:
- maybe_self = ""
-
- if a.init is False:
- if has_factory:
- init_factory_name = _init_factory_pat.format(a.name)
- if a.converter is not None:
- lines.append(
- fmt_setter_with_converter(
- attr_name,
- init_factory_name + "(%s)" % (maybe_self,),
- has_on_setattr,
- )
- )
- conv_name = _init_converter_pat % (a.name,)
- names_for_globals[conv_name] = a.converter
- else:
- lines.append(
- fmt_setter(
- attr_name,
- init_factory_name + "(%s)" % (maybe_self,),
- has_on_setattr,
- )
- )
- names_for_globals[init_factory_name] = a.default.factory
- else:
- if a.converter is not None:
- lines.append(
- fmt_setter_with_converter(
- attr_name,
- "attr_dict['%s'].default" % (attr_name,),
- has_on_setattr,
- )
- )
- conv_name = _init_converter_pat % (a.name,)
- names_for_globals[conv_name] = a.converter
- else:
- lines.append(
- fmt_setter(
- attr_name,
- "attr_dict['%s'].default" % (attr_name,),
- has_on_setattr,
- )
- )
- elif a.default is not NOTHING and not has_factory:
- arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name)
- if a.kw_only:
- kw_only_args.append(arg)
- else:
- args.append(arg)
-
- if a.converter is not None:
- lines.append(
- fmt_setter_with_converter(
- attr_name, arg_name, has_on_setattr
- )
- )
- names_for_globals[
- _init_converter_pat % (a.name,)
- ] = a.converter
- else:
- lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
-
- elif has_factory:
- arg = "%s=NOTHING" % (arg_name,)
- if a.kw_only:
- kw_only_args.append(arg)
- else:
- args.append(arg)
- lines.append("if %s is not NOTHING:" % (arg_name,))
-
- init_factory_name = _init_factory_pat.format(a.name)
- if a.converter is not None:
- lines.append(
- " "
- + fmt_setter_with_converter(
- attr_name, arg_name, has_on_setattr
- )
- )
- lines.append("else:")
- lines.append(
- " "
- + fmt_setter_with_converter(
- attr_name,
- init_factory_name + "(" + maybe_self + ")",
- has_on_setattr,
- )
- )
- names_for_globals[
- _init_converter_pat % (a.name,)
- ] = a.converter
- else:
- lines.append(
- " " + fmt_setter(attr_name, arg_name, has_on_setattr)
- )
- lines.append("else:")
- lines.append(
- " "
- + fmt_setter(
- attr_name,
- init_factory_name + "(" + maybe_self + ")",
- has_on_setattr,
- )
- )
- names_for_globals[init_factory_name] = a.default.factory
- else:
- if a.kw_only:
- kw_only_args.append(arg_name)
- else:
- args.append(arg_name)
-
- if a.converter is not None:
- lines.append(
- fmt_setter_with_converter(
- attr_name, arg_name, has_on_setattr
- )
- )
- names_for_globals[
- _init_converter_pat % (a.name,)
- ] = a.converter
- else:
- lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
-
- if a.init is True:
- if a.type is not None and a.converter is None:
- annotations[arg_name] = a.type
- elif a.converter is not None and not PY2:
- # Try to get the type from the converter.
- sig = None
- try:
- sig = inspect.signature(a.converter)
- except (ValueError, TypeError): # inspect failed
- pass
- if sig:
- sig_params = list(sig.parameters.values())
- if (
- sig_params
- and sig_params[0].annotation
- is not inspect.Parameter.empty
- ):
- annotations[arg_name] = sig_params[0].annotation
-
- if attrs_to_validate: # we can skip this if there are no validators.
- names_for_globals["_config"] = _config
- lines.append("if _config._run_validators is True:")
- for a in attrs_to_validate:
- val_name = "__attr_validator_" + a.name
- attr_name = "__attr_" + a.name
- lines.append(
- " %s(self, %s, self.%s)" % (val_name, attr_name, a.name)
- )
- names_for_globals[val_name] = a.validator
- names_for_globals[attr_name] = a
-
- if post_init:
- lines.append("self.__attrs_post_init__()")
-
- # because this is set only after __attrs_post_init is called, a crash
- # will result if post-init tries to access the hash code. This seemed
- # preferable to setting this beforehand, in which case alteration to
- # field values during post-init combined with post-init accessing the
- # hash code would result in silent bugs.
- if cache_hash:
- if frozen:
- if slots:
- # if frozen and slots, then _setattr defined above
- init_hash_cache = "_setattr('%s', %s)"
- else:
- # if frozen and not slots, then _inst_dict defined above
- init_hash_cache = "_inst_dict['%s'] = %s"
- else:
- init_hash_cache = "self.%s = %s"
- lines.append(init_hash_cache % (_hash_cache_field, "None"))
-
- # For exceptions we rely on BaseException.__init__ for proper
- # initialization.
- if is_exc:
- vals = ",".join("self." + a.name for a in attrs if a.init)
-
- lines.append("BaseException.__init__(self, %s)" % (vals,))
-
- args = ", ".join(args)
- if kw_only_args:
- if PY2:
- lines = _unpack_kw_only_lines_py2(kw_only_args) + lines
-
- args += "%s**_kw_only" % (", " if args else "",) # leading comma
- else:
- args += "%s*, %s" % (
- ", " if args else "", # leading comma
- ", ".join(kw_only_args), # kw_only args
- )
- return (
- """\
-def {init_name}(self, {args}):
- {lines}
-""".format(
- init_name=("__attrs_init__" if attrs_init else "__init__"),
- args=args,
- lines="\n ".join(lines) if lines else "pass",
- ),
- names_for_globals,
- annotations,
- )
-
-
-class Attribute(object):
- """
- *Read-only* representation of an attribute.
-
- Instances of this class are frequently used for introspection purposes
- like:
-
- - `fields` returns a tuple of them.
- - Validators get them passed as the first argument.
- - The *field transformer* hook receives a list of them.
-
- :attribute name: The name of the attribute.
- :attribute inherited: Whether or not that attribute has been inherited from
- a base class.
-
- Plus *all* arguments of `attr.ib` (except for ``factory``
- which is only syntactic sugar for ``default=Factory(...)``.
-
- .. versionadded:: 20.1.0 *inherited*
- .. versionadded:: 20.1.0 *on_setattr*
- .. versionchanged:: 20.2.0 *inherited* is not taken into account for
- equality checks and hashing anymore.
- .. versionadded:: 21.1.0 *eq_key* and *order_key*
-
- For the full version history of the fields, see `attr.ib`.
- """
-
- __slots__ = (
- "name",
- "default",
- "validator",
- "repr",
- "eq",
- "eq_key",
- "order",
- "order_key",
- "hash",
- "init",
- "metadata",
- "type",
- "converter",
- "kw_only",
- "inherited",
- "on_setattr",
- )
-
- def __init__(
- self,
- name,
- default,
- validator,
- repr,
- cmp, # XXX: unused, remove along with other cmp code.
- hash,
- init,
- inherited,
- metadata=None,
- type=None,
- converter=None,
- kw_only=False,
- eq=None,
- eq_key=None,
- order=None,
- order_key=None,
- on_setattr=None,
- ):
- eq, eq_key, order, order_key = _determine_attrib_eq_order(
- cmp, eq_key or eq, order_key or order, True
- )
-
- # Cache this descriptor here to speed things up later.
- bound_setattr = _obj_setattr.__get__(self, Attribute)
-
- # Despite the big red warning, people *do* instantiate `Attribute`
- # themselves.
- bound_setattr("name", name)
- bound_setattr("default", default)
- bound_setattr("validator", validator)
- bound_setattr("repr", repr)
- bound_setattr("eq", eq)
- bound_setattr("eq_key", eq_key)
- bound_setattr("order", order)
- bound_setattr("order_key", order_key)
- bound_setattr("hash", hash)
- bound_setattr("init", init)
- bound_setattr("converter", converter)
- bound_setattr(
- "metadata",
- (
- metadata_proxy(metadata)
- if metadata
- else _empty_metadata_singleton
- ),
- )
- bound_setattr("type", type)
- bound_setattr("kw_only", kw_only)
- bound_setattr("inherited", inherited)
- bound_setattr("on_setattr", on_setattr)
-
- def __setattr__(self, name, value):
- raise FrozenInstanceError()
-
- @classmethod
- def from_counting_attr(cls, name, ca, type=None):
- # type holds the annotated value. deal with conflicts:
- if type is None:
- type = ca.type
- elif ca.type is not None:
- raise ValueError(
- "Type annotation and type argument cannot both be present"
- )
- inst_dict = {
- k: getattr(ca, k)
- for k in Attribute.__slots__
- if k
- not in (
- "name",
- "validator",
- "default",
- "type",
- "inherited",
- ) # exclude methods and deprecated alias
- }
- return cls(
- name=name,
- validator=ca._validator,
- default=ca._default,
- type=type,
- cmp=None,
- inherited=False,
- **inst_dict
- )
-
- @property
- def cmp(self):
- """
- Simulate the presence of a cmp attribute and warn.
- """
- warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2)
-
- return self.eq and self.order
-
- # Don't use attr.evolve since fields(Attribute) doesn't work
- def evolve(self, **changes):
- """
- Copy *self* and apply *changes*.
-
- This works similarly to `attr.evolve` but that function does not work
- with ``Attribute``.
-
- It is mainly meant to be used for `transform-fields`.
-
- .. versionadded:: 20.3.0
- """
- new = copy.copy(self)
-
- new._setattrs(changes.items())
-
- return new
-
- # Don't use _add_pickle since fields(Attribute) doesn't work
- def __getstate__(self):
- """
- Play nice with pickle.
- """
- return tuple(
- getattr(self, name) if name != "metadata" else dict(self.metadata)
- for name in self.__slots__
- )
-
- def __setstate__(self, state):
- """
- Play nice with pickle.
- """
- self._setattrs(zip(self.__slots__, state))
-
- def _setattrs(self, name_values_pairs):
- bound_setattr = _obj_setattr.__get__(self, Attribute)
- for name, value in name_values_pairs:
- if name != "metadata":
- bound_setattr(name, value)
- else:
- bound_setattr(
- name,
- metadata_proxy(value)
- if value
- else _empty_metadata_singleton,
- )
-
-
-_a = [
- Attribute(
- name=name,
- default=NOTHING,
- validator=None,
- repr=True,
- cmp=None,
- eq=True,
- order=False,
- hash=(name != "metadata"),
- init=True,
- inherited=False,
- )
- for name in Attribute.__slots__
-]
-
-Attribute = _add_hash(
- _add_eq(
- _add_repr(Attribute, attrs=_a),
- attrs=[a for a in _a if a.name != "inherited"],
- ),
- attrs=[a for a in _a if a.hash and a.name != "inherited"],
-)
-
-
-class _CountingAttr(object):
- """
- Intermediate representation of attributes that uses a counter to preserve
- the order in which the attributes have been defined.
-
- *Internal* data structure of the attrs library. Running into is most
- likely the result of a bug like a forgotten `@attr.s` decorator.
- """
-
- __slots__ = (
- "counter",
- "_default",
- "repr",
- "eq",
- "eq_key",
- "order",
- "order_key",
- "hash",
- "init",
- "metadata",
- "_validator",
- "converter",
- "type",
- "kw_only",
- "on_setattr",
- )
- __attrs_attrs__ = tuple(
- Attribute(
- name=name,
- default=NOTHING,
- validator=None,
- repr=True,
- cmp=None,
- hash=True,
- init=True,
- kw_only=False,
- eq=True,
- eq_key=None,
- order=False,
- order_key=None,
- inherited=False,
- on_setattr=None,
- )
- for name in (
- "counter",
- "_default",
- "repr",
- "eq",
- "order",
- "hash",
- "init",
- "on_setattr",
- )
- ) + (
- Attribute(
- name="metadata",
- default=None,
- validator=None,
- repr=True,
- cmp=None,
- hash=False,
- init=True,
- kw_only=False,
- eq=True,
- eq_key=None,
- order=False,
- order_key=None,
- inherited=False,
- on_setattr=None,
- ),
- )
- cls_counter = 0
-
- def __init__(
- self,
- default,
- validator,
- repr,
- cmp,
- hash,
- init,
- converter,
- metadata,
- type,
- kw_only,
- eq,
- eq_key,
- order,
- order_key,
- on_setattr,
- ):
- _CountingAttr.cls_counter += 1
- self.counter = _CountingAttr.cls_counter
- self._default = default
- self._validator = validator
- self.converter = converter
- self.repr = repr
- self.eq = eq
- self.eq_key = eq_key
- self.order = order
- self.order_key = order_key
- self.hash = hash
- self.init = init
- self.metadata = metadata
- self.type = type
- self.kw_only = kw_only
- self.on_setattr = on_setattr
-
- def validator(self, meth):
- """
- Decorator that adds *meth* to the list of validators.
-
- Returns *meth* unchanged.
-
- .. versionadded:: 17.1.0
- """
- if self._validator is None:
- self._validator = meth
- else:
- self._validator = and_(self._validator, meth)
- return meth
-
- def default(self, meth):
- """
- Decorator that allows to set the default for an attribute.
-
- Returns *meth* unchanged.
-
- :raises DefaultAlreadySetError: If default has been set before.
-
- .. versionadded:: 17.1.0
- """
- if self._default is not NOTHING:
- raise DefaultAlreadySetError()
-
- self._default = Factory(meth, takes_self=True)
-
- return meth
-
-
-_CountingAttr = _add_eq(_add_repr(_CountingAttr))
-
-
-class Factory(object):
- """
- Stores a factory callable.
-
- If passed as the default value to `attr.ib`, the factory is used to
- generate a new value.
-
- :param callable factory: A callable that takes either none or exactly one
- mandatory positional argument depending on *takes_self*.
- :param bool takes_self: Pass the partially initialized instance that is
- being initialized as a positional argument.
-
- .. versionadded:: 17.1.0 *takes_self*
- """
-
- __slots__ = ("factory", "takes_self")
-
- def __init__(self, factory, takes_self=False):
- """
- `Factory` is part of the default machinery so if we want a default
- value here, we have to implement it ourselves.
- """
- self.factory = factory
- self.takes_self = takes_self
-
- def __getstate__(self):
- """
- Play nice with pickle.
- """
- return tuple(getattr(self, name) for name in self.__slots__)
-
- def __setstate__(self, state):
- """
- Play nice with pickle.
- """
- for name, value in zip(self.__slots__, state):
- setattr(self, name, value)
-
-
-_f = [
- Attribute(
- name=name,
- default=NOTHING,
- validator=None,
- repr=True,
- cmp=None,
- eq=True,
- order=False,
- hash=True,
- init=True,
- inherited=False,
- )
- for name in Factory.__slots__
-]
-
-Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
-
-
-def make_class(name, attrs, bases=(object,), **attributes_arguments):
- """
- A quick way to create a new class called *name* with *attrs*.
-
- :param str name: The name for the new class.
-
- :param attrs: A list of names or a dictionary of mappings of names to
- attributes.
-
- If *attrs* is a list or an ordered dict (`dict` on Python 3.6+,
- `collections.OrderedDict` otherwise), the order is deduced from
- the order of the names or attributes inside *attrs*. Otherwise the
- order of the definition of the attributes is used.
- :type attrs: `list` or `dict`
-
- :param tuple bases: Classes that the new class will subclass.
-
- :param attributes_arguments: Passed unmodified to `attr.s`.
-
- :return: A new class with *attrs*.
- :rtype: type
-
- .. versionadded:: 17.1.0 *bases*
- .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
- """
- if isinstance(attrs, dict):
- cls_dict = attrs
- elif isinstance(attrs, (list, tuple)):
- cls_dict = dict((a, attrib()) for a in attrs)
- else:
- raise TypeError("attrs argument must be a dict or a list.")
-
- pre_init = cls_dict.pop("__attrs_pre_init__", None)
- post_init = cls_dict.pop("__attrs_post_init__", None)
- user_init = cls_dict.pop("__init__", None)
-
- body = {}
- if pre_init is not None:
- body["__attrs_pre_init__"] = pre_init
- if post_init is not None:
- body["__attrs_post_init__"] = post_init
- if user_init is not None:
- body["__init__"] = user_init
-
- type_ = new_class(name, bases, {}, lambda ns: ns.update(body))
-
- # For pickling to work, the __module__ variable needs to be set to the
- # frame where the class is created. Bypass this step in environments where
- # sys._getframe is not defined (Jython for example) or sys._getframe is not
- # defined for arguments greater than 0 (IronPython).
- try:
- type_.__module__ = sys._getframe(1).f_globals.get(
- "__name__", "__main__"
- )
- except (AttributeError, ValueError):
- pass
-
- # We do it here for proper warnings with meaningful stacklevel.
- cmp = attributes_arguments.pop("cmp", None)
- (
- attributes_arguments["eq"],
- attributes_arguments["order"],
- ) = _determine_attrs_eq_order(
- cmp,
- attributes_arguments.get("eq"),
- attributes_arguments.get("order"),
- True,
- )
-
- return _attrs(these=cls_dict, **attributes_arguments)(type_)
-
-
-# These are required by within this module so we define them here and merely
-# import into .validators / .converters.
-
-
-@attrs(slots=True, hash=True)
-class _AndValidator(object):
- """
- Compose many validators to a single one.
- """
-
- _validators = attrib()
-
- def __call__(self, inst, attr, value):
- for v in self._validators:
- v(inst, attr, value)
-
-
-def and_(*validators):
- """
- A validator that composes multiple validators into one.
-
- When called on a value, it runs all wrapped validators.
-
- :param callables validators: Arbitrary number of validators.
-
- .. versionadded:: 17.1.0
- """
- vals = []
- for validator in validators:
- vals.extend(
- validator._validators
- if isinstance(validator, _AndValidator)
- else [validator]
- )
-
- return _AndValidator(tuple(vals))
-
-
-def pipe(*converters):
- """
- A converter that composes multiple converters into one.
-
- When called on a value, it runs all wrapped converters, returning the
- *last* value.
-
- Type annotations will be inferred from the wrapped converters', if
- they have any.
-
- :param callables converters: Arbitrary number of converters.
-
- .. versionadded:: 20.1.0
- """
-
- def pipe_converter(val):
- for converter in converters:
- val = converter(val)
-
- return val
-
- if not PY2:
- if not converters:
- # If the converter list is empty, pipe_converter is the identity.
- A = typing.TypeVar("A")
- pipe_converter.__annotations__ = {"val": A, "return": A}
- else:
- # Get parameter type.
- sig = None
- try:
- sig = inspect.signature(converters[0])
- except (ValueError, TypeError): # inspect failed
- pass
- if sig:
- params = list(sig.parameters.values())
- if (
- params
- and params[0].annotation is not inspect.Parameter.empty
- ):
- pipe_converter.__annotations__["val"] = params[
- 0
- ].annotation
- # Get return type.
- sig = None
- try:
- sig = inspect.signature(converters[-1])
- except (ValueError, TypeError): # inspect failed
- pass
- if sig and sig.return_annotation is not inspect.Signature().empty:
- pipe_converter.__annotations__[
- "return"
- ] = sig.return_annotation
-
- return pipe_converter
diff --git a/lib/spack/external/attr/_next_gen.py b/lib/spack/external/attr/_next_gen.py
deleted file mode 100644
index fab0af966a..0000000000
--- a/lib/spack/external/attr/_next_gen.py
+++ /dev/null
@@ -1,158 +0,0 @@
-"""
-These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
-`attr.ib` with different default values.
-"""
-
-from functools import partial
-
-from attr.exceptions import UnannotatedAttributeError
-
-from . import setters
-from ._make import NOTHING, _frozen_setattrs, attrib, attrs
-
-
-def define(
- maybe_cls=None,
- *,
- these=None,
- repr=None,
- hash=None,
- init=None,
- slots=True,
- frozen=False,
- weakref_slot=True,
- str=False,
- auto_attribs=None,
- kw_only=False,
- cache_hash=False,
- auto_exc=True,
- eq=None,
- order=False,
- auto_detect=True,
- getstate_setstate=None,
- on_setattr=None,
- field_transformer=None,
-):
- r"""
- The only behavioral differences are the handling of the *auto_attribs*
- option:
-
- :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
- exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
-
- 1. If any attributes are annotated and no unannotated `attr.ib`\ s
- are found, it assumes *auto_attribs=True*.
- 2. Otherwise it assumes *auto_attribs=False* and tries to collect
- `attr.ib`\ s.
-
- and that mutable classes (``frozen=False``) validate on ``__setattr__``.
-
- .. versionadded:: 20.1.0
- """
-
- def do_it(cls, auto_attribs):
- return attrs(
- maybe_cls=cls,
- these=these,
- repr=repr,
- hash=hash,
- init=init,
- slots=slots,
- frozen=frozen,
- weakref_slot=weakref_slot,
- str=str,
- auto_attribs=auto_attribs,
- kw_only=kw_only,
- cache_hash=cache_hash,
- auto_exc=auto_exc,
- eq=eq,
- order=order,
- auto_detect=auto_detect,
- collect_by_mro=True,
- getstate_setstate=getstate_setstate,
- on_setattr=on_setattr,
- field_transformer=field_transformer,
- )
-
- def wrap(cls):
- """
- Making this a wrapper ensures this code runs during class creation.
-
- We also ensure that frozen-ness of classes is inherited.
- """
- nonlocal frozen, on_setattr
-
- had_on_setattr = on_setattr not in (None, setters.NO_OP)
-
- # By default, mutable classes validate on setattr.
- if frozen is False and on_setattr is None:
- on_setattr = setters.validate
-
- # However, if we subclass a frozen class, we inherit the immutability
- # and disable on_setattr.
- for base_cls in cls.__bases__:
- if base_cls.__setattr__ is _frozen_setattrs:
- if had_on_setattr:
- raise ValueError(
- "Frozen classes can't use on_setattr "
- "(frozen-ness was inherited)."
- )
-
- on_setattr = setters.NO_OP
- break
-
- if auto_attribs is not None:
- return do_it(cls, auto_attribs)
-
- try:
- return do_it(cls, True)
- except UnannotatedAttributeError:
- return do_it(cls, False)
-
- # maybe_cls's type depends on the usage of the decorator. It's a class
- # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
- if maybe_cls is None:
- return wrap
- else:
- return wrap(maybe_cls)
-
-
-mutable = define
-frozen = partial(define, frozen=True, on_setattr=None)
-
-
-def field(
- *,
- default=NOTHING,
- validator=None,
- repr=True,
- hash=None,
- init=True,
- metadata=None,
- converter=None,
- factory=None,
- kw_only=False,
- eq=None,
- order=None,
- on_setattr=None,
-):
- """
- Identical to `attr.ib`, except keyword-only and with some arguments
- removed.
-
- .. versionadded:: 20.1.0
- """
- return attrib(
- default=default,
- validator=validator,
- repr=repr,
- hash=hash,
- init=init,
- metadata=metadata,
- converter=converter,
- factory=factory,
- kw_only=kw_only,
- eq=eq,
- order=order,
- on_setattr=on_setattr,
- )
diff --git a/lib/spack/external/attr/_version_info.py b/lib/spack/external/attr/_version_info.py
deleted file mode 100644
index 014e78a1b4..0000000000
--- a/lib/spack/external/attr/_version_info.py
+++ /dev/null
@@ -1,85 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-from functools import total_ordering
-
-from ._funcs import astuple
-from ._make import attrib, attrs
-
-
-@total_ordering
-@attrs(eq=False, order=False, slots=True, frozen=True)
-class VersionInfo(object):
- """
- A version object that can be compared to tuple of length 1--4:
-
- >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
- True
- >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
- True
- >>> vi = attr.VersionInfo(19, 2, 0, "final")
- >>> vi < (19, 1, 1)
- False
- >>> vi < (19,)
- False
- >>> vi == (19, 2,)
- True
- >>> vi == (19, 2, 1)
- False
-
- .. versionadded:: 19.2
- """
-
- year = attrib(type=int)
- minor = attrib(type=int)
- micro = attrib(type=int)
- releaselevel = attrib(type=str)
-
- @classmethod
- def _from_version_string(cls, s):
- """
- Parse *s* and return a _VersionInfo.
- """
- v = s.split(".")
- if len(v) == 3:
- v.append("final")
-
- return cls(
- year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
- )
-
- def _ensure_tuple(self, other):
- """
- Ensure *other* is a tuple of a valid length.
-
- Returns a possibly transformed *other* and ourselves as a tuple of
- the same length as *other*.
- """
-
- if self.__class__ is other.__class__:
- other = astuple(other)
-
- if not isinstance(other, tuple):
- raise NotImplementedError
-
- if not (1 <= len(other) <= 4):
- raise NotImplementedError
-
- return astuple(self)[: len(other)], other
-
- def __eq__(self, other):
- try:
- us, them = self._ensure_tuple(other)
- except NotImplementedError:
- return NotImplemented
-
- return us == them
-
- def __lt__(self, other):
- try:
- us, them = self._ensure_tuple(other)
- except NotImplementedError:
- return NotImplemented
-
- # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
- # have to do anything special with releaselevel for now.
- return us < them
diff --git a/lib/spack/external/attr/converters.py b/lib/spack/external/attr/converters.py
deleted file mode 100644
index 2777db6d0a..0000000000
--- a/lib/spack/external/attr/converters.py
+++ /dev/null
@@ -1,111 +0,0 @@
-"""
-Commonly useful converters.
-"""
-
-from __future__ import absolute_import, division, print_function
-
-from ._compat import PY2
-from ._make import NOTHING, Factory, pipe
-
-
-if not PY2:
- import inspect
- import typing
-
-
-__all__ = [
- "pipe",
- "optional",
- "default_if_none",
-]
-
-
-def optional(converter):
- """
- A converter that allows an attribute to be optional. An optional attribute
- is one which can be set to ``None``.
-
- Type annotations will be inferred from the wrapped converter's, if it
- has any.
-
- :param callable converter: the converter that is used for non-``None``
- values.
-
- .. versionadded:: 17.1.0
- """
-
- def optional_converter(val):
- if val is None:
- return None
- return converter(val)
-
- if not PY2:
- sig = None
- try:
- sig = inspect.signature(converter)
- except (ValueError, TypeError): # inspect failed
- pass
- if sig:
- params = list(sig.parameters.values())
- if params and params[0].annotation is not inspect.Parameter.empty:
- optional_converter.__annotations__["val"] = typing.Optional[
- params[0].annotation
- ]
- if sig.return_annotation is not inspect.Signature.empty:
- optional_converter.__annotations__["return"] = typing.Optional[
- sig.return_annotation
- ]
-
- return optional_converter
-
-
-def default_if_none(default=NOTHING, factory=None):
- """
- A converter that allows to replace ``None`` values by *default* or the
- result of *factory*.
-
- :param default: Value to be used if ``None`` is passed. Passing an instance
- of `attr.Factory` is supported, however the ``takes_self`` option
- is *not*.
- :param callable factory: A callable that takes no parameters whose result
- is used if ``None`` is passed.
-
- :raises TypeError: If **neither** *default* or *factory* is passed.
- :raises TypeError: If **both** *default* and *factory* are passed.
- :raises ValueError: If an instance of `attr.Factory` is passed with
- ``takes_self=True``.
-
- .. versionadded:: 18.2.0
- """
- if default is NOTHING and factory is None:
- raise TypeError("Must pass either `default` or `factory`.")
-
- if default is not NOTHING and factory is not None:
- raise TypeError(
- "Must pass either `default` or `factory` but not both."
- )
-
- if factory is not None:
- default = Factory(factory)
-
- if isinstance(default, Factory):
- if default.takes_self:
- raise ValueError(
- "`takes_self` is not supported by default_if_none."
- )
-
- def default_if_none_converter(val):
- if val is not None:
- return val
-
- return default.factory()
-
- else:
-
- def default_if_none_converter(val):
- if val is not None:
- return val
-
- return default
-
- return default_if_none_converter
diff --git a/lib/spack/external/attr/exceptions.py b/lib/spack/external/attr/exceptions.py
deleted file mode 100644
index f6f9861bea..0000000000
--- a/lib/spack/external/attr/exceptions.py
+++ /dev/null
@@ -1,92 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-
-class FrozenError(AttributeError):
- """
- A frozen/immutable instance or attribute have been attempted to be
- modified.
-
- It mirrors the behavior of ``namedtuples`` by using the same error message
- and subclassing `AttributeError`.
-
- .. versionadded:: 20.1.0
- """
-
- msg = "can't set attribute"
- args = [msg]
-
-
-class FrozenInstanceError(FrozenError):
- """
- A frozen instance has been attempted to be modified.
-
- .. versionadded:: 16.1.0
- """
-
-
-class FrozenAttributeError(FrozenError):
- """
- A frozen attribute has been attempted to be modified.
-
- .. versionadded:: 20.1.0
- """
-
-
-class AttrsAttributeNotFoundError(ValueError):
- """
- An ``attrs`` function couldn't find an attribute that the user asked for.
-
- .. versionadded:: 16.2.0
- """
-
-
-class NotAnAttrsClassError(ValueError):
- """
- A non-``attrs`` class has been passed into an ``attrs`` function.
-
- .. versionadded:: 16.2.0
- """
-
-
-class DefaultAlreadySetError(RuntimeError):
- """
- A default has been set using ``attr.ib()`` and is attempted to be reset
- using the decorator.
-
- .. versionadded:: 17.1.0
- """
-
-
-class UnannotatedAttributeError(RuntimeError):
- """
- A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
- annotation.
-
- .. versionadded:: 17.3.0
- """
-
-
-class PythonTooOldError(RuntimeError):
- """
- It was attempted to use an ``attrs`` feature that requires a newer Python
- version.
-
- .. versionadded:: 18.2.0
- """
-
-
-class NotCallableError(TypeError):
- """
- A ``attr.ib()`` requiring a callable has been set with a value
- that is not callable.
-
- .. versionadded:: 19.2.0
- """
-
- def __init__(self, msg, value):
- super(TypeError, self).__init__(msg, value)
- self.msg = msg
- self.value = value
-
- def __str__(self):
- return str(self.msg)
diff --git a/lib/spack/external/attr/filters.py b/lib/spack/external/attr/filters.py
deleted file mode 100644
index dc47e8fa38..0000000000
--- a/lib/spack/external/attr/filters.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""
-Commonly useful filters for `attr.asdict`.
-"""
-
-from __future__ import absolute_import, division, print_function
-
-from ._compat import isclass
-from ._make import Attribute
-
-
-def _split_what(what):
- """
- Returns a tuple of `frozenset`s of classes and attributes.
- """
- return (
- frozenset(cls for cls in what if isclass(cls)),
- frozenset(cls for cls in what if isinstance(cls, Attribute)),
- )
-
-
-def include(*what):
- """
- Whitelist *what*.
-
- :param what: What to whitelist.
- :type what: `list` of `type` or `attr.Attribute`\\ s
-
- :rtype: `callable`
- """
- cls, attrs = _split_what(what)
-
- def include_(attribute, value):
- return value.__class__ in cls or attribute in attrs
-
- return include_
-
-
-def exclude(*what):
- """
- Blacklist *what*.
-
- :param what: What to blacklist.
- :type what: `list` of classes or `attr.Attribute`\\ s.
-
- :rtype: `callable`
- """
- cls, attrs = _split_what(what)
-
- def exclude_(attribute, value):
- return value.__class__ not in cls and attribute not in attrs
-
- return exclude_
diff --git a/lib/spack/external/attr/setters.py b/lib/spack/external/attr/setters.py
deleted file mode 100644
index 240014b3c1..0000000000
--- a/lib/spack/external/attr/setters.py
+++ /dev/null
@@ -1,77 +0,0 @@
-"""
-Commonly used hooks for on_setattr.
-"""
-
-from __future__ import absolute_import, division, print_function
-
-from . import _config
-from .exceptions import FrozenAttributeError
-
-
-def pipe(*setters):
- """
- Run all *setters* and return the return value of the last one.
-
- .. versionadded:: 20.1.0
- """
-
- def wrapped_pipe(instance, attrib, new_value):
- rv = new_value
-
- for setter in setters:
- rv = setter(instance, attrib, rv)
-
- return rv
-
- return wrapped_pipe
-
-
-def frozen(_, __, ___):
- """
- Prevent an attribute to be modified.
-
- .. versionadded:: 20.1.0
- """
- raise FrozenAttributeError()
-
-
-def validate(instance, attrib, new_value):
- """
- Run *attrib*'s validator on *new_value* if it has one.
-
- .. versionadded:: 20.1.0
- """
- if _config._run_validators is False:
- return new_value
-
- v = attrib.validator
- if not v:
- return new_value
-
- v(instance, attrib, new_value)
-
- return new_value
-
-
-def convert(instance, attrib, new_value):
- """
- Run *attrib*'s converter -- if it has one -- on *new_value* and return the
- result.
-
- .. versionadded:: 20.1.0
- """
- c = attrib.converter
- if c:
- return c(new_value)
-
- return new_value
-
-
-NO_OP = object()
-"""
-Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
-
-Does not work in `pipe` or within lists.
-
-.. versionadded:: 20.1.0
-"""
diff --git a/lib/spack/external/attr/validators.py b/lib/spack/external/attr/validators.py
deleted file mode 100644
index b9a73054e9..0000000000
--- a/lib/spack/external/attr/validators.py
+++ /dev/null
@@ -1,379 +0,0 @@
-"""
-Commonly useful validators.
-"""
-
-from __future__ import absolute_import, division, print_function
-
-import re
-
-from ._make import _AndValidator, and_, attrib, attrs
-from .exceptions import NotCallableError
-
-
-__all__ = [
- "and_",
- "deep_iterable",
- "deep_mapping",
- "in_",
- "instance_of",
- "is_callable",
- "matches_re",
- "optional",
- "provides",
-]
-
-
-@attrs(repr=False, slots=True, hash=True)
-class _InstanceOfValidator(object):
- type = attrib()
-
- def __call__(self, inst, attr, value):
- """
- We use a callable class to be able to change the ``__repr__``.
- """
- if not isinstance(value, self.type):
- raise TypeError(
- "'{name}' must be {type!r} (got {value!r} that is a "
- "{actual!r}).".format(
- name=attr.name,
- type=self.type,
- actual=value.__class__,
- value=value,
- ),
- attr,
- self.type,
- value,
- )
-
- def __repr__(self):
- return "<instance_of validator for type {type!r}>".format(
- type=self.type
- )
-
-
-def instance_of(type):
- """
- A validator that raises a `TypeError` if the initializer is called
- with a wrong type for this particular attribute (checks are performed using
- `isinstance` therefore it's also valid to pass a tuple of types).
-
- :param type: The type to check for.
- :type type: type or tuple of types
-
- :raises TypeError: With a human readable error message, the attribute
- (of type `attr.Attribute`), the expected type, and the value it
- got.
- """
- return _InstanceOfValidator(type)
-
-
-@attrs(repr=False, frozen=True, slots=True)
-class _MatchesReValidator(object):
- regex = attrib()
- flags = attrib()
- match_func = attrib()
-
- def __call__(self, inst, attr, value):
- """
- We use a callable class to be able to change the ``__repr__``.
- """
- if not self.match_func(value):
- raise ValueError(
- "'{name}' must match regex {regex!r}"
- " ({value!r} doesn't)".format(
- name=attr.name, regex=self.regex.pattern, value=value
- ),
- attr,
- self.regex,
- value,
- )
-
- def __repr__(self):
- return "<matches_re validator for pattern {regex!r}>".format(
- regex=self.regex
- )
-
-
-def matches_re(regex, flags=0, func=None):
- r"""
- A validator that raises `ValueError` if the initializer is called
- with a string that doesn't match *regex*.
-
- :param str regex: a regex string to match against
- :param int flags: flags that will be passed to the underlying re function
- (default 0)
- :param callable func: which underlying `re` function to call (options
- are `re.fullmatch`, `re.search`, `re.match`, default
- is ``None`` which means either `re.fullmatch` or an emulation of
- it on Python 2). For performance reasons, they won't be used directly
- but on a pre-`re.compile`\ ed pattern.
-
- .. versionadded:: 19.2.0
- """
- fullmatch = getattr(re, "fullmatch", None)
- valid_funcs = (fullmatch, None, re.search, re.match)
- if func not in valid_funcs:
- raise ValueError(
- "'func' must be one of %s."
- % (
- ", ".join(
- sorted(
- e and e.__name__ or "None" for e in set(valid_funcs)
- )
- ),
- )
- )
-
- pattern = re.compile(regex, flags)
- if func is re.match:
- match_func = pattern.match
- elif func is re.search:
- match_func = pattern.search
- else:
- if fullmatch:
- match_func = pattern.fullmatch
- else:
- pattern = re.compile(r"(?:{})\Z".format(regex), flags)
- match_func = pattern.match
-
- return _MatchesReValidator(pattern, flags, match_func)
-
-
-@attrs(repr=False, slots=True, hash=True)
-class _ProvidesValidator(object):
- interface = attrib()
-
- def __call__(self, inst, attr, value):
- """
- We use a callable class to be able to change the ``__repr__``.
- """
- if not self.interface.providedBy(value):
- raise TypeError(
- "'{name}' must provide {interface!r} which {value!r} "
- "doesn't.".format(
- name=attr.name, interface=self.interface, value=value
- ),
- attr,
- self.interface,
- value,
- )
-
- def __repr__(self):
- return "<provides validator for interface {interface!r}>".format(
- interface=self.interface
- )
-
-
-def provides(interface):
- """
- A validator that raises a `TypeError` if the initializer is called
- with an object that does not provide the requested *interface* (checks are
- performed using ``interface.providedBy(value)`` (see `zope.interface
- <https://zopeinterface.readthedocs.io/en/latest/>`_).
-
- :param interface: The interface to check for.
- :type interface: ``zope.interface.Interface``
-
- :raises TypeError: With a human readable error message, the attribute
- (of type `attr.Attribute`), the expected interface, and the
- value it got.
- """
- return _ProvidesValidator(interface)
-
-
-@attrs(repr=False, slots=True, hash=True)
-class _OptionalValidator(object):
- validator = attrib()
-
- def __call__(self, inst, attr, value):
- if value is None:
- return
-
- self.validator(inst, attr, value)
-
- def __repr__(self):
- return "<optional validator for {what} or None>".format(
- what=repr(self.validator)
- )
-
-
-def optional(validator):
- """
- A validator that makes an attribute optional. An optional attribute is one
- which can be set to ``None`` in addition to satisfying the requirements of
- the sub-validator.
-
- :param validator: A validator (or a list of validators) that is used for
- non-``None`` values.
- :type validator: callable or `list` of callables.
-
- .. versionadded:: 15.1.0
- .. versionchanged:: 17.1.0 *validator* can be a list of validators.
- """
- if isinstance(validator, list):
- return _OptionalValidator(_AndValidator(validator))
- return _OptionalValidator(validator)
-
-
-@attrs(repr=False, slots=True, hash=True)
-class _InValidator(object):
- options = attrib()
-
- def __call__(self, inst, attr, value):
- try:
- in_options = value in self.options
- except TypeError: # e.g. `1 in "abc"`
- in_options = False
-
- if not in_options:
- raise ValueError(
- "'{name}' must be in {options!r} (got {value!r})".format(
- name=attr.name, options=self.options, value=value
- )
- )
-
- def __repr__(self):
- return "<in_ validator with options {options!r}>".format(
- options=self.options
- )
-
-
-def in_(options):
- """
- A validator that raises a `ValueError` if the initializer is called
- with a value that does not belong in the options provided. The check is
- performed using ``value in options``.
-
- :param options: Allowed options.
- :type options: list, tuple, `enum.Enum`, ...
-
- :raises ValueError: With a human readable error message, the attribute (of
- type `attr.Attribute`), the expected options, and the value it
- got.
-
- .. versionadded:: 17.1.0
- """
- return _InValidator(options)
-
-
-@attrs(repr=False, slots=False, hash=True)
-class _IsCallableValidator(object):
- def __call__(self, inst, attr, value):
- """
- We use a callable class to be able to change the ``__repr__``.
- """
- if not callable(value):
- message = (
- "'{name}' must be callable "
- "(got {value!r} that is a {actual!r})."
- )
- raise NotCallableError(
- msg=message.format(
- name=attr.name, value=value, actual=value.__class__
- ),
- value=value,
- )
-
- def __repr__(self):
- return "<is_callable validator>"
-
-
-def is_callable():
- """
- A validator that raises a `attr.exceptions.NotCallableError` if the
- initializer is called with a value for this particular attribute
- that is not callable.
-
- .. versionadded:: 19.1.0
-
- :raises `attr.exceptions.NotCallableError`: With a human readable error
- message containing the attribute (`attr.Attribute`) name,
- and the value it got.
- """
- return _IsCallableValidator()
-
-
-@attrs(repr=False, slots=True, hash=True)
-class _DeepIterable(object):
- member_validator = attrib(validator=is_callable())
- iterable_validator = attrib(
- default=None, validator=optional(is_callable())
- )
-
- def __call__(self, inst, attr, value):
- """
- We use a callable class to be able to change the ``__repr__``.
- """
- if self.iterable_validator is not None:
- self.iterable_validator(inst, attr, value)
-
- for member in value:
- self.member_validator(inst, attr, member)
-
- def __repr__(self):
- iterable_identifier = (
- ""
- if self.iterable_validator is None
- else " {iterable!r}".format(iterable=self.iterable_validator)
- )
- return (
- "<deep_iterable validator for{iterable_identifier}"
- " iterables of {member!r}>"
- ).format(
- iterable_identifier=iterable_identifier,
- member=self.member_validator,
- )
-
-
-def deep_iterable(member_validator, iterable_validator=None):
- """
- A validator that performs deep validation of an iterable.
-
- :param member_validator: Validator to apply to iterable members
- :param iterable_validator: Validator to apply to iterable itself
- (optional)
-
- .. versionadded:: 19.1.0
-
- :raises TypeError: if any sub-validators fail
- """
- return _DeepIterable(member_validator, iterable_validator)
-
-
-@attrs(repr=False, slots=True, hash=True)
-class _DeepMapping(object):
- key_validator = attrib(validator=is_callable())
- value_validator = attrib(validator=is_callable())
- mapping_validator = attrib(default=None, validator=optional(is_callable()))
-
- def __call__(self, inst, attr, value):
- """
- We use a callable class to be able to change the ``__repr__``.
- """
- if self.mapping_validator is not None:
- self.mapping_validator(inst, attr, value)
-
- for key in value:
- self.key_validator(inst, attr, key)
- self.value_validator(inst, attr, value[key])
-
- def __repr__(self):
- return (
- "<deep_mapping validator for objects mapping {key!r} to {value!r}>"
- ).format(key=self.key_validator, value=self.value_validator)
-
-
-def deep_mapping(key_validator, value_validator, mapping_validator=None):
- """
- A validator that performs deep validation of a dictionary.
-
- :param key_validator: Validator to apply to dictionary keys
- :param value_validator: Validator to apply to dictionary values
- :param mapping_validator: Validator to apply to top-level mapping
- attribute (optional)
-
- .. versionadded:: 19.1.0
-
- :raises TypeError: if any sub-validators fail
- """
- return _DeepMapping(key_validator, value_validator, mapping_validator)
diff --git a/lib/spack/external/distro.py b/lib/spack/external/distro.py
deleted file mode 100644
index 7892741347..0000000000
--- a/lib/spack/external/distro.py
+++ /dev/null
@@ -1,1386 +0,0 @@
-# Copyright 2015,2016,2017 Nir Cohen
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-The ``distro`` package (``distro`` stands for Linux Distribution) provides
-information about the Linux distribution it runs on, such as a reliable
-machine-readable distro ID, or version information.
-
-It is the recommended replacement for Python's original
-:py:func:`platform.linux_distribution` function, but it provides much more
-functionality. An alternative implementation became necessary because Python
-3.5 deprecated this function, and Python 3.8 removed it altogether. Its
-predecessor function :py:func:`platform.dist` was already deprecated since
-Python 2.6 and removed in Python 3.8. Still, there are many cases in which
-access to OS distribution information is needed. See `Python issue 1322
-<https://bugs.python.org/issue1322>`_ for more information.
-"""
-
-import argparse
-import json
-import logging
-import os
-import re
-import shlex
-import subprocess
-import sys
-import warnings
-
-__version__ = "1.6.0"
-
-# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2
-# support, can use typing.TYPE_CHECKING instead. See:
-# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING
-if False: # pragma: nocover
- from typing import (
- Any,
- Callable,
- Dict,
- Iterable,
- Optional,
- Sequence,
- TextIO,
- Tuple,
- Type,
- TypedDict,
- Union,
- )
-
- VersionDict = TypedDict(
- "VersionDict", {"major": str, "minor": str, "build_number": str}
- )
- InfoDict = TypedDict(
- "InfoDict",
- {
- "id": str,
- "version": str,
- "version_parts": VersionDict,
- "like": str,
- "codename": str,
- },
- )
-
-
-_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
-_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib")
-_OS_RELEASE_BASENAME = "os-release"
-
-#: Translation table for normalizing the "ID" attribute defined in os-release
-#: files, for use by the :func:`distro.id` method.
-#:
-#: * Key: Value as defined in the os-release file, translated to lower case,
-#: with blanks translated to underscores.
-#:
-#: * Value: Normalized value.
-NORMALIZED_OS_ID = {
- "ol": "oracle", # Oracle Linux
-}
-
-#: Translation table for normalizing the "Distributor ID" attribute returned by
-#: the lsb_release command, for use by the :func:`distro.id` method.
-#:
-#: * Key: Value as returned by the lsb_release command, translated to lower
-#: case, with blanks translated to underscores.
-#:
-#: * Value: Normalized value.
-NORMALIZED_LSB_ID = {
- "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4
- "enterpriseenterpriseserver": "oracle", # Oracle Linux 5
- "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation
- "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server
- "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode
-}
-
-#: Translation table for normalizing the distro ID derived from the file name
-#: of distro release files, for use by the :func:`distro.id` method.
-#:
-#: * Key: Value as derived from the file name of a distro release file,
-#: translated to lower case, with blanks translated to underscores.
-#:
-#: * Value: Normalized value.
-NORMALIZED_DISTRO_ID = {
- "redhat": "rhel", # RHEL 6.x, 7.x
-}
-
-# Pattern for content of distro release file (reversed)
-_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
- r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)"
-)
-
-# Pattern for base file name of distro release file
-_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
-
-# Base file names to be ignored when searching for distro release file
-_DISTRO_RELEASE_IGNORE_BASENAMES = (
- "debian_version",
- "lsb-release",
- "oem-release",
- _OS_RELEASE_BASENAME,
- "system-release",
- "plesk-release",
- "iredmail-release",
-)
-
-
-def linux_distribution(full_distribution_name=True):
- # type: (bool) -> Tuple[str, str, str]
- """
- .. deprecated:: 1.6.0
-
- :func:`distro.linux_distribution()` is deprecated. It should only be
- used as a compatibility shim with Python's
- :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`,
- :func:`distro.version` and :func:`distro.name` instead.
-
- Return information about the current OS distribution as a tuple
- ``(id_name, version, codename)`` with items as follows:
-
- * ``id_name``: If *full_distribution_name* is false, the result of
- :func:`distro.id`. Otherwise, the result of :func:`distro.name`.
-
- * ``version``: The result of :func:`distro.version`.
-
- * ``codename``: The result of :func:`distro.codename`.
-
- The interface of this function is compatible with the original
- :py:func:`platform.linux_distribution` function, supporting a subset of
- its parameters.
-
- The data it returns may not exactly be the same, because it uses more data
- sources than the original function, and that may lead to different data if
- the OS distribution is not consistent across multiple data sources it
- provides (there are indeed such distributions ...).
-
- Another reason for differences is the fact that the :func:`distro.id`
- method normalizes the distro ID string to a reliable machine-readable value
- for a number of popular OS distributions.
- """
- warnings.warn(
- "distro.linux_distribution() is deprecated. It should only be used as a "
- "compatibility shim with Python's platform.linux_distribution(). Please use "
- "distro.id(), distro.version() and distro.name() instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return _distro.linux_distribution(full_distribution_name)
-
-
-def id():
- # type: () -> str
- """
- Return the distro ID of the current distribution, as a
- machine-readable string.
-
- For a number of OS distributions, the returned distro ID value is
- *reliable*, in the sense that it is documented and that it does not change
- across releases of the distribution.
-
- This package maintains the following reliable distro ID values:
-
- ============== =========================================
- Distro ID Distribution
- ============== =========================================
- "ubuntu" Ubuntu
- "debian" Debian
- "rhel" RedHat Enterprise Linux
- "centos" CentOS
- "fedora" Fedora
- "sles" SUSE Linux Enterprise Server
- "opensuse" openSUSE
- "amazon" Amazon Linux
- "arch" Arch Linux
- "cloudlinux" CloudLinux OS
- "exherbo" Exherbo Linux
- "gentoo" GenToo Linux
- "ibm_powerkvm" IBM PowerKVM
- "kvmibm" KVM for IBM z Systems
- "linuxmint" Linux Mint
- "mageia" Mageia
- "mandriva" Mandriva Linux
- "parallels" Parallels
- "pidora" Pidora
- "raspbian" Raspbian
- "oracle" Oracle Linux (and Oracle Enterprise Linux)
- "scientific" Scientific Linux
- "slackware" Slackware
- "xenserver" XenServer
- "openbsd" OpenBSD
- "netbsd" NetBSD
- "freebsd" FreeBSD
- "midnightbsd" MidnightBSD
- ============== =========================================
-
- If you have a need to get distros for reliable IDs added into this set,
- or if you find that the :func:`distro.id` function returns a different
- distro ID for one of the listed distros, please create an issue in the
- `distro issue tracker`_.
-
- **Lookup hierarchy and transformations:**
-
- First, the ID is obtained from the following sources, in the specified
- order. The first available and non-empty value is used:
-
- * the value of the "ID" attribute of the os-release file,
-
- * the value of the "Distributor ID" attribute returned by the lsb_release
- command,
-
- * the first part of the file name of the distro release file,
-
- The so determined ID value then passes the following transformations,
- before it is returned by this method:
-
- * it is translated to lower case,
-
- * blanks (which should not be there anyway) are translated to underscores,
-
- * a normalization of the ID is performed, based upon
- `normalization tables`_. The purpose of this normalization is to ensure
- that the ID is as reliable as possible, even across incompatible changes
- in the OS distributions. A common reason for an incompatible change is
- the addition of an os-release file, or the addition of the lsb_release
- command, with ID values that differ from what was previously determined
- from the distro release file name.
- """
- return _distro.id()
-
-
-def name(pretty=False):
- # type: (bool) -> str
- """
- Return the name of the current OS distribution, as a human-readable
- string.
-
- If *pretty* is false, the name is returned without version or codename.
- (e.g. "CentOS Linux")
-
- If *pretty* is true, the version and codename are appended.
- (e.g. "CentOS Linux 7.1.1503 (Core)")
-
- **Lookup hierarchy:**
-
- The name is obtained from the following sources, in the specified order.
- The first available and non-empty value is used:
-
- * If *pretty* is false:
-
- - the value of the "NAME" attribute of the os-release file,
-
- - the value of the "Distributor ID" attribute returned by the lsb_release
- command,
-
- - the value of the "<name>" field of the distro release file.
-
- * If *pretty* is true:
-
- - the value of the "PRETTY_NAME" attribute of the os-release file,
-
- - the value of the "Description" attribute returned by the lsb_release
- command,
-
- - the value of the "<name>" field of the distro release file, appended
- with the value of the pretty version ("<version_id>" and "<codename>"
- fields) of the distro release file, if available.
- """
- return _distro.name(pretty)
-
-
-def version(pretty=False, best=False):
- # type: (bool, bool) -> str
- """
- Return the version of the current OS distribution, as a human-readable
- string.
-
- If *pretty* is false, the version is returned without codename (e.g.
- "7.0").
-
- If *pretty* is true, the codename in parenthesis is appended, if the
- codename is non-empty (e.g. "7.0 (Maipo)").
-
- Some distributions provide version numbers with different precisions in
- the different sources of distribution information. Examining the different
- sources in a fixed priority order does not always yield the most precise
- version (e.g. for Debian 8.2, or CentOS 7.1).
-
- The *best* parameter can be used to control the approach for the returned
- version:
-
- If *best* is false, the first non-empty version number in priority order of
- the examined sources is returned.
-
- If *best* is true, the most precise version number out of all examined
- sources is returned.
-
- **Lookup hierarchy:**
-
- In all cases, the version number is obtained from the following sources.
- If *best* is false, this order represents the priority order:
-
- * the value of the "VERSION_ID" attribute of the os-release file,
- * the value of the "Release" attribute returned by the lsb_release
- command,
- * the version number parsed from the "<version_id>" field of the first line
- of the distro release file,
- * the version number parsed from the "PRETTY_NAME" attribute of the
- os-release file, if it follows the format of the distro release files.
- * the version number parsed from the "Description" attribute returned by
- the lsb_release command, if it follows the format of the distro release
- files.
- """
- return _distro.version(pretty, best)
-
-
-def version_parts(best=False):
- # type: (bool) -> Tuple[str, str, str]
- """
- Return the version of the current OS distribution as a tuple
- ``(major, minor, build_number)`` with items as follows:
-
- * ``major``: The result of :func:`distro.major_version`.
-
- * ``minor``: The result of :func:`distro.minor_version`.
-
- * ``build_number``: The result of :func:`distro.build_number`.
-
- For a description of the *best* parameter, see the :func:`distro.version`
- method.
- """
- return _distro.version_parts(best)
-
-
-def major_version(best=False):
- # type: (bool) -> str
- """
- Return the major version of the current OS distribution, as a string,
- if provided.
- Otherwise, the empty string is returned. The major version is the first
- part of the dot-separated version string.
-
- For a description of the *best* parameter, see the :func:`distro.version`
- method.
- """
- return _distro.major_version(best)
-
-
-def minor_version(best=False):
- # type: (bool) -> str
- """
- Return the minor version of the current OS distribution, as a string,
- if provided.
- Otherwise, the empty string is returned. The minor version is the second
- part of the dot-separated version string.
-
- For a description of the *best* parameter, see the :func:`distro.version`
- method.
- """
- return _distro.minor_version(best)
-
-
-def build_number(best=False):
- # type: (bool) -> str
- """
- Return the build number of the current OS distribution, as a string,
- if provided.
- Otherwise, the empty string is returned. The build number is the third part
- of the dot-separated version string.
-
- For a description of the *best* parameter, see the :func:`distro.version`
- method.
- """
- return _distro.build_number(best)
-
-
-def like():
- # type: () -> str
- """
- Return a space-separated list of distro IDs of distributions that are
- closely related to the current OS distribution in regards to packaging
- and programming interfaces, for example distributions the current
- distribution is a derivative from.
-
- **Lookup hierarchy:**
-
- This information item is only provided by the os-release file.
- For details, see the description of the "ID_LIKE" attribute in the
- `os-release man page
- <http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
- """
- return _distro.like()
-
-
-def codename():
- # type: () -> str
- """
- Return the codename for the release of the current OS distribution,
- as a string.
-
- If the distribution does not have a codename, an empty string is returned.
-
- Note that the returned codename is not always really a codename. For
- example, openSUSE returns "x86_64". This function does not handle such
- cases in any special way and just returns the string it finds, if any.
-
- **Lookup hierarchy:**
-
- * the codename within the "VERSION" attribute of the os-release file, if
- provided,
-
- * the value of the "Codename" attribute returned by the lsb_release
- command,
-
- * the value of the "<codename>" field of the distro release file.
- """
- return _distro.codename()
-
-
-def info(pretty=False, best=False):
- # type: (bool, bool) -> InfoDict
- """
- Return certain machine-readable information items about the current OS
- distribution in a dictionary, as shown in the following example:
-
- .. sourcecode:: python
-
- {
- 'id': 'rhel',
- 'version': '7.0',
- 'version_parts': {
- 'major': '7',
- 'minor': '0',
- 'build_number': ''
- },
- 'like': 'fedora',
- 'codename': 'Maipo'
- }
-
- The dictionary structure and keys are always the same, regardless of which
- information items are available in the underlying data sources. The values
- for the various keys are as follows:
-
- * ``id``: The result of :func:`distro.id`.
-
- * ``version``: The result of :func:`distro.version`.
-
- * ``version_parts -> major``: The result of :func:`distro.major_version`.
-
- * ``version_parts -> minor``: The result of :func:`distro.minor_version`.
-
- * ``version_parts -> build_number``: The result of
- :func:`distro.build_number`.
-
- * ``like``: The result of :func:`distro.like`.
-
- * ``codename``: The result of :func:`distro.codename`.
-
- For a description of the *pretty* and *best* parameters, see the
- :func:`distro.version` method.
- """
- return _distro.info(pretty, best)
-
-
-def os_release_info():
- # type: () -> Dict[str, str]
- """
- Return a dictionary containing key-value pairs for the information items
- from the os-release file data source of the current OS distribution.
-
- See `os-release file`_ for details about these information items.
- """
- return _distro.os_release_info()
-
-
-def lsb_release_info():
- # type: () -> Dict[str, str]
- """
- Return a dictionary containing key-value pairs for the information items
- from the lsb_release command data source of the current OS distribution.
-
- See `lsb_release command output`_ for details about these information
- items.
- """
- return _distro.lsb_release_info()
-
-
-def distro_release_info():
- # type: () -> Dict[str, str]
- """
- Return a dictionary containing key-value pairs for the information items
- from the distro release file data source of the current OS distribution.
-
- See `distro release file`_ for details about these information items.
- """
- return _distro.distro_release_info()
-
-
-def uname_info():
- # type: () -> Dict[str, str]
- """
- Return a dictionary containing key-value pairs for the information items
- from the distro release file data source of the current OS distribution.
- """
- return _distro.uname_info()
-
-
-def os_release_attr(attribute):
- # type: (str) -> str
- """
- Return a single named information item from the os-release file data source
- of the current OS distribution.
-
- Parameters:
-
- * ``attribute`` (string): Key of the information item.
-
- Returns:
-
- * (string): Value of the information item, if the item exists.
- The empty string, if the item does not exist.
-
- See `os-release file`_ for details about these information items.
- """
- return _distro.os_release_attr(attribute)
-
-
-def lsb_release_attr(attribute):
- # type: (str) -> str
- """
- Return a single named information item from the lsb_release command output
- data source of the current OS distribution.
-
- Parameters:
-
- * ``attribute`` (string): Key of the information item.
-
- Returns:
-
- * (string): Value of the information item, if the item exists.
- The empty string, if the item does not exist.
-
- See `lsb_release command output`_ for details about these information
- items.
- """
- return _distro.lsb_release_attr(attribute)
-
-
-def distro_release_attr(attribute):
- # type: (str) -> str
- """
- Return a single named information item from the distro release file
- data source of the current OS distribution.
-
- Parameters:
-
- * ``attribute`` (string): Key of the information item.
-
- Returns:
-
- * (string): Value of the information item, if the item exists.
- The empty string, if the item does not exist.
-
- See `distro release file`_ for details about these information items.
- """
- return _distro.distro_release_attr(attribute)
-
-
-def uname_attr(attribute):
- # type: (str) -> str
- """
- Return a single named information item from the distro release file
- data source of the current OS distribution.
-
- Parameters:
-
- * ``attribute`` (string): Key of the information item.
-
- Returns:
-
- * (string): Value of the information item, if the item exists.
- The empty string, if the item does not exist.
- """
- return _distro.uname_attr(attribute)
-
-
-try:
- from functools import cached_property
-except ImportError:
- # Python < 3.8
- class cached_property(object): # type: ignore
- """A version of @property which caches the value. On access, it calls the
- underlying function and sets the value in `__dict__` so future accesses
- will not re-call the property.
- """
-
- def __init__(self, f):
- # type: (Callable[[Any], Any]) -> None
- self._fname = f.__name__
- self._f = f
-
- def __get__(self, obj, owner):
- # type: (Any, Type[Any]) -> Any
- assert obj is not None, "call {} on an instance".format(self._fname)
- ret = obj.__dict__[self._fname] = self._f(obj)
- return ret
-
-
-class LinuxDistribution(object):
- """
- Provides information about a OS distribution.
-
- This package creates a private module-global instance of this class with
- default initialization arguments, that is used by the
- `consolidated accessor functions`_ and `single source accessor functions`_.
- By using default initialization arguments, that module-global instance
- returns data about the current OS distribution (i.e. the distro this
- package runs on).
-
- Normally, it is not necessary to create additional instances of this class.
- However, in situations where control is needed over the exact data sources
- that are used, instances of this class can be created with a specific
- distro release file, or a specific os-release file, or without invoking the
- lsb_release command.
- """
-
- def __init__(
- self,
- include_lsb=True,
- os_release_file="",
- distro_release_file="",
- include_uname=True,
- root_dir=None,
- ):
- # type: (bool, str, str, bool, Optional[str]) -> None
- """
- The initialization method of this class gathers information from the
- available data sources, and stores that in private instance attributes.
- Subsequent access to the information items uses these private instance
- attributes, so that the data sources are read only once.
-
- Parameters:
-
- * ``include_lsb`` (bool): Controls whether the
- `lsb_release command output`_ is included as a data source.
-
- If the lsb_release command is not available in the program execution
- path, the data source for the lsb_release command will be empty.
-
- * ``os_release_file`` (string): The path name of the
- `os-release file`_ that is to be used as a data source.
-
- An empty string (the default) will cause the default path name to
- be used (see `os-release file`_ for details).
-
- If the specified or defaulted os-release file does not exist, the
- data source for the os-release file will be empty.
-
- * ``distro_release_file`` (string): The path name of the
- `distro release file`_ that is to be used as a data source.
-
- An empty string (the default) will cause a default search algorithm
- to be used (see `distro release file`_ for details).
-
- If the specified distro release file does not exist, or if no default
- distro release file can be found, the data source for the distro
- release file will be empty.
-
- * ``include_uname`` (bool): Controls whether uname command output is
- included as a data source. If the uname command is not available in
- the program execution path the data source for the uname command will
- be empty.
-
- * ``root_dir`` (string): The absolute path to the root directory to use
- to find distro-related information files.
-
- Public instance attributes:
-
- * ``os_release_file`` (string): The path name of the
- `os-release file`_ that is actually used as a data source. The
- empty string if no distro release file is used as a data source.
-
- * ``distro_release_file`` (string): The path name of the
- `distro release file`_ that is actually used as a data source. The
- empty string if no distro release file is used as a data source.
-
- * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
- This controls whether the lsb information will be loaded.
-
- * ``include_uname`` (bool): The result of the ``include_uname``
- parameter. This controls whether the uname information will
- be loaded.
-
- Raises:
-
- * :py:exc:`IOError`: Some I/O issue with an os-release file or distro
- release file.
-
- * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
- some issue (other than not being available in the program execution
- path).
-
- * :py:exc:`UnicodeError`: A data source has unexpected characters or
- uses an unexpected encoding.
- """
- self.root_dir = root_dir
- self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR
- self.usr_lib_dir = (
- os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR
- )
-
- if os_release_file:
- self.os_release_file = os_release_file
- else:
- etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME)
- usr_lib_os_release_file = os.path.join(
- self.usr_lib_dir, _OS_RELEASE_BASENAME
- )
-
- # NOTE: The idea is to respect order **and** have it set
- # at all times for API backwards compatibility.
- if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile(
- usr_lib_os_release_file
- ):
- self.os_release_file = etc_dir_os_release_file
- else:
- self.os_release_file = usr_lib_os_release_file
-
- self.distro_release_file = distro_release_file or "" # updated later
- self.include_lsb = include_lsb
- self.include_uname = include_uname
-
- def __repr__(self):
- # type: () -> str
- """Return repr of all info"""
- return (
- "LinuxDistribution("
- "os_release_file={self.os_release_file!r}, "
- "distro_release_file={self.distro_release_file!r}, "
- "include_lsb={self.include_lsb!r}, "
- "include_uname={self.include_uname!r}, "
- "_os_release_info={self._os_release_info!r}, "
- "_lsb_release_info={self._lsb_release_info!r}, "
- "_distro_release_info={self._distro_release_info!r}, "
- "_uname_info={self._uname_info!r})".format(self=self)
- )
-
- def linux_distribution(self, full_distribution_name=True):
- # type: (bool) -> Tuple[str, str, str]
- """
- Return information about the OS distribution that is compatible
- with Python's :func:`platform.linux_distribution`, supporting a subset
- of its parameters.
-
- For details, see :func:`distro.linux_distribution`.
- """
- return (
- self.name() if full_distribution_name else self.id(),
- self.version(),
- self.codename(),
- )
-
- def id(self):
- # type: () -> str
- """Return the distro ID of the OS distribution, as a string.
-
- For details, see :func:`distro.id`.
- """
-
- def normalize(distro_id, table):
- # type: (str, Dict[str, str]) -> str
- distro_id = distro_id.lower().replace(" ", "_")
- return table.get(distro_id, distro_id)
-
- distro_id = self.os_release_attr("id")
- if distro_id:
- return normalize(distro_id, NORMALIZED_OS_ID)
-
- distro_id = self.lsb_release_attr("distributor_id")
- if distro_id:
- return normalize(distro_id, NORMALIZED_LSB_ID)
-
- distro_id = self.distro_release_attr("id")
- if distro_id:
- return normalize(distro_id, NORMALIZED_DISTRO_ID)
-
- distro_id = self.uname_attr("id")
- if distro_id:
- return normalize(distro_id, NORMALIZED_DISTRO_ID)
-
- return ""
-
- def name(self, pretty=False):
- # type: (bool) -> str
- """
- Return the name of the OS distribution, as a string.
-
- For details, see :func:`distro.name`.
- """
- name = (
- self.os_release_attr("name")
- or self.lsb_release_attr("distributor_id")
- or self.distro_release_attr("name")
- or self.uname_attr("name")
- )
- if pretty:
- name = self.os_release_attr("pretty_name") or self.lsb_release_attr(
- "description"
- )
- if not name:
- name = self.distro_release_attr("name") or self.uname_attr("name")
- version = self.version(pretty=True)
- if version:
- name = name + " " + version
- return name or ""
-
- def version(self, pretty=False, best=False):
- # type: (bool, bool) -> str
- """
- Return the version of the OS distribution, as a string.
-
- For details, see :func:`distro.version`.
- """
- versions = [
- self.os_release_attr("version_id"),
- self.lsb_release_attr("release"),
- self.distro_release_attr("version_id"),
- self._parse_distro_release_content(self.os_release_attr("pretty_name")).get(
- "version_id", ""
- ),
- self._parse_distro_release_content(
- self.lsb_release_attr("description")
- ).get("version_id", ""),
- self.uname_attr("release"),
- ]
- version = ""
- if best:
- # This algorithm uses the last version in priority order that has
- # the best precision. If the versions are not in conflict, that
- # does not matter; otherwise, using the last one instead of the
- # first one might be considered a surprise.
- for v in versions:
- if v.count(".") > version.count(".") or version == "":
- version = v
- else:
- for v in versions:
- if v != "":
- version = v
- break
- if pretty and version and self.codename():
- version = "{0} ({1})".format(version, self.codename())
- return version
-
- def version_parts(self, best=False):
- # type: (bool) -> Tuple[str, str, str]
- """
- Return the version of the OS distribution, as a tuple of version
- numbers.
-
- For details, see :func:`distro.version_parts`.
- """
- version_str = self.version(best=best)
- if version_str:
- version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?")
- matches = version_regex.match(version_str)
- if matches:
- major, minor, build_number = matches.groups()
- return major, minor or "", build_number or ""
- return "", "", ""
-
- def major_version(self, best=False):
- # type: (bool) -> str
- """
- Return the major version number of the current distribution.
-
- For details, see :func:`distro.major_version`.
- """
- return self.version_parts(best)[0]
-
- def minor_version(self, best=False):
- # type: (bool) -> str
- """
- Return the minor version number of the current distribution.
-
- For details, see :func:`distro.minor_version`.
- """
- return self.version_parts(best)[1]
-
- def build_number(self, best=False):
- # type: (bool) -> str
- """
- Return the build number of the current distribution.
-
- For details, see :func:`distro.build_number`.
- """
- return self.version_parts(best)[2]
-
- def like(self):
- # type: () -> str
- """
- Return the IDs of distributions that are like the OS distribution.
-
- For details, see :func:`distro.like`.
- """
- return self.os_release_attr("id_like") or ""
-
- def codename(self):
- # type: () -> str
- """
- Return the codename of the OS distribution.
-
- For details, see :func:`distro.codename`.
- """
- try:
- # Handle os_release specially since distros might purposefully set
- # this to empty string to have no codename
- return self._os_release_info["codename"]
- except KeyError:
- return (
- self.lsb_release_attr("codename")
- or self.distro_release_attr("codename")
- or ""
- )
-
- def info(self, pretty=False, best=False):
- # type: (bool, bool) -> InfoDict
- """
- Return certain machine-readable information about the OS
- distribution.
-
- For details, see :func:`distro.info`.
- """
- return dict(
- id=self.id(),
- version=self.version(pretty, best),
- version_parts=dict(
- major=self.major_version(best),
- minor=self.minor_version(best),
- build_number=self.build_number(best),
- ),
- like=self.like(),
- codename=self.codename(),
- )
-
- def os_release_info(self):
- # type: () -> Dict[str, str]
- """
- Return a dictionary containing key-value pairs for the information
- items from the os-release file data source of the OS distribution.
-
- For details, see :func:`distro.os_release_info`.
- """
- return self._os_release_info
-
- def lsb_release_info(self):
- # type: () -> Dict[str, str]
- """
- Return a dictionary containing key-value pairs for the information
- items from the lsb_release command data source of the OS
- distribution.
-
- For details, see :func:`distro.lsb_release_info`.
- """
- return self._lsb_release_info
-
- def distro_release_info(self):
- # type: () -> Dict[str, str]
- """
- Return a dictionary containing key-value pairs for the information
- items from the distro release file data source of the OS
- distribution.
-
- For details, see :func:`distro.distro_release_info`.
- """
- return self._distro_release_info
-
- def uname_info(self):
- # type: () -> Dict[str, str]
- """
- Return a dictionary containing key-value pairs for the information
- items from the uname command data source of the OS distribution.
-
- For details, see :func:`distro.uname_info`.
- """
- return self._uname_info
-
- def os_release_attr(self, attribute):
- # type: (str) -> str
- """
- Return a single named information item from the os-release file data
- source of the OS distribution.
-
- For details, see :func:`distro.os_release_attr`.
- """
- return self._os_release_info.get(attribute, "")
-
- def lsb_release_attr(self, attribute):
- # type: (str) -> str
- """
- Return a single named information item from the lsb_release command
- output data source of the OS distribution.
-
- For details, see :func:`distro.lsb_release_attr`.
- """
- return self._lsb_release_info.get(attribute, "")
-
- def distro_release_attr(self, attribute):
- # type: (str) -> str
- """
- Return a single named information item from the distro release file
- data source of the OS distribution.
-
- For details, see :func:`distro.distro_release_attr`.
- """
- return self._distro_release_info.get(attribute, "")
-
- def uname_attr(self, attribute):
- # type: (str) -> str
- """
- Return a single named information item from the uname command
- output data source of the OS distribution.
-
- For details, see :func:`distro.uname_attr`.
- """
- return self._uname_info.get(attribute, "")
-
- @cached_property
- def _os_release_info(self):
- # type: () -> Dict[str, str]
- """
- Get the information items from the specified os-release file.
-
- Returns:
- A dictionary containing all information items.
- """
- if os.path.isfile(self.os_release_file):
- with open(self.os_release_file) as release_file:
- return self._parse_os_release_content(release_file)
- return {}
-
- @staticmethod
- def _parse_os_release_content(lines):
- # type: (TextIO) -> Dict[str, str]
- """
- Parse the lines of an os-release file.
-
- Parameters:
-
- * lines: Iterable through the lines in the os-release file.
- Each line must be a unicode string or a UTF-8 encoded byte
- string.
-
- Returns:
- A dictionary containing all information items.
- """
- props = {}
- lexer = shlex.shlex(lines, posix=True)
- lexer.whitespace_split = True
-
- # The shlex module defines its `wordchars` variable using literals,
- # making it dependent on the encoding of the Python source file.
- # In Python 2.6 and 2.7, the shlex source file is encoded in
- # 'iso-8859-1', and the `wordchars` variable is defined as a byte
- # string. This causes a UnicodeDecodeError to be raised when the
- # parsed content is a unicode object. The following fix resolves that
- # (... but it should be fixed in shlex...):
- if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
- lexer.wordchars = lexer.wordchars.decode("iso-8859-1")
-
- tokens = list(lexer)
- for token in tokens:
- # At this point, all shell-like parsing has been done (i.e.
- # comments processed, quotes and backslash escape sequences
- # processed, multi-line values assembled, trailing newlines
- # stripped, etc.), so the tokens are now either:
- # * variable assignments: var=value
- # * commands or their arguments (not allowed in os-release)
- if "=" in token:
- k, v = token.split("=", 1)
- props[k.lower()] = v
- else:
- # Ignore any tokens that are not variable assignments
- pass
-
- if "version_codename" in props:
- # os-release added a version_codename field. Use that in
- # preference to anything else Note that some distros purposefully
- # do not have code names. They should be setting
- # version_codename=""
- props["codename"] = props["version_codename"]
- elif "ubuntu_codename" in props:
- # Same as above but a non-standard field name used on older Ubuntus
- props["codename"] = props["ubuntu_codename"]
- elif "version" in props:
- # If there is no version_codename, parse it from the version
- match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"])
- if match:
- codename = match.group()
- codename = codename.strip("()")
- codename = codename.strip(",")
- codename = codename.strip()
- # codename appears within paranthese.
- props["codename"] = codename
-
- return props
-
- @cached_property
- def _lsb_release_info(self):
- # type: () -> Dict[str, str]
- """
- Get the information items from the lsb_release command output.
-
- Returns:
- A dictionary containing all information items.
- """
- if not self.include_lsb:
- return {}
- with open(os.devnull, "wb") as devnull:
- try:
- cmd = ("lsb_release", "-a")
- stdout = subprocess.check_output(cmd, stderr=devnull)
- # Command not found or lsb_release returned error
- except (OSError, subprocess.CalledProcessError):
- return {}
- content = self._to_str(stdout).splitlines()
- return self._parse_lsb_release_content(content)
-
- @staticmethod
- def _parse_lsb_release_content(lines):
- # type: (Iterable[str]) -> Dict[str, str]
- """
- Parse the output of the lsb_release command.
-
- Parameters:
-
- * lines: Iterable through the lines of the lsb_release output.
- Each line must be a unicode string or a UTF-8 encoded byte
- string.
-
- Returns:
- A dictionary containing all information items.
- """
- props = {}
- for line in lines:
- kv = line.strip("\n").split(":", 1)
- if len(kv) != 2:
- # Ignore lines without colon.
- continue
- k, v = kv
- props.update({k.replace(" ", "_").lower(): v.strip()})
- return props
-
- @cached_property
- def _uname_info(self):
- # type: () -> Dict[str, str]
- with open(os.devnull, "wb") as devnull:
- try:
- cmd = ("uname", "-rs")
- stdout = subprocess.check_output(cmd, stderr=devnull)
- except OSError:
- return {}
- content = self._to_str(stdout).splitlines()
- return self._parse_uname_content(content)
-
- @staticmethod
- def _parse_uname_content(lines):
- # type: (Sequence[str]) -> Dict[str, str]
- props = {}
- match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
- if match:
- name, version = match.groups()
-
- # This is to prevent the Linux kernel version from
- # appearing as the 'best' version on otherwise
- # identifiable distributions.
- if name == "Linux":
- return {}
- props["id"] = name.lower()
- props["name"] = name
- props["release"] = version
- return props
-
- @staticmethod
- def _to_str(text):
- # type: (Union[bytes, str]) -> str
- encoding = sys.getfilesystemencoding()
- encoding = "utf-8" if encoding == "ascii" else encoding
-
- if sys.version_info[0] >= 3:
- if isinstance(text, bytes):
- return text.decode(encoding)
- else:
- if isinstance(text, unicode): # noqa
- return text.encode(encoding)
-
- return text
-
- @cached_property
- def _distro_release_info(self):
- # type: () -> Dict[str, str]
- """
- Get the information items from the specified distro release file.
-
- Returns:
- A dictionary containing all information items.
- """
- if self.distro_release_file:
- # If it was specified, we use it and parse what we can, even if
- # its file name or content does not match the expected pattern.
- distro_info = self._parse_distro_release_file(self.distro_release_file)
- basename = os.path.basename(self.distro_release_file)
- # The file name pattern for user-specified distro release files
- # is somewhat more tolerant (compared to when searching for the
- # file), because we want to use what was specified as best as
- # possible.
- match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
- if "name" in distro_info and "cloudlinux" in distro_info["name"].lower():
- distro_info["id"] = "cloudlinux"
- elif match:
- distro_info["id"] = match.group(1)
- return distro_info
- else:
- try:
- basenames = os.listdir(self.etc_dir)
- # We sort for repeatability in cases where there are multiple
- # distro specific files; e.g. CentOS, Oracle, Enterprise all
- # containing `redhat-release` on top of their own.
- basenames.sort()
- except OSError:
- # This may occur when /etc is not readable but we can't be
- # sure about the *-release files. Check common entries of
- # /etc for information. If they turn out to not be there the
- # error is handled in `_parse_distro_release_file()`.
- basenames = [
- "SuSE-release",
- "arch-release",
- "base-release",
- "centos-release",
- "fedora-release",
- "gentoo-release",
- "mageia-release",
- "mandrake-release",
- "mandriva-release",
- "mandrivalinux-release",
- "manjaro-release",
- "oracle-release",
- "redhat-release",
- "sl-release",
- "slackware-version",
- ]
- for basename in basenames:
- if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
- continue
- match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
- if match:
- filepath = os.path.join(self.etc_dir, basename)
- distro_info = self._parse_distro_release_file(filepath)
- if "name" in distro_info:
- # The name is always present if the pattern matches
- self.distro_release_file = filepath
- distro_info["id"] = match.group(1)
- if "cloudlinux" in distro_info["name"].lower():
- distro_info["id"] = "cloudlinux"
- return distro_info
- return {}
-
- def _parse_distro_release_file(self, filepath):
- # type: (str) -> Dict[str, str]
- """
- Parse a distro release file.
-
- Parameters:
-
- * filepath: Path name of the distro release file.
-
- Returns:
- A dictionary containing all information items.
- """
- try:
- with open(filepath) as fp:
- # Only parse the first line. For instance, on SLES there
- # are multiple lines. We don't want them...
- return self._parse_distro_release_content(fp.readline())
- except (OSError, IOError):
- # Ignore not being able to read a specific, seemingly version
- # related file.
- # See https://github.com/python-distro/distro/issues/162
- return {}
-
- @staticmethod
- def _parse_distro_release_content(line):
- # type: (str) -> Dict[str, str]
- """
- Parse a line from a distro release file.
-
- Parameters:
- * line: Line from the distro release file. Must be a unicode string
- or a UTF-8 encoded byte string.
-
- Returns:
- A dictionary containing all information items.
- """
- matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1])
- distro_info = {}
- if matches:
- # regexp ensures non-None
- distro_info["name"] = matches.group(3)[::-1]
- if matches.group(2):
- distro_info["version_id"] = matches.group(2)[::-1]
- if matches.group(1):
- distro_info["codename"] = matches.group(1)[::-1]
- elif line:
- distro_info["name"] = line.strip()
- return distro_info
-
-
-_distro = LinuxDistribution()
-
-
-def main():
- # type: () -> None
- logger = logging.getLogger(__name__)
- logger.setLevel(logging.DEBUG)
- logger.addHandler(logging.StreamHandler(sys.stdout))
-
- parser = argparse.ArgumentParser(description="OS distro info tool")
- parser.add_argument(
- "--json", "-j", help="Output in machine readable format", action="store_true"
- )
-
- parser.add_argument(
- "--root-dir",
- "-r",
- type=str,
- dest="root_dir",
- help="Path to the root filesystem directory (defaults to /)",
- )
-
- args = parser.parse_args()
-
- if args.root_dir:
- dist = LinuxDistribution(
- include_lsb=False, include_uname=False, root_dir=args.root_dir
- )
- else:
- dist = _distro
-
- if args.json:
- logger.info(json.dumps(dist.info(), indent=4, sort_keys=True))
- else:
- logger.info("Name: %s", dist.name(pretty=True))
- distribution_version = dist.version(pretty=True)
- logger.info("Version: %s", distribution_version)
- distribution_codename = dist.codename()
- logger.info("Codename: %s", distribution_codename)
-
-
-if __name__ == "__main__":
- main()
diff --git a/lib/spack/external/jinja2/LICENSE.rst b/lib/spack/external/jinja2/LICENSE.rst
deleted file mode 100644
index c37cae49ec..0000000000
--- a/lib/spack/external/jinja2/LICENSE.rst
+++ /dev/null
@@ -1,28 +0,0 @@
-Copyright 2007 Pallets
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-3. Neither the name of the copyright holder nor the names of its
- contributors may be used to endorse or promote products derived from
- this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
-TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/lib/spack/external/jinja2/__init__.py b/lib/spack/external/jinja2/__init__.py
deleted file mode 100644
index f17866f6c4..0000000000
--- a/lib/spack/external/jinja2/__init__.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Jinja is a template engine written in pure Python. It provides a
-non-XML syntax that supports inline expressions and an optional
-sandboxed environment.
-"""
-from markupsafe import escape
-from markupsafe import Markup
-
-from .bccache import BytecodeCache
-from .bccache import FileSystemBytecodeCache
-from .bccache import MemcachedBytecodeCache
-from .environment import Environment
-from .environment import Template
-from .exceptions import TemplateAssertionError
-from .exceptions import TemplateError
-from .exceptions import TemplateNotFound
-from .exceptions import TemplateRuntimeError
-from .exceptions import TemplatesNotFound
-from .exceptions import TemplateSyntaxError
-from .exceptions import UndefinedError
-from .filters import contextfilter
-from .filters import environmentfilter
-from .filters import evalcontextfilter
-from .loaders import BaseLoader
-from .loaders import ChoiceLoader
-from .loaders import DictLoader
-from .loaders import FileSystemLoader
-from .loaders import FunctionLoader
-from .loaders import ModuleLoader
-from .loaders import PackageLoader
-from .loaders import PrefixLoader
-from .runtime import ChainableUndefined
-from .runtime import DebugUndefined
-from .runtime import make_logging_undefined
-from .runtime import StrictUndefined
-from .runtime import Undefined
-from .utils import clear_caches
-from .utils import contextfunction
-from .utils import environmentfunction
-from .utils import evalcontextfunction
-from .utils import is_undefined
-from .utils import select_autoescape
-
-__version__ = "2.11.3"
diff --git a/lib/spack/external/jinja2/_compat.py b/lib/spack/external/jinja2/_compat.py
deleted file mode 100644
index 1f044954a0..0000000000
--- a/lib/spack/external/jinja2/_compat.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# -*- coding: utf-8 -*-
-# flake8: noqa
-import marshal
-import sys
-
-PY2 = sys.version_info[0] == 2
-PYPY = hasattr(sys, "pypy_translation_info")
-_identity = lambda x: x
-
-if not PY2:
- unichr = chr
- range_type = range
- text_type = str
- string_types = (str,)
- integer_types = (int,)
-
- iterkeys = lambda d: iter(d.keys())
- itervalues = lambda d: iter(d.values())
- iteritems = lambda d: iter(d.items())
-
- import pickle
- from io import BytesIO, StringIO
-
- NativeStringIO = StringIO
-
- def reraise(tp, value, tb=None):
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
-
- ifilter = filter
- imap = map
- izip = zip
- intern = sys.intern
-
- implements_iterator = _identity
- implements_to_string = _identity
- encode_filename = _identity
-
- marshal_dump = marshal.dump
- marshal_load = marshal.load
-
-else:
- unichr = unichr
- text_type = unicode
- range_type = xrange
- string_types = (str, unicode)
- integer_types = (int, long)
-
- iterkeys = lambda d: d.iterkeys()
- itervalues = lambda d: d.itervalues()
- iteritems = lambda d: d.iteritems()
-
- import cPickle as pickle
- from cStringIO import StringIO as BytesIO, StringIO
-
- NativeStringIO = BytesIO
-
- exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
-
- from itertools import imap, izip, ifilter
-
- intern = intern
-
- def implements_iterator(cls):
- cls.next = cls.__next__
- del cls.__next__
- return cls
-
- def implements_to_string(cls):
- cls.__unicode__ = cls.__str__
- cls.__str__ = lambda x: x.__unicode__().encode("utf-8")
- return cls
-
- def encode_filename(filename):
- if isinstance(filename, unicode):
- return filename.encode("utf-8")
- return filename
-
- def marshal_dump(code, f):
- if isinstance(f, file):
- marshal.dump(code, f)
- else:
- f.write(marshal.dumps(code))
-
- def marshal_load(f):
- if isinstance(f, file):
- return marshal.load(f)
- return marshal.loads(f.read())
-
-
-def with_metaclass(meta, *bases):
- """Create a base class with a metaclass."""
- # This requires a bit of explanation: the basic idea is to make a
- # dummy metaclass for one level of class instantiation that replaces
- # itself with the actual metaclass.
- class metaclass(type):
- def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
-
- return type.__new__(metaclass, "temporary_class", (), {})
-
-
-try:
- from urllib.parse import quote_from_bytes as url_quote
-except ImportError:
- from urllib import quote as url_quote
-
-
-try:
- from collections import abc
-except ImportError:
- import collections as abc
-
-
-try:
- from os import fspath
-except ImportError:
- try:
- from pathlib import PurePath
- except ImportError:
- PurePath = None
-
- def fspath(path):
- if hasattr(path, "__fspath__"):
- return path.__fspath__()
-
- # Python 3.5 doesn't have __fspath__ yet, use str.
- if PurePath is not None and isinstance(path, PurePath):
- return str(path)
-
- return path
diff --git a/lib/spack/external/jinja2/_identifier.py b/lib/spack/external/jinja2/_identifier.py
deleted file mode 100644
index 224d5449d1..0000000000
--- a/lib/spack/external/jinja2/_identifier.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import re
-
-# generated by scripts/generate_identifier_pattern.py
-pattern = re.compile(
- r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950
-)
diff --git a/lib/spack/external/jinja2/asyncfilters.py b/lib/spack/external/jinja2/asyncfilters.py
deleted file mode 100644
index 3d98dbcc00..0000000000
--- a/lib/spack/external/jinja2/asyncfilters.py
+++ /dev/null
@@ -1,158 +0,0 @@
-from functools import wraps
-
-from . import filters
-from .asyncsupport import auto_aiter
-from .asyncsupport import auto_await
-
-
-async def auto_to_seq(value):
- seq = []
- if hasattr(value, "__aiter__"):
- async for item in value:
- seq.append(item)
- else:
- for item in value:
- seq.append(item)
- return seq
-
-
-async def async_select_or_reject(args, kwargs, modfunc, lookup_attr):
- seq, func = filters.prepare_select_or_reject(args, kwargs, modfunc, lookup_attr)
- if seq:
- async for item in auto_aiter(seq):
- if func(item):
- yield item
-
-
-def dualfilter(normal_filter, async_filter):
- wrap_evalctx = False
- if getattr(normal_filter, "environmentfilter", False) is True:
-
- def is_async(args):
- return args[0].is_async
-
- wrap_evalctx = False
- else:
- has_evalctxfilter = getattr(normal_filter, "evalcontextfilter", False) is True
- has_ctxfilter = getattr(normal_filter, "contextfilter", False) is True
- wrap_evalctx = not has_evalctxfilter and not has_ctxfilter
-
- def is_async(args):
- return args[0].environment.is_async
-
- @wraps(normal_filter)
- def wrapper(*args, **kwargs):
- b = is_async(args)
- if wrap_evalctx:
- args = args[1:]
- if b:
- return async_filter(*args, **kwargs)
- return normal_filter(*args, **kwargs)
-
- if wrap_evalctx:
- wrapper.evalcontextfilter = True
-
- wrapper.asyncfiltervariant = True
-
- return wrapper
-
-
-def asyncfiltervariant(original):
- def decorator(f):
- return dualfilter(original, f)
-
- return decorator
-
-
-@asyncfiltervariant(filters.do_first)
-async def do_first(environment, seq):
- try:
- return await auto_aiter(seq).__anext__()
- except StopAsyncIteration:
- return environment.undefined("No first item, sequence was empty.")
-
-
-@asyncfiltervariant(filters.do_groupby)
-async def do_groupby(environment, value, attribute):
- expr = filters.make_attrgetter(environment, attribute)
- return [
- filters._GroupTuple(key, await auto_to_seq(values))
- for key, values in filters.groupby(
- sorted(await auto_to_seq(value), key=expr), expr
- )
- ]
-
-
-@asyncfiltervariant(filters.do_join)
-async def do_join(eval_ctx, value, d=u"", attribute=None):
- return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute)
-
-
-@asyncfiltervariant(filters.do_list)
-async def do_list(value):
- return await auto_to_seq(value)
-
-
-@asyncfiltervariant(filters.do_reject)
-async def do_reject(*args, **kwargs):
- return async_select_or_reject(args, kwargs, lambda x: not x, False)
-
-
-@asyncfiltervariant(filters.do_rejectattr)
-async def do_rejectattr(*args, **kwargs):
- return async_select_or_reject(args, kwargs, lambda x: not x, True)
-
-
-@asyncfiltervariant(filters.do_select)
-async def do_select(*args, **kwargs):
- return async_select_or_reject(args, kwargs, lambda x: x, False)
-
-
-@asyncfiltervariant(filters.do_selectattr)
-async def do_selectattr(*args, **kwargs):
- return async_select_or_reject(args, kwargs, lambda x: x, True)
-
-
-@asyncfiltervariant(filters.do_map)
-async def do_map(*args, **kwargs):
- seq, func = filters.prepare_map(args, kwargs)
- if seq:
- async for item in auto_aiter(seq):
- yield await auto_await(func(item))
-
-
-@asyncfiltervariant(filters.do_sum)
-async def do_sum(environment, iterable, attribute=None, start=0):
- rv = start
- if attribute is not None:
- func = filters.make_attrgetter(environment, attribute)
- else:
-
- def func(x):
- return x
-
- async for item in auto_aiter(iterable):
- rv += func(item)
- return rv
-
-
-@asyncfiltervariant(filters.do_slice)
-async def do_slice(value, slices, fill_with=None):
- return filters.do_slice(await auto_to_seq(value), slices, fill_with)
-
-
-ASYNC_FILTERS = {
- "first": do_first,
- "groupby": do_groupby,
- "join": do_join,
- "list": do_list,
- # we intentionally do not support do_last because that would be
- # ridiculous
- "reject": do_reject,
- "rejectattr": do_rejectattr,
- "map": do_map,
- "select": do_select,
- "selectattr": do_selectattr,
- "sum": do_sum,
- "slice": do_slice,
-}
diff --git a/lib/spack/external/jinja2/asyncsupport.py b/lib/spack/external/jinja2/asyncsupport.py
deleted file mode 100644
index 78ba3739d8..0000000000
--- a/lib/spack/external/jinja2/asyncsupport.py
+++ /dev/null
@@ -1,264 +0,0 @@
-# -*- coding: utf-8 -*-
-"""The code for async support. Importing this patches Jinja on supported
-Python versions.
-"""
-import asyncio
-import inspect
-from functools import update_wrapper
-
-from markupsafe import Markup
-
-from .environment import TemplateModule
-from .runtime import LoopContext
-from .utils import concat
-from .utils import internalcode
-from .utils import missing
-
-
-async def concat_async(async_gen):
- rv = []
-
- async def collect():
- async for event in async_gen:
- rv.append(event)
-
- await collect()
- return concat(rv)
-
-
-async def generate_async(self, *args, **kwargs):
- vars = dict(*args, **kwargs)
- try:
- async for event in self.root_render_func(self.new_context(vars)):
- yield event
- except Exception:
- yield self.environment.handle_exception()
-
-
-def wrap_generate_func(original_generate):
- def _convert_generator(self, loop, args, kwargs):
- async_gen = self.generate_async(*args, **kwargs)
- try:
- while 1:
- yield loop.run_until_complete(async_gen.__anext__())
- except StopAsyncIteration:
- pass
-
- def generate(self, *args, **kwargs):
- if not self.environment.is_async:
- return original_generate(self, *args, **kwargs)
- return _convert_generator(self, asyncio.get_event_loop(), args, kwargs)
-
- return update_wrapper(generate, original_generate)
-
-
-async def render_async(self, *args, **kwargs):
- if not self.environment.is_async:
- raise RuntimeError("The environment was not created with async mode enabled.")
-
- vars = dict(*args, **kwargs)
- ctx = self.new_context(vars)
-
- try:
- return await concat_async(self.root_render_func(ctx))
- except Exception:
- return self.environment.handle_exception()
-
-
-def wrap_render_func(original_render):
- def render(self, *args, **kwargs):
- if not self.environment.is_async:
- return original_render(self, *args, **kwargs)
- loop = asyncio.get_event_loop()
- return loop.run_until_complete(self.render_async(*args, **kwargs))
-
- return update_wrapper(render, original_render)
-
-
-def wrap_block_reference_call(original_call):
- @internalcode
- async def async_call(self):
- rv = await concat_async(self._stack[self._depth](self._context))
- if self._context.eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
- @internalcode
- def __call__(self):
- if not self._context.environment.is_async:
- return original_call(self)
- return async_call(self)
-
- return update_wrapper(__call__, original_call)
-
-
-def wrap_macro_invoke(original_invoke):
- @internalcode
- async def async_invoke(self, arguments, autoescape):
- rv = await self._func(*arguments)
- if autoescape:
- rv = Markup(rv)
- return rv
-
- @internalcode
- def _invoke(self, arguments, autoescape):
- if not self._environment.is_async:
- return original_invoke(self, arguments, autoescape)
- return async_invoke(self, arguments, autoescape)
-
- return update_wrapper(_invoke, original_invoke)
-
-
-@internalcode
-async def get_default_module_async(self):
- if self._module is not None:
- return self._module
- self._module = rv = await self.make_module_async()
- return rv
-
-
-def wrap_default_module(original_default_module):
- @internalcode
- def _get_default_module(self):
- if self.environment.is_async:
- raise RuntimeError("Template module attribute is unavailable in async mode")
- return original_default_module(self)
-
- return _get_default_module
-
-
-async def make_module_async(self, vars=None, shared=False, locals=None):
- context = self.new_context(vars, shared, locals)
- body_stream = []
- async for item in self.root_render_func(context):
- body_stream.append(item)
- return TemplateModule(self, context, body_stream)
-
-
-def patch_template():
- from . import Template
-
- Template.generate = wrap_generate_func(Template.generate)
- Template.generate_async = update_wrapper(generate_async, Template.generate_async)
- Template.render_async = update_wrapper(render_async, Template.render_async)
- Template.render = wrap_render_func(Template.render)
- Template._get_default_module = wrap_default_module(Template._get_default_module)
- Template._get_default_module_async = get_default_module_async
- Template.make_module_async = update_wrapper(
- make_module_async, Template.make_module_async
- )
-
-
-def patch_runtime():
- from .runtime import BlockReference, Macro
-
- BlockReference.__call__ = wrap_block_reference_call(BlockReference.__call__)
- Macro._invoke = wrap_macro_invoke(Macro._invoke)
-
-
-def patch_filters():
- from .filters import FILTERS
- from .asyncfilters import ASYNC_FILTERS
-
- FILTERS.update(ASYNC_FILTERS)
-
-
-def patch_all():
- patch_template()
- patch_runtime()
- patch_filters()
-
-
-async def auto_await(value):
- if inspect.isawaitable(value):
- return await value
- return value
-
-
-async def auto_aiter(iterable):
- if hasattr(iterable, "__aiter__"):
- async for item in iterable:
- yield item
- return
- for item in iterable:
- yield item
-
-
-class AsyncLoopContext(LoopContext):
- _to_iterator = staticmethod(auto_aiter)
-
- @property
- async def length(self):
- if self._length is not None:
- return self._length
-
- try:
- self._length = len(self._iterable)
- except TypeError:
- iterable = [x async for x in self._iterator]
- self._iterator = self._to_iterator(iterable)
- self._length = len(iterable) + self.index + (self._after is not missing)
-
- return self._length
-
- @property
- async def revindex0(self):
- return await self.length - self.index
-
- @property
- async def revindex(self):
- return await self.length - self.index0
-
- async def _peek_next(self):
- if self._after is not missing:
- return self._after
-
- try:
- self._after = await self._iterator.__anext__()
- except StopAsyncIteration:
- self._after = missing
-
- return self._after
-
- @property
- async def last(self):
- return await self._peek_next() is missing
-
- @property
- async def nextitem(self):
- rv = await self._peek_next()
-
- if rv is missing:
- return self._undefined("there is no next item")
-
- return rv
-
- def __aiter__(self):
- return self
-
- async def __anext__(self):
- if self._after is not missing:
- rv = self._after
- self._after = missing
- else:
- rv = await self._iterator.__anext__()
-
- self.index0 += 1
- self._before = self._current
- self._current = rv
- return rv, self
-
-
-async def make_async_loop_context(iterable, undefined, recurse=None, depth0=0):
- import warnings
-
- warnings.warn(
- "This template must be recompiled with at least Jinja 2.11, or"
- " it will fail in 3.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- return AsyncLoopContext(iterable, undefined, recurse, depth0)
-
-
-patch_all()
diff --git a/lib/spack/external/jinja2/bccache.py b/lib/spack/external/jinja2/bccache.py
deleted file mode 100644
index 9c0661030f..0000000000
--- a/lib/spack/external/jinja2/bccache.py
+++ /dev/null
@@ -1,350 +0,0 @@
-# -*- coding: utf-8 -*-
-"""The optional bytecode cache system. This is useful if you have very
-complex template situations and the compilation of all those templates
-slows down your application too much.
-
-Situations where this is useful are often forking web applications that
-are initialized on the first request.
-"""
-import errno
-import fnmatch
-import os
-import stat
-import sys
-import tempfile
-from hashlib import sha1
-from os import listdir
-from os import path
-
-from ._compat import BytesIO
-from ._compat import marshal_dump
-from ._compat import marshal_load
-from ._compat import pickle
-from ._compat import text_type
-from .utils import open_if_exists
-
-bc_version = 4
-# Magic bytes to identify Jinja bytecode cache files. Contains the
-# Python major and minor version to avoid loading incompatible bytecode
-# if a project upgrades its Python version.
-bc_magic = (
- b"j2"
- + pickle.dumps(bc_version, 2)
- + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2)
-)
-
-
-class Bucket(object):
- """Buckets are used to store the bytecode for one template. It's created
- and initialized by the bytecode cache and passed to the loading functions.
-
- The buckets get an internal checksum from the cache assigned and use this
- to automatically reject outdated cache material. Individual bytecode
- cache subclasses don't have to care about cache invalidation.
- """
-
- def __init__(self, environment, key, checksum):
- self.environment = environment
- self.key = key
- self.checksum = checksum
- self.reset()
-
- def reset(self):
- """Resets the bucket (unloads the bytecode)."""
- self.code = None
-
- def load_bytecode(self, f):
- """Loads bytecode from a file or file like object."""
- # make sure the magic header is correct
- magic = f.read(len(bc_magic))
- if magic != bc_magic:
- self.reset()
- return
- # the source code of the file changed, we need to reload
- checksum = pickle.load(f)
- if self.checksum != checksum:
- self.reset()
- return
- # if marshal_load fails then we need to reload
- try:
- self.code = marshal_load(f)
- except (EOFError, ValueError, TypeError):
- self.reset()
- return
-
- def write_bytecode(self, f):
- """Dump the bytecode into the file or file like object passed."""
- if self.code is None:
- raise TypeError("can't write empty bucket")
- f.write(bc_magic)
- pickle.dump(self.checksum, f, 2)
- marshal_dump(self.code, f)
-
- def bytecode_from_string(self, string):
- """Load bytecode from a string."""
- self.load_bytecode(BytesIO(string))
-
- def bytecode_to_string(self):
- """Return the bytecode as string."""
- out = BytesIO()
- self.write_bytecode(out)
- return out.getvalue()
-
-
-class BytecodeCache(object):
- """To implement your own bytecode cache you have to subclass this class
- and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
- these methods are passed a :class:`~jinja2.bccache.Bucket`.
-
- A very basic bytecode cache that saves the bytecode on the file system::
-
- from os import path
-
- class MyCache(BytecodeCache):
-
- def __init__(self, directory):
- self.directory = directory
-
- def load_bytecode(self, bucket):
- filename = path.join(self.directory, bucket.key)
- if path.exists(filename):
- with open(filename, 'rb') as f:
- bucket.load_bytecode(f)
-
- def dump_bytecode(self, bucket):
- filename = path.join(self.directory, bucket.key)
- with open(filename, 'wb') as f:
- bucket.write_bytecode(f)
-
- A more advanced version of a filesystem based bytecode cache is part of
- Jinja.
- """
-
- def load_bytecode(self, bucket):
- """Subclasses have to override this method to load bytecode into a
- bucket. If they are not able to find code in the cache for the
- bucket, it must not do anything.
- """
- raise NotImplementedError()
-
- def dump_bytecode(self, bucket):
- """Subclasses have to override this method to write the bytecode
- from a bucket back to the cache. If it unable to do so it must not
- fail silently but raise an exception.
- """
- raise NotImplementedError()
-
- def clear(self):
- """Clears the cache. This method is not used by Jinja but should be
- implemented to allow applications to clear the bytecode cache used
- by a particular environment.
- """
-
- def get_cache_key(self, name, filename=None):
- """Returns the unique hash key for this template name."""
- hash = sha1(name.encode("utf-8"))
- if filename is not None:
- filename = "|" + filename
- if isinstance(filename, text_type):
- filename = filename.encode("utf-8")
- hash.update(filename)
- return hash.hexdigest()
-
- def get_source_checksum(self, source):
- """Returns a checksum for the source."""
- return sha1(source.encode("utf-8")).hexdigest()
-
- def get_bucket(self, environment, name, filename, source):
- """Return a cache bucket for the given template. All arguments are
- mandatory but filename may be `None`.
- """
- key = self.get_cache_key(name, filename)
- checksum = self.get_source_checksum(source)
- bucket = Bucket(environment, key, checksum)
- self.load_bytecode(bucket)
- return bucket
-
- def set_bucket(self, bucket):
- """Put the bucket into the cache."""
- self.dump_bytecode(bucket)
-
-
-class FileSystemBytecodeCache(BytecodeCache):
- """A bytecode cache that stores bytecode on the filesystem. It accepts
- two arguments: The directory where the cache items are stored and a
- pattern string that is used to build the filename.
-
- If no directory is specified a default cache directory is selected. On
- Windows the user's temp directory is used, on UNIX systems a directory
- is created for the user in the system temp directory.
-
- The pattern can be used to have multiple separate caches operate on the
- same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
- is replaced with the cache key.
-
- >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
-
- This bytecode cache supports clearing of the cache using the clear method.
- """
-
- def __init__(self, directory=None, pattern="__jinja2_%s.cache"):
- if directory is None:
- directory = self._get_default_cache_dir()
- self.directory = directory
- self.pattern = pattern
-
- def _get_default_cache_dir(self):
- def _unsafe_dir():
- raise RuntimeError(
- "Cannot determine safe temp directory. You "
- "need to explicitly provide one."
- )
-
- tmpdir = tempfile.gettempdir()
-
- # On windows the temporary directory is used specific unless
- # explicitly forced otherwise. We can just use that.
- if os.name == "nt":
- return tmpdir
- if not hasattr(os, "getuid"):
- _unsafe_dir()
-
- dirname = "_jinja2-cache-%d" % os.getuid()
- actual_dir = os.path.join(tmpdir, dirname)
-
- try:
- os.mkdir(actual_dir, stat.S_IRWXU)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
- try:
- os.chmod(actual_dir, stat.S_IRWXU)
- actual_dir_stat = os.lstat(actual_dir)
- if (
- actual_dir_stat.st_uid != os.getuid()
- or not stat.S_ISDIR(actual_dir_stat.st_mode)
- or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
- ):
- _unsafe_dir()
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
- actual_dir_stat = os.lstat(actual_dir)
- if (
- actual_dir_stat.st_uid != os.getuid()
- or not stat.S_ISDIR(actual_dir_stat.st_mode)
- or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
- ):
- _unsafe_dir()
-
- return actual_dir
-
- def _get_cache_filename(self, bucket):
- return path.join(self.directory, self.pattern % bucket.key)
-
- def load_bytecode(self, bucket):
- f = open_if_exists(self._get_cache_filename(bucket), "rb")
- if f is not None:
- try:
- bucket.load_bytecode(f)
- finally:
- f.close()
-
- def dump_bytecode(self, bucket):
- f = open(self._get_cache_filename(bucket), "wb")
- try:
- bucket.write_bytecode(f)
- finally:
- f.close()
-
- def clear(self):
- # imported lazily here because google app-engine doesn't support
- # write access on the file system and the function does not exist
- # normally.
- from os import remove
-
- files = fnmatch.filter(listdir(self.directory), self.pattern % "*")
- for filename in files:
- try:
- remove(path.join(self.directory, filename))
- except OSError:
- pass
-
-
-class MemcachedBytecodeCache(BytecodeCache):
- """This class implements a bytecode cache that uses a memcache cache for
- storing the information. It does not enforce a specific memcache library
- (tummy's memcache or cmemcache) but will accept any class that provides
- the minimal interface required.
-
- Libraries compatible with this class:
-
- - `cachelib <https://github.com/pallets/cachelib>`_
- - `python-memcached <https://pypi.org/project/python-memcached/>`_
-
- (Unfortunately the django cache interface is not compatible because it
- does not support storing binary data, only unicode. You can however pass
- the underlying cache client to the bytecode cache which is available
- as `django.core.cache.cache._client`.)
-
- The minimal interface for the client passed to the constructor is this:
-
- .. class:: MinimalClientInterface
-
- .. method:: set(key, value[, timeout])
-
- Stores the bytecode in the cache. `value` is a string and
- `timeout` the timeout of the key. If timeout is not provided
- a default timeout or no timeout should be assumed, if it's
- provided it's an integer with the number of seconds the cache
- item should exist.
-
- .. method:: get(key)
-
- Returns the value for the cache key. If the item does not
- exist in the cache the return value must be `None`.
-
- The other arguments to the constructor are the prefix for all keys that
- is added before the actual cache key and the timeout for the bytecode in
- the cache system. We recommend a high (or no) timeout.
-
- This bytecode cache does not support clearing of used items in the cache.
- The clear method is a no-operation function.
-
- .. versionadded:: 2.7
- Added support for ignoring memcache errors through the
- `ignore_memcache_errors` parameter.
- """
-
- def __init__(
- self,
- client,
- prefix="jinja2/bytecode/",
- timeout=None,
- ignore_memcache_errors=True,
- ):
- self.client = client
- self.prefix = prefix
- self.timeout = timeout
- self.ignore_memcache_errors = ignore_memcache_errors
-
- def load_bytecode(self, bucket):
- try:
- code = self.client.get(self.prefix + bucket.key)
- except Exception:
- if not self.ignore_memcache_errors:
- raise
- code = None
- if code is not None:
- bucket.bytecode_from_string(code)
-
- def dump_bytecode(self, bucket):
- args = (self.prefix + bucket.key, bucket.bytecode_to_string())
- if self.timeout is not None:
- args += (self.timeout,)
- try:
- self.client.set(*args)
- except Exception:
- if not self.ignore_memcache_errors:
- raise
diff --git a/lib/spack/external/jinja2/compiler.py b/lib/spack/external/jinja2/compiler.py
deleted file mode 100644
index 63297b42c3..0000000000
--- a/lib/spack/external/jinja2/compiler.py
+++ /dev/null
@@ -1,1843 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Compiles nodes from the parser into Python code."""
-from collections import namedtuple
-from functools import update_wrapper
-from itertools import chain
-from keyword import iskeyword as is_python_keyword
-
-from markupsafe import escape
-from markupsafe import Markup
-
-from . import nodes
-from ._compat import imap
-from ._compat import iteritems
-from ._compat import izip
-from ._compat import NativeStringIO
-from ._compat import range_type
-from ._compat import string_types
-from ._compat import text_type
-from .exceptions import TemplateAssertionError
-from .idtracking import Symbols
-from .idtracking import VAR_LOAD_ALIAS
-from .idtracking import VAR_LOAD_PARAMETER
-from .idtracking import VAR_LOAD_RESOLVE
-from .idtracking import VAR_LOAD_UNDEFINED
-from .nodes import EvalContext
-from .optimizer import Optimizer
-from .utils import concat
-from .visitor import NodeVisitor
-
-operators = {
- "eq": "==",
- "ne": "!=",
- "gt": ">",
- "gteq": ">=",
- "lt": "<",
- "lteq": "<=",
- "in": "in",
- "notin": "not in",
-}
-
-# what method to iterate over items do we want to use for dict iteration
-# in generated code? on 2.x let's go with iteritems, on 3.x with items
-if hasattr(dict, "iteritems"):
- dict_item_iter = "iteritems"
-else:
- dict_item_iter = "items"
-
-code_features = ["division"]
-
-# does this python version support generator stops? (PEP 0479)
-try:
- exec("from __future__ import generator_stop")
- code_features.append("generator_stop")
-except SyntaxError:
- pass
-
-# does this python version support yield from?
-try:
- exec("def f(): yield from x()")
-except SyntaxError:
- supports_yield_from = False
-else:
- supports_yield_from = True
-
-
-def optimizeconst(f):
- def new_func(self, node, frame, **kwargs):
- # Only optimize if the frame is not volatile
- if self.optimized and not frame.eval_ctx.volatile:
- new_node = self.optimizer.visit(node, frame.eval_ctx)
- if new_node != node:
- return self.visit(new_node, frame)
- return f(self, node, frame, **kwargs)
-
- return update_wrapper(new_func, f)
-
-
-def generate(
- node, environment, name, filename, stream=None, defer_init=False, optimized=True
-):
- """Generate the python source for a node tree."""
- if not isinstance(node, nodes.Template):
- raise TypeError("Can't compile non template nodes")
- generator = environment.code_generator_class(
- environment, name, filename, stream, defer_init, optimized
- )
- generator.visit(node)
- if stream is None:
- return generator.stream.getvalue()
-
-
-def has_safe_repr(value):
- """Does the node have a safe representation?"""
- if value is None or value is NotImplemented or value is Ellipsis:
- return True
- if type(value) in (bool, int, float, complex, range_type, Markup) + string_types:
- return True
- if type(value) in (tuple, list, set, frozenset):
- for item in value:
- if not has_safe_repr(item):
- return False
- return True
- elif type(value) is dict:
- for key, value in iteritems(value):
- if not has_safe_repr(key):
- return False
- if not has_safe_repr(value):
- return False
- return True
- return False
-
-
-def find_undeclared(nodes, names):
- """Check if the names passed are accessed undeclared. The return value
- is a set of all the undeclared names from the sequence of names found.
- """
- visitor = UndeclaredNameVisitor(names)
- try:
- for node in nodes:
- visitor.visit(node)
- except VisitorExit:
- pass
- return visitor.undeclared
-
-
-class MacroRef(object):
- def __init__(self, node):
- self.node = node
- self.accesses_caller = False
- self.accesses_kwargs = False
- self.accesses_varargs = False
-
-
-class Frame(object):
- """Holds compile time information for us."""
-
- def __init__(self, eval_ctx, parent=None, level=None):
- self.eval_ctx = eval_ctx
- self.symbols = Symbols(parent and parent.symbols or None, level=level)
-
- # a toplevel frame is the root + soft frames such as if conditions.
- self.toplevel = False
-
- # the root frame is basically just the outermost frame, so no if
- # conditions. This information is used to optimize inheritance
- # situations.
- self.rootlevel = False
-
- # in some dynamic inheritance situations the compiler needs to add
- # write tests around output statements.
- self.require_output_check = parent and parent.require_output_check
-
- # inside some tags we are using a buffer rather than yield statements.
- # this for example affects {% filter %} or {% macro %}. If a frame
- # is buffered this variable points to the name of the list used as
- # buffer.
- self.buffer = None
-
- # the name of the block we're in, otherwise None.
- self.block = parent and parent.block or None
-
- # the parent of this frame
- self.parent = parent
-
- if parent is not None:
- self.buffer = parent.buffer
-
- def copy(self):
- """Create a copy of the current one."""
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.symbols = self.symbols.copy()
- return rv
-
- def inner(self, isolated=False):
- """Return an inner frame."""
- if isolated:
- return Frame(self.eval_ctx, level=self.symbols.level + 1)
- return Frame(self.eval_ctx, self)
-
- def soft(self):
- """Return a soft frame. A soft frame may not be modified as
- standalone thing as it shares the resources with the frame it
- was created of, but it's not a rootlevel frame any longer.
-
- This is only used to implement if-statements.
- """
- rv = self.copy()
- rv.rootlevel = False
- return rv
-
- __copy__ = copy
-
-
-class VisitorExit(RuntimeError):
- """Exception used by the `UndeclaredNameVisitor` to signal a stop."""
-
-
-class DependencyFinderVisitor(NodeVisitor):
- """A visitor that collects filter and test calls."""
-
- def __init__(self):
- self.filters = set()
- self.tests = set()
-
- def visit_Filter(self, node):
- self.generic_visit(node)
- self.filters.add(node.name)
-
- def visit_Test(self, node):
- self.generic_visit(node)
- self.tests.add(node.name)
-
- def visit_Block(self, node):
- """Stop visiting at blocks."""
-
-
-class UndeclaredNameVisitor(NodeVisitor):
- """A visitor that checks if a name is accessed without being
- declared. This is different from the frame visitor as it will
- not stop at closure frames.
- """
-
- def __init__(self, names):
- self.names = set(names)
- self.undeclared = set()
-
- def visit_Name(self, node):
- if node.ctx == "load" and node.name in self.names:
- self.undeclared.add(node.name)
- if self.undeclared == self.names:
- raise VisitorExit()
- else:
- self.names.discard(node.name)
-
- def visit_Block(self, node):
- """Stop visiting a blocks."""
-
-
-class CompilerExit(Exception):
- """Raised if the compiler encountered a situation where it just
- doesn't make sense to further process the code. Any block that
- raises such an exception is not further processed.
- """
-
-
-class CodeGenerator(NodeVisitor):
- def __init__(
- self, environment, name, filename, stream=None, defer_init=False, optimized=True
- ):
- if stream is None:
- stream = NativeStringIO()
- self.environment = environment
- self.name = name
- self.filename = filename
- self.stream = stream
- self.created_block_context = False
- self.defer_init = defer_init
- self.optimized = optimized
- if optimized:
- self.optimizer = Optimizer(environment)
-
- # aliases for imports
- self.import_aliases = {}
-
- # a registry for all blocks. Because blocks are moved out
- # into the global python scope they are registered here
- self.blocks = {}
-
- # the number of extends statements so far
- self.extends_so_far = 0
-
- # some templates have a rootlevel extends. In this case we
- # can safely assume that we're a child template and do some
- # more optimizations.
- self.has_known_extends = False
-
- # the current line number
- self.code_lineno = 1
-
- # registry of all filters and tests (global, not block local)
- self.tests = {}
- self.filters = {}
-
- # the debug information
- self.debug_info = []
- self._write_debug_info = None
-
- # the number of new lines before the next write()
- self._new_lines = 0
-
- # the line number of the last written statement
- self._last_line = 0
-
- # true if nothing was written so far.
- self._first_write = True
-
- # used by the `temporary_identifier` method to get new
- # unique, temporary identifier
- self._last_identifier = 0
-
- # the current indentation
- self._indentation = 0
-
- # Tracks toplevel assignments
- self._assign_stack = []
-
- # Tracks parameter definition blocks
- self._param_def_block = []
-
- # Tracks the current context.
- self._context_reference_stack = ["context"]
-
- # -- Various compilation helpers
-
- def fail(self, msg, lineno):
- """Fail with a :exc:`TemplateAssertionError`."""
- raise TemplateAssertionError(msg, lineno, self.name, self.filename)
-
- def temporary_identifier(self):
- """Get a new unique identifier."""
- self._last_identifier += 1
- return "t_%d" % self._last_identifier
-
- def buffer(self, frame):
- """Enable buffering for the frame from that point onwards."""
- frame.buffer = self.temporary_identifier()
- self.writeline("%s = []" % frame.buffer)
-
- def return_buffer_contents(self, frame, force_unescaped=False):
- """Return the buffer contents of the frame."""
- if not force_unescaped:
- if frame.eval_ctx.volatile:
- self.writeline("if context.eval_ctx.autoescape:")
- self.indent()
- self.writeline("return Markup(concat(%s))" % frame.buffer)
- self.outdent()
- self.writeline("else:")
- self.indent()
- self.writeline("return concat(%s)" % frame.buffer)
- self.outdent()
- return
- elif frame.eval_ctx.autoescape:
- self.writeline("return Markup(concat(%s))" % frame.buffer)
- return
- self.writeline("return concat(%s)" % frame.buffer)
-
- def indent(self):
- """Indent by one."""
- self._indentation += 1
-
- def outdent(self, step=1):
- """Outdent by step."""
- self._indentation -= step
-
- def start_write(self, frame, node=None):
- """Yield or write into the frame buffer."""
- if frame.buffer is None:
- self.writeline("yield ", node)
- else:
- self.writeline("%s.append(" % frame.buffer, node)
-
- def end_write(self, frame):
- """End the writing process started by `start_write`."""
- if frame.buffer is not None:
- self.write(")")
-
- def simple_write(self, s, frame, node=None):
- """Simple shortcut for start_write + write + end_write."""
- self.start_write(frame, node)
- self.write(s)
- self.end_write(frame)
-
- def blockvisit(self, nodes, frame):
- """Visit a list of nodes as block in a frame. If the current frame
- is no buffer a dummy ``if 0: yield None`` is written automatically.
- """
- try:
- self.writeline("pass")
- for node in nodes:
- self.visit(node, frame)
- except CompilerExit:
- pass
-
- def write(self, x):
- """Write a string into the output stream."""
- if self._new_lines:
- if not self._first_write:
- self.stream.write("\n" * self._new_lines)
- self.code_lineno += self._new_lines
- if self._write_debug_info is not None:
- self.debug_info.append((self._write_debug_info, self.code_lineno))
- self._write_debug_info = None
- self._first_write = False
- self.stream.write(" " * self._indentation)
- self._new_lines = 0
- self.stream.write(x)
-
- def writeline(self, x, node=None, extra=0):
- """Combination of newline and write."""
- self.newline(node, extra)
- self.write(x)
-
- def newline(self, node=None, extra=0):
- """Add one or more newlines before the next write."""
- self._new_lines = max(self._new_lines, 1 + extra)
- if node is not None and node.lineno != self._last_line:
- self._write_debug_info = node.lineno
- self._last_line = node.lineno
-
- def signature(self, node, frame, extra_kwargs=None):
- """Writes a function call to the stream for the current node.
- A leading comma is added automatically. The extra keyword
- arguments may not include python keywords otherwise a syntax
- error could occur. The extra keyword arguments should be given
- as python dict.
- """
- # if any of the given keyword arguments is a python keyword
- # we have to make sure that no invalid call is created.
- kwarg_workaround = False
- for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
- if is_python_keyword(kwarg):
- kwarg_workaround = True
- break
-
- for arg in node.args:
- self.write(", ")
- self.visit(arg, frame)
-
- if not kwarg_workaround:
- for kwarg in node.kwargs:
- self.write(", ")
- self.visit(kwarg, frame)
- if extra_kwargs is not None:
- for key, value in iteritems(extra_kwargs):
- self.write(", %s=%s" % (key, value))
- if node.dyn_args:
- self.write(", *")
- self.visit(node.dyn_args, frame)
-
- if kwarg_workaround:
- if node.dyn_kwargs is not None:
- self.write(", **dict({")
- else:
- self.write(", **{")
- for kwarg in node.kwargs:
- self.write("%r: " % kwarg.key)
- self.visit(kwarg.value, frame)
- self.write(", ")
- if extra_kwargs is not None:
- for key, value in iteritems(extra_kwargs):
- self.write("%r: %s, " % (key, value))
- if node.dyn_kwargs is not None:
- self.write("}, **")
- self.visit(node.dyn_kwargs, frame)
- self.write(")")
- else:
- self.write("}")
-
- elif node.dyn_kwargs is not None:
- self.write(", **")
- self.visit(node.dyn_kwargs, frame)
-
- def pull_dependencies(self, nodes):
- """Pull all the dependencies."""
- visitor = DependencyFinderVisitor()
- for node in nodes:
- visitor.visit(node)
- for dependency in "filters", "tests":
- mapping = getattr(self, dependency)
- for name in getattr(visitor, dependency):
- if name not in mapping:
- mapping[name] = self.temporary_identifier()
- self.writeline(
- "%s = environment.%s[%r]" % (mapping[name], dependency, name)
- )
-
- def enter_frame(self, frame):
- undefs = []
- for target, (action, param) in iteritems(frame.symbols.loads):
- if action == VAR_LOAD_PARAMETER:
- pass
- elif action == VAR_LOAD_RESOLVE:
- self.writeline("%s = %s(%r)" % (target, self.get_resolve_func(), param))
- elif action == VAR_LOAD_ALIAS:
- self.writeline("%s = %s" % (target, param))
- elif action == VAR_LOAD_UNDEFINED:
- undefs.append(target)
- else:
- raise NotImplementedError("unknown load instruction")
- if undefs:
- self.writeline("%s = missing" % " = ".join(undefs))
-
- def leave_frame(self, frame, with_python_scope=False):
- if not with_python_scope:
- undefs = []
- for target, _ in iteritems(frame.symbols.loads):
- undefs.append(target)
- if undefs:
- self.writeline("%s = missing" % " = ".join(undefs))
-
- def func(self, name):
- if self.environment.is_async:
- return "async def %s" % name
- return "def %s" % name
-
- def macro_body(self, node, frame):
- """Dump the function def of a macro or call block."""
- frame = frame.inner()
- frame.symbols.analyze_node(node)
- macro_ref = MacroRef(node)
-
- explicit_caller = None
- skip_special_params = set()
- args = []
- for idx, arg in enumerate(node.args):
- if arg.name == "caller":
- explicit_caller = idx
- if arg.name in ("kwargs", "varargs"):
- skip_special_params.add(arg.name)
- args.append(frame.symbols.ref(arg.name))
-
- undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs"))
-
- if "caller" in undeclared:
- # In older Jinja versions there was a bug that allowed caller
- # to retain the special behavior even if it was mentioned in
- # the argument list. However thankfully this was only really
- # working if it was the last argument. So we are explicitly
- # checking this now and error out if it is anywhere else in
- # the argument list.
- if explicit_caller is not None:
- try:
- node.defaults[explicit_caller - len(node.args)]
- except IndexError:
- self.fail(
- "When defining macros or call blocks the "
- 'special "caller" argument must be omitted '
- "or be given a default.",
- node.lineno,
- )
- else:
- args.append(frame.symbols.declare_parameter("caller"))
- macro_ref.accesses_caller = True
- if "kwargs" in undeclared and "kwargs" not in skip_special_params:
- args.append(frame.symbols.declare_parameter("kwargs"))
- macro_ref.accesses_kwargs = True
- if "varargs" in undeclared and "varargs" not in skip_special_params:
- args.append(frame.symbols.declare_parameter("varargs"))
- macro_ref.accesses_varargs = True
-
- # macros are delayed, they never require output checks
- frame.require_output_check = False
- frame.symbols.analyze_node(node)
- self.writeline("%s(%s):" % (self.func("macro"), ", ".join(args)), node)
- self.indent()
-
- self.buffer(frame)
- self.enter_frame(frame)
-
- self.push_parameter_definitions(frame)
- for idx, arg in enumerate(node.args):
- ref = frame.symbols.ref(arg.name)
- self.writeline("if %s is missing:" % ref)
- self.indent()
- try:
- default = node.defaults[idx - len(node.args)]
- except IndexError:
- self.writeline(
- "%s = undefined(%r, name=%r)"
- % (ref, "parameter %r was not provided" % arg.name, arg.name)
- )
- else:
- self.writeline("%s = " % ref)
- self.visit(default, frame)
- self.mark_parameter_stored(ref)
- self.outdent()
- self.pop_parameter_definitions()
-
- self.blockvisit(node.body, frame)
- self.return_buffer_contents(frame, force_unescaped=True)
- self.leave_frame(frame, with_python_scope=True)
- self.outdent()
-
- return frame, macro_ref
-
- def macro_def(self, macro_ref, frame):
- """Dump the macro definition for the def created by macro_body."""
- arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args)
- name = getattr(macro_ref.node, "name", None)
- if len(macro_ref.node.args) == 1:
- arg_tuple += ","
- self.write(
- "Macro(environment, macro, %r, (%s), %r, %r, %r, "
- "context.eval_ctx.autoescape)"
- % (
- name,
- arg_tuple,
- macro_ref.accesses_kwargs,
- macro_ref.accesses_varargs,
- macro_ref.accesses_caller,
- )
- )
-
- def position(self, node):
- """Return a human readable position for the node."""
- rv = "line %d" % node.lineno
- if self.name is not None:
- rv += " in " + repr(self.name)
- return rv
-
- def dump_local_context(self, frame):
- return "{%s}" % ", ".join(
- "%r: %s" % (name, target)
- for name, target in iteritems(frame.symbols.dump_stores())
- )
-
- def write_commons(self):
- """Writes a common preamble that is used by root and block functions.
- Primarily this sets up common local helpers and enforces a generator
- through a dead branch.
- """
- self.writeline("resolve = context.resolve_or_missing")
- self.writeline("undefined = environment.undefined")
- # always use the standard Undefined class for the implicit else of
- # conditional expressions
- self.writeline("cond_expr_undefined = Undefined")
- self.writeline("if 0: yield None")
-
- def push_parameter_definitions(self, frame):
- """Pushes all parameter targets from the given frame into a local
- stack that permits tracking of yet to be assigned parameters. In
- particular this enables the optimization from `visit_Name` to skip
- undefined expressions for parameters in macros as macros can reference
- otherwise unbound parameters.
- """
- self._param_def_block.append(frame.symbols.dump_param_targets())
-
- def pop_parameter_definitions(self):
- """Pops the current parameter definitions set."""
- self._param_def_block.pop()
-
- def mark_parameter_stored(self, target):
- """Marks a parameter in the current parameter definitions as stored.
- This will skip the enforced undefined checks.
- """
- if self._param_def_block:
- self._param_def_block[-1].discard(target)
-
- def push_context_reference(self, target):
- self._context_reference_stack.append(target)
-
- def pop_context_reference(self):
- self._context_reference_stack.pop()
-
- def get_context_ref(self):
- return self._context_reference_stack[-1]
-
- def get_resolve_func(self):
- target = self._context_reference_stack[-1]
- if target == "context":
- return "resolve"
- return "%s.resolve" % target
-
- def derive_context(self, frame):
- return "%s.derived(%s)" % (
- self.get_context_ref(),
- self.dump_local_context(frame),
- )
-
- def parameter_is_undeclared(self, target):
- """Checks if a given target is an undeclared parameter."""
- if not self._param_def_block:
- return False
- return target in self._param_def_block[-1]
-
- def push_assign_tracking(self):
- """Pushes a new layer for assignment tracking."""
- self._assign_stack.append(set())
-
- def pop_assign_tracking(self, frame):
- """Pops the topmost level for assignment tracking and updates the
- context variables if necessary.
- """
- vars = self._assign_stack.pop()
- if not frame.toplevel or not vars:
- return
- public_names = [x for x in vars if x[:1] != "_"]
- if len(vars) == 1:
- name = next(iter(vars))
- ref = frame.symbols.ref(name)
- self.writeline("context.vars[%r] = %s" % (name, ref))
- else:
- self.writeline("context.vars.update({")
- for idx, name in enumerate(vars):
- if idx:
- self.write(", ")
- ref = frame.symbols.ref(name)
- self.write("%r: %s" % (name, ref))
- self.write("})")
- if public_names:
- if len(public_names) == 1:
- self.writeline("context.exported_vars.add(%r)" % public_names[0])
- else:
- self.writeline(
- "context.exported_vars.update((%s))"
- % ", ".join(imap(repr, public_names))
- )
-
- # -- Statement Visitors
-
- def visit_Template(self, node, frame=None):
- assert frame is None, "no root frame allowed"
- eval_ctx = EvalContext(self.environment, self.name)
-
- from .runtime import exported
-
- self.writeline("from __future__ import %s" % ", ".join(code_features))
- self.writeline("from jinja2.runtime import " + ", ".join(exported))
-
- if self.environment.is_async:
- self.writeline(
- "from jinja2.asyncsupport import auto_await, "
- "auto_aiter, AsyncLoopContext"
- )
-
- # if we want a deferred initialization we cannot move the
- # environment into a local name
- envenv = not self.defer_init and ", environment=environment" or ""
-
- # do we have an extends tag at all? If not, we can save some
- # overhead by just not processing any inheritance code.
- have_extends = node.find(nodes.Extends) is not None
-
- # find all blocks
- for block in node.find_all(nodes.Block):
- if block.name in self.blocks:
- self.fail("block %r defined twice" % block.name, block.lineno)
- self.blocks[block.name] = block
-
- # find all imports and import them
- for import_ in node.find_all(nodes.ImportedName):
- if import_.importname not in self.import_aliases:
- imp = import_.importname
- self.import_aliases[imp] = alias = self.temporary_identifier()
- if "." in imp:
- module, obj = imp.rsplit(".", 1)
- self.writeline("from %s import %s as %s" % (module, obj, alias))
- else:
- self.writeline("import %s as %s" % (imp, alias))
-
- # add the load name
- self.writeline("name = %r" % self.name)
-
- # generate the root render function.
- self.writeline(
- "%s(context, missing=missing%s):" % (self.func("root"), envenv), extra=1
- )
- self.indent()
- self.write_commons()
-
- # process the root
- frame = Frame(eval_ctx)
- if "self" in find_undeclared(node.body, ("self",)):
- ref = frame.symbols.declare_parameter("self")
- self.writeline("%s = TemplateReference(context)" % ref)
- frame.symbols.analyze_node(node)
- frame.toplevel = frame.rootlevel = True
- frame.require_output_check = have_extends and not self.has_known_extends
- if have_extends:
- self.writeline("parent_template = None")
- self.enter_frame(frame)
- self.pull_dependencies(node.body)
- self.blockvisit(node.body, frame)
- self.leave_frame(frame, with_python_scope=True)
- self.outdent()
-
- # make sure that the parent root is called.
- if have_extends:
- if not self.has_known_extends:
- self.indent()
- self.writeline("if parent_template is not None:")
- self.indent()
- if supports_yield_from and not self.environment.is_async:
- self.writeline("yield from parent_template.root_render_func(context)")
- else:
- self.writeline(
- "%sfor event in parent_template."
- "root_render_func(context):"
- % (self.environment.is_async and "async " or "")
- )
- self.indent()
- self.writeline("yield event")
- self.outdent()
- self.outdent(1 + (not self.has_known_extends))
-
- # at this point we now have the blocks collected and can visit them too.
- for name, block in iteritems(self.blocks):
- self.writeline(
- "%s(context, missing=missing%s):"
- % (self.func("block_" + name), envenv),
- block,
- 1,
- )
- self.indent()
- self.write_commons()
- # It's important that we do not make this frame a child of the
- # toplevel template. This would cause a variety of
- # interesting issues with identifier tracking.
- block_frame = Frame(eval_ctx)
- undeclared = find_undeclared(block.body, ("self", "super"))
- if "self" in undeclared:
- ref = block_frame.symbols.declare_parameter("self")
- self.writeline("%s = TemplateReference(context)" % ref)
- if "super" in undeclared:
- ref = block_frame.symbols.declare_parameter("super")
- self.writeline("%s = context.super(%r, block_%s)" % (ref, name, name))
- block_frame.symbols.analyze_node(block)
- block_frame.block = name
- self.enter_frame(block_frame)
- self.pull_dependencies(block.body)
- self.blockvisit(block.body, block_frame)
- self.leave_frame(block_frame, with_python_scope=True)
- self.outdent()
-
- self.writeline(
- "blocks = {%s}" % ", ".join("%r: block_%s" % (x, x) for x in self.blocks),
- extra=1,
- )
-
- # add a function that returns the debug info
- self.writeline(
- "debug_info = %r" % "&".join("%s=%s" % x for x in self.debug_info)
- )
-
- def visit_Block(self, node, frame):
- """Call a block and register it for the template."""
- level = 0
- if frame.toplevel:
- # if we know that we are a child template, there is no need to
- # check if we are one
- if self.has_known_extends:
- return
- if self.extends_so_far > 0:
- self.writeline("if parent_template is None:")
- self.indent()
- level += 1
-
- if node.scoped:
- context = self.derive_context(frame)
- else:
- context = self.get_context_ref()
-
- if (
- supports_yield_from
- and not self.environment.is_async
- and frame.buffer is None
- ):
- self.writeline(
- "yield from context.blocks[%r][0](%s)" % (node.name, context), node
- )
- else:
- loop = self.environment.is_async and "async for" or "for"
- self.writeline(
- "%s event in context.blocks[%r][0](%s):" % (loop, node.name, context),
- node,
- )
- self.indent()
- self.simple_write("event", frame)
- self.outdent()
-
- self.outdent(level)
-
- def visit_Extends(self, node, frame):
- """Calls the extender."""
- if not frame.toplevel:
- self.fail("cannot use extend from a non top-level scope", node.lineno)
-
- # if the number of extends statements in general is zero so
- # far, we don't have to add a check if something extended
- # the template before this one.
- if self.extends_so_far > 0:
-
- # if we have a known extends we just add a template runtime
- # error into the generated code. We could catch that at compile
- # time too, but i welcome it not to confuse users by throwing the
- # same error at different times just "because we can".
- if not self.has_known_extends:
- self.writeline("if parent_template is not None:")
- self.indent()
- self.writeline("raise TemplateRuntimeError(%r)" % "extended multiple times")
-
- # if we have a known extends already we don't need that code here
- # as we know that the template execution will end here.
- if self.has_known_extends:
- raise CompilerExit()
- else:
- self.outdent()
-
- self.writeline("parent_template = environment.get_template(", node)
- self.visit(node.template, frame)
- self.write(", %r)" % self.name)
- self.writeline(
- "for name, parent_block in parent_template.blocks.%s():" % dict_item_iter
- )
- self.indent()
- self.writeline("context.blocks.setdefault(name, []).append(parent_block)")
- self.outdent()
-
- # if this extends statement was in the root level we can take
- # advantage of that information and simplify the generated code
- # in the top level from this point onwards
- if frame.rootlevel:
- self.has_known_extends = True
-
- # and now we have one more
- self.extends_so_far += 1
-
- def visit_Include(self, node, frame):
- """Handles includes."""
- if node.ignore_missing:
- self.writeline("try:")
- self.indent()
-
- func_name = "get_or_select_template"
- if isinstance(node.template, nodes.Const):
- if isinstance(node.template.value, string_types):
- func_name = "get_template"
- elif isinstance(node.template.value, (tuple, list)):
- func_name = "select_template"
- elif isinstance(node.template, (nodes.Tuple, nodes.List)):
- func_name = "select_template"
-
- self.writeline("template = environment.%s(" % func_name, node)
- self.visit(node.template, frame)
- self.write(", %r)" % self.name)
- if node.ignore_missing:
- self.outdent()
- self.writeline("except TemplateNotFound:")
- self.indent()
- self.writeline("pass")
- self.outdent()
- self.writeline("else:")
- self.indent()
-
- skip_event_yield = False
- if node.with_context:
- loop = self.environment.is_async and "async for" or "for"
- self.writeline(
- "%s event in template.root_render_func("
- "template.new_context(context.get_all(), True, "
- "%s)):" % (loop, self.dump_local_context(frame))
- )
- elif self.environment.is_async:
- self.writeline(
- "for event in (await "
- "template._get_default_module_async())"
- "._body_stream:"
- )
- else:
- if supports_yield_from:
- self.writeline("yield from template._get_default_module()._body_stream")
- skip_event_yield = True
- else:
- self.writeline(
- "for event in template._get_default_module()._body_stream:"
- )
-
- if not skip_event_yield:
- self.indent()
- self.simple_write("event", frame)
- self.outdent()
-
- if node.ignore_missing:
- self.outdent()
-
- def visit_Import(self, node, frame):
- """Visit regular imports."""
- self.writeline("%s = " % frame.symbols.ref(node.target), node)
- if frame.toplevel:
- self.write("context.vars[%r] = " % node.target)
- if self.environment.is_async:
- self.write("await ")
- self.write("environment.get_template(")
- self.visit(node.template, frame)
- self.write(", %r)." % self.name)
- if node.with_context:
- self.write(
- "make_module%s(context.get_all(), True, %s)"
- % (
- self.environment.is_async and "_async" or "",
- self.dump_local_context(frame),
- )
- )
- elif self.environment.is_async:
- self.write("_get_default_module_async()")
- else:
- self.write("_get_default_module()")
- if frame.toplevel and not node.target.startswith("_"):
- self.writeline("context.exported_vars.discard(%r)" % node.target)
-
- def visit_FromImport(self, node, frame):
- """Visit named imports."""
- self.newline(node)
- self.write(
- "included_template = %senvironment.get_template("
- % (self.environment.is_async and "await " or "")
- )
- self.visit(node.template, frame)
- self.write(", %r)." % self.name)
- if node.with_context:
- self.write(
- "make_module%s(context.get_all(), True, %s)"
- % (
- self.environment.is_async and "_async" or "",
- self.dump_local_context(frame),
- )
- )
- elif self.environment.is_async:
- self.write("_get_default_module_async()")
- else:
- self.write("_get_default_module()")
-
- var_names = []
- discarded_names = []
- for name in node.names:
- if isinstance(name, tuple):
- name, alias = name
- else:
- alias = name
- self.writeline(
- "%s = getattr(included_template, "
- "%r, missing)" % (frame.symbols.ref(alias), name)
- )
- self.writeline("if %s is missing:" % frame.symbols.ref(alias))
- self.indent()
- self.writeline(
- "%s = undefined(%r %% "
- "included_template.__name__, "
- "name=%r)"
- % (
- frame.symbols.ref(alias),
- "the template %%r (imported on %s) does "
- "not export the requested name %s"
- % (self.position(node), repr(name)),
- name,
- )
- )
- self.outdent()
- if frame.toplevel:
- var_names.append(alias)
- if not alias.startswith("_"):
- discarded_names.append(alias)
-
- if var_names:
- if len(var_names) == 1:
- name = var_names[0]
- self.writeline(
- "context.vars[%r] = %s" % (name, frame.symbols.ref(name))
- )
- else:
- self.writeline(
- "context.vars.update({%s})"
- % ", ".join(
- "%r: %s" % (name, frame.symbols.ref(name)) for name in var_names
- )
- )
- if discarded_names:
- if len(discarded_names) == 1:
- self.writeline("context.exported_vars.discard(%r)" % discarded_names[0])
- else:
- self.writeline(
- "context.exported_vars.difference_"
- "update((%s))" % ", ".join(imap(repr, discarded_names))
- )
-
- def visit_For(self, node, frame):
- loop_frame = frame.inner()
- test_frame = frame.inner()
- else_frame = frame.inner()
-
- # try to figure out if we have an extended loop. An extended loop
- # is necessary if the loop is in recursive mode if the special loop
- # variable is accessed in the body.
- extended_loop = node.recursive or "loop" in find_undeclared(
- node.iter_child_nodes(only=("body",)), ("loop",)
- )
-
- loop_ref = None
- if extended_loop:
- loop_ref = loop_frame.symbols.declare_parameter("loop")
-
- loop_frame.symbols.analyze_node(node, for_branch="body")
- if node.else_:
- else_frame.symbols.analyze_node(node, for_branch="else")
-
- if node.test:
- loop_filter_func = self.temporary_identifier()
- test_frame.symbols.analyze_node(node, for_branch="test")
- self.writeline("%s(fiter):" % self.func(loop_filter_func), node.test)
- self.indent()
- self.enter_frame(test_frame)
- self.writeline(self.environment.is_async and "async for " or "for ")
- self.visit(node.target, loop_frame)
- self.write(" in ")
- self.write(self.environment.is_async and "auto_aiter(fiter)" or "fiter")
- self.write(":")
- self.indent()
- self.writeline("if ", node.test)
- self.visit(node.test, test_frame)
- self.write(":")
- self.indent()
- self.writeline("yield ")
- self.visit(node.target, loop_frame)
- self.outdent(3)
- self.leave_frame(test_frame, with_python_scope=True)
-
- # if we don't have an recursive loop we have to find the shadowed
- # variables at that point. Because loops can be nested but the loop
- # variable is a special one we have to enforce aliasing for it.
- if node.recursive:
- self.writeline(
- "%s(reciter, loop_render_func, depth=0):" % self.func("loop"), node
- )
- self.indent()
- self.buffer(loop_frame)
-
- # Use the same buffer for the else frame
- else_frame.buffer = loop_frame.buffer
-
- # make sure the loop variable is a special one and raise a template
- # assertion error if a loop tries to write to loop
- if extended_loop:
- self.writeline("%s = missing" % loop_ref)
-
- for name in node.find_all(nodes.Name):
- if name.ctx == "store" and name.name == "loop":
- self.fail(
- "Can't assign to special loop variable in for-loop target",
- name.lineno,
- )
-
- if node.else_:
- iteration_indicator = self.temporary_identifier()
- self.writeline("%s = 1" % iteration_indicator)
-
- self.writeline(self.environment.is_async and "async for " or "for ", node)
- self.visit(node.target, loop_frame)
- if extended_loop:
- if self.environment.is_async:
- self.write(", %s in AsyncLoopContext(" % loop_ref)
- else:
- self.write(", %s in LoopContext(" % loop_ref)
- else:
- self.write(" in ")
-
- if node.test:
- self.write("%s(" % loop_filter_func)
- if node.recursive:
- self.write("reciter")
- else:
- if self.environment.is_async and not extended_loop:
- self.write("auto_aiter(")
- self.visit(node.iter, frame)
- if self.environment.is_async and not extended_loop:
- self.write(")")
- if node.test:
- self.write(")")
-
- if node.recursive:
- self.write(", undefined, loop_render_func, depth):")
- else:
- self.write(extended_loop and ", undefined):" or ":")
-
- self.indent()
- self.enter_frame(loop_frame)
-
- self.blockvisit(node.body, loop_frame)
- if node.else_:
- self.writeline("%s = 0" % iteration_indicator)
- self.outdent()
- self.leave_frame(
- loop_frame, with_python_scope=node.recursive and not node.else_
- )
-
- if node.else_:
- self.writeline("if %s:" % iteration_indicator)
- self.indent()
- self.enter_frame(else_frame)
- self.blockvisit(node.else_, else_frame)
- self.leave_frame(else_frame)
- self.outdent()
-
- # if the node was recursive we have to return the buffer contents
- # and start the iteration code
- if node.recursive:
- self.return_buffer_contents(loop_frame)
- self.outdent()
- self.start_write(frame, node)
- if self.environment.is_async:
- self.write("await ")
- self.write("loop(")
- if self.environment.is_async:
- self.write("auto_aiter(")
- self.visit(node.iter, frame)
- if self.environment.is_async:
- self.write(")")
- self.write(", loop)")
- self.end_write(frame)
-
- def visit_If(self, node, frame):
- if_frame = frame.soft()
- self.writeline("if ", node)
- self.visit(node.test, if_frame)
- self.write(":")
- self.indent()
- self.blockvisit(node.body, if_frame)
- self.outdent()
- for elif_ in node.elif_:
- self.writeline("elif ", elif_)
- self.visit(elif_.test, if_frame)
- self.write(":")
- self.indent()
- self.blockvisit(elif_.body, if_frame)
- self.outdent()
- if node.else_:
- self.writeline("else:")
- self.indent()
- self.blockvisit(node.else_, if_frame)
- self.outdent()
-
- def visit_Macro(self, node, frame):
- macro_frame, macro_ref = self.macro_body(node, frame)
- self.newline()
- if frame.toplevel:
- if not node.name.startswith("_"):
- self.write("context.exported_vars.add(%r)" % node.name)
- self.writeline("context.vars[%r] = " % node.name)
- self.write("%s = " % frame.symbols.ref(node.name))
- self.macro_def(macro_ref, macro_frame)
-
- def visit_CallBlock(self, node, frame):
- call_frame, macro_ref = self.macro_body(node, frame)
- self.writeline("caller = ")
- self.macro_def(macro_ref, call_frame)
- self.start_write(frame, node)
- self.visit_Call(node.call, frame, forward_caller=True)
- self.end_write(frame)
-
- def visit_FilterBlock(self, node, frame):
- filter_frame = frame.inner()
- filter_frame.symbols.analyze_node(node)
- self.enter_frame(filter_frame)
- self.buffer(filter_frame)
- self.blockvisit(node.body, filter_frame)
- self.start_write(frame, node)
- self.visit_Filter(node.filter, filter_frame)
- self.end_write(frame)
- self.leave_frame(filter_frame)
-
- def visit_With(self, node, frame):
- with_frame = frame.inner()
- with_frame.symbols.analyze_node(node)
- self.enter_frame(with_frame)
- for target, expr in izip(node.targets, node.values):
- self.newline()
- self.visit(target, with_frame)
- self.write(" = ")
- self.visit(expr, frame)
- self.blockvisit(node.body, with_frame)
- self.leave_frame(with_frame)
-
- def visit_ExprStmt(self, node, frame):
- self.newline(node)
- self.visit(node.node, frame)
-
- _FinalizeInfo = namedtuple("_FinalizeInfo", ("const", "src"))
- #: The default finalize function if the environment isn't configured
- #: with one. Or if the environment has one, this is called on that
- #: function's output for constants.
- _default_finalize = text_type
- _finalize = None
-
- def _make_finalize(self):
- """Build the finalize function to be used on constants and at
- runtime. Cached so it's only created once for all output nodes.
-
- Returns a ``namedtuple`` with the following attributes:
-
- ``const``
- A function to finalize constant data at compile time.
-
- ``src``
- Source code to output around nodes to be evaluated at
- runtime.
- """
- if self._finalize is not None:
- return self._finalize
-
- finalize = default = self._default_finalize
- src = None
-
- if self.environment.finalize:
- src = "environment.finalize("
- env_finalize = self.environment.finalize
-
- def finalize(value):
- return default(env_finalize(value))
-
- if getattr(env_finalize, "contextfunction", False) is True:
- src += "context, "
- finalize = None # noqa: F811
- elif getattr(env_finalize, "evalcontextfunction", False) is True:
- src += "context.eval_ctx, "
- finalize = None
- elif getattr(env_finalize, "environmentfunction", False) is True:
- src += "environment, "
-
- def finalize(value):
- return default(env_finalize(self.environment, value))
-
- self._finalize = self._FinalizeInfo(finalize, src)
- return self._finalize
-
- def _output_const_repr(self, group):
- """Given a group of constant values converted from ``Output``
- child nodes, produce a string to write to the template module
- source.
- """
- return repr(concat(group))
-
- def _output_child_to_const(self, node, frame, finalize):
- """Try to optimize a child of an ``Output`` node by trying to
- convert it to constant, finalized data at compile time.
-
- If :exc:`Impossible` is raised, the node is not constant and
- will be evaluated at runtime. Any other exception will also be
- evaluated at runtime for easier debugging.
- """
- const = node.as_const(frame.eval_ctx)
-
- if frame.eval_ctx.autoescape:
- const = escape(const)
-
- # Template data doesn't go through finalize.
- if isinstance(node, nodes.TemplateData):
- return text_type(const)
-
- return finalize.const(const)
-
- def _output_child_pre(self, node, frame, finalize):
- """Output extra source code before visiting a child of an
- ``Output`` node.
- """
- if frame.eval_ctx.volatile:
- self.write("(escape if context.eval_ctx.autoescape else to_string)(")
- elif frame.eval_ctx.autoescape:
- self.write("escape(")
- else:
- self.write("to_string(")
-
- if finalize.src is not None:
- self.write(finalize.src)
-
- def _output_child_post(self, node, frame, finalize):
- """Output extra source code after visiting a child of an
- ``Output`` node.
- """
- self.write(")")
-
- if finalize.src is not None:
- self.write(")")
-
- def visit_Output(self, node, frame):
- # If an extends is active, don't render outside a block.
- if frame.require_output_check:
- # A top-level extends is known to exist at compile time.
- if self.has_known_extends:
- return
-
- self.writeline("if parent_template is None:")
- self.indent()
-
- finalize = self._make_finalize()
- body = []
-
- # Evaluate constants at compile time if possible. Each item in
- # body will be either a list of static data or a node to be
- # evaluated at runtime.
- for child in node.nodes:
- try:
- if not (
- # If the finalize function requires runtime context,
- # constants can't be evaluated at compile time.
- finalize.const
- # Unless it's basic template data that won't be
- # finalized anyway.
- or isinstance(child, nodes.TemplateData)
- ):
- raise nodes.Impossible()
-
- const = self._output_child_to_const(child, frame, finalize)
- except (nodes.Impossible, Exception):
- # The node was not constant and needs to be evaluated at
- # runtime. Or another error was raised, which is easier
- # to debug at runtime.
- body.append(child)
- continue
-
- if body and isinstance(body[-1], list):
- body[-1].append(const)
- else:
- body.append([const])
-
- if frame.buffer is not None:
- if len(body) == 1:
- self.writeline("%s.append(" % frame.buffer)
- else:
- self.writeline("%s.extend((" % frame.buffer)
-
- self.indent()
-
- for item in body:
- if isinstance(item, list):
- # A group of constant data to join and output.
- val = self._output_const_repr(item)
-
- if frame.buffer is None:
- self.writeline("yield " + val)
- else:
- self.writeline(val + ",")
- else:
- if frame.buffer is None:
- self.writeline("yield ", item)
- else:
- self.newline(item)
-
- # A node to be evaluated at runtime.
- self._output_child_pre(item, frame, finalize)
- self.visit(item, frame)
- self._output_child_post(item, frame, finalize)
-
- if frame.buffer is not None:
- self.write(",")
-
- if frame.buffer is not None:
- self.outdent()
- self.writeline(")" if len(body) == 1 else "))")
-
- if frame.require_output_check:
- self.outdent()
-
- def visit_Assign(self, node, frame):
- self.push_assign_tracking()
- self.newline(node)
- self.visit(node.target, frame)
- self.write(" = ")
- self.visit(node.node, frame)
- self.pop_assign_tracking(frame)
-
- def visit_AssignBlock(self, node, frame):
- self.push_assign_tracking()
- block_frame = frame.inner()
- # This is a special case. Since a set block always captures we
- # will disable output checks. This way one can use set blocks
- # toplevel even in extended templates.
- block_frame.require_output_check = False
- block_frame.symbols.analyze_node(node)
- self.enter_frame(block_frame)
- self.buffer(block_frame)
- self.blockvisit(node.body, block_frame)
- self.newline(node)
- self.visit(node.target, frame)
- self.write(" = (Markup if context.eval_ctx.autoescape else identity)(")
- if node.filter is not None:
- self.visit_Filter(node.filter, block_frame)
- else:
- self.write("concat(%s)" % block_frame.buffer)
- self.write(")")
- self.pop_assign_tracking(frame)
- self.leave_frame(block_frame)
-
- # -- Expression Visitors
-
- def visit_Name(self, node, frame):
- if node.ctx == "store" and frame.toplevel:
- if self._assign_stack:
- self._assign_stack[-1].add(node.name)
- ref = frame.symbols.ref(node.name)
-
- # If we are looking up a variable we might have to deal with the
- # case where it's undefined. We can skip that case if the load
- # instruction indicates a parameter which are always defined.
- if node.ctx == "load":
- load = frame.symbols.find_load(ref)
- if not (
- load is not None
- and load[0] == VAR_LOAD_PARAMETER
- and not self.parameter_is_undeclared(ref)
- ):
- self.write(
- "(undefined(name=%r) if %s is missing else %s)"
- % (node.name, ref, ref)
- )
- return
-
- self.write(ref)
-
- def visit_NSRef(self, node, frame):
- # NSRefs can only be used to store values; since they use the normal
- # `foo.bar` notation they will be parsed as a normal attribute access
- # when used anywhere but in a `set` context
- ref = frame.symbols.ref(node.name)
- self.writeline("if not isinstance(%s, Namespace):" % ref)
- self.indent()
- self.writeline(
- "raise TemplateRuntimeError(%r)"
- % "cannot assign attribute on non-namespace object"
- )
- self.outdent()
- self.writeline("%s[%r]" % (ref, node.attr))
-
- def visit_Const(self, node, frame):
- val = node.as_const(frame.eval_ctx)
- if isinstance(val, float):
- self.write(str(val))
- else:
- self.write(repr(val))
-
- def visit_TemplateData(self, node, frame):
- try:
- self.write(repr(node.as_const(frame.eval_ctx)))
- except nodes.Impossible:
- self.write(
- "(Markup if context.eval_ctx.autoescape else identity)(%r)" % node.data
- )
-
- def visit_Tuple(self, node, frame):
- self.write("(")
- idx = -1
- for idx, item in enumerate(node.items):
- if idx:
- self.write(", ")
- self.visit(item, frame)
- self.write(idx == 0 and ",)" or ")")
-
- def visit_List(self, node, frame):
- self.write("[")
- for idx, item in enumerate(node.items):
- if idx:
- self.write(", ")
- self.visit(item, frame)
- self.write("]")
-
- def visit_Dict(self, node, frame):
- self.write("{")
- for idx, item in enumerate(node.items):
- if idx:
- self.write(", ")
- self.visit(item.key, frame)
- self.write(": ")
- self.visit(item.value, frame)
- self.write("}")
-
- def binop(operator, interceptable=True): # noqa: B902
- @optimizeconst
- def visitor(self, node, frame):
- if (
- self.environment.sandboxed
- and operator in self.environment.intercepted_binops
- ):
- self.write("environment.call_binop(context, %r, " % operator)
- self.visit(node.left, frame)
- self.write(", ")
- self.visit(node.right, frame)
- else:
- self.write("(")
- self.visit(node.left, frame)
- self.write(" %s " % operator)
- self.visit(node.right, frame)
- self.write(")")
-
- return visitor
-
- def uaop(operator, interceptable=True): # noqa: B902
- @optimizeconst
- def visitor(self, node, frame):
- if (
- self.environment.sandboxed
- and operator in self.environment.intercepted_unops
- ):
- self.write("environment.call_unop(context, %r, " % operator)
- self.visit(node.node, frame)
- else:
- self.write("(" + operator)
- self.visit(node.node, frame)
- self.write(")")
-
- return visitor
-
- visit_Add = binop("+")
- visit_Sub = binop("-")
- visit_Mul = binop("*")
- visit_Div = binop("/")
- visit_FloorDiv = binop("//")
- visit_Pow = binop("**")
- visit_Mod = binop("%")
- visit_And = binop("and", interceptable=False)
- visit_Or = binop("or", interceptable=False)
- visit_Pos = uaop("+")
- visit_Neg = uaop("-")
- visit_Not = uaop("not ", interceptable=False)
- del binop, uaop
-
- @optimizeconst
- def visit_Concat(self, node, frame):
- if frame.eval_ctx.volatile:
- func_name = "(context.eval_ctx.volatile and markup_join or unicode_join)"
- elif frame.eval_ctx.autoescape:
- func_name = "markup_join"
- else:
- func_name = "unicode_join"
- self.write("%s((" % func_name)
- for arg in node.nodes:
- self.visit(arg, frame)
- self.write(", ")
- self.write("))")
-
- @optimizeconst
- def visit_Compare(self, node, frame):
- self.write("(")
- self.visit(node.expr, frame)
- for op in node.ops:
- self.visit(op, frame)
- self.write(")")
-
- def visit_Operand(self, node, frame):
- self.write(" %s " % operators[node.op])
- self.visit(node.expr, frame)
-
- @optimizeconst
- def visit_Getattr(self, node, frame):
- if self.environment.is_async:
- self.write("(await auto_await(")
-
- self.write("environment.getattr(")
- self.visit(node.node, frame)
- self.write(", %r)" % node.attr)
-
- if self.environment.is_async:
- self.write("))")
-
- @optimizeconst
- def visit_Getitem(self, node, frame):
- # slices bypass the environment getitem method.
- if isinstance(node.arg, nodes.Slice):
- self.visit(node.node, frame)
- self.write("[")
- self.visit(node.arg, frame)
- self.write("]")
- else:
- if self.environment.is_async:
- self.write("(await auto_await(")
-
- self.write("environment.getitem(")
- self.visit(node.node, frame)
- self.write(", ")
- self.visit(node.arg, frame)
- self.write(")")
-
- if self.environment.is_async:
- self.write("))")
-
- def visit_Slice(self, node, frame):
- if node.start is not None:
- self.visit(node.start, frame)
- self.write(":")
- if node.stop is not None:
- self.visit(node.stop, frame)
- if node.step is not None:
- self.write(":")
- self.visit(node.step, frame)
-
- @optimizeconst
- def visit_Filter(self, node, frame):
- if self.environment.is_async:
- self.write("await auto_await(")
- self.write(self.filters[node.name] + "(")
- func = self.environment.filters.get(node.name)
- if func is None:
- self.fail("no filter named %r" % node.name, node.lineno)
- if getattr(func, "contextfilter", False) is True:
- self.write("context, ")
- elif getattr(func, "evalcontextfilter", False) is True:
- self.write("context.eval_ctx, ")
- elif getattr(func, "environmentfilter", False) is True:
- self.write("environment, ")
-
- # if the filter node is None we are inside a filter block
- # and want to write to the current buffer
- if node.node is not None:
- self.visit(node.node, frame)
- elif frame.eval_ctx.volatile:
- self.write(
- "(context.eval_ctx.autoescape and"
- " Markup(concat(%s)) or concat(%s))" % (frame.buffer, frame.buffer)
- )
- elif frame.eval_ctx.autoescape:
- self.write("Markup(concat(%s))" % frame.buffer)
- else:
- self.write("concat(%s)" % frame.buffer)
- self.signature(node, frame)
- self.write(")")
- if self.environment.is_async:
- self.write(")")
-
- @optimizeconst
- def visit_Test(self, node, frame):
- self.write(self.tests[node.name] + "(")
- if node.name not in self.environment.tests:
- self.fail("no test named %r" % node.name, node.lineno)
- self.visit(node.node, frame)
- self.signature(node, frame)
- self.write(")")
-
- @optimizeconst
- def visit_CondExpr(self, node, frame):
- def write_expr2():
- if node.expr2 is not None:
- return self.visit(node.expr2, frame)
- self.write(
- "cond_expr_undefined(%r)"
- % (
- "the inline if-"
- "expression on %s evaluated to false and "
- "no else section was defined." % self.position(node)
- )
- )
-
- self.write("(")
- self.visit(node.expr1, frame)
- self.write(" if ")
- self.visit(node.test, frame)
- self.write(" else ")
- write_expr2()
- self.write(")")
-
- @optimizeconst
- def visit_Call(self, node, frame, forward_caller=False):
- if self.environment.is_async:
- self.write("await auto_await(")
- if self.environment.sandboxed:
- self.write("environment.call(context, ")
- else:
- self.write("context.call(")
- self.visit(node.node, frame)
- extra_kwargs = forward_caller and {"caller": "caller"} or None
- self.signature(node, frame, extra_kwargs)
- self.write(")")
- if self.environment.is_async:
- self.write(")")
-
- def visit_Keyword(self, node, frame):
- self.write(node.key + "=")
- self.visit(node.value, frame)
-
- # -- Unused nodes for extensions
-
- def visit_MarkSafe(self, node, frame):
- self.write("Markup(")
- self.visit(node.expr, frame)
- self.write(")")
-
- def visit_MarkSafeIfAutoescape(self, node, frame):
- self.write("(context.eval_ctx.autoescape and Markup or identity)(")
- self.visit(node.expr, frame)
- self.write(")")
-
- def visit_EnvironmentAttribute(self, node, frame):
- self.write("environment." + node.name)
-
- def visit_ExtensionAttribute(self, node, frame):
- self.write("environment.extensions[%r].%s" % (node.identifier, node.name))
-
- def visit_ImportedName(self, node, frame):
- self.write(self.import_aliases[node.importname])
-
- def visit_InternalName(self, node, frame):
- self.write(node.name)
-
- def visit_ContextReference(self, node, frame):
- self.write("context")
-
- def visit_DerivedContextReference(self, node, frame):
- self.write(self.derive_context(frame))
-
- def visit_Continue(self, node, frame):
- self.writeline("continue", node)
-
- def visit_Break(self, node, frame):
- self.writeline("break", node)
-
- def visit_Scope(self, node, frame):
- scope_frame = frame.inner()
- scope_frame.symbols.analyze_node(node)
- self.enter_frame(scope_frame)
- self.blockvisit(node.body, scope_frame)
- self.leave_frame(scope_frame)
-
- def visit_OverlayScope(self, node, frame):
- ctx = self.temporary_identifier()
- self.writeline("%s = %s" % (ctx, self.derive_context(frame)))
- self.writeline("%s.vars = " % ctx)
- self.visit(node.context, frame)
- self.push_context_reference(ctx)
-
- scope_frame = frame.inner(isolated=True)
- scope_frame.symbols.analyze_node(node)
- self.enter_frame(scope_frame)
- self.blockvisit(node.body, scope_frame)
- self.leave_frame(scope_frame)
- self.pop_context_reference()
-
- def visit_EvalContextModifier(self, node, frame):
- for keyword in node.options:
- self.writeline("context.eval_ctx.%s = " % keyword.key)
- self.visit(keyword.value, frame)
- try:
- val = keyword.value.as_const(frame.eval_ctx)
- except nodes.Impossible:
- frame.eval_ctx.volatile = True
- else:
- setattr(frame.eval_ctx, keyword.key, val)
-
- def visit_ScopedEvalContextModifier(self, node, frame):
- old_ctx_name = self.temporary_identifier()
- saved_ctx = frame.eval_ctx.save()
- self.writeline("%s = context.eval_ctx.save()" % old_ctx_name)
- self.visit_EvalContextModifier(node, frame)
- for child in node.body:
- self.visit(child, frame)
- frame.eval_ctx.revert(saved_ctx)
- self.writeline("context.eval_ctx.revert(%s)" % old_ctx_name)
diff --git a/lib/spack/external/jinja2/constants.py b/lib/spack/external/jinja2/constants.py
deleted file mode 100644
index bf7f2ca721..0000000000
--- a/lib/spack/external/jinja2/constants.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-#: list of lorem ipsum words used by the lipsum() helper function
-LOREM_IPSUM_WORDS = u"""\
-a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
-auctor augue bibendum blandit class commodo condimentum congue consectetuer
-consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
-diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
-elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
-faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
-hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
-justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
-luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
-mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
-nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
-penatibus per pharetra phasellus placerat platea porta porttitor posuere
-potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
-ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
-sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
-tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
-ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
-viverra volutpat vulputate"""
diff --git a/lib/spack/external/jinja2/debug.py b/lib/spack/external/jinja2/debug.py
deleted file mode 100644
index 5d8aec31d0..0000000000
--- a/lib/spack/external/jinja2/debug.py
+++ /dev/null
@@ -1,268 +0,0 @@
-import sys
-from types import CodeType
-
-from . import TemplateSyntaxError
-from ._compat import PYPY
-from .utils import internal_code
-from .utils import missing
-
-
-def rewrite_traceback_stack(source=None):
- """Rewrite the current exception to replace any tracebacks from
- within compiled template code with tracebacks that look like they
- came from the template source.
-
- This must be called within an ``except`` block.
-
- :param exc_info: A :meth:`sys.exc_info` tuple. If not provided,
- the current ``exc_info`` is used.
- :param source: For ``TemplateSyntaxError``, the original source if
- known.
- :return: A :meth:`sys.exc_info` tuple that can be re-raised.
- """
- exc_type, exc_value, tb = sys.exc_info()
-
- if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
- exc_value.translated = True
- exc_value.source = source
-
- try:
- # Remove the old traceback on Python 3, otherwise the frames
- # from the compiler still show up.
- exc_value.with_traceback(None)
- except AttributeError:
- pass
-
- # Outside of runtime, so the frame isn't executing template
- # code, but it still needs to point at the template.
- tb = fake_traceback(
- exc_value, None, exc_value.filename or "<unknown>", exc_value.lineno
- )
- else:
- # Skip the frame for the render function.
- tb = tb.tb_next
-
- stack = []
-
- # Build the stack of traceback object, replacing any in template
- # code with the source file and line information.
- while tb is not None:
- # Skip frames decorated with @internalcode. These are internal
- # calls that aren't useful in template debugging output.
- if tb.tb_frame.f_code in internal_code:
- tb = tb.tb_next
- continue
-
- template = tb.tb_frame.f_globals.get("__jinja_template__")
-
- if template is not None:
- lineno = template.get_corresponding_lineno(tb.tb_lineno)
- fake_tb = fake_traceback(exc_value, tb, template.filename, lineno)
- stack.append(fake_tb)
- else:
- stack.append(tb)
-
- tb = tb.tb_next
-
- tb_next = None
-
- # Assign tb_next in reverse to avoid circular references.
- for tb in reversed(stack):
- tb_next = tb_set_next(tb, tb_next)
-
- return exc_type, exc_value, tb_next
-
-
-def fake_traceback(exc_value, tb, filename, lineno):
- """Produce a new traceback object that looks like it came from the
- template source instead of the compiled code. The filename, line
- number, and location name will point to the template, and the local
- variables will be the current template context.
-
- :param exc_value: The original exception to be re-raised to create
- the new traceback.
- :param tb: The original traceback to get the local variables and
- code info from.
- :param filename: The template filename.
- :param lineno: The line number in the template source.
- """
- if tb is not None:
- # Replace the real locals with the context that would be
- # available at that point in the template.
- locals = get_template_locals(tb.tb_frame.f_locals)
- locals.pop("__jinja_exception__", None)
- else:
- locals = {}
-
- globals = {
- "__name__": filename,
- "__file__": filename,
- "__jinja_exception__": exc_value,
- }
- # Raise an exception at the correct line number.
- code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec")
-
- # Build a new code object that points to the template file and
- # replaces the location with a block name.
- try:
- location = "template"
-
- if tb is not None:
- function = tb.tb_frame.f_code.co_name
-
- if function == "root":
- location = "top-level template code"
- elif function.startswith("block_"):
- location = 'block "%s"' % function[6:]
-
- # Collect arguments for the new code object. CodeType only
- # accepts positional arguments, and arguments were inserted in
- # new Python versions.
- code_args = []
-
- for attr in (
- "argcount",
- "posonlyargcount", # Python 3.8
- "kwonlyargcount", # Python 3
- "nlocals",
- "stacksize",
- "flags",
- "code", # codestring
- "consts", # constants
- "names",
- "varnames",
- ("filename", filename),
- ("name", location),
- "firstlineno",
- "lnotab",
- "freevars",
- "cellvars",
- ):
- if isinstance(attr, tuple):
- # Replace with given value.
- code_args.append(attr[1])
- continue
-
- try:
- # Copy original value if it exists.
- code_args.append(getattr(code, "co_" + attr))
- except AttributeError:
- # Some arguments were added later.
- continue
-
- code = CodeType(*code_args)
- except Exception:
- # Some environments such as Google App Engine don't support
- # modifying code objects.
- pass
-
- # Execute the new code, which is guaranteed to raise, and return
- # the new traceback without this frame.
- try:
- exec(code, globals, locals)
- except BaseException:
- return sys.exc_info()[2].tb_next
-
-
-def get_template_locals(real_locals):
- """Based on the runtime locals, get the context that would be
- available at that point in the template.
- """
- # Start with the current template context.
- ctx = real_locals.get("context")
-
- if ctx:
- data = ctx.get_all().copy()
- else:
- data = {}
-
- # Might be in a derived context that only sets local variables
- # rather than pushing a context. Local variables follow the scheme
- # l_depth_name. Find the highest-depth local that has a value for
- # each name.
- local_overrides = {}
-
- for name, value in real_locals.items():
- if not name.startswith("l_") or value is missing:
- # Not a template variable, or no longer relevant.
- continue
-
- try:
- _, depth, name = name.split("_", 2)
- depth = int(depth)
- except ValueError:
- continue
-
- cur_depth = local_overrides.get(name, (-1,))[0]
-
- if cur_depth < depth:
- local_overrides[name] = (depth, value)
-
- # Modify the context with any derived context.
- for name, (_, value) in local_overrides.items():
- if value is missing:
- data.pop(name, None)
- else:
- data[name] = value
-
- return data
-
-
-if sys.version_info >= (3, 7):
- # tb_next is directly assignable as of Python 3.7
- def tb_set_next(tb, tb_next):
- tb.tb_next = tb_next
- return tb
-
-
-elif PYPY:
- # PyPy might have special support, and won't work with ctypes.
- try:
- import tputil
- except ImportError:
- # Without tproxy support, use the original traceback.
- def tb_set_next(tb, tb_next):
- return tb
-
- else:
- # With tproxy support, create a proxy around the traceback that
- # returns the new tb_next.
- def tb_set_next(tb, tb_next):
- def controller(op):
- if op.opname == "__getattribute__" and op.args[0] == "tb_next":
- return tb_next
-
- return op.delegate()
-
- return tputil.make_proxy(controller, obj=tb)
-
-
-else:
- # Use ctypes to assign tb_next at the C level since it's read-only
- # from Python.
- import ctypes
-
- class _CTraceback(ctypes.Structure):
- _fields_ = [
- # Extra PyObject slots when compiled with Py_TRACE_REFS.
- ("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()),
- # Only care about tb_next as an object, not a traceback.
- ("tb_next", ctypes.py_object),
- ]
-
- def tb_set_next(tb, tb_next):
- c_tb = _CTraceback.from_address(id(tb))
-
- # Clear out the old tb_next.
- if tb.tb_next is not None:
- c_tb_next = ctypes.py_object(tb.tb_next)
- c_tb.tb_next = ctypes.py_object()
- ctypes.pythonapi.Py_DecRef(c_tb_next)
-
- # Assign the new tb_next.
- if tb_next is not None:
- c_tb_next = ctypes.py_object(tb_next)
- ctypes.pythonapi.Py_IncRef(c_tb_next)
- c_tb.tb_next = c_tb_next
-
- return tb
diff --git a/lib/spack/external/jinja2/defaults.py b/lib/spack/external/jinja2/defaults.py
deleted file mode 100644
index 8e0e7d7710..0000000000
--- a/lib/spack/external/jinja2/defaults.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-from ._compat import range_type
-from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401
-from .tests import TESTS as DEFAULT_TESTS # noqa: F401
-from .utils import Cycler
-from .utils import generate_lorem_ipsum
-from .utils import Joiner
-from .utils import Namespace
-
-# defaults for the parser / lexer
-BLOCK_START_STRING = "{%"
-BLOCK_END_STRING = "%}"
-VARIABLE_START_STRING = "{{"
-VARIABLE_END_STRING = "}}"
-COMMENT_START_STRING = "{#"
-COMMENT_END_STRING = "#}"
-LINE_STATEMENT_PREFIX = None
-LINE_COMMENT_PREFIX = None
-TRIM_BLOCKS = False
-LSTRIP_BLOCKS = False
-NEWLINE_SEQUENCE = "\n"
-KEEP_TRAILING_NEWLINE = False
-
-# default filters, tests and namespace
-
-DEFAULT_NAMESPACE = {
- "range": range_type,
- "dict": dict,
- "lipsum": generate_lorem_ipsum,
- "cycler": Cycler,
- "joiner": Joiner,
- "namespace": Namespace,
-}
-
-# default policies
-DEFAULT_POLICIES = {
- "compiler.ascii_str": True,
- "urlize.rel": "noopener",
- "urlize.target": None,
- "truncate.leeway": 5,
- "json.dumps_function": None,
- "json.dumps_kwargs": {"sort_keys": True},
- "ext.i18n.trimmed": False,
-}
diff --git a/lib/spack/external/jinja2/environment.py b/lib/spack/external/jinja2/environment.py
deleted file mode 100644
index 8430390eea..0000000000
--- a/lib/spack/external/jinja2/environment.py
+++ /dev/null
@@ -1,1362 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Classes for managing templates and their runtime and compile time
-options.
-"""
-import os
-import sys
-import weakref
-from functools import partial
-from functools import reduce
-
-from markupsafe import Markup
-
-from . import nodes
-from ._compat import encode_filename
-from ._compat import implements_iterator
-from ._compat import implements_to_string
-from ._compat import iteritems
-from ._compat import PY2
-from ._compat import PYPY
-from ._compat import reraise
-from ._compat import string_types
-from ._compat import text_type
-from .compiler import CodeGenerator
-from .compiler import generate
-from .defaults import BLOCK_END_STRING
-from .defaults import BLOCK_START_STRING
-from .defaults import COMMENT_END_STRING
-from .defaults import COMMENT_START_STRING
-from .defaults import DEFAULT_FILTERS
-from .defaults import DEFAULT_NAMESPACE
-from .defaults import DEFAULT_POLICIES
-from .defaults import DEFAULT_TESTS
-from .defaults import KEEP_TRAILING_NEWLINE
-from .defaults import LINE_COMMENT_PREFIX
-from .defaults import LINE_STATEMENT_PREFIX
-from .defaults import LSTRIP_BLOCKS
-from .defaults import NEWLINE_SEQUENCE
-from .defaults import TRIM_BLOCKS
-from .defaults import VARIABLE_END_STRING
-from .defaults import VARIABLE_START_STRING
-from .exceptions import TemplateNotFound
-from .exceptions import TemplateRuntimeError
-from .exceptions import TemplatesNotFound
-from .exceptions import TemplateSyntaxError
-from .exceptions import UndefinedError
-from .lexer import get_lexer
-from .lexer import TokenStream
-from .nodes import EvalContext
-from .parser import Parser
-from .runtime import Context
-from .runtime import new_context
-from .runtime import Undefined
-from .utils import concat
-from .utils import consume
-from .utils import have_async_gen
-from .utils import import_string
-from .utils import internalcode
-from .utils import LRUCache
-from .utils import missing
-
-# for direct template usage we have up to ten living environments
-_spontaneous_environments = LRUCache(10)
-
-
-def get_spontaneous_environment(cls, *args):
- """Return a new spontaneous environment. A spontaneous environment
- is used for templates created directly rather than through an
- existing environment.
-
- :param cls: Environment class to create.
- :param args: Positional arguments passed to environment.
- """
- key = (cls, args)
-
- try:
- return _spontaneous_environments[key]
- except KeyError:
- _spontaneous_environments[key] = env = cls(*args)
- env.shared = True
- return env
-
-
-def create_cache(size):
- """Return the cache class for the given size."""
- if size == 0:
- return None
- if size < 0:
- return {}
- return LRUCache(size)
-
-
-def copy_cache(cache):
- """Create an empty copy of the given cache."""
- if cache is None:
- return None
- elif type(cache) is dict:
- return {}
- return LRUCache(cache.capacity)
-
-
-def load_extensions(environment, extensions):
- """Load the extensions from the list and bind it to the environment.
- Returns a dict of instantiated environments.
- """
- result = {}
- for extension in extensions:
- if isinstance(extension, string_types):
- extension = import_string(extension)
- result[extension.identifier] = extension(environment)
- return result
-
-
-def fail_for_missing_callable(string, name):
- msg = string % name
- if isinstance(name, Undefined):
- try:
- name._fail_with_undefined_error()
- except Exception as e:
- msg = "%s (%s; did you forget to quote the callable name?)" % (msg, e)
- raise TemplateRuntimeError(msg)
-
-
-def _environment_sanity_check(environment):
- """Perform a sanity check on the environment."""
- assert issubclass(
- environment.undefined, Undefined
- ), "undefined must be a subclass of undefined because filters depend on it."
- assert (
- environment.block_start_string
- != environment.variable_start_string
- != environment.comment_start_string
- ), "block, variable and comment start strings must be different"
- assert environment.newline_sequence in (
- "\r",
- "\r\n",
- "\n",
- ), "newline_sequence set to unknown line ending string."
- return environment
-
-
-class Environment(object):
- r"""The core component of Jinja is the `Environment`. It contains
- important shared variables like configuration, filters, tests,
- globals and others. Instances of this class may be modified if
- they are not shared and if no template was loaded so far.
- Modifications on environments after the first template was loaded
- will lead to surprising effects and undefined behavior.
-
- Here are the possible initialization parameters:
-
- `block_start_string`
- The string marking the beginning of a block. Defaults to ``'{%'``.
-
- `block_end_string`
- The string marking the end of a block. Defaults to ``'%}'``.
-
- `variable_start_string`
- The string marking the beginning of a print statement.
- Defaults to ``'{{'``.
-
- `variable_end_string`
- The string marking the end of a print statement. Defaults to
- ``'}}'``.
-
- `comment_start_string`
- The string marking the beginning of a comment. Defaults to ``'{#'``.
-
- `comment_end_string`
- The string marking the end of a comment. Defaults to ``'#}'``.
-
- `line_statement_prefix`
- If given and a string, this will be used as prefix for line based
- statements. See also :ref:`line-statements`.
-
- `line_comment_prefix`
- If given and a string, this will be used as prefix for line based
- comments. See also :ref:`line-statements`.
-
- .. versionadded:: 2.2
-
- `trim_blocks`
- If this is set to ``True`` the first newline after a block is
- removed (block, not variable tag!). Defaults to `False`.
-
- `lstrip_blocks`
- If this is set to ``True`` leading spaces and tabs are stripped
- from the start of a line to a block. Defaults to `False`.
-
- `newline_sequence`
- The sequence that starts a newline. Must be one of ``'\r'``,
- ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a
- useful default for Linux and OS X systems as well as web
- applications.
-
- `keep_trailing_newline`
- Preserve the trailing newline when rendering templates.
- The default is ``False``, which causes a single newline,
- if present, to be stripped from the end of the template.
-
- .. versionadded:: 2.7
-
- `extensions`
- List of Jinja extensions to use. This can either be import paths
- as strings or extension classes. For more information have a
- look at :ref:`the extensions documentation <jinja-extensions>`.
-
- `optimized`
- should the optimizer be enabled? Default is ``True``.
-
- `undefined`
- :class:`Undefined` or a subclass of it that is used to represent
- undefined values in the template.
-
- `finalize`
- A callable that can be used to process the result of a variable
- expression before it is output. For example one can convert
- ``None`` implicitly into an empty string here.
-
- `autoescape`
- If set to ``True`` the XML/HTML autoescaping feature is enabled by
- default. For more details about autoescaping see
- :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also
- be a callable that is passed the template name and has to
- return ``True`` or ``False`` depending on autoescape should be
- enabled by default.
-
- .. versionchanged:: 2.4
- `autoescape` can now be a function
-
- `loader`
- The template loader for this environment.
-
- `cache_size`
- The size of the cache. Per default this is ``400`` which means
- that if more than 400 templates are loaded the loader will clean
- out the least recently used template. If the cache size is set to
- ``0`` templates are recompiled all the time, if the cache size is
- ``-1`` the cache will not be cleaned.
-
- .. versionchanged:: 2.8
- The cache size was increased to 400 from a low 50.
-
- `auto_reload`
- Some loaders load templates from locations where the template
- sources may change (ie: file system or database). If
- ``auto_reload`` is set to ``True`` (default) every time a template is
- requested the loader checks if the source changed and if yes, it
- will reload the template. For higher performance it's possible to
- disable that.
-
- `bytecode_cache`
- If set to a bytecode cache object, this object will provide a
- cache for the internal Jinja bytecode so that templates don't
- have to be parsed if they were not changed.
-
- See :ref:`bytecode-cache` for more information.
-
- `enable_async`
- If set to true this enables async template execution which allows
- you to take advantage of newer Python features. This requires
- Python 3.6 or later.
- """
-
- #: if this environment is sandboxed. Modifying this variable won't make
- #: the environment sandboxed though. For a real sandboxed environment
- #: have a look at jinja2.sandbox. This flag alone controls the code
- #: generation by the compiler.
- sandboxed = False
-
- #: True if the environment is just an overlay
- overlayed = False
-
- #: the environment this environment is linked to if it is an overlay
- linked_to = None
-
- #: shared environments have this set to `True`. A shared environment
- #: must not be modified
- shared = False
-
- #: the class that is used for code generation. See
- #: :class:`~jinja2.compiler.CodeGenerator` for more information.
- code_generator_class = CodeGenerator
-
- #: the context class thatis used for templates. See
- #: :class:`~jinja2.runtime.Context` for more information.
- context_class = Context
-
- def __init__(
- self,
- block_start_string=BLOCK_START_STRING,
- block_end_string=BLOCK_END_STRING,
- variable_start_string=VARIABLE_START_STRING,
- variable_end_string=VARIABLE_END_STRING,
- comment_start_string=COMMENT_START_STRING,
- comment_end_string=COMMENT_END_STRING,
- line_statement_prefix=LINE_STATEMENT_PREFIX,
- line_comment_prefix=LINE_COMMENT_PREFIX,
- trim_blocks=TRIM_BLOCKS,
- lstrip_blocks=LSTRIP_BLOCKS,
- newline_sequence=NEWLINE_SEQUENCE,
- keep_trailing_newline=KEEP_TRAILING_NEWLINE,
- extensions=(),
- optimized=True,
- undefined=Undefined,
- finalize=None,
- autoescape=False,
- loader=None,
- cache_size=400,
- auto_reload=True,
- bytecode_cache=None,
- enable_async=False,
- ):
- # !!Important notice!!
- # The constructor accepts quite a few arguments that should be
- # passed by keyword rather than position. However it's important to
- # not change the order of arguments because it's used at least
- # internally in those cases:
- # - spontaneous environments (i18n extension and Template)
- # - unittests
- # If parameter changes are required only add parameters at the end
- # and don't change the arguments (or the defaults!) of the arguments
- # existing already.
-
- # lexer / parser information
- self.block_start_string = block_start_string
- self.block_end_string = block_end_string
- self.variable_start_string = variable_start_string
- self.variable_end_string = variable_end_string
- self.comment_start_string = comment_start_string
- self.comment_end_string = comment_end_string
- self.line_statement_prefix = line_statement_prefix
- self.line_comment_prefix = line_comment_prefix
- self.trim_blocks = trim_blocks
- self.lstrip_blocks = lstrip_blocks
- self.newline_sequence = newline_sequence
- self.keep_trailing_newline = keep_trailing_newline
-
- # runtime information
- self.undefined = undefined
- self.optimized = optimized
- self.finalize = finalize
- self.autoescape = autoescape
-
- # defaults
- self.filters = DEFAULT_FILTERS.copy()
- self.tests = DEFAULT_TESTS.copy()
- self.globals = DEFAULT_NAMESPACE.copy()
-
- # set the loader provided
- self.loader = loader
- self.cache = create_cache(cache_size)
- self.bytecode_cache = bytecode_cache
- self.auto_reload = auto_reload
-
- # configurable policies
- self.policies = DEFAULT_POLICIES.copy()
-
- # load extensions
- self.extensions = load_extensions(self, extensions)
-
- self.enable_async = enable_async
- self.is_async = self.enable_async and have_async_gen
- if self.is_async:
- # runs patch_all() to enable async support
- from . import asyncsupport # noqa: F401
-
- _environment_sanity_check(self)
-
- def add_extension(self, extension):
- """Adds an extension after the environment was created.
-
- .. versionadded:: 2.5
- """
- self.extensions.update(load_extensions(self, [extension]))
-
- def extend(self, **attributes):
- """Add the items to the instance of the environment if they do not exist
- yet. This is used by :ref:`extensions <writing-extensions>` to register
- callbacks and configuration values without breaking inheritance.
- """
- for key, value in iteritems(attributes):
- if not hasattr(self, key):
- setattr(self, key, value)
-
- def overlay(
- self,
- block_start_string=missing,
- block_end_string=missing,
- variable_start_string=missing,
- variable_end_string=missing,
- comment_start_string=missing,
- comment_end_string=missing,
- line_statement_prefix=missing,
- line_comment_prefix=missing,
- trim_blocks=missing,
- lstrip_blocks=missing,
- extensions=missing,
- optimized=missing,
- undefined=missing,
- finalize=missing,
- autoescape=missing,
- loader=missing,
- cache_size=missing,
- auto_reload=missing,
- bytecode_cache=missing,
- ):
- """Create a new overlay environment that shares all the data with the
- current environment except for cache and the overridden attributes.
- Extensions cannot be removed for an overlayed environment. An overlayed
- environment automatically gets all the extensions of the environment it
- is linked to plus optional extra extensions.
-
- Creating overlays should happen after the initial environment was set
- up completely. Not all attributes are truly linked, some are just
- copied over so modifications on the original environment may not shine
- through.
- """
- args = dict(locals())
- del args["self"], args["cache_size"], args["extensions"]
-
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.overlayed = True
- rv.linked_to = self
-
- for key, value in iteritems(args):
- if value is not missing:
- setattr(rv, key, value)
-
- if cache_size is not missing:
- rv.cache = create_cache(cache_size)
- else:
- rv.cache = copy_cache(self.cache)
-
- rv.extensions = {}
- for key, value in iteritems(self.extensions):
- rv.extensions[key] = value.bind(rv)
- if extensions is not missing:
- rv.extensions.update(load_extensions(rv, extensions))
-
- return _environment_sanity_check(rv)
-
- lexer = property(get_lexer, doc="The lexer for this environment.")
-
- def iter_extensions(self):
- """Iterates over the extensions by priority."""
- return iter(sorted(self.extensions.values(), key=lambda x: x.priority))
-
- def getitem(self, obj, argument):
- """Get an item or attribute of an object but prefer the item."""
- try:
- return obj[argument]
- except (AttributeError, TypeError, LookupError):
- if isinstance(argument, string_types):
- try:
- attr = str(argument)
- except Exception:
- pass
- else:
- try:
- return getattr(obj, attr)
- except AttributeError:
- pass
- return self.undefined(obj=obj, name=argument)
-
- def getattr(self, obj, attribute):
- """Get an item or attribute of an object but prefer the attribute.
- Unlike :meth:`getitem` the attribute *must* be a bytestring.
- """
- try:
- return getattr(obj, attribute)
- except AttributeError:
- pass
- try:
- return obj[attribute]
- except (TypeError, LookupError, AttributeError):
- return self.undefined(obj=obj, name=attribute)
-
- def call_filter(
- self, name, value, args=None, kwargs=None, context=None, eval_ctx=None
- ):
- """Invokes a filter on a value the same way the compiler does it.
-
- Note that on Python 3 this might return a coroutine in case the
- filter is running from an environment in async mode and the filter
- supports async execution. It's your responsibility to await this
- if needed.
-
- .. versionadded:: 2.7
- """
- func = self.filters.get(name)
- if func is None:
- fail_for_missing_callable("no filter named %r", name)
- args = [value] + list(args or ())
- if getattr(func, "contextfilter", False) is True:
- if context is None:
- raise TemplateRuntimeError(
- "Attempted to invoke context filter without context"
- )
- args.insert(0, context)
- elif getattr(func, "evalcontextfilter", False) is True:
- if eval_ctx is None:
- if context is not None:
- eval_ctx = context.eval_ctx
- else:
- eval_ctx = EvalContext(self)
- args.insert(0, eval_ctx)
- elif getattr(func, "environmentfilter", False) is True:
- args.insert(0, self)
- return func(*args, **(kwargs or {}))
-
- def call_test(self, name, value, args=None, kwargs=None):
- """Invokes a test on a value the same way the compiler does it.
-
- .. versionadded:: 2.7
- """
- func = self.tests.get(name)
- if func is None:
- fail_for_missing_callable("no test named %r", name)
- return func(value, *(args or ()), **(kwargs or {}))
-
- @internalcode
- def parse(self, source, name=None, filename=None):
- """Parse the sourcecode and return the abstract syntax tree. This
- tree of nodes is used by the compiler to convert the template into
- executable source- or bytecode. This is useful for debugging or to
- extract information from templates.
-
- If you are :ref:`developing Jinja extensions <writing-extensions>`
- this gives you a good overview of the node tree generated.
- """
- try:
- return self._parse(source, name, filename)
- except TemplateSyntaxError:
- self.handle_exception(source=source)
-
- def _parse(self, source, name, filename):
- """Internal parsing function used by `parse` and `compile`."""
- return Parser(self, source, name, encode_filename(filename)).parse()
-
- def lex(self, source, name=None, filename=None):
- """Lex the given sourcecode and return a generator that yields
- tokens as tuples in the form ``(lineno, token_type, value)``.
- This can be useful for :ref:`extension development <writing-extensions>`
- and debugging templates.
-
- This does not perform preprocessing. If you want the preprocessing
- of the extensions to be applied you have to filter source through
- the :meth:`preprocess` method.
- """
- source = text_type(source)
- try:
- return self.lexer.tokeniter(source, name, filename)
- except TemplateSyntaxError:
- self.handle_exception(source=source)
-
- def preprocess(self, source, name=None, filename=None):
- """Preprocesses the source with all extensions. This is automatically
- called for all parsing and compiling methods but *not* for :meth:`lex`
- because there you usually only want the actual source tokenized.
- """
- return reduce(
- lambda s, e: e.preprocess(s, name, filename),
- self.iter_extensions(),
- text_type(source),
- )
-
- def _tokenize(self, source, name, filename=None, state=None):
- """Called by the parser to do the preprocessing and filtering
- for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
- """
- source = self.preprocess(source, name, filename)
- stream = self.lexer.tokenize(source, name, filename, state)
- for ext in self.iter_extensions():
- stream = ext.filter_stream(stream)
- if not isinstance(stream, TokenStream):
- stream = TokenStream(stream, name, filename)
- return stream
-
- def _generate(self, source, name, filename, defer_init=False):
- """Internal hook that can be overridden to hook a different generate
- method in.
-
- .. versionadded:: 2.5
- """
- return generate(
- source,
- self,
- name,
- filename,
- defer_init=defer_init,
- optimized=self.optimized,
- )
-
- def _compile(self, source, filename):
- """Internal hook that can be overridden to hook a different compile
- method in.
-
- .. versionadded:: 2.5
- """
- return compile(source, filename, "exec")
-
- @internalcode
- def compile(self, source, name=None, filename=None, raw=False, defer_init=False):
- """Compile a node or template source code. The `name` parameter is
- the load name of the template after it was joined using
- :meth:`join_path` if necessary, not the filename on the file system.
- the `filename` parameter is the estimated filename of the template on
- the file system. If the template came from a database or memory this
- can be omitted.
-
- The return value of this method is a python code object. If the `raw`
- parameter is `True` the return value will be a string with python
- code equivalent to the bytecode returned otherwise. This method is
- mainly used internally.
-
- `defer_init` is use internally to aid the module code generator. This
- causes the generated code to be able to import without the global
- environment variable to be set.
-
- .. versionadded:: 2.4
- `defer_init` parameter added.
- """
- source_hint = None
- try:
- if isinstance(source, string_types):
- source_hint = source
- source = self._parse(source, name, filename)
- source = self._generate(source, name, filename, defer_init=defer_init)
- if raw:
- return source
- if filename is None:
- filename = "<template>"
- else:
- filename = encode_filename(filename)
- return self._compile(source, filename)
- except TemplateSyntaxError:
- self.handle_exception(source=source_hint)
-
- def compile_expression(self, source, undefined_to_none=True):
- """A handy helper method that returns a callable that accepts keyword
- arguments that appear as variables in the expression. If called it
- returns the result of the expression.
-
- This is useful if applications want to use the same rules as Jinja
- in template "configuration files" or similar situations.
-
- Example usage:
-
- >>> env = Environment()
- >>> expr = env.compile_expression('foo == 42')
- >>> expr(foo=23)
- False
- >>> expr(foo=42)
- True
-
- Per default the return value is converted to `None` if the
- expression returns an undefined value. This can be changed
- by setting `undefined_to_none` to `False`.
-
- >>> env.compile_expression('var')() is None
- True
- >>> env.compile_expression('var', undefined_to_none=False)()
- Undefined
-
- .. versionadded:: 2.1
- """
- parser = Parser(self, source, state="variable")
- try:
- expr = parser.parse_expression()
- if not parser.stream.eos:
- raise TemplateSyntaxError(
- "chunk after expression", parser.stream.current.lineno, None, None
- )
- expr.set_environment(self)
- except TemplateSyntaxError:
- if sys.exc_info() is not None:
- self.handle_exception(source=source)
-
- body = [nodes.Assign(nodes.Name("result", "store"), expr, lineno=1)]
- template = self.from_string(nodes.Template(body, lineno=1))
- return TemplateExpression(template, undefined_to_none)
-
- def compile_templates(
- self,
- target,
- extensions=None,
- filter_func=None,
- zip="deflated",
- log_function=None,
- ignore_errors=True,
- py_compile=False,
- ):
- """Finds all the templates the loader can find, compiles them
- and stores them in `target`. If `zip` is `None`, instead of in a
- zipfile, the templates will be stored in a directory.
- By default a deflate zip algorithm is used. To switch to
- the stored algorithm, `zip` can be set to ``'stored'``.
-
- `extensions` and `filter_func` are passed to :meth:`list_templates`.
- Each template returned will be compiled to the target folder or
- zipfile.
-
- By default template compilation errors are ignored. In case a
- log function is provided, errors are logged. If you want template
- syntax errors to abort the compilation you can set `ignore_errors`
- to `False` and you will get an exception on syntax errors.
-
- If `py_compile` is set to `True` .pyc files will be written to the
- target instead of standard .py files. This flag does not do anything
- on pypy and Python 3 where pyc files are not picked up by itself and
- don't give much benefit.
-
- .. versionadded:: 2.4
- """
- from .loaders import ModuleLoader
-
- if log_function is None:
-
- def log_function(x):
- pass
-
- if py_compile:
- if not PY2 or PYPY:
- import warnings
-
- warnings.warn(
- "'py_compile=True' has no effect on PyPy or Python"
- " 3 and will be removed in version 3.0",
- DeprecationWarning,
- stacklevel=2,
- )
- py_compile = False
- else:
- import imp
- import marshal
-
- py_header = imp.get_magic() + u"\xff\xff\xff\xff".encode("iso-8859-15")
-
- # Python 3.3 added a source filesize to the header
- if sys.version_info >= (3, 3):
- py_header += u"\x00\x00\x00\x00".encode("iso-8859-15")
-
- def write_file(filename, data):
- if zip:
- info = ZipInfo(filename)
- info.external_attr = 0o755 << 16
- zip_file.writestr(info, data)
- else:
- if isinstance(data, text_type):
- data = data.encode("utf8")
-
- with open(os.path.join(target, filename), "wb") as f:
- f.write(data)
-
- if zip is not None:
- from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
-
- zip_file = ZipFile(
- target, "w", dict(deflated=ZIP_DEFLATED, stored=ZIP_STORED)[zip]
- )
- log_function('Compiling into Zip archive "%s"' % target)
- else:
- if not os.path.isdir(target):
- os.makedirs(target)
- log_function('Compiling into folder "%s"' % target)
-
- try:
- for name in self.list_templates(extensions, filter_func):
- source, filename, _ = self.loader.get_source(self, name)
- try:
- code = self.compile(source, name, filename, True, True)
- except TemplateSyntaxError as e:
- if not ignore_errors:
- raise
- log_function('Could not compile "%s": %s' % (name, e))
- continue
-
- filename = ModuleLoader.get_module_filename(name)
-
- if py_compile:
- c = self._compile(code, encode_filename(filename))
- write_file(filename + "c", py_header + marshal.dumps(c))
- log_function('Byte-compiled "%s" as %s' % (name, filename + "c"))
- else:
- write_file(filename, code)
- log_function('Compiled "%s" as %s' % (name, filename))
- finally:
- if zip:
- zip_file.close()
-
- log_function("Finished compiling templates")
-
- def list_templates(self, extensions=None, filter_func=None):
- """Returns a list of templates for this environment. This requires
- that the loader supports the loader's
- :meth:`~BaseLoader.list_templates` method.
-
- If there are other files in the template folder besides the
- actual templates, the returned list can be filtered. There are two
- ways: either `extensions` is set to a list of file extensions for
- templates, or a `filter_func` can be provided which is a callable that
- is passed a template name and should return `True` if it should end up
- in the result list.
-
- If the loader does not support that, a :exc:`TypeError` is raised.
-
- .. versionadded:: 2.4
- """
- names = self.loader.list_templates()
-
- if extensions is not None:
- if filter_func is not None:
- raise TypeError(
- "either extensions or filter_func can be passed, but not both"
- )
-
- def filter_func(x):
- return "." in x and x.rsplit(".", 1)[1] in extensions
-
- if filter_func is not None:
- names = [name for name in names if filter_func(name)]
-
- return names
-
- def handle_exception(self, source=None):
- """Exception handling helper. This is used internally to either raise
- rewritten exceptions or return a rendered traceback for the template.
- """
- from .debug import rewrite_traceback_stack
-
- reraise(*rewrite_traceback_stack(source=source))
-
- def join_path(self, template, parent):
- """Join a template with the parent. By default all the lookups are
- relative to the loader root so this method returns the `template`
- parameter unchanged, but if the paths should be relative to the
- parent template, this function can be used to calculate the real
- template name.
-
- Subclasses may override this method and implement template path
- joining here.
- """
- return template
-
- @internalcode
- def _load_template(self, name, globals):
- if self.loader is None:
- raise TypeError("no loader for this environment specified")
- cache_key = (weakref.ref(self.loader), name)
- if self.cache is not None:
- template = self.cache.get(cache_key)
- if template is not None and (
- not self.auto_reload or template.is_up_to_date
- ):
- return template
- template = self.loader.load(self, name, globals)
- if self.cache is not None:
- self.cache[cache_key] = template
- return template
-
- @internalcode
- def get_template(self, name, parent=None, globals=None):
- """Load a template from the loader. If a loader is configured this
- method asks the loader for the template and returns a :class:`Template`.
- If the `parent` parameter is not `None`, :meth:`join_path` is called
- to get the real template name before loading.
-
- The `globals` parameter can be used to provide template wide globals.
- These variables are available in the context at render time.
-
- If the template does not exist a :exc:`TemplateNotFound` exception is
- raised.
-
- .. versionchanged:: 2.4
- If `name` is a :class:`Template` object it is returned from the
- function unchanged.
- """
- if isinstance(name, Template):
- return name
- if parent is not None:
- name = self.join_path(name, parent)
- return self._load_template(name, self.make_globals(globals))
-
- @internalcode
- def select_template(self, names, parent=None, globals=None):
- """Works like :meth:`get_template` but tries a number of templates
- before it fails. If it cannot find any of the templates, it will
- raise a :exc:`TemplatesNotFound` exception.
-
- .. versionchanged:: 2.11
- If names is :class:`Undefined`, an :exc:`UndefinedError` is
- raised instead. If no templates were found and names
- contains :class:`Undefined`, the message is more helpful.
-
- .. versionchanged:: 2.4
- If `names` contains a :class:`Template` object it is returned
- from the function unchanged.
-
- .. versionadded:: 2.3
- """
- if isinstance(names, Undefined):
- names._fail_with_undefined_error()
-
- if not names:
- raise TemplatesNotFound(
- message=u"Tried to select from an empty list " u"of templates."
- )
- globals = self.make_globals(globals)
- for name in names:
- if isinstance(name, Template):
- return name
- if parent is not None:
- name = self.join_path(name, parent)
- try:
- return self._load_template(name, globals)
- except (TemplateNotFound, UndefinedError):
- pass
- raise TemplatesNotFound(names)
-
- @internalcode
- def get_or_select_template(self, template_name_or_list, parent=None, globals=None):
- """Does a typecheck and dispatches to :meth:`select_template`
- if an iterable of template names is given, otherwise to
- :meth:`get_template`.
-
- .. versionadded:: 2.3
- """
- if isinstance(template_name_or_list, (string_types, Undefined)):
- return self.get_template(template_name_or_list, parent, globals)
- elif isinstance(template_name_or_list, Template):
- return template_name_or_list
- return self.select_template(template_name_or_list, parent, globals)
-
- def from_string(self, source, globals=None, template_class=None):
- """Load a template from a string. This parses the source given and
- returns a :class:`Template` object.
- """
- globals = self.make_globals(globals)
- cls = template_class or self.template_class
- return cls.from_code(self, self.compile(source), globals, None)
-
- def make_globals(self, d):
- """Return a dict for the globals."""
- if not d:
- return self.globals
- return dict(self.globals, **d)
-
-
-class Template(object):
- """The central template object. This class represents a compiled template
- and is used to evaluate it.
-
- Normally the template object is generated from an :class:`Environment` but
- it also has a constructor that makes it possible to create a template
- instance directly using the constructor. It takes the same arguments as
- the environment constructor but it's not possible to specify a loader.
-
- Every template object has a few methods and members that are guaranteed
- to exist. However it's important that a template object should be
- considered immutable. Modifications on the object are not supported.
-
- Template objects created from the constructor rather than an environment
- do have an `environment` attribute that points to a temporary environment
- that is probably shared with other templates created with the constructor
- and compatible settings.
-
- >>> template = Template('Hello {{ name }}!')
- >>> template.render(name='John Doe') == u'Hello John Doe!'
- True
- >>> stream = template.stream(name='John Doe')
- >>> next(stream) == u'Hello John Doe!'
- True
- >>> next(stream)
- Traceback (most recent call last):
- ...
- StopIteration
- """
-
- #: Type of environment to create when creating a template directly
- #: rather than through an existing environment.
- environment_class = Environment
-
- def __new__(
- cls,
- source,
- block_start_string=BLOCK_START_STRING,
- block_end_string=BLOCK_END_STRING,
- variable_start_string=VARIABLE_START_STRING,
- variable_end_string=VARIABLE_END_STRING,
- comment_start_string=COMMENT_START_STRING,
- comment_end_string=COMMENT_END_STRING,
- line_statement_prefix=LINE_STATEMENT_PREFIX,
- line_comment_prefix=LINE_COMMENT_PREFIX,
- trim_blocks=TRIM_BLOCKS,
- lstrip_blocks=LSTRIP_BLOCKS,
- newline_sequence=NEWLINE_SEQUENCE,
- keep_trailing_newline=KEEP_TRAILING_NEWLINE,
- extensions=(),
- optimized=True,
- undefined=Undefined,
- finalize=None,
- autoescape=False,
- enable_async=False,
- ):
- env = get_spontaneous_environment(
- cls.environment_class,
- block_start_string,
- block_end_string,
- variable_start_string,
- variable_end_string,
- comment_start_string,
- comment_end_string,
- line_statement_prefix,
- line_comment_prefix,
- trim_blocks,
- lstrip_blocks,
- newline_sequence,
- keep_trailing_newline,
- frozenset(extensions),
- optimized,
- undefined,
- finalize,
- autoescape,
- None,
- 0,
- False,
- None,
- enable_async,
- )
- return env.from_string(source, template_class=cls)
-
- @classmethod
- def from_code(cls, environment, code, globals, uptodate=None):
- """Creates a template object from compiled code and the globals. This
- is used by the loaders and environment to create a template object.
- """
- namespace = {"environment": environment, "__file__": code.co_filename}
- exec(code, namespace)
- rv = cls._from_namespace(environment, namespace, globals)
- rv._uptodate = uptodate
- return rv
-
- @classmethod
- def from_module_dict(cls, environment, module_dict, globals):
- """Creates a template object from a module. This is used by the
- module loader to create a template object.
-
- .. versionadded:: 2.4
- """
- return cls._from_namespace(environment, module_dict, globals)
-
- @classmethod
- def _from_namespace(cls, environment, namespace, globals):
- t = object.__new__(cls)
- t.environment = environment
- t.globals = globals
- t.name = namespace["name"]
- t.filename = namespace["__file__"]
- t.blocks = namespace["blocks"]
-
- # render function and module
- t.root_render_func = namespace["root"]
- t._module = None
-
- # debug and loader helpers
- t._debug_info = namespace["debug_info"]
- t._uptodate = None
-
- # store the reference
- namespace["environment"] = environment
- namespace["__jinja_template__"] = t
-
- return t
-
- def render(self, *args, **kwargs):
- """This method accepts the same arguments as the `dict` constructor:
- A dict, a dict subclass or some keyword arguments. If no arguments
- are given the context will be empty. These two calls do the same::
-
- template.render(knights='that say nih')
- template.render({'knights': 'that say nih'})
-
- This will return the rendered template as unicode string.
- """
- vars = dict(*args, **kwargs)
- try:
- return concat(self.root_render_func(self.new_context(vars)))
- except Exception:
- self.environment.handle_exception()
-
- def render_async(self, *args, **kwargs):
- """This works similar to :meth:`render` but returns a coroutine
- that when awaited returns the entire rendered template string. This
- requires the async feature to be enabled.
-
- Example usage::
-
- await template.render_async(knights='that say nih; asynchronously')
- """
- # see asyncsupport for the actual implementation
- raise NotImplementedError(
- "This feature is not available for this version of Python"
- )
-
- def stream(self, *args, **kwargs):
- """Works exactly like :meth:`generate` but returns a
- :class:`TemplateStream`.
- """
- return TemplateStream(self.generate(*args, **kwargs))
-
- def generate(self, *args, **kwargs):
- """For very large templates it can be useful to not render the whole
- template at once but evaluate each statement after another and yield
- piece for piece. This method basically does exactly that and returns
- a generator that yields one item after another as unicode strings.
-
- It accepts the same arguments as :meth:`render`.
- """
- vars = dict(*args, **kwargs)
- try:
- for event in self.root_render_func(self.new_context(vars)):
- yield event
- except Exception:
- yield self.environment.handle_exception()
-
- def generate_async(self, *args, **kwargs):
- """An async version of :meth:`generate`. Works very similarly but
- returns an async iterator instead.
- """
- # see asyncsupport for the actual implementation
- raise NotImplementedError(
- "This feature is not available for this version of Python"
- )
-
- def new_context(self, vars=None, shared=False, locals=None):
- """Create a new :class:`Context` for this template. The vars
- provided will be passed to the template. Per default the globals
- are added to the context. If shared is set to `True` the data
- is passed as is to the context without adding the globals.
-
- `locals` can be a dict of local variables for internal usage.
- """
- return new_context(
- self.environment, self.name, self.blocks, vars, shared, self.globals, locals
- )
-
- def make_module(self, vars=None, shared=False, locals=None):
- """This method works like the :attr:`module` attribute when called
- without arguments but it will evaluate the template on every call
- rather than caching it. It's also possible to provide
- a dict which is then used as context. The arguments are the same
- as for the :meth:`new_context` method.
- """
- return TemplateModule(self, self.new_context(vars, shared, locals))
-
- def make_module_async(self, vars=None, shared=False, locals=None):
- """As template module creation can invoke template code for
- asynchronous executions this method must be used instead of the
- normal :meth:`make_module` one. Likewise the module attribute
- becomes unavailable in async mode.
- """
- # see asyncsupport for the actual implementation
- raise NotImplementedError(
- "This feature is not available for this version of Python"
- )
-
- @internalcode
- def _get_default_module(self):
- if self._module is not None:
- return self._module
- self._module = rv = self.make_module()
- return rv
-
- @property
- def module(self):
- """The template as module. This is used for imports in the
- template runtime but is also useful if one wants to access
- exported template variables from the Python layer:
-
- >>> t = Template('{% macro foo() %}42{% endmacro %}23')
- >>> str(t.module)
- '23'
- >>> t.module.foo() == u'42'
- True
-
- This attribute is not available if async mode is enabled.
- """
- return self._get_default_module()
-
- def get_corresponding_lineno(self, lineno):
- """Return the source line number of a line number in the
- generated bytecode as they are not in sync.
- """
- for template_line, code_line in reversed(self.debug_info):
- if code_line <= lineno:
- return template_line
- return 1
-
- @property
- def is_up_to_date(self):
- """If this variable is `False` there is a newer version available."""
- if self._uptodate is None:
- return True
- return self._uptodate()
-
- @property
- def debug_info(self):
- """The debug info mapping."""
- if self._debug_info:
- return [tuple(map(int, x.split("="))) for x in self._debug_info.split("&")]
- return []
-
- def __repr__(self):
- if self.name is None:
- name = "memory:%x" % id(self)
- else:
- name = repr(self.name)
- return "<%s %s>" % (self.__class__.__name__, name)
-
-
-@implements_to_string
-class TemplateModule(object):
- """Represents an imported template. All the exported names of the
- template are available as attributes on this object. Additionally
- converting it into an unicode- or bytestrings renders the contents.
- """
-
- def __init__(self, template, context, body_stream=None):
- if body_stream is None:
- if context.environment.is_async:
- raise RuntimeError(
- "Async mode requires a body stream "
- "to be passed to a template module. Use "
- "the async methods of the API you are "
- "using."
- )
- body_stream = list(template.root_render_func(context))
- self._body_stream = body_stream
- self.__dict__.update(context.get_exported())
- self.__name__ = template.name
-
- def __html__(self):
- return Markup(concat(self._body_stream))
-
- def __str__(self):
- return concat(self._body_stream)
-
- def __repr__(self):
- if self.__name__ is None:
- name = "memory:%x" % id(self)
- else:
- name = repr(self.__name__)
- return "<%s %s>" % (self.__class__.__name__, name)
-
-
-class TemplateExpression(object):
- """The :meth:`jinja2.Environment.compile_expression` method returns an
- instance of this object. It encapsulates the expression-like access
- to the template with an expression it wraps.
- """
-
- def __init__(self, template, undefined_to_none):
- self._template = template
- self._undefined_to_none = undefined_to_none
-
- def __call__(self, *args, **kwargs):
- context = self._template.new_context(dict(*args, **kwargs))
- consume(self._template.root_render_func(context))
- rv = context.vars["result"]
- if self._undefined_to_none and isinstance(rv, Undefined):
- rv = None
- return rv
-
-
-@implements_iterator
-class TemplateStream(object):
- """A template stream works pretty much like an ordinary python generator
- but it can buffer multiple items to reduce the number of total iterations.
- Per default the output is unbuffered which means that for every unbuffered
- instruction in the template one unicode string is yielded.
-
- If buffering is enabled with a buffer size of 5, five items are combined
- into a new unicode string. This is mainly useful if you are streaming
- big templates to a client via WSGI which flushes after each iteration.
- """
-
- def __init__(self, gen):
- self._gen = gen
- self.disable_buffering()
-
- def dump(self, fp, encoding=None, errors="strict"):
- """Dump the complete stream into a file or file-like object.
- Per default unicode strings are written, if you want to encode
- before writing specify an `encoding`.
-
- Example usage::
-
- Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
- """
- close = False
- if isinstance(fp, string_types):
- if encoding is None:
- encoding = "utf-8"
- fp = open(fp, "wb")
- close = True
- try:
- if encoding is not None:
- iterable = (x.encode(encoding, errors) for x in self)
- else:
- iterable = self
- if hasattr(fp, "writelines"):
- fp.writelines(iterable)
- else:
- for item in iterable:
- fp.write(item)
- finally:
- if close:
- fp.close()
-
- def disable_buffering(self):
- """Disable the output buffering."""
- self._next = partial(next, self._gen)
- self.buffered = False
-
- def _buffered_generator(self, size):
- buf = []
- c_size = 0
- push = buf.append
-
- while 1:
- try:
- while c_size < size:
- c = next(self._gen)
- push(c)
- if c:
- c_size += 1
- except StopIteration:
- if not c_size:
- return
- yield concat(buf)
- del buf[:]
- c_size = 0
-
- def enable_buffering(self, size=5):
- """Enable buffering. Buffer `size` items before yielding them."""
- if size <= 1:
- raise ValueError("buffer size too small")
-
- self.buffered = True
- self._next = partial(next, self._buffered_generator(size))
-
- def __iter__(self):
- return self
-
- def __next__(self):
- return self._next()
-
-
-# hook in default template class. if anyone reads this comment: ignore that
-# it's possible to use custom templates ;-)
-Environment.template_class = Template
diff --git a/lib/spack/external/jinja2/exceptions.py b/lib/spack/external/jinja2/exceptions.py
deleted file mode 100644
index 0bf2003e30..0000000000
--- a/lib/spack/external/jinja2/exceptions.py
+++ /dev/null
@@ -1,177 +0,0 @@
-# -*- coding: utf-8 -*-
-from ._compat import imap
-from ._compat import implements_to_string
-from ._compat import PY2
-from ._compat import text_type
-
-
-class TemplateError(Exception):
- """Baseclass for all template errors."""
-
- if PY2:
-
- def __init__(self, message=None):
- if message is not None:
- message = text_type(message).encode("utf-8")
- Exception.__init__(self, message)
-
- @property
- def message(self):
- if self.args:
- message = self.args[0]
- if message is not None:
- return message.decode("utf-8", "replace")
-
- def __unicode__(self):
- return self.message or u""
-
- else:
-
- def __init__(self, message=None):
- Exception.__init__(self, message)
-
- @property
- def message(self):
- if self.args:
- message = self.args[0]
- if message is not None:
- return message
-
-
-@implements_to_string
-class TemplateNotFound(IOError, LookupError, TemplateError):
- """Raised if a template does not exist.
-
- .. versionchanged:: 2.11
- If the given name is :class:`Undefined` and no message was
- provided, an :exc:`UndefinedError` is raised.
- """
-
- # looks weird, but removes the warning descriptor that just
- # bogusly warns us about message being deprecated
- message = None
-
- def __init__(self, name, message=None):
- IOError.__init__(self, name)
-
- if message is None:
- from .runtime import Undefined
-
- if isinstance(name, Undefined):
- name._fail_with_undefined_error()
-
- message = name
-
- self.message = message
- self.name = name
- self.templates = [name]
-
- def __str__(self):
- return self.message
-
-
-class TemplatesNotFound(TemplateNotFound):
- """Like :class:`TemplateNotFound` but raised if multiple templates
- are selected. This is a subclass of :class:`TemplateNotFound`
- exception, so just catching the base exception will catch both.
-
- .. versionchanged:: 2.11
- If a name in the list of names is :class:`Undefined`, a message
- about it being undefined is shown rather than the empty string.
-
- .. versionadded:: 2.2
- """
-
- def __init__(self, names=(), message=None):
- if message is None:
- from .runtime import Undefined
-
- parts = []
-
- for name in names:
- if isinstance(name, Undefined):
- parts.append(name._undefined_message)
- else:
- parts.append(name)
-
- message = u"none of the templates given were found: " + u", ".join(
- imap(text_type, parts)
- )
- TemplateNotFound.__init__(self, names and names[-1] or None, message)
- self.templates = list(names)
-
-
-@implements_to_string
-class TemplateSyntaxError(TemplateError):
- """Raised to tell the user that there is a problem with the template."""
-
- def __init__(self, message, lineno, name=None, filename=None):
- TemplateError.__init__(self, message)
- self.lineno = lineno
- self.name = name
- self.filename = filename
- self.source = None
-
- # this is set to True if the debug.translate_syntax_error
- # function translated the syntax error into a new traceback
- self.translated = False
-
- def __str__(self):
- # for translated errors we only return the message
- if self.translated:
- return self.message
-
- # otherwise attach some stuff
- location = "line %d" % self.lineno
- name = self.filename or self.name
- if name:
- location = 'File "%s", %s' % (name, location)
- lines = [self.message, " " + location]
-
- # if the source is set, add the line to the output
- if self.source is not None:
- try:
- line = self.source.splitlines()[self.lineno - 1]
- except IndexError:
- line = None
- if line:
- lines.append(" " + line.strip())
-
- return u"\n".join(lines)
-
- def __reduce__(self):
- # https://bugs.python.org/issue1692335 Exceptions that take
- # multiple required arguments have problems with pickling.
- # Without this, raises TypeError: __init__() missing 1 required
- # positional argument: 'lineno'
- return self.__class__, (self.message, self.lineno, self.name, self.filename)
-
-
-class TemplateAssertionError(TemplateSyntaxError):
- """Like a template syntax error, but covers cases where something in the
- template caused an error at compile time that wasn't necessarily caused
- by a syntax error. However it's a direct subclass of
- :exc:`TemplateSyntaxError` and has the same attributes.
- """
-
-
-class TemplateRuntimeError(TemplateError):
- """A generic runtime error in the template engine. Under some situations
- Jinja may raise this exception.
- """
-
-
-class UndefinedError(TemplateRuntimeError):
- """Raised if a template tries to operate on :class:`Undefined`."""
-
-
-class SecurityError(TemplateRuntimeError):
- """Raised if a template tries to do something insecure if the
- sandbox is enabled.
- """
-
-
-class FilterArgumentError(TemplateRuntimeError):
- """This error is raised if a filter was called with inappropriate
- arguments
- """
diff --git a/lib/spack/external/jinja2/ext.py b/lib/spack/external/jinja2/ext.py
deleted file mode 100644
index 9141be4dac..0000000000
--- a/lib/spack/external/jinja2/ext.py
+++ /dev/null
@@ -1,704 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Extension API for adding custom tags and behavior."""
-import pprint
-import re
-from sys import version_info
-
-from markupsafe import Markup
-
-from . import nodes
-from ._compat import iteritems
-from ._compat import string_types
-from ._compat import with_metaclass
-from .defaults import BLOCK_END_STRING
-from .defaults import BLOCK_START_STRING
-from .defaults import COMMENT_END_STRING
-from .defaults import COMMENT_START_STRING
-from .defaults import KEEP_TRAILING_NEWLINE
-from .defaults import LINE_COMMENT_PREFIX
-from .defaults import LINE_STATEMENT_PREFIX
-from .defaults import LSTRIP_BLOCKS
-from .defaults import NEWLINE_SEQUENCE
-from .defaults import TRIM_BLOCKS
-from .defaults import VARIABLE_END_STRING
-from .defaults import VARIABLE_START_STRING
-from .environment import Environment
-from .exceptions import TemplateAssertionError
-from .exceptions import TemplateSyntaxError
-from .nodes import ContextReference
-from .runtime import concat
-from .utils import contextfunction
-from .utils import import_string
-
-# the only real useful gettext functions for a Jinja template. Note
-# that ugettext must be assigned to gettext as Jinja doesn't support
-# non unicode strings.
-GETTEXT_FUNCTIONS = ("_", "gettext", "ngettext")
-
-_ws_re = re.compile(r"\s*\n\s*")
-
-
-class ExtensionRegistry(type):
- """Gives the extension an unique identifier."""
-
- def __new__(mcs, name, bases, d):
- rv = type.__new__(mcs, name, bases, d)
- rv.identifier = rv.__module__ + "." + rv.__name__
- return rv
-
-
-class Extension(with_metaclass(ExtensionRegistry, object)):
- """Extensions can be used to add extra functionality to the Jinja template
- system at the parser level. Custom extensions are bound to an environment
- but may not store environment specific data on `self`. The reason for
- this is that an extension can be bound to another environment (for
- overlays) by creating a copy and reassigning the `environment` attribute.
-
- As extensions are created by the environment they cannot accept any
- arguments for configuration. One may want to work around that by using
- a factory function, but that is not possible as extensions are identified
- by their import name. The correct way to configure the extension is
- storing the configuration values on the environment. Because this way the
- environment ends up acting as central configuration storage the
- attributes may clash which is why extensions have to ensure that the names
- they choose for configuration are not too generic. ``prefix`` for example
- is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
- name as includes the name of the extension (fragment cache).
- """
-
- #: if this extension parses this is the list of tags it's listening to.
- tags = set()
-
- #: the priority of that extension. This is especially useful for
- #: extensions that preprocess values. A lower value means higher
- #: priority.
- #:
- #: .. versionadded:: 2.4
- priority = 100
-
- def __init__(self, environment):
- self.environment = environment
-
- def bind(self, environment):
- """Create a copy of this extension bound to another environment."""
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.environment = environment
- return rv
-
- def preprocess(self, source, name, filename=None):
- """This method is called before the actual lexing and can be used to
- preprocess the source. The `filename` is optional. The return value
- must be the preprocessed source.
- """
- return source
-
- def filter_stream(self, stream):
- """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
- to filter tokens returned. This method has to return an iterable of
- :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
- :class:`~jinja2.lexer.TokenStream`.
- """
- return stream
-
- def parse(self, parser):
- """If any of the :attr:`tags` matched this method is called with the
- parser as first argument. The token the parser stream is pointing at
- is the name token that matched. This method has to return one or a
- list of multiple nodes.
- """
- raise NotImplementedError()
-
- def attr(self, name, lineno=None):
- """Return an attribute node for the current extension. This is useful
- to pass constants on extensions to generated template code.
-
- ::
-
- self.attr('_my_attribute', lineno=lineno)
- """
- return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
-
- def call_method(
- self, name, args=None, kwargs=None, dyn_args=None, dyn_kwargs=None, lineno=None
- ):
- """Call a method of the extension. This is a shortcut for
- :meth:`attr` + :class:`jinja2.nodes.Call`.
- """
- if args is None:
- args = []
- if kwargs is None:
- kwargs = []
- return nodes.Call(
- self.attr(name, lineno=lineno),
- args,
- kwargs,
- dyn_args,
- dyn_kwargs,
- lineno=lineno,
- )
-
-
-@contextfunction
-def _gettext_alias(__context, *args, **kwargs):
- return __context.call(__context.resolve("gettext"), *args, **kwargs)
-
-
-def _make_new_gettext(func):
- @contextfunction
- def gettext(__context, __string, **variables):
- rv = __context.call(func, __string)
- if __context.eval_ctx.autoescape:
- rv = Markup(rv)
- # Always treat as a format string, even if there are no
- # variables. This makes translation strings more consistent
- # and predictable. This requires escaping
- return rv % variables
-
- return gettext
-
-
-def _make_new_ngettext(func):
- @contextfunction
- def ngettext(__context, __singular, __plural, __num, **variables):
- variables.setdefault("num", __num)
- rv = __context.call(func, __singular, __plural, __num)
- if __context.eval_ctx.autoescape:
- rv = Markup(rv)
- # Always treat as a format string, see gettext comment above.
- return rv % variables
-
- return ngettext
-
-
-class InternationalizationExtension(Extension):
- """This extension adds gettext support to Jinja."""
-
- tags = {"trans"}
-
- # TODO: the i18n extension is currently reevaluating values in a few
- # situations. Take this example:
- # {% trans count=something() %}{{ count }} foo{% pluralize
- # %}{{ count }} fooss{% endtrans %}
- # something is called twice here. One time for the gettext value and
- # the other time for the n-parameter of the ngettext function.
-
- def __init__(self, environment):
- Extension.__init__(self, environment)
- environment.globals["_"] = _gettext_alias
- environment.extend(
- install_gettext_translations=self._install,
- install_null_translations=self._install_null,
- install_gettext_callables=self._install_callables,
- uninstall_gettext_translations=self._uninstall,
- extract_translations=self._extract,
- newstyle_gettext=False,
- )
-
- def _install(self, translations, newstyle=None):
- gettext = getattr(translations, "ugettext", None)
- if gettext is None:
- gettext = translations.gettext
- ngettext = getattr(translations, "ungettext", None)
- if ngettext is None:
- ngettext = translations.ngettext
- self._install_callables(gettext, ngettext, newstyle)
-
- def _install_null(self, newstyle=None):
- self._install_callables(
- lambda x: x, lambda s, p, n: (n != 1 and (p,) or (s,))[0], newstyle
- )
-
- def _install_callables(self, gettext, ngettext, newstyle=None):
- if newstyle is not None:
- self.environment.newstyle_gettext = newstyle
- if self.environment.newstyle_gettext:
- gettext = _make_new_gettext(gettext)
- ngettext = _make_new_ngettext(ngettext)
- self.environment.globals.update(gettext=gettext, ngettext=ngettext)
-
- def _uninstall(self, translations):
- for key in "gettext", "ngettext":
- self.environment.globals.pop(key, None)
-
- def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
- if isinstance(source, string_types):
- source = self.environment.parse(source)
- return extract_from_ast(source, gettext_functions)
-
- def parse(self, parser):
- """Parse a translatable tag."""
- lineno = next(parser.stream).lineno
- num_called_num = False
-
- # find all the variables referenced. Additionally a variable can be
- # defined in the body of the trans block too, but this is checked at
- # a later state.
- plural_expr = None
- plural_expr_assignment = None
- variables = {}
- trimmed = None
- while parser.stream.current.type != "block_end":
- if variables:
- parser.stream.expect("comma")
-
- # skip colon for python compatibility
- if parser.stream.skip_if("colon"):
- break
-
- name = parser.stream.expect("name")
- if name.value in variables:
- parser.fail(
- "translatable variable %r defined twice." % name.value,
- name.lineno,
- exc=TemplateAssertionError,
- )
-
- # expressions
- if parser.stream.current.type == "assign":
- next(parser.stream)
- variables[name.value] = var = parser.parse_expression()
- elif trimmed is None and name.value in ("trimmed", "notrimmed"):
- trimmed = name.value == "trimmed"
- continue
- else:
- variables[name.value] = var = nodes.Name(name.value, "load")
-
- if plural_expr is None:
- if isinstance(var, nodes.Call):
- plural_expr = nodes.Name("_trans", "load")
- variables[name.value] = plural_expr
- plural_expr_assignment = nodes.Assign(
- nodes.Name("_trans", "store"), var
- )
- else:
- plural_expr = var
- num_called_num = name.value == "num"
-
- parser.stream.expect("block_end")
-
- plural = None
- have_plural = False
- referenced = set()
-
- # now parse until endtrans or pluralize
- singular_names, singular = self._parse_block(parser, True)
- if singular_names:
- referenced.update(singular_names)
- if plural_expr is None:
- plural_expr = nodes.Name(singular_names[0], "load")
- num_called_num = singular_names[0] == "num"
-
- # if we have a pluralize block, we parse that too
- if parser.stream.current.test("name:pluralize"):
- have_plural = True
- next(parser.stream)
- if parser.stream.current.type != "block_end":
- name = parser.stream.expect("name")
- if name.value not in variables:
- parser.fail(
- "unknown variable %r for pluralization" % name.value,
- name.lineno,
- exc=TemplateAssertionError,
- )
- plural_expr = variables[name.value]
- num_called_num = name.value == "num"
- parser.stream.expect("block_end")
- plural_names, plural = self._parse_block(parser, False)
- next(parser.stream)
- referenced.update(plural_names)
- else:
- next(parser.stream)
-
- # register free names as simple name expressions
- for var in referenced:
- if var not in variables:
- variables[var] = nodes.Name(var, "load")
-
- if not have_plural:
- plural_expr = None
- elif plural_expr is None:
- parser.fail("pluralize without variables", lineno)
-
- if trimmed is None:
- trimmed = self.environment.policies["ext.i18n.trimmed"]
- if trimmed:
- singular = self._trim_whitespace(singular)
- if plural:
- plural = self._trim_whitespace(plural)
-
- node = self._make_node(
- singular,
- plural,
- variables,
- plural_expr,
- bool(referenced),
- num_called_num and have_plural,
- )
- node.set_lineno(lineno)
- if plural_expr_assignment is not None:
- return [plural_expr_assignment, node]
- else:
- return node
-
- def _trim_whitespace(self, string, _ws_re=_ws_re):
- return _ws_re.sub(" ", string.strip())
-
- def _parse_block(self, parser, allow_pluralize):
- """Parse until the next block tag with a given name."""
- referenced = []
- buf = []
- while 1:
- if parser.stream.current.type == "data":
- buf.append(parser.stream.current.value.replace("%", "%%"))
- next(parser.stream)
- elif parser.stream.current.type == "variable_begin":
- next(parser.stream)
- name = parser.stream.expect("name").value
- referenced.append(name)
- buf.append("%%(%s)s" % name)
- parser.stream.expect("variable_end")
- elif parser.stream.current.type == "block_begin":
- next(parser.stream)
- if parser.stream.current.test("name:endtrans"):
- break
- elif parser.stream.current.test("name:pluralize"):
- if allow_pluralize:
- break
- parser.fail(
- "a translatable section can have only one pluralize section"
- )
- parser.fail(
- "control structures in translatable sections are not allowed"
- )
- elif parser.stream.eos:
- parser.fail("unclosed translation block")
- else:
- raise RuntimeError("internal parser error")
-
- return referenced, concat(buf)
-
- def _make_node(
- self, singular, plural, variables, plural_expr, vars_referenced, num_called_num
- ):
- """Generates a useful node from the data provided."""
- # no variables referenced? no need to escape for old style
- # gettext invocations only if there are vars.
- if not vars_referenced and not self.environment.newstyle_gettext:
- singular = singular.replace("%%", "%")
- if plural:
- plural = plural.replace("%%", "%")
-
- # singular only:
- if plural_expr is None:
- gettext = nodes.Name("gettext", "load")
- node = nodes.Call(gettext, [nodes.Const(singular)], [], None, None)
-
- # singular and plural
- else:
- ngettext = nodes.Name("ngettext", "load")
- node = nodes.Call(
- ngettext,
- [nodes.Const(singular), nodes.Const(plural), plural_expr],
- [],
- None,
- None,
- )
-
- # in case newstyle gettext is used, the method is powerful
- # enough to handle the variable expansion and autoescape
- # handling itself
- if self.environment.newstyle_gettext:
- for key, value in iteritems(variables):
- # the function adds that later anyways in case num was
- # called num, so just skip it.
- if num_called_num and key == "num":
- continue
- node.kwargs.append(nodes.Keyword(key, value))
-
- # otherwise do that here
- else:
- # mark the return value as safe if we are in an
- # environment with autoescaping turned on
- node = nodes.MarkSafeIfAutoescape(node)
- if variables:
- node = nodes.Mod(
- node,
- nodes.Dict(
- [
- nodes.Pair(nodes.Const(key), value)
- for key, value in variables.items()
- ]
- ),
- )
- return nodes.Output([node])
-
-
-class ExprStmtExtension(Extension):
- """Adds a `do` tag to Jinja that works like the print statement just
- that it doesn't print the return value.
- """
-
- tags = set(["do"])
-
- def parse(self, parser):
- node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
- node.node = parser.parse_tuple()
- return node
-
-
-class LoopControlExtension(Extension):
- """Adds break and continue to the template engine."""
-
- tags = set(["break", "continue"])
-
- def parse(self, parser):
- token = next(parser.stream)
- if token.value == "break":
- return nodes.Break(lineno=token.lineno)
- return nodes.Continue(lineno=token.lineno)
-
-
-class WithExtension(Extension):
- pass
-
-
-class AutoEscapeExtension(Extension):
- pass
-
-
-class DebugExtension(Extension):
- """A ``{% debug %}`` tag that dumps the available variables,
- filters, and tests.
-
- .. code-block:: html+jinja
-
- <pre>{% debug %}</pre>
-
- .. code-block:: text
-
- {'context': {'cycler': <class 'jinja2.utils.Cycler'>,
- ...,
- 'namespace': <class 'jinja2.utils.Namespace'>},
- 'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd',
- ..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'],
- 'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined',
- ..., 'odd', 'sameas', 'sequence', 'string', 'undefined', 'upper']}
-
- .. versionadded:: 2.11.0
- """
-
- tags = {"debug"}
-
- def parse(self, parser):
- lineno = parser.stream.expect("name:debug").lineno
- context = ContextReference()
- result = self.call_method("_render", [context], lineno=lineno)
- return nodes.Output([result], lineno=lineno)
-
- def _render(self, context):
- result = {
- "context": context.get_all(),
- "filters": sorted(self.environment.filters.keys()),
- "tests": sorted(self.environment.tests.keys()),
- }
-
- # Set the depth since the intent is to show the top few names.
- if version_info[:2] >= (3, 4):
- return pprint.pformat(result, depth=3, compact=True)
- else:
- return pprint.pformat(result, depth=3)
-
-
-def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True):
- """Extract localizable strings from the given template node. Per
- default this function returns matches in babel style that means non string
- parameters as well as keyword arguments are returned as `None`. This
- allows Babel to figure out what you really meant if you are using
- gettext functions that allow keyword arguments for placeholder expansion.
- If you don't want that behavior set the `babel_style` parameter to `False`
- which causes only strings to be returned and parameters are always stored
- in tuples. As a consequence invalid gettext calls (calls without a single
- string parameter or string parameters after non-string parameters) are
- skipped.
-
- This example explains the behavior:
-
- >>> from jinja2 import Environment
- >>> env = Environment()
- >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
- >>> list(extract_from_ast(node))
- [(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
- >>> list(extract_from_ast(node, babel_style=False))
- [(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
-
- For every string found this function yields a ``(lineno, function,
- message)`` tuple, where:
-
- * ``lineno`` is the number of the line on which the string was found,
- * ``function`` is the name of the ``gettext`` function used (if the
- string was extracted from embedded Python code), and
- * ``message`` is the string itself (a ``unicode`` object, or a tuple
- of ``unicode`` objects for functions with multiple string arguments).
-
- This extraction function operates on the AST and is because of that unable
- to extract any comments. For comment support you have to use the babel
- extraction interface or extract comments yourself.
- """
- for node in node.find_all(nodes.Call):
- if (
- not isinstance(node.node, nodes.Name)
- or node.node.name not in gettext_functions
- ):
- continue
-
- strings = []
- for arg in node.args:
- if isinstance(arg, nodes.Const) and isinstance(arg.value, string_types):
- strings.append(arg.value)
- else:
- strings.append(None)
-
- for _ in node.kwargs:
- strings.append(None)
- if node.dyn_args is not None:
- strings.append(None)
- if node.dyn_kwargs is not None:
- strings.append(None)
-
- if not babel_style:
- strings = tuple(x for x in strings if x is not None)
- if not strings:
- continue
- else:
- if len(strings) == 1:
- strings = strings[0]
- else:
- strings = tuple(strings)
- yield node.lineno, node.node.name, strings
-
-
-class _CommentFinder(object):
- """Helper class to find comments in a token stream. Can only
- find comments for gettext calls forwards. Once the comment
- from line 4 is found, a comment for line 1 will not return a
- usable value.
- """
-
- def __init__(self, tokens, comment_tags):
- self.tokens = tokens
- self.comment_tags = comment_tags
- self.offset = 0
- self.last_lineno = 0
-
- def find_backwards(self, offset):
- try:
- for _, token_type, token_value in reversed(
- self.tokens[self.offset : offset]
- ):
- if token_type in ("comment", "linecomment"):
- try:
- prefix, comment = token_value.split(None, 1)
- except ValueError:
- continue
- if prefix in self.comment_tags:
- return [comment.rstrip()]
- return []
- finally:
- self.offset = offset
-
- def find_comments(self, lineno):
- if not self.comment_tags or self.last_lineno > lineno:
- return []
- for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]):
- if token_lineno > lineno:
- return self.find_backwards(self.offset + idx)
- return self.find_backwards(len(self.tokens))
-
-
-def babel_extract(fileobj, keywords, comment_tags, options):
- """Babel extraction method for Jinja templates.
-
- .. versionchanged:: 2.3
- Basic support for translation comments was added. If `comment_tags`
- is now set to a list of keywords for extraction, the extractor will
- try to find the best preceding comment that begins with one of the
- keywords. For best results, make sure to not have more than one
- gettext call in one line of code and the matching comment in the
- same line or the line before.
-
- .. versionchanged:: 2.5.1
- The `newstyle_gettext` flag can be set to `True` to enable newstyle
- gettext calls.
-
- .. versionchanged:: 2.7
- A `silent` option can now be provided. If set to `False` template
- syntax errors are propagated instead of being ignored.
-
- :param fileobj: the file-like object the messages should be extracted from
- :param keywords: a list of keywords (i.e. function names) that should be
- recognized as translation functions
- :param comment_tags: a list of translator tags to search for and include
- in the results.
- :param options: a dictionary of additional options (optional)
- :return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
- (comments will be empty currently)
- """
- extensions = set()
- for extension in options.get("extensions", "").split(","):
- extension = extension.strip()
- if not extension:
- continue
- extensions.add(import_string(extension))
- if InternationalizationExtension not in extensions:
- extensions.add(InternationalizationExtension)
-
- def getbool(options, key, default=False):
- return options.get(key, str(default)).lower() in ("1", "on", "yes", "true")
-
- silent = getbool(options, "silent", True)
- environment = Environment(
- options.get("block_start_string", BLOCK_START_STRING),
- options.get("block_end_string", BLOCK_END_STRING),
- options.get("variable_start_string", VARIABLE_START_STRING),
- options.get("variable_end_string", VARIABLE_END_STRING),
- options.get("comment_start_string", COMMENT_START_STRING),
- options.get("comment_end_string", COMMENT_END_STRING),
- options.get("line_statement_prefix") or LINE_STATEMENT_PREFIX,
- options.get("line_comment_prefix") or LINE_COMMENT_PREFIX,
- getbool(options, "trim_blocks", TRIM_BLOCKS),
- getbool(options, "lstrip_blocks", LSTRIP_BLOCKS),
- NEWLINE_SEQUENCE,
- getbool(options, "keep_trailing_newline", KEEP_TRAILING_NEWLINE),
- frozenset(extensions),
- cache_size=0,
- auto_reload=False,
- )
-
- if getbool(options, "trimmed"):
- environment.policies["ext.i18n.trimmed"] = True
- if getbool(options, "newstyle_gettext"):
- environment.newstyle_gettext = True
-
- source = fileobj.read().decode(options.get("encoding", "utf-8"))
- try:
- node = environment.parse(source)
- tokens = list(environment.lex(environment.preprocess(source)))
- except TemplateSyntaxError:
- if not silent:
- raise
- # skip templates with syntax errors
- return
-
- finder = _CommentFinder(tokens, comment_tags)
- for lineno, func, message in extract_from_ast(node, keywords):
- yield lineno, func, message, finder.find_comments(lineno)
-
-
-#: nicer import names
-i18n = InternationalizationExtension
-do = ExprStmtExtension
-loopcontrols = LoopControlExtension
-with_ = WithExtension
-autoescape = AutoEscapeExtension
-debug = DebugExtension
diff --git a/lib/spack/external/jinja2/filters.py b/lib/spack/external/jinja2/filters.py
deleted file mode 100644
index 74b108dcec..0000000000
--- a/lib/spack/external/jinja2/filters.py
+++ /dev/null
@@ -1,1382 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Built-in template filters used with the ``|`` operator."""
-import math
-import random
-import re
-import warnings
-from collections import namedtuple
-from itertools import chain
-from itertools import groupby
-
-from markupsafe import escape
-from markupsafe import Markup
-from markupsafe import soft_unicode
-
-from ._compat import abc
-from ._compat import imap
-from ._compat import iteritems
-from ._compat import string_types
-from ._compat import text_type
-from .exceptions import FilterArgumentError
-from .runtime import Undefined
-from .utils import htmlsafe_json_dumps
-from .utils import pformat
-from .utils import unicode_urlencode
-from .utils import urlize
-
-_word_re = re.compile(r"\w+", re.UNICODE)
-_word_beginning_split_re = re.compile(r"([-\s\(\{\[\<]+)", re.UNICODE)
-
-
-def contextfilter(f):
- """Decorator for marking context dependent filters. The current
- :class:`Context` will be passed as first argument.
- """
- f.contextfilter = True
- return f
-
-
-def evalcontextfilter(f):
- """Decorator for marking eval-context dependent filters. An eval
- context object is passed as first argument. For more information
- about the eval context, see :ref:`eval-context`.
-
- .. versionadded:: 2.4
- """
- f.evalcontextfilter = True
- return f
-
-
-def environmentfilter(f):
- """Decorator for marking environment dependent filters. The current
- :class:`Environment` is passed to the filter as first argument.
- """
- f.environmentfilter = True
- return f
-
-
-def ignore_case(value):
- """For use as a postprocessor for :func:`make_attrgetter`. Converts strings
- to lowercase and returns other types as-is."""
- return value.lower() if isinstance(value, string_types) else value
-
-
-def make_attrgetter(environment, attribute, postprocess=None, default=None):
- """Returns a callable that looks up the given attribute from a
- passed object with the rules of the environment. Dots are allowed
- to access attributes of attributes. Integer parts in paths are
- looked up as integers.
- """
- attribute = _prepare_attribute_parts(attribute)
-
- def attrgetter(item):
- for part in attribute:
- item = environment.getitem(item, part)
-
- if default and isinstance(item, Undefined):
- item = default
-
- if postprocess is not None:
- item = postprocess(item)
-
- return item
-
- return attrgetter
-
-
-def make_multi_attrgetter(environment, attribute, postprocess=None):
- """Returns a callable that looks up the given comma separated
- attributes from a passed object with the rules of the environment.
- Dots are allowed to access attributes of each attribute. Integer
- parts in paths are looked up as integers.
-
- The value returned by the returned callable is a list of extracted
- attribute values.
-
- Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc.
- """
- attribute_parts = (
- attribute.split(",") if isinstance(attribute, string_types) else [attribute]
- )
- attribute = [
- _prepare_attribute_parts(attribute_part) for attribute_part in attribute_parts
- ]
-
- def attrgetter(item):
- items = [None] * len(attribute)
- for i, attribute_part in enumerate(attribute):
- item_i = item
- for part in attribute_part:
- item_i = environment.getitem(item_i, part)
-
- if postprocess is not None:
- item_i = postprocess(item_i)
-
- items[i] = item_i
- return items
-
- return attrgetter
-
-
-def _prepare_attribute_parts(attr):
- if attr is None:
- return []
- elif isinstance(attr, string_types):
- return [int(x) if x.isdigit() else x for x in attr.split(".")]
- else:
- return [attr]
-
-
-def do_forceescape(value):
- """Enforce HTML escaping. This will probably double escape variables."""
- if hasattr(value, "__html__"):
- value = value.__html__()
- return escape(text_type(value))
-
-
-def do_urlencode(value):
- """Quote data for use in a URL path or query using UTF-8.
-
- Basic wrapper around :func:`urllib.parse.quote` when given a
- string, or :func:`urllib.parse.urlencode` for a dict or iterable.
-
- :param value: Data to quote. A string will be quoted directly. A
- dict or iterable of ``(key, value)`` pairs will be joined as a
- query string.
-
- When given a string, "/" is not quoted. HTTP servers treat "/" and
- "%2F" equivalently in paths. If you need quoted slashes, use the
- ``|replace("/", "%2F")`` filter.
-
- .. versionadded:: 2.7
- """
- if isinstance(value, string_types) or not isinstance(value, abc.Iterable):
- return unicode_urlencode(value)
-
- if isinstance(value, dict):
- items = iteritems(value)
- else:
- items = iter(value)
-
- return u"&".join(
- "%s=%s" % (unicode_urlencode(k, for_qs=True), unicode_urlencode(v, for_qs=True))
- for k, v in items
- )
-
-
-@evalcontextfilter
-def do_replace(eval_ctx, s, old, new, count=None):
- """Return a copy of the value with all occurrences of a substring
- replaced with a new one. The first argument is the substring
- that should be replaced, the second is the replacement string.
- If the optional third argument ``count`` is given, only the first
- ``count`` occurrences are replaced:
-
- .. sourcecode:: jinja
-
- {{ "Hello World"|replace("Hello", "Goodbye") }}
- -> Goodbye World
-
- {{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
- -> d'oh, d'oh, aaargh
- """
- if count is None:
- count = -1
- if not eval_ctx.autoescape:
- return text_type(s).replace(text_type(old), text_type(new), count)
- if (
- hasattr(old, "__html__")
- or hasattr(new, "__html__")
- and not hasattr(s, "__html__")
- ):
- s = escape(s)
- else:
- s = soft_unicode(s)
- return s.replace(soft_unicode(old), soft_unicode(new), count)
-
-
-def do_upper(s):
- """Convert a value to uppercase."""
- return soft_unicode(s).upper()
-
-
-def do_lower(s):
- """Convert a value to lowercase."""
- return soft_unicode(s).lower()
-
-
-@evalcontextfilter
-def do_xmlattr(_eval_ctx, d, autospace=True):
- """Create an SGML/XML attribute string based on the items in a dict.
- All values that are neither `none` nor `undefined` are automatically
- escaped:
-
- .. sourcecode:: html+jinja
-
- <ul{{ {'class': 'my_list', 'missing': none,
- 'id': 'list-%d'|format(variable)}|xmlattr }}>
- ...
- </ul>
-
- Results in something like this:
-
- .. sourcecode:: html
-
- <ul class="my_list" id="list-42">
- ...
- </ul>
-
- As you can see it automatically prepends a space in front of the item
- if the filter returned something unless the second parameter is false.
- """
- rv = u" ".join(
- u'%s="%s"' % (escape(key), escape(value))
- for key, value in iteritems(d)
- if value is not None and not isinstance(value, Undefined)
- )
- if autospace and rv:
- rv = u" " + rv
- if _eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
-
-def do_capitalize(s):
- """Capitalize a value. The first character will be uppercase, all others
- lowercase.
- """
- return soft_unicode(s).capitalize()
-
-
-def do_title(s):
- """Return a titlecased version of the value. I.e. words will start with
- uppercase letters, all remaining characters are lowercase.
- """
- return "".join(
- [
- item[0].upper() + item[1:].lower()
- for item in _word_beginning_split_re.split(soft_unicode(s))
- if item
- ]
- )
-
-
-def do_dictsort(value, case_sensitive=False, by="key", reverse=False):
- """Sort a dict and yield (key, value) pairs. Because python dicts are
- unsorted you may want to use this function to order them by either
- key or value:
-
- .. sourcecode:: jinja
-
- {% for key, value in mydict|dictsort %}
- sort the dict by key, case insensitive
-
- {% for key, value in mydict|dictsort(reverse=true) %}
- sort the dict by key, case insensitive, reverse order
-
- {% for key, value in mydict|dictsort(true) %}
- sort the dict by key, case sensitive
-
- {% for key, value in mydict|dictsort(false, 'value') %}
- sort the dict by value, case insensitive
- """
- if by == "key":
- pos = 0
- elif by == "value":
- pos = 1
- else:
- raise FilterArgumentError('You can only sort by either "key" or "value"')
-
- def sort_func(item):
- value = item[pos]
-
- if not case_sensitive:
- value = ignore_case(value)
-
- return value
-
- return sorted(value.items(), key=sort_func, reverse=reverse)
-
-
-@environmentfilter
-def do_sort(environment, value, reverse=False, case_sensitive=False, attribute=None):
- """Sort an iterable using Python's :func:`sorted`.
-
- .. sourcecode:: jinja
-
- {% for city in cities|sort %}
- ...
- {% endfor %}
-
- :param reverse: Sort descending instead of ascending.
- :param case_sensitive: When sorting strings, sort upper and lower
- case separately.
- :param attribute: When sorting objects or dicts, an attribute or
- key to sort by. Can use dot notation like ``"address.city"``.
- Can be a list of attributes like ``"age,name"``.
-
- The sort is stable, it does not change the relative order of
- elements that compare equal. This makes it is possible to chain
- sorts on different attributes and ordering.
-
- .. sourcecode:: jinja
-
- {% for user in users|sort(attribute="name")
- |sort(reverse=true, attribute="age") %}
- ...
- {% endfor %}
-
- As a shortcut to chaining when the direction is the same for all
- attributes, pass a comma separate list of attributes.
-
- .. sourcecode:: jinja
-
- {% for user users|sort(attribute="age,name") %}
- ...
- {% endfor %}
-
- .. versionchanged:: 2.11.0
- The ``attribute`` parameter can be a comma separated list of
- attributes, e.g. ``"age,name"``.
-
- .. versionchanged:: 2.6
- The ``attribute`` parameter was added.
- """
- key_func = make_multi_attrgetter(
- environment, attribute, postprocess=ignore_case if not case_sensitive else None
- )
- return sorted(value, key=key_func, reverse=reverse)
-
-
-@environmentfilter
-def do_unique(environment, value, case_sensitive=False, attribute=None):
- """Returns a list of unique items from the given iterable.
-
- .. sourcecode:: jinja
-
- {{ ['foo', 'bar', 'foobar', 'FooBar']|unique|list }}
- -> ['foo', 'bar', 'foobar']
-
- The unique items are yielded in the same order as their first occurrence in
- the iterable passed to the filter.
-
- :param case_sensitive: Treat upper and lower case strings as distinct.
- :param attribute: Filter objects with unique values for this attribute.
- """
- getter = make_attrgetter(
- environment, attribute, postprocess=ignore_case if not case_sensitive else None
- )
- seen = set()
-
- for item in value:
- key = getter(item)
-
- if key not in seen:
- seen.add(key)
- yield item
-
-
-def _min_or_max(environment, value, func, case_sensitive, attribute):
- it = iter(value)
-
- try:
- first = next(it)
- except StopIteration:
- return environment.undefined("No aggregated item, sequence was empty.")
-
- key_func = make_attrgetter(
- environment, attribute, postprocess=ignore_case if not case_sensitive else None
- )
- return func(chain([first], it), key=key_func)
-
-
-@environmentfilter
-def do_min(environment, value, case_sensitive=False, attribute=None):
- """Return the smallest item from the sequence.
-
- .. sourcecode:: jinja
-
- {{ [1, 2, 3]|min }}
- -> 1
-
- :param case_sensitive: Treat upper and lower case strings as distinct.
- :param attribute: Get the object with the min value of this attribute.
- """
- return _min_or_max(environment, value, min, case_sensitive, attribute)
-
-
-@environmentfilter
-def do_max(environment, value, case_sensitive=False, attribute=None):
- """Return the largest item from the sequence.
-
- .. sourcecode:: jinja
-
- {{ [1, 2, 3]|max }}
- -> 3
-
- :param case_sensitive: Treat upper and lower case strings as distinct.
- :param attribute: Get the object with the max value of this attribute.
- """
- return _min_or_max(environment, value, max, case_sensitive, attribute)
-
-
-def do_default(value, default_value=u"", boolean=False):
- """If the value is undefined it will return the passed default value,
- otherwise the value of the variable:
-
- .. sourcecode:: jinja
-
- {{ my_variable|default('my_variable is not defined') }}
-
- This will output the value of ``my_variable`` if the variable was
- defined, otherwise ``'my_variable is not defined'``. If you want
- to use default with variables that evaluate to false you have to
- set the second parameter to `true`:
-
- .. sourcecode:: jinja
-
- {{ ''|default('the string was empty', true) }}
-
- .. versionchanged:: 2.11
- It's now possible to configure the :class:`~jinja2.Environment` with
- :class:`~jinja2.ChainableUndefined` to make the `default` filter work
- on nested elements and attributes that may contain undefined values
- in the chain without getting an :exc:`~jinja2.UndefinedError`.
- """
- if isinstance(value, Undefined) or (boolean and not value):
- return default_value
- return value
-
-
-@evalcontextfilter
-def do_join(eval_ctx, value, d=u"", attribute=None):
- """Return a string which is the concatenation of the strings in the
- sequence. The separator between elements is an empty string per
- default, you can define it with the optional parameter:
-
- .. sourcecode:: jinja
-
- {{ [1, 2, 3]|join('|') }}
- -> 1|2|3
-
- {{ [1, 2, 3]|join }}
- -> 123
-
- It is also possible to join certain attributes of an object:
-
- .. sourcecode:: jinja
-
- {{ users|join(', ', attribute='username') }}
-
- .. versionadded:: 2.6
- The `attribute` parameter was added.
- """
- if attribute is not None:
- value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
-
- # no automatic escaping? joining is a lot easier then
- if not eval_ctx.autoescape:
- return text_type(d).join(imap(text_type, value))
-
- # if the delimiter doesn't have an html representation we check
- # if any of the items has. If yes we do a coercion to Markup
- if not hasattr(d, "__html__"):
- value = list(value)
- do_escape = False
- for idx, item in enumerate(value):
- if hasattr(item, "__html__"):
- do_escape = True
- else:
- value[idx] = text_type(item)
- if do_escape:
- d = escape(d)
- else:
- d = text_type(d)
- return d.join(value)
-
- # no html involved, to normal joining
- return soft_unicode(d).join(imap(soft_unicode, value))
-
-
-def do_center(value, width=80):
- """Centers the value in a field of a given width."""
- return text_type(value).center(width)
-
-
-@environmentfilter
-def do_first(environment, seq):
- """Return the first item of a sequence."""
- try:
- return next(iter(seq))
- except StopIteration:
- return environment.undefined("No first item, sequence was empty.")
-
-
-@environmentfilter
-def do_last(environment, seq):
- """
- Return the last item of a sequence.
-
- Note: Does not work with generators. You may want to explicitly
- convert it to a list:
-
- .. sourcecode:: jinja
-
- {{ data | selectattr('name', '==', 'Jinja') | list | last }}
- """
- try:
- return next(iter(reversed(seq)))
- except StopIteration:
- return environment.undefined("No last item, sequence was empty.")
-
-
-@contextfilter
-def do_random(context, seq):
- """Return a random item from the sequence."""
- try:
- return random.choice(seq)
- except IndexError:
- return context.environment.undefined("No random item, sequence was empty.")
-
-
-def do_filesizeformat(value, binary=False):
- """Format the value like a 'human-readable' file size (i.e. 13 kB,
- 4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
- Giga, etc.), if the second parameter is set to `True` the binary
- prefixes are used (Mebi, Gibi).
- """
- bytes = float(value)
- base = binary and 1024 or 1000
- prefixes = [
- (binary and "KiB" or "kB"),
- (binary and "MiB" or "MB"),
- (binary and "GiB" or "GB"),
- (binary and "TiB" or "TB"),
- (binary and "PiB" or "PB"),
- (binary and "EiB" or "EB"),
- (binary and "ZiB" or "ZB"),
- (binary and "YiB" or "YB"),
- ]
- if bytes == 1:
- return "1 Byte"
- elif bytes < base:
- return "%d Bytes" % bytes
- else:
- for i, prefix in enumerate(prefixes):
- unit = base ** (i + 2)
- if bytes < unit:
- return "%.1f %s" % ((base * bytes / unit), prefix)
- return "%.1f %s" % ((base * bytes / unit), prefix)
-
-
-def do_pprint(value, verbose=False):
- """Pretty print a variable. Useful for debugging.
-
- With Jinja 1.2 onwards you can pass it a parameter. If this parameter
- is truthy the output will be more verbose (this requires `pretty`)
- """
- return pformat(value, verbose=verbose)
-
-
-@evalcontextfilter
-def do_urlize(
- eval_ctx, value, trim_url_limit=None, nofollow=False, target=None, rel=None
-):
- """Converts URLs in plain text into clickable links.
-
- If you pass the filter an additional integer it will shorten the urls
- to that number. Also a third argument exists that makes the urls
- "nofollow":
-
- .. sourcecode:: jinja
-
- {{ mytext|urlize(40, true) }}
- links are shortened to 40 chars and defined with rel="nofollow"
-
- If *target* is specified, the ``target`` attribute will be added to the
- ``<a>`` tag:
-
- .. sourcecode:: jinja
-
- {{ mytext|urlize(40, target='_blank') }}
-
- .. versionchanged:: 2.8+
- The *target* parameter was added.
- """
- policies = eval_ctx.environment.policies
- rel = set((rel or "").split() or [])
- if nofollow:
- rel.add("nofollow")
- rel.update((policies["urlize.rel"] or "").split())
- if target is None:
- target = policies["urlize.target"]
- rel = " ".join(sorted(rel)) or None
- rv = urlize(value, trim_url_limit, rel=rel, target=target)
- if eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
-
-def do_indent(s, width=4, first=False, blank=False, indentfirst=None):
- """Return a copy of the string with each line indented by 4 spaces. The
- first line and blank lines are not indented by default.
-
- :param width: Number of spaces to indent by.
- :param first: Don't skip indenting the first line.
- :param blank: Don't skip indenting empty lines.
-
- .. versionchanged:: 2.10
- Blank lines are not indented by default.
-
- Rename the ``indentfirst`` argument to ``first``.
- """
- if indentfirst is not None:
- warnings.warn(
- "The 'indentfirst' argument is renamed to 'first' and will"
- " be removed in version 3.0.",
- DeprecationWarning,
- stacklevel=2,
- )
- first = indentfirst
-
- indention = u" " * width
- newline = u"\n"
-
- if isinstance(s, Markup):
- indention = Markup(indention)
- newline = Markup(newline)
-
- s += newline # this quirk is necessary for splitlines method
-
- if blank:
- rv = (newline + indention).join(s.splitlines())
- else:
- lines = s.splitlines()
- rv = lines.pop(0)
-
- if lines:
- rv += newline + newline.join(
- indention + line if line else line for line in lines
- )
-
- if first:
- rv = indention + rv
-
- return rv
-
-
-@environmentfilter
-def do_truncate(env, s, length=255, killwords=False, end="...", leeway=None):
- """Return a truncated copy of the string. The length is specified
- with the first parameter which defaults to ``255``. If the second
- parameter is ``true`` the filter will cut the text at length. Otherwise
- it will discard the last word. If the text was in fact
- truncated it will append an ellipsis sign (``"..."``). If you want a
- different ellipsis sign than ``"..."`` you can specify it using the
- third parameter. Strings that only exceed the length by the tolerance
- margin given in the fourth parameter will not be truncated.
-
- .. sourcecode:: jinja
-
- {{ "foo bar baz qux"|truncate(9) }}
- -> "foo..."
- {{ "foo bar baz qux"|truncate(9, True) }}
- -> "foo ba..."
- {{ "foo bar baz qux"|truncate(11) }}
- -> "foo bar baz qux"
- {{ "foo bar baz qux"|truncate(11, False, '...', 0) }}
- -> "foo bar..."
-
- The default leeway on newer Jinja versions is 5 and was 0 before but
- can be reconfigured globally.
- """
- if leeway is None:
- leeway = env.policies["truncate.leeway"]
- assert length >= len(end), "expected length >= %s, got %s" % (len(end), length)
- assert leeway >= 0, "expected leeway >= 0, got %s" % leeway
- if len(s) <= length + leeway:
- return s
- if killwords:
- return s[: length - len(end)] + end
- result = s[: length - len(end)].rsplit(" ", 1)[0]
- return result + end
-
-
-@environmentfilter
-def do_wordwrap(
- environment,
- s,
- width=79,
- break_long_words=True,
- wrapstring=None,
- break_on_hyphens=True,
-):
- """Wrap a string to the given width. Existing newlines are treated
- as paragraphs to be wrapped separately.
-
- :param s: Original text to wrap.
- :param width: Maximum length of wrapped lines.
- :param break_long_words: If a word is longer than ``width``, break
- it across lines.
- :param break_on_hyphens: If a word contains hyphens, it may be split
- across lines.
- :param wrapstring: String to join each wrapped line. Defaults to
- :attr:`Environment.newline_sequence`.
-
- .. versionchanged:: 2.11
- Existing newlines are treated as paragraphs wrapped separately.
-
- .. versionchanged:: 2.11
- Added the ``break_on_hyphens`` parameter.
-
- .. versionchanged:: 2.7
- Added the ``wrapstring`` parameter.
- """
-
- import textwrap
-
- if not wrapstring:
- wrapstring = environment.newline_sequence
-
- # textwrap.wrap doesn't consider existing newlines when wrapping.
- # If the string has a newline before width, wrap will still insert
- # a newline at width, resulting in a short line. Instead, split and
- # wrap each paragraph individually.
- return wrapstring.join(
- [
- wrapstring.join(
- textwrap.wrap(
- line,
- width=width,
- expand_tabs=False,
- replace_whitespace=False,
- break_long_words=break_long_words,
- break_on_hyphens=break_on_hyphens,
- )
- )
- for line in s.splitlines()
- ]
- )
-
-
-def do_wordcount(s):
- """Count the words in that string."""
- return len(_word_re.findall(soft_unicode(s)))
-
-
-def do_int(value, default=0, base=10):
- """Convert the value into an integer. If the
- conversion doesn't work it will return ``0``. You can
- override this default using the first parameter. You
- can also override the default base (10) in the second
- parameter, which handles input with prefixes such as
- 0b, 0o and 0x for bases 2, 8 and 16 respectively.
- The base is ignored for decimal numbers and non-string values.
- """
- try:
- if isinstance(value, string_types):
- return int(value, base)
- return int(value)
- except (TypeError, ValueError):
- # this quirk is necessary so that "42.23"|int gives 42.
- try:
- return int(float(value))
- except (TypeError, ValueError):
- return default
-
-
-def do_float(value, default=0.0):
- """Convert the value into a floating point number. If the
- conversion doesn't work it will return ``0.0``. You can
- override this default using the first parameter.
- """
- try:
- return float(value)
- except (TypeError, ValueError):
- return default
-
-
-def do_format(value, *args, **kwargs):
- """Apply the given values to a `printf-style`_ format string, like
- ``string % values``.
-
- .. sourcecode:: jinja
-
- {{ "%s, %s!"|format(greeting, name) }}
- Hello, World!
-
- In most cases it should be more convenient and efficient to use the
- ``%`` operator or :meth:`str.format`.
-
- .. code-block:: text
-
- {{ "%s, %s!" % (greeting, name) }}
- {{ "{}, {}!".format(greeting, name) }}
-
- .. _printf-style: https://docs.python.org/library/stdtypes.html
- #printf-style-string-formatting
- """
- if args and kwargs:
- raise FilterArgumentError(
- "can't handle positional and keyword arguments at the same time"
- )
- return soft_unicode(value) % (kwargs or args)
-
-
-def do_trim(value, chars=None):
- """Strip leading and trailing characters, by default whitespace."""
- return soft_unicode(value).strip(chars)
-
-
-def do_striptags(value):
- """Strip SGML/XML tags and replace adjacent whitespace by one space."""
- if hasattr(value, "__html__"):
- value = value.__html__()
- return Markup(text_type(value)).striptags()
-
-
-def do_slice(value, slices, fill_with=None):
- """Slice an iterator and return a list of lists containing
- those items. Useful if you want to create a div containing
- three ul tags that represent columns:
-
- .. sourcecode:: html+jinja
-
- <div class="columnwrapper">
- {%- for column in items|slice(3) %}
- <ul class="column-{{ loop.index }}">
- {%- for item in column %}
- <li>{{ item }}</li>
- {%- endfor %}
- </ul>
- {%- endfor %}
- </div>
-
- If you pass it a second argument it's used to fill missing
- values on the last iteration.
- """
- seq = list(value)
- length = len(seq)
- items_per_slice = length // slices
- slices_with_extra = length % slices
- offset = 0
- for slice_number in range(slices):
- start = offset + slice_number * items_per_slice
- if slice_number < slices_with_extra:
- offset += 1
- end = offset + (slice_number + 1) * items_per_slice
- tmp = seq[start:end]
- if fill_with is not None and slice_number >= slices_with_extra:
- tmp.append(fill_with)
- yield tmp
-
-
-def do_batch(value, linecount, fill_with=None):
- """
- A filter that batches items. It works pretty much like `slice`
- just the other way round. It returns a list of lists with the
- given number of items. If you provide a second parameter this
- is used to fill up missing items. See this example:
-
- .. sourcecode:: html+jinja
-
- <table>
- {%- for row in items|batch(3, '&nbsp;') %}
- <tr>
- {%- for column in row %}
- <td>{{ column }}</td>
- {%- endfor %}
- </tr>
- {%- endfor %}
- </table>
- """
- tmp = []
- for item in value:
- if len(tmp) == linecount:
- yield tmp
- tmp = []
- tmp.append(item)
- if tmp:
- if fill_with is not None and len(tmp) < linecount:
- tmp += [fill_with] * (linecount - len(tmp))
- yield tmp
-
-
-def do_round(value, precision=0, method="common"):
- """Round the number to a given precision. The first
- parameter specifies the precision (default is ``0``), the
- second the rounding method:
-
- - ``'common'`` rounds either up or down
- - ``'ceil'`` always rounds up
- - ``'floor'`` always rounds down
-
- If you don't specify a method ``'common'`` is used.
-
- .. sourcecode:: jinja
-
- {{ 42.55|round }}
- -> 43.0
- {{ 42.55|round(1, 'floor') }}
- -> 42.5
-
- Note that even if rounded to 0 precision, a float is returned. If
- you need a real integer, pipe it through `int`:
-
- .. sourcecode:: jinja
-
- {{ 42.55|round|int }}
- -> 43
- """
- if method not in {"common", "ceil", "floor"}:
- raise FilterArgumentError("method must be common, ceil or floor")
- if method == "common":
- return round(value, precision)
- func = getattr(math, method)
- return func(value * (10 ** precision)) / (10 ** precision)
-
-
-# Use a regular tuple repr here. This is what we did in the past and we
-# really want to hide this custom type as much as possible. In particular
-# we do not want to accidentally expose an auto generated repr in case
-# people start to print this out in comments or something similar for
-# debugging.
-_GroupTuple = namedtuple("_GroupTuple", ["grouper", "list"])
-_GroupTuple.__repr__ = tuple.__repr__
-_GroupTuple.__str__ = tuple.__str__
-
-
-@environmentfilter
-def do_groupby(environment, value, attribute):
- """Group a sequence of objects by an attribute using Python's
- :func:`itertools.groupby`. The attribute can use dot notation for
- nested access, like ``"address.city"``. Unlike Python's ``groupby``,
- the values are sorted first so only one group is returned for each
- unique value.
-
- For example, a list of ``User`` objects with a ``city`` attribute
- can be rendered in groups. In this example, ``grouper`` refers to
- the ``city`` value of the group.
-
- .. sourcecode:: html+jinja
-
- <ul>{% for city, items in users|groupby("city") %}
- <li>{{ city }}
- <ul>{% for user in items %}
- <li>{{ user.name }}
- {% endfor %}</ul>
- </li>
- {% endfor %}</ul>
-
- ``groupby`` yields namedtuples of ``(grouper, list)``, which
- can be used instead of the tuple unpacking above. ``grouper`` is the
- value of the attribute, and ``list`` is the items with that value.
-
- .. sourcecode:: html+jinja
-
- <ul>{% for group in users|groupby("city") %}
- <li>{{ group.grouper }}: {{ group.list|join(", ") }}
- {% endfor %}</ul>
-
- .. versionchanged:: 2.6
- The attribute supports dot notation for nested access.
- """
- expr = make_attrgetter(environment, attribute)
- return [
- _GroupTuple(key, list(values))
- for key, values in groupby(sorted(value, key=expr), expr)
- ]
-
-
-@environmentfilter
-def do_sum(environment, iterable, attribute=None, start=0):
- """Returns the sum of a sequence of numbers plus the value of parameter
- 'start' (which defaults to 0). When the sequence is empty it returns
- start.
-
- It is also possible to sum up only certain attributes:
-
- .. sourcecode:: jinja
-
- Total: {{ items|sum(attribute='price') }}
-
- .. versionchanged:: 2.6
- The `attribute` parameter was added to allow suming up over
- attributes. Also the `start` parameter was moved on to the right.
- """
- if attribute is not None:
- iterable = imap(make_attrgetter(environment, attribute), iterable)
- return sum(iterable, start)
-
-
-def do_list(value):
- """Convert the value into a list. If it was a string the returned list
- will be a list of characters.
- """
- return list(value)
-
-
-def do_mark_safe(value):
- """Mark the value as safe which means that in an environment with automatic
- escaping enabled this variable will not be escaped.
- """
- return Markup(value)
-
-
-def do_mark_unsafe(value):
- """Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
- return text_type(value)
-
-
-def do_reverse(value):
- """Reverse the object or return an iterator that iterates over it the other
- way round.
- """
- if isinstance(value, string_types):
- return value[::-1]
- try:
- return reversed(value)
- except TypeError:
- try:
- rv = list(value)
- rv.reverse()
- return rv
- except TypeError:
- raise FilterArgumentError("argument must be iterable")
-
-
-@environmentfilter
-def do_attr(environment, obj, name):
- """Get an attribute of an object. ``foo|attr("bar")`` works like
- ``foo.bar`` just that always an attribute is returned and items are not
- looked up.
-
- See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
- """
- try:
- name = str(name)
- except UnicodeError:
- pass
- else:
- try:
- value = getattr(obj, name)
- except AttributeError:
- pass
- else:
- if environment.sandboxed and not environment.is_safe_attribute(
- obj, name, value
- ):
- return environment.unsafe_undefined(obj, name)
- return value
- return environment.undefined(obj=obj, name=name)
-
-
-@contextfilter
-def do_map(*args, **kwargs):
- """Applies a filter on a sequence of objects or looks up an attribute.
- This is useful when dealing with lists of objects but you are really
- only interested in a certain value of it.
-
- The basic usage is mapping on an attribute. Imagine you have a list
- of users but you are only interested in a list of usernames:
-
- .. sourcecode:: jinja
-
- Users on this page: {{ users|map(attribute='username')|join(', ') }}
-
- You can specify a ``default`` value to use if an object in the list
- does not have the given attribute.
-
- .. sourcecode:: jinja
-
- {{ users|map(attribute="username", default="Anonymous")|join(", ") }}
-
- Alternatively you can let it invoke a filter by passing the name of the
- filter and the arguments afterwards. A good example would be applying a
- text conversion filter on a sequence:
-
- .. sourcecode:: jinja
-
- Users on this page: {{ titles|map('lower')|join(', ') }}
-
- Similar to a generator comprehension such as:
-
- .. code-block:: python
-
- (u.username for u in users)
- (u.username or "Anonymous" for u in users)
- (do_lower(x) for x in titles)
-
- .. versionchanged:: 2.11.0
- Added the ``default`` parameter.
-
- .. versionadded:: 2.7
- """
- seq, func = prepare_map(args, kwargs)
- if seq:
- for item in seq:
- yield func(item)
-
-
-@contextfilter
-def do_select(*args, **kwargs):
- """Filters a sequence of objects by applying a test to each object,
- and only selecting the objects with the test succeeding.
-
- If no test is specified, each object will be evaluated as a boolean.
-
- Example usage:
-
- .. sourcecode:: jinja
-
- {{ numbers|select("odd") }}
- {{ numbers|select("odd") }}
- {{ numbers|select("divisibleby", 3) }}
- {{ numbers|select("lessthan", 42) }}
- {{ strings|select("equalto", "mystring") }}
-
- Similar to a generator comprehension such as:
-
- .. code-block:: python
-
- (n for n in numbers if test_odd(n))
- (n for n in numbers if test_divisibleby(n, 3))
-
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: x, False)
-
-
-@contextfilter
-def do_reject(*args, **kwargs):
- """Filters a sequence of objects by applying a test to each object,
- and rejecting the objects with the test succeeding.
-
- If no test is specified, each object will be evaluated as a boolean.
-
- Example usage:
-
- .. sourcecode:: jinja
-
- {{ numbers|reject("odd") }}
-
- Similar to a generator comprehension such as:
-
- .. code-block:: python
-
- (n for n in numbers if not test_odd(n))
-
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: not x, False)
-
-
-@contextfilter
-def do_selectattr(*args, **kwargs):
- """Filters a sequence of objects by applying a test to the specified
- attribute of each object, and only selecting the objects with the
- test succeeding.
-
- If no test is specified, the attribute's value will be evaluated as
- a boolean.
-
- Example usage:
-
- .. sourcecode:: jinja
-
- {{ users|selectattr("is_active") }}
- {{ users|selectattr("email", "none") }}
-
- Similar to a generator comprehension such as:
-
- .. code-block:: python
-
- (u for user in users if user.is_active)
- (u for user in users if test_none(user.email))
-
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: x, True)
-
-
-@contextfilter
-def do_rejectattr(*args, **kwargs):
- """Filters a sequence of objects by applying a test to the specified
- attribute of each object, and rejecting the objects with the test
- succeeding.
-
- If no test is specified, the attribute's value will be evaluated as
- a boolean.
-
- .. sourcecode:: jinja
-
- {{ users|rejectattr("is_active") }}
- {{ users|rejectattr("email", "none") }}
-
- Similar to a generator comprehension such as:
-
- .. code-block:: python
-
- (u for user in users if not user.is_active)
- (u for user in users if not test_none(user.email))
-
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: not x, True)
-
-
-@evalcontextfilter
-def do_tojson(eval_ctx, value, indent=None):
- """Dumps a structure to JSON so that it's safe to use in ``<script>``
- tags. It accepts the same arguments and returns a JSON string. Note that
- this is available in templates through the ``|tojson`` filter which will
- also mark the result as safe. Due to how this function escapes certain
- characters this is safe even if used outside of ``<script>`` tags.
-
- The following characters are escaped in strings:
-
- - ``<``
- - ``>``
- - ``&``
- - ``'``
-
- This makes it safe to embed such strings in any place in HTML with the
- notable exception of double quoted attributes. In that case single
- quote your attributes or HTML escape it in addition.
-
- The indent parameter can be used to enable pretty printing. Set it to
- the number of spaces that the structures should be indented with.
-
- Note that this filter is for use in HTML contexts only.
-
- .. versionadded:: 2.9
- """
- policies = eval_ctx.environment.policies
- dumper = policies["json.dumps_function"]
- options = policies["json.dumps_kwargs"]
- if indent is not None:
- options = dict(options)
- options["indent"] = indent
- return htmlsafe_json_dumps(value, dumper=dumper, **options)
-
-
-def prepare_map(args, kwargs):
- context = args[0]
- seq = args[1]
- default = None
-
- if len(args) == 2 and "attribute" in kwargs:
- attribute = kwargs.pop("attribute")
- default = kwargs.pop("default", None)
- if kwargs:
- raise FilterArgumentError(
- "Unexpected keyword argument %r" % next(iter(kwargs))
- )
- func = make_attrgetter(context.environment, attribute, default=default)
- else:
- try:
- name = args[2]
- args = args[3:]
- except LookupError:
- raise FilterArgumentError("map requires a filter argument")
-
- def func(item):
- return context.environment.call_filter(
- name, item, args, kwargs, context=context
- )
-
- return seq, func
-
-
-def prepare_select_or_reject(args, kwargs, modfunc, lookup_attr):
- context = args[0]
- seq = args[1]
- if lookup_attr:
- try:
- attr = args[2]
- except LookupError:
- raise FilterArgumentError("Missing parameter for attribute name")
- transfunc = make_attrgetter(context.environment, attr)
- off = 1
- else:
- off = 0
-
- def transfunc(x):
- return x
-
- try:
- name = args[2 + off]
- args = args[3 + off :]
-
- def func(item):
- return context.environment.call_test(name, item, args, kwargs)
-
- except LookupError:
- func = bool
-
- return seq, lambda item: modfunc(func(transfunc(item)))
-
-
-def select_or_reject(args, kwargs, modfunc, lookup_attr):
- seq, func = prepare_select_or_reject(args, kwargs, modfunc, lookup_attr)
- if seq:
- for item in seq:
- if func(item):
- yield item
-
-
-FILTERS = {
- "abs": abs,
- "attr": do_attr,
- "batch": do_batch,
- "capitalize": do_capitalize,
- "center": do_center,
- "count": len,
- "d": do_default,
- "default": do_default,
- "dictsort": do_dictsort,
- "e": escape,
- "escape": escape,
- "filesizeformat": do_filesizeformat,
- "first": do_first,
- "float": do_float,
- "forceescape": do_forceescape,
- "format": do_format,
- "groupby": do_groupby,
- "indent": do_indent,
- "int": do_int,
- "join": do_join,
- "last": do_last,
- "length": len,
- "list": do_list,
- "lower": do_lower,
- "map": do_map,
- "min": do_min,
- "max": do_max,
- "pprint": do_pprint,
- "random": do_random,
- "reject": do_reject,
- "rejectattr": do_rejectattr,
- "replace": do_replace,
- "reverse": do_reverse,
- "round": do_round,
- "safe": do_mark_safe,
- "select": do_select,
- "selectattr": do_selectattr,
- "slice": do_slice,
- "sort": do_sort,
- "string": soft_unicode,
- "striptags": do_striptags,
- "sum": do_sum,
- "title": do_title,
- "trim": do_trim,
- "truncate": do_truncate,
- "unique": do_unique,
- "upper": do_upper,
- "urlencode": do_urlencode,
- "urlize": do_urlize,
- "wordcount": do_wordcount,
- "wordwrap": do_wordwrap,
- "xmlattr": do_xmlattr,
- "tojson": do_tojson,
-}
diff --git a/lib/spack/external/jinja2/idtracking.py b/lib/spack/external/jinja2/idtracking.py
deleted file mode 100644
index 9a0d838017..0000000000
--- a/lib/spack/external/jinja2/idtracking.py
+++ /dev/null
@@ -1,290 +0,0 @@
-from ._compat import iteritems
-from .visitor import NodeVisitor
-
-VAR_LOAD_PARAMETER = "param"
-VAR_LOAD_RESOLVE = "resolve"
-VAR_LOAD_ALIAS = "alias"
-VAR_LOAD_UNDEFINED = "undefined"
-
-
-def find_symbols(nodes, parent_symbols=None):
- sym = Symbols(parent=parent_symbols)
- visitor = FrameSymbolVisitor(sym)
- for node in nodes:
- visitor.visit(node)
- return sym
-
-
-def symbols_for_node(node, parent_symbols=None):
- sym = Symbols(parent=parent_symbols)
- sym.analyze_node(node)
- return sym
-
-
-class Symbols(object):
- def __init__(self, parent=None, level=None):
- if level is None:
- if parent is None:
- level = 0
- else:
- level = parent.level + 1
- self.level = level
- self.parent = parent
- self.refs = {}
- self.loads = {}
- self.stores = set()
-
- def analyze_node(self, node, **kwargs):
- visitor = RootVisitor(self)
- visitor.visit(node, **kwargs)
-
- def _define_ref(self, name, load=None):
- ident = "l_%d_%s" % (self.level, name)
- self.refs[name] = ident
- if load is not None:
- self.loads[ident] = load
- return ident
-
- def find_load(self, target):
- if target in self.loads:
- return self.loads[target]
- if self.parent is not None:
- return self.parent.find_load(target)
-
- def find_ref(self, name):
- if name in self.refs:
- return self.refs[name]
- if self.parent is not None:
- return self.parent.find_ref(name)
-
- def ref(self, name):
- rv = self.find_ref(name)
- if rv is None:
- raise AssertionError(
- "Tried to resolve a name to a reference that "
- "was unknown to the frame (%r)" % name
- )
- return rv
-
- def copy(self):
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.refs = self.refs.copy()
- rv.loads = self.loads.copy()
- rv.stores = self.stores.copy()
- return rv
-
- def store(self, name):
- self.stores.add(name)
-
- # If we have not see the name referenced yet, we need to figure
- # out what to set it to.
- if name not in self.refs:
- # If there is a parent scope we check if the name has a
- # reference there. If it does it means we might have to alias
- # to a variable there.
- if self.parent is not None:
- outer_ref = self.parent.find_ref(name)
- if outer_ref is not None:
- self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref))
- return
-
- # Otherwise we can just set it to undefined.
- self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
-
- def declare_parameter(self, name):
- self.stores.add(name)
- return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
-
- def load(self, name):
- target = self.find_ref(name)
- if target is None:
- self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
-
- def branch_update(self, branch_symbols):
- stores = {}
- for branch in branch_symbols:
- for target in branch.stores:
- if target in self.stores:
- continue
- stores[target] = stores.get(target, 0) + 1
-
- for sym in branch_symbols:
- self.refs.update(sym.refs)
- self.loads.update(sym.loads)
- self.stores.update(sym.stores)
-
- for name, branch_count in iteritems(stores):
- if branch_count == len(branch_symbols):
- continue
- target = self.find_ref(name)
- assert target is not None, "should not happen"
-
- if self.parent is not None:
- outer_target = self.parent.find_ref(name)
- if outer_target is not None:
- self.loads[target] = (VAR_LOAD_ALIAS, outer_target)
- continue
- self.loads[target] = (VAR_LOAD_RESOLVE, name)
-
- def dump_stores(self):
- rv = {}
- node = self
- while node is not None:
- for name in node.stores:
- if name not in rv:
- rv[name] = self.find_ref(name)
- node = node.parent
- return rv
-
- def dump_param_targets(self):
- rv = set()
- node = self
- while node is not None:
- for target, (instr, _) in iteritems(self.loads):
- if instr == VAR_LOAD_PARAMETER:
- rv.add(target)
- node = node.parent
- return rv
-
-
-class RootVisitor(NodeVisitor):
- def __init__(self, symbols):
- self.sym_visitor = FrameSymbolVisitor(symbols)
-
- def _simple_visit(self, node, **kwargs):
- for child in node.iter_child_nodes():
- self.sym_visitor.visit(child)
-
- visit_Template = (
- visit_Block
- ) = (
- visit_Macro
- ) = (
- visit_FilterBlock
- ) = visit_Scope = visit_If = visit_ScopedEvalContextModifier = _simple_visit
-
- def visit_AssignBlock(self, node, **kwargs):
- for child in node.body:
- self.sym_visitor.visit(child)
-
- def visit_CallBlock(self, node, **kwargs):
- for child in node.iter_child_nodes(exclude=("call",)):
- self.sym_visitor.visit(child)
-
- def visit_OverlayScope(self, node, **kwargs):
- for child in node.body:
- self.sym_visitor.visit(child)
-
- def visit_For(self, node, for_branch="body", **kwargs):
- if for_branch == "body":
- self.sym_visitor.visit(node.target, store_as_param=True)
- branch = node.body
- elif for_branch == "else":
- branch = node.else_
- elif for_branch == "test":
- self.sym_visitor.visit(node.target, store_as_param=True)
- if node.test is not None:
- self.sym_visitor.visit(node.test)
- return
- else:
- raise RuntimeError("Unknown for branch")
- for item in branch or ():
- self.sym_visitor.visit(item)
-
- def visit_With(self, node, **kwargs):
- for target in node.targets:
- self.sym_visitor.visit(target)
- for child in node.body:
- self.sym_visitor.visit(child)
-
- def generic_visit(self, node, *args, **kwargs):
- raise NotImplementedError(
- "Cannot find symbols for %r" % node.__class__.__name__
- )
-
-
-class FrameSymbolVisitor(NodeVisitor):
- """A visitor for `Frame.inspect`."""
-
- def __init__(self, symbols):
- self.symbols = symbols
-
- def visit_Name(self, node, store_as_param=False, **kwargs):
- """All assignments to names go through this function."""
- if store_as_param or node.ctx == "param":
- self.symbols.declare_parameter(node.name)
- elif node.ctx == "store":
- self.symbols.store(node.name)
- elif node.ctx == "load":
- self.symbols.load(node.name)
-
- def visit_NSRef(self, node, **kwargs):
- self.symbols.load(node.name)
-
- def visit_If(self, node, **kwargs):
- self.visit(node.test, **kwargs)
-
- original_symbols = self.symbols
-
- def inner_visit(nodes):
- self.symbols = rv = original_symbols.copy()
- for subnode in nodes:
- self.visit(subnode, **kwargs)
- self.symbols = original_symbols
- return rv
-
- body_symbols = inner_visit(node.body)
- elif_symbols = inner_visit(node.elif_)
- else_symbols = inner_visit(node.else_ or ())
-
- self.symbols.branch_update([body_symbols, elif_symbols, else_symbols])
-
- def visit_Macro(self, node, **kwargs):
- self.symbols.store(node.name)
-
- def visit_Import(self, node, **kwargs):
- self.generic_visit(node, **kwargs)
- self.symbols.store(node.target)
-
- def visit_FromImport(self, node, **kwargs):
- self.generic_visit(node, **kwargs)
- for name in node.names:
- if isinstance(name, tuple):
- self.symbols.store(name[1])
- else:
- self.symbols.store(name)
-
- def visit_Assign(self, node, **kwargs):
- """Visit assignments in the correct order."""
- self.visit(node.node, **kwargs)
- self.visit(node.target, **kwargs)
-
- def visit_For(self, node, **kwargs):
- """Visiting stops at for blocks. However the block sequence
- is visited as part of the outer scope.
- """
- self.visit(node.iter, **kwargs)
-
- def visit_CallBlock(self, node, **kwargs):
- self.visit(node.call, **kwargs)
-
- def visit_FilterBlock(self, node, **kwargs):
- self.visit(node.filter, **kwargs)
-
- def visit_With(self, node, **kwargs):
- for target in node.values:
- self.visit(target)
-
- def visit_AssignBlock(self, node, **kwargs):
- """Stop visiting at block assigns."""
- self.visit(node.target, **kwargs)
-
- def visit_Scope(self, node, **kwargs):
- """Stop visiting at scopes."""
-
- def visit_Block(self, node, **kwargs):
- """Stop visiting at blocks."""
-
- def visit_OverlayScope(self, node, **kwargs):
- """Do not visit into overlay scopes."""
diff --git a/lib/spack/external/jinja2/lexer.py b/lib/spack/external/jinja2/lexer.py
deleted file mode 100644
index 552356a12d..0000000000
--- a/lib/spack/external/jinja2/lexer.py
+++ /dev/null
@@ -1,848 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Implements a Jinja / Python combination lexer. The ``Lexer`` class
-is used to do some preprocessing. It filters out invalid operators like
-the bitshift operators we don't allow in templates. It separates
-template code and python code in expressions.
-"""
-import re
-from ast import literal_eval
-from collections import deque
-from operator import itemgetter
-
-from ._compat import implements_iterator
-from ._compat import intern
-from ._compat import iteritems
-from ._compat import text_type
-from .exceptions import TemplateSyntaxError
-from .utils import LRUCache
-
-# cache for the lexers. Exists in order to be able to have multiple
-# environments with the same lexer
-_lexer_cache = LRUCache(50)
-
-# static regular expressions
-whitespace_re = re.compile(r"\s+", re.U)
-newline_re = re.compile(r"(\r\n|\r|\n)")
-string_re = re.compile(
- r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S
-)
-integer_re = re.compile(r"(\d+_)*\d+")
-float_re = re.compile(
- r"""
- (?<!\.) # doesn't start with a .
- (\d+_)*\d+ # digits, possibly _ separated
- (
- (\.(\d+_)*\d+)? # optional fractional part
- e[+\-]?(\d+_)*\d+ # exponent part
- |
- \.(\d+_)*\d+ # required fractional part
- )
- """,
- re.IGNORECASE | re.VERBOSE,
-)
-
-try:
- # check if this Python supports Unicode identifiers
- compile("föö", "<unknown>", "eval")
-except SyntaxError:
- # Python 2, no Unicode support, use ASCII identifiers
- name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*")
- check_ident = False
-else:
- # Unicode support, import generated re pattern and set flag to use
- # str.isidentifier to validate during lexing.
- from ._identifier import pattern as name_re
-
- check_ident = True
-
-# internal the tokens and keep references to them
-TOKEN_ADD = intern("add")
-TOKEN_ASSIGN = intern("assign")
-TOKEN_COLON = intern("colon")
-TOKEN_COMMA = intern("comma")
-TOKEN_DIV = intern("div")
-TOKEN_DOT = intern("dot")
-TOKEN_EQ = intern("eq")
-TOKEN_FLOORDIV = intern("floordiv")
-TOKEN_GT = intern("gt")
-TOKEN_GTEQ = intern("gteq")
-TOKEN_LBRACE = intern("lbrace")
-TOKEN_LBRACKET = intern("lbracket")
-TOKEN_LPAREN = intern("lparen")
-TOKEN_LT = intern("lt")
-TOKEN_LTEQ = intern("lteq")
-TOKEN_MOD = intern("mod")
-TOKEN_MUL = intern("mul")
-TOKEN_NE = intern("ne")
-TOKEN_PIPE = intern("pipe")
-TOKEN_POW = intern("pow")
-TOKEN_RBRACE = intern("rbrace")
-TOKEN_RBRACKET = intern("rbracket")
-TOKEN_RPAREN = intern("rparen")
-TOKEN_SEMICOLON = intern("semicolon")
-TOKEN_SUB = intern("sub")
-TOKEN_TILDE = intern("tilde")
-TOKEN_WHITESPACE = intern("whitespace")
-TOKEN_FLOAT = intern("float")
-TOKEN_INTEGER = intern("integer")
-TOKEN_NAME = intern("name")
-TOKEN_STRING = intern("string")
-TOKEN_OPERATOR = intern("operator")
-TOKEN_BLOCK_BEGIN = intern("block_begin")
-TOKEN_BLOCK_END = intern("block_end")
-TOKEN_VARIABLE_BEGIN = intern("variable_begin")
-TOKEN_VARIABLE_END = intern("variable_end")
-TOKEN_RAW_BEGIN = intern("raw_begin")
-TOKEN_RAW_END = intern("raw_end")
-TOKEN_COMMENT_BEGIN = intern("comment_begin")
-TOKEN_COMMENT_END = intern("comment_end")
-TOKEN_COMMENT = intern("comment")
-TOKEN_LINESTATEMENT_BEGIN = intern("linestatement_begin")
-TOKEN_LINESTATEMENT_END = intern("linestatement_end")
-TOKEN_LINECOMMENT_BEGIN = intern("linecomment_begin")
-TOKEN_LINECOMMENT_END = intern("linecomment_end")
-TOKEN_LINECOMMENT = intern("linecomment")
-TOKEN_DATA = intern("data")
-TOKEN_INITIAL = intern("initial")
-TOKEN_EOF = intern("eof")
-
-# bind operators to token types
-operators = {
- "+": TOKEN_ADD,
- "-": TOKEN_SUB,
- "/": TOKEN_DIV,
- "//": TOKEN_FLOORDIV,
- "*": TOKEN_MUL,
- "%": TOKEN_MOD,
- "**": TOKEN_POW,
- "~": TOKEN_TILDE,
- "[": TOKEN_LBRACKET,
- "]": TOKEN_RBRACKET,
- "(": TOKEN_LPAREN,
- ")": TOKEN_RPAREN,
- "{": TOKEN_LBRACE,
- "}": TOKEN_RBRACE,
- "==": TOKEN_EQ,
- "!=": TOKEN_NE,
- ">": TOKEN_GT,
- ">=": TOKEN_GTEQ,
- "<": TOKEN_LT,
- "<=": TOKEN_LTEQ,
- "=": TOKEN_ASSIGN,
- ".": TOKEN_DOT,
- ":": TOKEN_COLON,
- "|": TOKEN_PIPE,
- ",": TOKEN_COMMA,
- ";": TOKEN_SEMICOLON,
-}
-
-reverse_operators = dict([(v, k) for k, v in iteritems(operators)])
-assert len(operators) == len(reverse_operators), "operators dropped"
-operator_re = re.compile(
- "(%s)" % "|".join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))
-)
-
-ignored_tokens = frozenset(
- [
- TOKEN_COMMENT_BEGIN,
- TOKEN_COMMENT,
- TOKEN_COMMENT_END,
- TOKEN_WHITESPACE,
- TOKEN_LINECOMMENT_BEGIN,
- TOKEN_LINECOMMENT_END,
- TOKEN_LINECOMMENT,
- ]
-)
-ignore_if_empty = frozenset(
- [TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT]
-)
-
-
-def _describe_token_type(token_type):
- if token_type in reverse_operators:
- return reverse_operators[token_type]
- return {
- TOKEN_COMMENT_BEGIN: "begin of comment",
- TOKEN_COMMENT_END: "end of comment",
- TOKEN_COMMENT: "comment",
- TOKEN_LINECOMMENT: "comment",
- TOKEN_BLOCK_BEGIN: "begin of statement block",
- TOKEN_BLOCK_END: "end of statement block",
- TOKEN_VARIABLE_BEGIN: "begin of print statement",
- TOKEN_VARIABLE_END: "end of print statement",
- TOKEN_LINESTATEMENT_BEGIN: "begin of line statement",
- TOKEN_LINESTATEMENT_END: "end of line statement",
- TOKEN_DATA: "template data / text",
- TOKEN_EOF: "end of template",
- }.get(token_type, token_type)
-
-
-def describe_token(token):
- """Returns a description of the token."""
- if token.type == TOKEN_NAME:
- return token.value
- return _describe_token_type(token.type)
-
-
-def describe_token_expr(expr):
- """Like `describe_token` but for token expressions."""
- if ":" in expr:
- type, value = expr.split(":", 1)
- if type == TOKEN_NAME:
- return value
- else:
- type = expr
- return _describe_token_type(type)
-
-
-def count_newlines(value):
- """Count the number of newline characters in the string. This is
- useful for extensions that filter a stream.
- """
- return len(newline_re.findall(value))
-
-
-def compile_rules(environment):
- """Compiles all the rules from the environment into a list of rules."""
- e = re.escape
- rules = [
- (
- len(environment.comment_start_string),
- TOKEN_COMMENT_BEGIN,
- e(environment.comment_start_string),
- ),
- (
- len(environment.block_start_string),
- TOKEN_BLOCK_BEGIN,
- e(environment.block_start_string),
- ),
- (
- len(environment.variable_start_string),
- TOKEN_VARIABLE_BEGIN,
- e(environment.variable_start_string),
- ),
- ]
-
- if environment.line_statement_prefix is not None:
- rules.append(
- (
- len(environment.line_statement_prefix),
- TOKEN_LINESTATEMENT_BEGIN,
- r"^[ \t\v]*" + e(environment.line_statement_prefix),
- )
- )
- if environment.line_comment_prefix is not None:
- rules.append(
- (
- len(environment.line_comment_prefix),
- TOKEN_LINECOMMENT_BEGIN,
- r"(?:^|(?<=\S))[^\S\r\n]*" + e(environment.line_comment_prefix),
- )
- )
-
- return [x[1:] for x in sorted(rules, reverse=True)]
-
-
-class Failure(object):
- """Class that raises a `TemplateSyntaxError` if called.
- Used by the `Lexer` to specify known errors.
- """
-
- def __init__(self, message, cls=TemplateSyntaxError):
- self.message = message
- self.error_class = cls
-
- def __call__(self, lineno, filename):
- raise self.error_class(self.message, lineno, filename)
-
-
-class Token(tuple):
- """Token class."""
-
- __slots__ = ()
- lineno, type, value = (property(itemgetter(x)) for x in range(3))
-
- def __new__(cls, lineno, type, value):
- return tuple.__new__(cls, (lineno, intern(str(type)), value))
-
- def __str__(self):
- if self.type in reverse_operators:
- return reverse_operators[self.type]
- elif self.type == "name":
- return self.value
- return self.type
-
- def test(self, expr):
- """Test a token against a token expression. This can either be a
- token type or ``'token_type:token_value'``. This can only test
- against string values and types.
- """
- # here we do a regular string equality check as test_any is usually
- # passed an iterable of not interned strings.
- if self.type == expr:
- return True
- elif ":" in expr:
- return expr.split(":", 1) == [self.type, self.value]
- return False
-
- def test_any(self, *iterable):
- """Test against multiple token expressions."""
- for expr in iterable:
- if self.test(expr):
- return True
- return False
-
- def __repr__(self):
- return "Token(%r, %r, %r)" % (self.lineno, self.type, self.value)
-
-
-@implements_iterator
-class TokenStreamIterator(object):
- """The iterator for tokenstreams. Iterate over the stream
- until the eof token is reached.
- """
-
- def __init__(self, stream):
- self.stream = stream
-
- def __iter__(self):
- return self
-
- def __next__(self):
- token = self.stream.current
- if token.type is TOKEN_EOF:
- self.stream.close()
- raise StopIteration()
- next(self.stream)
- return token
-
-
-@implements_iterator
-class TokenStream(object):
- """A token stream is an iterable that yields :class:`Token`\\s. The
- parser however does not iterate over it but calls :meth:`next` to go
- one token ahead. The current active token is stored as :attr:`current`.
- """
-
- def __init__(self, generator, name, filename):
- self._iter = iter(generator)
- self._pushed = deque()
- self.name = name
- self.filename = filename
- self.closed = False
- self.current = Token(1, TOKEN_INITIAL, "")
- next(self)
-
- def __iter__(self):
- return TokenStreamIterator(self)
-
- def __bool__(self):
- return bool(self._pushed) or self.current.type is not TOKEN_EOF
-
- __nonzero__ = __bool__ # py2
-
- @property
- def eos(self):
- """Are we at the end of the stream?"""
- return not self
-
- def push(self, token):
- """Push a token back to the stream."""
- self._pushed.append(token)
-
- def look(self):
- """Look at the next token."""
- old_token = next(self)
- result = self.current
- self.push(result)
- self.current = old_token
- return result
-
- def skip(self, n=1):
- """Got n tokens ahead."""
- for _ in range(n):
- next(self)
-
- def next_if(self, expr):
- """Perform the token test and return the token if it matched.
- Otherwise the return value is `None`.
- """
- if self.current.test(expr):
- return next(self)
-
- def skip_if(self, expr):
- """Like :meth:`next_if` but only returns `True` or `False`."""
- return self.next_if(expr) is not None
-
- def __next__(self):
- """Go one token ahead and return the old one.
-
- Use the built-in :func:`next` instead of calling this directly.
- """
- rv = self.current
- if self._pushed:
- self.current = self._pushed.popleft()
- elif self.current.type is not TOKEN_EOF:
- try:
- self.current = next(self._iter)
- except StopIteration:
- self.close()
- return rv
-
- def close(self):
- """Close the stream."""
- self.current = Token(self.current.lineno, TOKEN_EOF, "")
- self._iter = None
- self.closed = True
-
- def expect(self, expr):
- """Expect a given token type and return it. This accepts the same
- argument as :meth:`jinja2.lexer.Token.test`.
- """
- if not self.current.test(expr):
- expr = describe_token_expr(expr)
- if self.current.type is TOKEN_EOF:
- raise TemplateSyntaxError(
- "unexpected end of template, expected %r." % expr,
- self.current.lineno,
- self.name,
- self.filename,
- )
- raise TemplateSyntaxError(
- "expected token %r, got %r" % (expr, describe_token(self.current)),
- self.current.lineno,
- self.name,
- self.filename,
- )
- try:
- return self.current
- finally:
- next(self)
-
-
-def get_lexer(environment):
- """Return a lexer which is probably cached."""
- key = (
- environment.block_start_string,
- environment.block_end_string,
- environment.variable_start_string,
- environment.variable_end_string,
- environment.comment_start_string,
- environment.comment_end_string,
- environment.line_statement_prefix,
- environment.line_comment_prefix,
- environment.trim_blocks,
- environment.lstrip_blocks,
- environment.newline_sequence,
- environment.keep_trailing_newline,
- )
- lexer = _lexer_cache.get(key)
- if lexer is None:
- lexer = Lexer(environment)
- _lexer_cache[key] = lexer
- return lexer
-
-
-class OptionalLStrip(tuple):
- """A special tuple for marking a point in the state that can have
- lstrip applied.
- """
-
- __slots__ = ()
-
- # Even though it looks like a no-op, creating instances fails
- # without this.
- def __new__(cls, *members, **kwargs):
- return super(OptionalLStrip, cls).__new__(cls, members)
-
-
-class Lexer(object):
- """Class that implements a lexer for a given environment. Automatically
- created by the environment class, usually you don't have to do that.
-
- Note that the lexer is not automatically bound to an environment.
- Multiple environments can share the same lexer.
- """
-
- def __init__(self, environment):
- # shortcuts
- e = re.escape
-
- def c(x):
- return re.compile(x, re.M | re.S)
-
- # lexing rules for tags
- tag_rules = [
- (whitespace_re, TOKEN_WHITESPACE, None),
- (float_re, TOKEN_FLOAT, None),
- (integer_re, TOKEN_INTEGER, None),
- (name_re, TOKEN_NAME, None),
- (string_re, TOKEN_STRING, None),
- (operator_re, TOKEN_OPERATOR, None),
- ]
-
- # assemble the root lexing rule. because "|" is ungreedy
- # we have to sort by length so that the lexer continues working
- # as expected when we have parsing rules like <% for block and
- # <%= for variables. (if someone wants asp like syntax)
- # variables are just part of the rules if variable processing
- # is required.
- root_tag_rules = compile_rules(environment)
-
- # block suffix if trimming is enabled
- block_suffix_re = environment.trim_blocks and "\\n?" or ""
-
- # If lstrip is enabled, it should not be applied if there is any
- # non-whitespace between the newline and block.
- self.lstrip_unless_re = c(r"[^ \t]") if environment.lstrip_blocks else None
-
- self.newline_sequence = environment.newline_sequence
- self.keep_trailing_newline = environment.keep_trailing_newline
-
- # global lexing rules
- self.rules = {
- "root": [
- # directives
- (
- c(
- "(.*?)(?:%s)"
- % "|".join(
- [
- r"(?P<raw_begin>%s(\-|\+|)\s*raw\s*(?:\-%s\s*|%s))"
- % (
- e(environment.block_start_string),
- e(environment.block_end_string),
- e(environment.block_end_string),
- )
- ]
- + [
- r"(?P<%s>%s(\-|\+|))" % (n, r)
- for n, r in root_tag_rules
- ]
- )
- ),
- OptionalLStrip(TOKEN_DATA, "#bygroup"),
- "#bygroup",
- ),
- # data
- (c(".+"), TOKEN_DATA, None),
- ],
- # comments
- TOKEN_COMMENT_BEGIN: [
- (
- c(
- r"(.*?)((?:\-%s\s*|%s)%s)"
- % (
- e(environment.comment_end_string),
- e(environment.comment_end_string),
- block_suffix_re,
- )
- ),
- (TOKEN_COMMENT, TOKEN_COMMENT_END),
- "#pop",
- ),
- (c("(.)"), (Failure("Missing end of comment tag"),), None),
- ],
- # blocks
- TOKEN_BLOCK_BEGIN: [
- (
- c(
- r"(?:\-%s\s*|%s)%s"
- % (
- e(environment.block_end_string),
- e(environment.block_end_string),
- block_suffix_re,
- )
- ),
- TOKEN_BLOCK_END,
- "#pop",
- ),
- ]
- + tag_rules,
- # variables
- TOKEN_VARIABLE_BEGIN: [
- (
- c(
- r"\-%s\s*|%s"
- % (
- e(environment.variable_end_string),
- e(environment.variable_end_string),
- )
- ),
- TOKEN_VARIABLE_END,
- "#pop",
- )
- ]
- + tag_rules,
- # raw block
- TOKEN_RAW_BEGIN: [
- (
- c(
- r"(.*?)((?:%s(\-|\+|))\s*endraw\s*(?:\-%s\s*|%s%s))"
- % (
- e(environment.block_start_string),
- e(environment.block_end_string),
- e(environment.block_end_string),
- block_suffix_re,
- )
- ),
- OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END),
- "#pop",
- ),
- (c("(.)"), (Failure("Missing end of raw directive"),), None),
- ],
- # line statements
- TOKEN_LINESTATEMENT_BEGIN: [
- (c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
- ]
- + tag_rules,
- # line comments
- TOKEN_LINECOMMENT_BEGIN: [
- (
- c(r"(.*?)()(?=\n|$)"),
- (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END),
- "#pop",
- )
- ],
- }
-
- def _normalize_newlines(self, value):
- """Called for strings and template data to normalize it to unicode."""
- return newline_re.sub(self.newline_sequence, value)
-
- def tokenize(self, source, name=None, filename=None, state=None):
- """Calls tokeniter + tokenize and wraps it in a token stream."""
- stream = self.tokeniter(source, name, filename, state)
- return TokenStream(self.wrap(stream, name, filename), name, filename)
-
- def wrap(self, stream, name=None, filename=None):
- """This is called with the stream as returned by `tokenize` and wraps
- every token in a :class:`Token` and converts the value.
- """
- for lineno, token, value in stream:
- if token in ignored_tokens:
- continue
- elif token == TOKEN_LINESTATEMENT_BEGIN:
- token = TOKEN_BLOCK_BEGIN
- elif token == TOKEN_LINESTATEMENT_END:
- token = TOKEN_BLOCK_END
- # we are not interested in those tokens in the parser
- elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END):
- continue
- elif token == TOKEN_DATA:
- value = self._normalize_newlines(value)
- elif token == "keyword":
- token = value
- elif token == TOKEN_NAME:
- value = str(value)
- if check_ident and not value.isidentifier():
- raise TemplateSyntaxError(
- "Invalid character in identifier", lineno, name, filename
- )
- elif token == TOKEN_STRING:
- # try to unescape string
- try:
- value = (
- self._normalize_newlines(value[1:-1])
- .encode("ascii", "backslashreplace")
- .decode("unicode-escape")
- )
- except Exception as e:
- msg = str(e).split(":")[-1].strip()
- raise TemplateSyntaxError(msg, lineno, name, filename)
- elif token == TOKEN_INTEGER:
- value = int(value.replace("_", ""))
- elif token == TOKEN_FLOAT:
- # remove all "_" first to support more Python versions
- value = literal_eval(value.replace("_", ""))
- elif token == TOKEN_OPERATOR:
- token = operators[value]
- yield Token(lineno, token, value)
-
- def tokeniter(self, source, name, filename=None, state=None):
- """This method tokenizes the text and returns the tokens in a
- generator. Use this method if you just want to tokenize a template.
- """
- source = text_type(source)
- lines = source.splitlines()
- if self.keep_trailing_newline and source:
- for newline in ("\r\n", "\r", "\n"):
- if source.endswith(newline):
- lines.append("")
- break
- source = "\n".join(lines)
- pos = 0
- lineno = 1
- stack = ["root"]
- if state is not None and state != "root":
- assert state in ("variable", "block"), "invalid state"
- stack.append(state + "_begin")
- statetokens = self.rules[stack[-1]]
- source_length = len(source)
- balancing_stack = []
- lstrip_unless_re = self.lstrip_unless_re
- newlines_stripped = 0
- line_starting = True
-
- while 1:
- # tokenizer loop
- for regex, tokens, new_state in statetokens:
- m = regex.match(source, pos)
- # if no match we try again with the next rule
- if m is None:
- continue
-
- # we only match blocks and variables if braces / parentheses
- # are balanced. continue parsing with the lower rule which
- # is the operator rule. do this only if the end tags look
- # like operators
- if balancing_stack and tokens in (
- TOKEN_VARIABLE_END,
- TOKEN_BLOCK_END,
- TOKEN_LINESTATEMENT_END,
- ):
- continue
-
- # tuples support more options
- if isinstance(tokens, tuple):
- groups = m.groups()
-
- if isinstance(tokens, OptionalLStrip):
- # Rule supports lstrip. Match will look like
- # text, block type, whitespace control, type, control, ...
- text = groups[0]
-
- # Skipping the text and first type, every other group is the
- # whitespace control for each type. One of the groups will be
- # -, +, or empty string instead of None.
- strip_sign = next(g for g in groups[2::2] if g is not None)
-
- if strip_sign == "-":
- # Strip all whitespace between the text and the tag.
- stripped = text.rstrip()
- newlines_stripped = text[len(stripped) :].count("\n")
- groups = (stripped,) + groups[1:]
- elif (
- # Not marked for preserving whitespace.
- strip_sign != "+"
- # lstrip is enabled.
- and lstrip_unless_re is not None
- # Not a variable expression.
- and not m.groupdict().get(TOKEN_VARIABLE_BEGIN)
- ):
- # The start of text between the last newline and the tag.
- l_pos = text.rfind("\n") + 1
- if l_pos > 0 or line_starting:
- # If there's only whitespace between the newline and the
- # tag, strip it.
- if not lstrip_unless_re.search(text, l_pos):
- groups = (text[:l_pos],) + groups[1:]
-
- for idx, token in enumerate(tokens):
- # failure group
- if token.__class__ is Failure:
- raise token(lineno, filename)
- # bygroup is a bit more complex, in that case we
- # yield for the current token the first named
- # group that matched
- elif token == "#bygroup":
- for key, value in iteritems(m.groupdict()):
- if value is not None:
- yield lineno, key, value
- lineno += value.count("\n")
- break
- else:
- raise RuntimeError(
- "%r wanted to resolve "
- "the token dynamically"
- " but no group matched" % regex
- )
- # normal group
- else:
- data = groups[idx]
- if data or token not in ignore_if_empty:
- yield lineno, token, data
- lineno += data.count("\n") + newlines_stripped
- newlines_stripped = 0
-
- # strings as token just are yielded as it.
- else:
- data = m.group()
- # update brace/parentheses balance
- if tokens == TOKEN_OPERATOR:
- if data == "{":
- balancing_stack.append("}")
- elif data == "(":
- balancing_stack.append(")")
- elif data == "[":
- balancing_stack.append("]")
- elif data in ("}", ")", "]"):
- if not balancing_stack:
- raise TemplateSyntaxError(
- "unexpected '%s'" % data, lineno, name, filename
- )
- expected_op = balancing_stack.pop()
- if expected_op != data:
- raise TemplateSyntaxError(
- "unexpected '%s', "
- "expected '%s'" % (data, expected_op),
- lineno,
- name,
- filename,
- )
- # yield items
- if data or tokens not in ignore_if_empty:
- yield lineno, tokens, data
- lineno += data.count("\n")
-
- line_starting = m.group()[-1:] == "\n"
-
- # fetch new position into new variable so that we can check
- # if there is a internal parsing error which would result
- # in an infinite loop
- pos2 = m.end()
-
- # handle state changes
- if new_state is not None:
- # remove the uppermost state
- if new_state == "#pop":
- stack.pop()
- # resolve the new state by group checking
- elif new_state == "#bygroup":
- for key, value in iteritems(m.groupdict()):
- if value is not None:
- stack.append(key)
- break
- else:
- raise RuntimeError(
- "%r wanted to resolve the "
- "new state dynamically but"
- " no group matched" % regex
- )
- # direct state name given
- else:
- stack.append(new_state)
- statetokens = self.rules[stack[-1]]
- # we are still at the same position and no stack change.
- # this means a loop without break condition, avoid that and
- # raise error
- elif pos2 == pos:
- raise RuntimeError(
- "%r yielded empty string without stack change" % regex
- )
- # publish new function and start again
- pos = pos2
- break
- # if loop terminated without break we haven't found a single match
- # either we are at the end of the file or we have a problem
- else:
- # end of text
- if pos >= source_length:
- return
- # something went wrong
- raise TemplateSyntaxError(
- "unexpected char %r at %d" % (source[pos], pos),
- lineno,
- name,
- filename,
- )
diff --git a/lib/spack/external/jinja2/loaders.py b/lib/spack/external/jinja2/loaders.py
deleted file mode 100644
index 457c4b59a7..0000000000
--- a/lib/spack/external/jinja2/loaders.py
+++ /dev/null
@@ -1,504 +0,0 @@
-# -*- coding: utf-8 -*-
-"""API and implementations for loading templates from different data
-sources.
-"""
-import os
-import sys
-import weakref
-from hashlib import sha1
-from os import path
-from types import ModuleType
-
-from ._compat import abc
-from ._compat import fspath
-from ._compat import iteritems
-from ._compat import string_types
-from .exceptions import TemplateNotFound
-from .utils import internalcode
-from .utils import open_if_exists
-
-
-def split_template_path(template):
- """Split a path into segments and perform a sanity check. If it detects
- '..' in the path it will raise a `TemplateNotFound` error.
- """
- pieces = []
- for piece in template.split("/"):
- if (
- path.sep in piece
- or (path.altsep and path.altsep in piece)
- or piece == path.pardir
- ):
- raise TemplateNotFound(template)
- elif piece and piece != ".":
- pieces.append(piece)
- return pieces
-
-
-class BaseLoader(object):
- """Baseclass for all loaders. Subclass this and override `get_source` to
- implement a custom loading mechanism. The environment provides a
- `get_template` method that calls the loader's `load` method to get the
- :class:`Template` object.
-
- A very basic example for a loader that looks up templates on the file
- system could look like this::
-
- from jinja2 import BaseLoader, TemplateNotFound
- from os.path import join, exists, getmtime
-
- class MyLoader(BaseLoader):
-
- def __init__(self, path):
- self.path = path
-
- def get_source(self, environment, template):
- path = join(self.path, template)
- if not exists(path):
- raise TemplateNotFound(template)
- mtime = getmtime(path)
- with file(path) as f:
- source = f.read().decode('utf-8')
- return source, path, lambda: mtime == getmtime(path)
- """
-
- #: if set to `False` it indicates that the loader cannot provide access
- #: to the source of templates.
- #:
- #: .. versionadded:: 2.4
- has_source_access = True
-
- def get_source(self, environment, template):
- """Get the template source, filename and reload helper for a template.
- It's passed the environment and template name and has to return a
- tuple in the form ``(source, filename, uptodate)`` or raise a
- `TemplateNotFound` error if it can't locate the template.
-
- The source part of the returned tuple must be the source of the
- template as unicode string or a ASCII bytestring. The filename should
- be the name of the file on the filesystem if it was loaded from there,
- otherwise `None`. The filename is used by python for the tracebacks
- if no loader extension is used.
-
- The last item in the tuple is the `uptodate` function. If auto
- reloading is enabled it's always called to check if the template
- changed. No arguments are passed so the function must store the
- old state somewhere (for example in a closure). If it returns `False`
- the template will be reloaded.
- """
- if not self.has_source_access:
- raise RuntimeError(
- "%s cannot provide access to the source" % self.__class__.__name__
- )
- raise TemplateNotFound(template)
-
- def list_templates(self):
- """Iterates over all templates. If the loader does not support that
- it should raise a :exc:`TypeError` which is the default behavior.
- """
- raise TypeError("this loader cannot iterate over all templates")
-
- @internalcode
- def load(self, environment, name, globals=None):
- """Loads a template. This method looks up the template in the cache
- or loads one by calling :meth:`get_source`. Subclasses should not
- override this method as loaders working on collections of other
- loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
- will not call this method but `get_source` directly.
- """
- code = None
- if globals is None:
- globals = {}
-
- # first we try to get the source for this template together
- # with the filename and the uptodate function.
- source, filename, uptodate = self.get_source(environment, name)
-
- # try to load the code from the bytecode cache if there is a
- # bytecode cache configured.
- bcc = environment.bytecode_cache
- if bcc is not None:
- bucket = bcc.get_bucket(environment, name, filename, source)
- code = bucket.code
-
- # if we don't have code so far (not cached, no longer up to
- # date) etc. we compile the template
- if code is None:
- code = environment.compile(source, name, filename)
-
- # if the bytecode cache is available and the bucket doesn't
- # have a code so far, we give the bucket the new code and put
- # it back to the bytecode cache.
- if bcc is not None and bucket.code is None:
- bucket.code = code
- bcc.set_bucket(bucket)
-
- return environment.template_class.from_code(
- environment, code, globals, uptodate
- )
-
-
-class FileSystemLoader(BaseLoader):
- """Loads templates from the file system. This loader can find templates
- in folders on the file system and is the preferred way to load them.
-
- The loader takes the path to the templates as string, or if multiple
- locations are wanted a list of them which is then looked up in the
- given order::
-
- >>> loader = FileSystemLoader('/path/to/templates')
- >>> loader = FileSystemLoader(['/path/to/templates', '/other/path'])
-
- Per default the template encoding is ``'utf-8'`` which can be changed
- by setting the `encoding` parameter to something else.
-
- To follow symbolic links, set the *followlinks* parameter to ``True``::
-
- >>> loader = FileSystemLoader('/path/to/templates', followlinks=True)
-
- .. versionchanged:: 2.8
- The ``followlinks`` parameter was added.
- """
-
- def __init__(self, searchpath, encoding="utf-8", followlinks=False):
- if not isinstance(searchpath, abc.Iterable) or isinstance(
- searchpath, string_types
- ):
- searchpath = [searchpath]
-
- # In Python 3.5, os.path.join doesn't support Path. This can be
- # simplified to list(searchpath) when Python 3.5 is dropped.
- self.searchpath = [fspath(p) for p in searchpath]
-
- self.encoding = encoding
- self.followlinks = followlinks
-
- def get_source(self, environment, template):
- pieces = split_template_path(template)
- for searchpath in self.searchpath:
- filename = path.join(searchpath, *pieces)
- f = open_if_exists(filename)
- if f is None:
- continue
- try:
- contents = f.read().decode(self.encoding)
- finally:
- f.close()
-
- mtime = path.getmtime(filename)
-
- def uptodate():
- try:
- return path.getmtime(filename) == mtime
- except OSError:
- return False
-
- return contents, filename, uptodate
- raise TemplateNotFound(template)
-
- def list_templates(self):
- found = set()
- for searchpath in self.searchpath:
- walk_dir = os.walk(searchpath, followlinks=self.followlinks)
- for dirpath, _, filenames in walk_dir:
- for filename in filenames:
- template = (
- os.path.join(dirpath, filename)[len(searchpath) :]
- .strip(os.path.sep)
- .replace(os.path.sep, "/")
- )
- if template[:2] == "./":
- template = template[2:]
- if template not in found:
- found.add(template)
- return sorted(found)
-
-
-class PackageLoader(BaseLoader):
- """Load templates from python eggs or packages. It is constructed with
- the name of the python package and the path to the templates in that
- package::
-
- loader = PackageLoader('mypackage', 'views')
-
- If the package path is not given, ``'templates'`` is assumed.
-
- Per default the template encoding is ``'utf-8'`` which can be changed
- by setting the `encoding` parameter to something else. Due to the nature
- of eggs it's only possible to reload templates if the package was loaded
- from the file system and not a zip file.
- """
-
- def __init__(self, package_name, package_path="templates", encoding="utf-8"):
- from pkg_resources import DefaultProvider
- from pkg_resources import get_provider
- from pkg_resources import ResourceManager
-
- provider = get_provider(package_name)
- self.encoding = encoding
- self.manager = ResourceManager()
- self.filesystem_bound = isinstance(provider, DefaultProvider)
- self.provider = provider
- self.package_path = package_path
-
- def get_source(self, environment, template):
- pieces = split_template_path(template)
- p = "/".join((self.package_path,) + tuple(pieces))
-
- if not self.provider.has_resource(p):
- raise TemplateNotFound(template)
-
- filename = uptodate = None
-
- if self.filesystem_bound:
- filename = self.provider.get_resource_filename(self.manager, p)
- mtime = path.getmtime(filename)
-
- def uptodate():
- try:
- return path.getmtime(filename) == mtime
- except OSError:
- return False
-
- source = self.provider.get_resource_string(self.manager, p)
- return source.decode(self.encoding), filename, uptodate
-
- def list_templates(self):
- path = self.package_path
-
- if path[:2] == "./":
- path = path[2:]
- elif path == ".":
- path = ""
-
- offset = len(path)
- results = []
-
- def _walk(path):
- for filename in self.provider.resource_listdir(path):
- fullname = path + "/" + filename
-
- if self.provider.resource_isdir(fullname):
- _walk(fullname)
- else:
- results.append(fullname[offset:].lstrip("/"))
-
- _walk(path)
- results.sort()
- return results
-
-
-class DictLoader(BaseLoader):
- """Loads a template from a python dict. It's passed a dict of unicode
- strings bound to template names. This loader is useful for unittesting:
-
- >>> loader = DictLoader({'index.html': 'source here'})
-
- Because auto reloading is rarely useful this is disabled per default.
- """
-
- def __init__(self, mapping):
- self.mapping = mapping
-
- def get_source(self, environment, template):
- if template in self.mapping:
- source = self.mapping[template]
- return source, None, lambda: source == self.mapping.get(template)
- raise TemplateNotFound(template)
-
- def list_templates(self):
- return sorted(self.mapping)
-
-
-class FunctionLoader(BaseLoader):
- """A loader that is passed a function which does the loading. The
- function receives the name of the template and has to return either
- an unicode string with the template source, a tuple in the form ``(source,
- filename, uptodatefunc)`` or `None` if the template does not exist.
-
- >>> def load_template(name):
- ... if name == 'index.html':
- ... return '...'
- ...
- >>> loader = FunctionLoader(load_template)
-
- The `uptodatefunc` is a function that is called if autoreload is enabled
- and has to return `True` if the template is still up to date. For more
- details have a look at :meth:`BaseLoader.get_source` which has the same
- return value.
- """
-
- def __init__(self, load_func):
- self.load_func = load_func
-
- def get_source(self, environment, template):
- rv = self.load_func(template)
- if rv is None:
- raise TemplateNotFound(template)
- elif isinstance(rv, string_types):
- return rv, None, None
- return rv
-
-
-class PrefixLoader(BaseLoader):
- """A loader that is passed a dict of loaders where each loader is bound
- to a prefix. The prefix is delimited from the template by a slash per
- default, which can be changed by setting the `delimiter` argument to
- something else::
-
- loader = PrefixLoader({
- 'app1': PackageLoader('mypackage.app1'),
- 'app2': PackageLoader('mypackage.app2')
- })
-
- By loading ``'app1/index.html'`` the file from the app1 package is loaded,
- by loading ``'app2/index.html'`` the file from the second.
- """
-
- def __init__(self, mapping, delimiter="/"):
- self.mapping = mapping
- self.delimiter = delimiter
-
- def get_loader(self, template):
- try:
- prefix, name = template.split(self.delimiter, 1)
- loader = self.mapping[prefix]
- except (ValueError, KeyError):
- raise TemplateNotFound(template)
- return loader, name
-
- def get_source(self, environment, template):
- loader, name = self.get_loader(template)
- try:
- return loader.get_source(environment, name)
- except TemplateNotFound:
- # re-raise the exception with the correct filename here.
- # (the one that includes the prefix)
- raise TemplateNotFound(template)
-
- @internalcode
- def load(self, environment, name, globals=None):
- loader, local_name = self.get_loader(name)
- try:
- return loader.load(environment, local_name, globals)
- except TemplateNotFound:
- # re-raise the exception with the correct filename here.
- # (the one that includes the prefix)
- raise TemplateNotFound(name)
-
- def list_templates(self):
- result = []
- for prefix, loader in iteritems(self.mapping):
- for template in loader.list_templates():
- result.append(prefix + self.delimiter + template)
- return result
-
-
-class ChoiceLoader(BaseLoader):
- """This loader works like the `PrefixLoader` just that no prefix is
- specified. If a template could not be found by one loader the next one
- is tried.
-
- >>> loader = ChoiceLoader([
- ... FileSystemLoader('/path/to/user/templates'),
- ... FileSystemLoader('/path/to/system/templates')
- ... ])
-
- This is useful if you want to allow users to override builtin templates
- from a different location.
- """
-
- def __init__(self, loaders):
- self.loaders = loaders
-
- def get_source(self, environment, template):
- for loader in self.loaders:
- try:
- return loader.get_source(environment, template)
- except TemplateNotFound:
- pass
- raise TemplateNotFound(template)
-
- @internalcode
- def load(self, environment, name, globals=None):
- for loader in self.loaders:
- try:
- return loader.load(environment, name, globals)
- except TemplateNotFound:
- pass
- raise TemplateNotFound(name)
-
- def list_templates(self):
- found = set()
- for loader in self.loaders:
- found.update(loader.list_templates())
- return sorted(found)
-
-
-class _TemplateModule(ModuleType):
- """Like a normal module but with support for weak references"""
-
-
-class ModuleLoader(BaseLoader):
- """This loader loads templates from precompiled templates.
-
- Example usage:
-
- >>> loader = ChoiceLoader([
- ... ModuleLoader('/path/to/compiled/templates'),
- ... FileSystemLoader('/path/to/templates')
- ... ])
-
- Templates can be precompiled with :meth:`Environment.compile_templates`.
- """
-
- has_source_access = False
-
- def __init__(self, path):
- package_name = "_jinja2_module_templates_%x" % id(self)
-
- # create a fake module that looks for the templates in the
- # path given.
- mod = _TemplateModule(package_name)
-
- if not isinstance(path, abc.Iterable) or isinstance(path, string_types):
- path = [path]
-
- mod.__path__ = [fspath(p) for p in path]
-
- sys.modules[package_name] = weakref.proxy(
- mod, lambda x: sys.modules.pop(package_name, None)
- )
-
- # the only strong reference, the sys.modules entry is weak
- # so that the garbage collector can remove it once the
- # loader that created it goes out of business.
- self.module = mod
- self.package_name = package_name
-
- @staticmethod
- def get_template_key(name):
- return "tmpl_" + sha1(name.encode("utf-8")).hexdigest()
-
- @staticmethod
- def get_module_filename(name):
- return ModuleLoader.get_template_key(name) + ".py"
-
- @internalcode
- def load(self, environment, name, globals=None):
- key = self.get_template_key(name)
- module = "%s.%s" % (self.package_name, key)
- mod = getattr(self.module, module, None)
- if mod is None:
- try:
- mod = __import__(module, None, None, ["root"])
- except ImportError:
- raise TemplateNotFound(name)
-
- # remove the entry from sys.modules, we only want the attribute
- # on the module object we have stored on the loader.
- sys.modules.pop(module, None)
-
- return environment.template_class.from_module_dict(
- environment, mod.__dict__, globals
- )
diff --git a/lib/spack/external/jinja2/meta.py b/lib/spack/external/jinja2/meta.py
deleted file mode 100644
index 3795aace59..0000000000
--- a/lib/spack/external/jinja2/meta.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Functions that expose information about templates that might be
-interesting for introspection.
-"""
-from . import nodes
-from ._compat import iteritems
-from ._compat import string_types
-from .compiler import CodeGenerator
-
-
-class TrackingCodeGenerator(CodeGenerator):
- """We abuse the code generator for introspection."""
-
- def __init__(self, environment):
- CodeGenerator.__init__(self, environment, "<introspection>", "<introspection>")
- self.undeclared_identifiers = set()
-
- def write(self, x):
- """Don't write."""
-
- def enter_frame(self, frame):
- """Remember all undeclared identifiers."""
- CodeGenerator.enter_frame(self, frame)
- for _, (action, param) in iteritems(frame.symbols.loads):
- if action == "resolve" and param not in self.environment.globals:
- self.undeclared_identifiers.add(param)
-
-
-def find_undeclared_variables(ast):
- """Returns a set of all variables in the AST that will be looked up from
- the context at runtime. Because at compile time it's not known which
- variables will be used depending on the path the execution takes at
- runtime, all variables are returned.
-
- >>> from jinja2 import Environment, meta
- >>> env = Environment()
- >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
- >>> meta.find_undeclared_variables(ast) == set(['bar'])
- True
-
- .. admonition:: Implementation
-
- Internally the code generator is used for finding undeclared variables.
- This is good to know because the code generator might raise a
- :exc:`TemplateAssertionError` during compilation and as a matter of
- fact this function can currently raise that exception as well.
- """
- codegen = TrackingCodeGenerator(ast.environment)
- codegen.visit(ast)
- return codegen.undeclared_identifiers
-
-
-def find_referenced_templates(ast):
- """Finds all the referenced templates from the AST. This will return an
- iterator over all the hardcoded template extensions, inclusions and
- imports. If dynamic inheritance or inclusion is used, `None` will be
- yielded.
-
- >>> from jinja2 import Environment, meta
- >>> env = Environment()
- >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
- >>> list(meta.find_referenced_templates(ast))
- ['layout.html', None]
-
- This function is useful for dependency tracking. For example if you want
- to rebuild parts of the website after a layout template has changed.
- """
- for node in ast.find_all(
- (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
- ):
- if not isinstance(node.template, nodes.Const):
- # a tuple with some non consts in there
- if isinstance(node.template, (nodes.Tuple, nodes.List)):
- for template_name in node.template.items:
- # something const, only yield the strings and ignore
- # non-string consts that really just make no sense
- if isinstance(template_name, nodes.Const):
- if isinstance(template_name.value, string_types):
- yield template_name.value
- # something dynamic in there
- else:
- yield None
- # something dynamic we don't know about here
- else:
- yield None
- continue
- # constant is a basestring, direct template name
- if isinstance(node.template.value, string_types):
- yield node.template.value
- # a tuple or list (latter *should* not happen) made of consts,
- # yield the consts that are strings. We could warn here for
- # non string values
- elif isinstance(node, nodes.Include) and isinstance(
- node.template.value, (tuple, list)
- ):
- for template_name in node.template.value:
- if isinstance(template_name, string_types):
- yield template_name
- # something else we don't care about, we could warn here
- else:
- yield None
diff --git a/lib/spack/external/jinja2/nativetypes.py b/lib/spack/external/jinja2/nativetypes.py
deleted file mode 100644
index a9ead4e2bb..0000000000
--- a/lib/spack/external/jinja2/nativetypes.py
+++ /dev/null
@@ -1,94 +0,0 @@
-from ast import literal_eval
-from itertools import chain
-from itertools import islice
-
-from . import nodes
-from ._compat import text_type
-from .compiler import CodeGenerator
-from .compiler import has_safe_repr
-from .environment import Environment
-from .environment import Template
-
-
-def native_concat(nodes):
- """Return a native Python type from the list of compiled nodes. If
- the result is a single node, its value is returned. Otherwise, the
- nodes are concatenated as strings. If the result can be parsed with
- :func:`ast.literal_eval`, the parsed value is returned. Otherwise,
- the string is returned.
-
- :param nodes: Iterable of nodes to concatenate.
- """
- head = list(islice(nodes, 2))
-
- if not head:
- return None
-
- if len(head) == 1:
- raw = head[0]
- else:
- raw = u"".join([text_type(v) for v in chain(head, nodes)])
-
- try:
- return literal_eval(raw)
- except (ValueError, SyntaxError, MemoryError):
- return raw
-
-
-class NativeCodeGenerator(CodeGenerator):
- """A code generator which renders Python types by not adding
- ``to_string()`` around output nodes.
- """
-
- @staticmethod
- def _default_finalize(value):
- return value
-
- def _output_const_repr(self, group):
- return repr(u"".join([text_type(v) for v in group]))
-
- def _output_child_to_const(self, node, frame, finalize):
- const = node.as_const(frame.eval_ctx)
-
- if not has_safe_repr(const):
- raise nodes.Impossible()
-
- if isinstance(node, nodes.TemplateData):
- return const
-
- return finalize.const(const)
-
- def _output_child_pre(self, node, frame, finalize):
- if finalize.src is not None:
- self.write(finalize.src)
-
- def _output_child_post(self, node, frame, finalize):
- if finalize.src is not None:
- self.write(")")
-
-
-class NativeEnvironment(Environment):
- """An environment that renders templates to native Python types."""
-
- code_generator_class = NativeCodeGenerator
-
-
-class NativeTemplate(Template):
- environment_class = NativeEnvironment
-
- def render(self, *args, **kwargs):
- """Render the template to produce a native Python type. If the
- result is a single node, its value is returned. Otherwise, the
- nodes are concatenated as strings. If the result can be parsed
- with :func:`ast.literal_eval`, the parsed value is returned.
- Otherwise, the string is returned.
- """
- vars = dict(*args, **kwargs)
-
- try:
- return native_concat(self.root_render_func(self.new_context(vars)))
- except Exception:
- return self.environment.handle_exception()
-
-
-NativeEnvironment.template_class = NativeTemplate
diff --git a/lib/spack/external/jinja2/nodes.py b/lib/spack/external/jinja2/nodes.py
deleted file mode 100644
index 95bd614a14..0000000000
--- a/lib/spack/external/jinja2/nodes.py
+++ /dev/null
@@ -1,1088 +0,0 @@
-# -*- coding: utf-8 -*-
-"""AST nodes generated by the parser for the compiler. Also provides
-some node tree helper functions used by the parser and compiler in order
-to normalize nodes.
-"""
-import operator
-from collections import deque
-
-from markupsafe import Markup
-
-from ._compat import izip
-from ._compat import PY2
-from ._compat import text_type
-from ._compat import with_metaclass
-
-_binop_to_func = {
- "*": operator.mul,
- "/": operator.truediv,
- "//": operator.floordiv,
- "**": operator.pow,
- "%": operator.mod,
- "+": operator.add,
- "-": operator.sub,
-}
-
-_uaop_to_func = {"not": operator.not_, "+": operator.pos, "-": operator.neg}
-
-_cmpop_to_func = {
- "eq": operator.eq,
- "ne": operator.ne,
- "gt": operator.gt,
- "gteq": operator.ge,
- "lt": operator.lt,
- "lteq": operator.le,
- "in": lambda a, b: a in b,
- "notin": lambda a, b: a not in b,
-}
-
-
-class Impossible(Exception):
- """Raised if the node could not perform a requested action."""
-
-
-class NodeType(type):
- """A metaclass for nodes that handles the field and attribute
- inheritance. fields and attributes from the parent class are
- automatically forwarded to the child."""
-
- def __new__(mcs, name, bases, d):
- for attr in "fields", "attributes":
- storage = []
- storage.extend(getattr(bases[0], attr, ()))
- storage.extend(d.get(attr, ()))
- assert len(bases) == 1, "multiple inheritance not allowed"
- assert len(storage) == len(set(storage)), "layout conflict"
- d[attr] = tuple(storage)
- d.setdefault("abstract", False)
- return type.__new__(mcs, name, bases, d)
-
-
-class EvalContext(object):
- """Holds evaluation time information. Custom attributes can be attached
- to it in extensions.
- """
-
- def __init__(self, environment, template_name=None):
- self.environment = environment
- if callable(environment.autoescape):
- self.autoescape = environment.autoescape(template_name)
- else:
- self.autoescape = environment.autoescape
- self.volatile = False
-
- def save(self):
- return self.__dict__.copy()
-
- def revert(self, old):
- self.__dict__.clear()
- self.__dict__.update(old)
-
-
-def get_eval_context(node, ctx):
- if ctx is None:
- if node.environment is None:
- raise RuntimeError(
- "if no eval context is passed, the "
- "node must have an attached "
- "environment."
- )
- return EvalContext(node.environment)
- return ctx
-
-
-class Node(with_metaclass(NodeType, object)):
- """Baseclass for all Jinja nodes. There are a number of nodes available
- of different types. There are four major types:
-
- - :class:`Stmt`: statements
- - :class:`Expr`: expressions
- - :class:`Helper`: helper nodes
- - :class:`Template`: the outermost wrapper node
-
- All nodes have fields and attributes. Fields may be other nodes, lists,
- or arbitrary values. Fields are passed to the constructor as regular
- positional arguments, attributes as keyword arguments. Each node has
- two attributes: `lineno` (the line number of the node) and `environment`.
- The `environment` attribute is set at the end of the parsing process for
- all nodes automatically.
- """
-
- fields = ()
- attributes = ("lineno", "environment")
- abstract = True
-
- def __init__(self, *fields, **attributes):
- if self.abstract:
- raise TypeError("abstract nodes are not instantiable")
- if fields:
- if len(fields) != len(self.fields):
- if not self.fields:
- raise TypeError("%r takes 0 arguments" % self.__class__.__name__)
- raise TypeError(
- "%r takes 0 or %d argument%s"
- % (
- self.__class__.__name__,
- len(self.fields),
- len(self.fields) != 1 and "s" or "",
- )
- )
- for name, arg in izip(self.fields, fields):
- setattr(self, name, arg)
- for attr in self.attributes:
- setattr(self, attr, attributes.pop(attr, None))
- if attributes:
- raise TypeError("unknown attribute %r" % next(iter(attributes)))
-
- def iter_fields(self, exclude=None, only=None):
- """This method iterates over all fields that are defined and yields
- ``(key, value)`` tuples. Per default all fields are returned, but
- it's possible to limit that to some fields by providing the `only`
- parameter or to exclude some using the `exclude` parameter. Both
- should be sets or tuples of field names.
- """
- for name in self.fields:
- if (
- (exclude is only is None)
- or (exclude is not None and name not in exclude)
- or (only is not None and name in only)
- ):
- try:
- yield name, getattr(self, name)
- except AttributeError:
- pass
-
- def iter_child_nodes(self, exclude=None, only=None):
- """Iterates over all direct child nodes of the node. This iterates
- over all fields and yields the values of they are nodes. If the value
- of a field is a list all the nodes in that list are returned.
- """
- for _, item in self.iter_fields(exclude, only):
- if isinstance(item, list):
- for n in item:
- if isinstance(n, Node):
- yield n
- elif isinstance(item, Node):
- yield item
-
- def find(self, node_type):
- """Find the first node of a given type. If no such node exists the
- return value is `None`.
- """
- for result in self.find_all(node_type):
- return result
-
- def find_all(self, node_type):
- """Find all the nodes of a given type. If the type is a tuple,
- the check is performed for any of the tuple items.
- """
- for child in self.iter_child_nodes():
- if isinstance(child, node_type):
- yield child
- for result in child.find_all(node_type):
- yield result
-
- def set_ctx(self, ctx):
- """Reset the context of a node and all child nodes. Per default the
- parser will all generate nodes that have a 'load' context as it's the
- most common one. This method is used in the parser to set assignment
- targets and other nodes to a store context.
- """
- todo = deque([self])
- while todo:
- node = todo.popleft()
- if "ctx" in node.fields:
- node.ctx = ctx
- todo.extend(node.iter_child_nodes())
- return self
-
- def set_lineno(self, lineno, override=False):
- """Set the line numbers of the node and children."""
- todo = deque([self])
- while todo:
- node = todo.popleft()
- if "lineno" in node.attributes:
- if node.lineno is None or override:
- node.lineno = lineno
- todo.extend(node.iter_child_nodes())
- return self
-
- def set_environment(self, environment):
- """Set the environment for all nodes."""
- todo = deque([self])
- while todo:
- node = todo.popleft()
- node.environment = environment
- todo.extend(node.iter_child_nodes())
- return self
-
- def __eq__(self, other):
- return type(self) is type(other) and tuple(self.iter_fields()) == tuple(
- other.iter_fields()
- )
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- # Restore Python 2 hashing behavior on Python 3
- __hash__ = object.__hash__
-
- def __repr__(self):
- return "%s(%s)" % (
- self.__class__.__name__,
- ", ".join("%s=%r" % (arg, getattr(self, arg, None)) for arg in self.fields),
- )
-
- def dump(self):
- def _dump(node):
- if not isinstance(node, Node):
- buf.append(repr(node))
- return
-
- buf.append("nodes.%s(" % node.__class__.__name__)
- if not node.fields:
- buf.append(")")
- return
- for idx, field in enumerate(node.fields):
- if idx:
- buf.append(", ")
- value = getattr(node, field)
- if isinstance(value, list):
- buf.append("[")
- for idx, item in enumerate(value):
- if idx:
- buf.append(", ")
- _dump(item)
- buf.append("]")
- else:
- _dump(value)
- buf.append(")")
-
- buf = []
- _dump(self)
- return "".join(buf)
-
-
-class Stmt(Node):
- """Base node for all statements."""
-
- abstract = True
-
-
-class Helper(Node):
- """Nodes that exist in a specific context only."""
-
- abstract = True
-
-
-class Template(Node):
- """Node that represents a template. This must be the outermost node that
- is passed to the compiler.
- """
-
- fields = ("body",)
-
-
-class Output(Stmt):
- """A node that holds multiple expressions which are then printed out.
- This is used both for the `print` statement and the regular template data.
- """
-
- fields = ("nodes",)
-
-
-class Extends(Stmt):
- """Represents an extends statement."""
-
- fields = ("template",)
-
-
-class For(Stmt):
- """The for loop. `target` is the target for the iteration (usually a
- :class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
- of nodes that are used as loop-body, and `else_` a list of nodes for the
- `else` block. If no else node exists it has to be an empty list.
-
- For filtered nodes an expression can be stored as `test`, otherwise `None`.
- """
-
- fields = ("target", "iter", "body", "else_", "test", "recursive")
-
-
-class If(Stmt):
- """If `test` is true, `body` is rendered, else `else_`."""
-
- fields = ("test", "body", "elif_", "else_")
-
-
-class Macro(Stmt):
- """A macro definition. `name` is the name of the macro, `args` a list of
- arguments and `defaults` a list of defaults if there are any. `body` is
- a list of nodes for the macro body.
- """
-
- fields = ("name", "args", "defaults", "body")
-
-
-class CallBlock(Stmt):
- """Like a macro without a name but a call instead. `call` is called with
- the unnamed macro as `caller` argument this node holds.
- """
-
- fields = ("call", "args", "defaults", "body")
-
-
-class FilterBlock(Stmt):
- """Node for filter sections."""
-
- fields = ("body", "filter")
-
-
-class With(Stmt):
- """Specific node for with statements. In older versions of Jinja the
- with statement was implemented on the base of the `Scope` node instead.
-
- .. versionadded:: 2.9.3
- """
-
- fields = ("targets", "values", "body")
-
-
-class Block(Stmt):
- """A node that represents a block."""
-
- fields = ("name", "body", "scoped")
-
-
-class Include(Stmt):
- """A node that represents the include tag."""
-
- fields = ("template", "with_context", "ignore_missing")
-
-
-class Import(Stmt):
- """A node that represents the import tag."""
-
- fields = ("template", "target", "with_context")
-
-
-class FromImport(Stmt):
- """A node that represents the from import tag. It's important to not
- pass unsafe names to the name attribute. The compiler translates the
- attribute lookups directly into getattr calls and does *not* use the
- subscript callback of the interface. As exported variables may not
- start with double underscores (which the parser asserts) this is not a
- problem for regular Jinja code, but if this node is used in an extension
- extra care must be taken.
-
- The list of names may contain tuples if aliases are wanted.
- """
-
- fields = ("template", "names", "with_context")
-
-
-class ExprStmt(Stmt):
- """A statement that evaluates an expression and discards the result."""
-
- fields = ("node",)
-
-
-class Assign(Stmt):
- """Assigns an expression to a target."""
-
- fields = ("target", "node")
-
-
-class AssignBlock(Stmt):
- """Assigns a block to a target."""
-
- fields = ("target", "filter", "body")
-
-
-class Expr(Node):
- """Baseclass for all expressions."""
-
- abstract = True
-
- def as_const(self, eval_ctx=None):
- """Return the value of the expression as constant or raise
- :exc:`Impossible` if this was not possible.
-
- An :class:`EvalContext` can be provided, if none is given
- a default context is created which requires the nodes to have
- an attached environment.
-
- .. versionchanged:: 2.4
- the `eval_ctx` parameter was added.
- """
- raise Impossible()
-
- def can_assign(self):
- """Check if it's possible to assign something to this node."""
- return False
-
-
-class BinExpr(Expr):
- """Baseclass for all binary expressions."""
-
- fields = ("left", "right")
- operator = None
- abstract = True
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- # intercepted operators cannot be folded at compile time
- if (
- self.environment.sandboxed
- and self.operator in self.environment.intercepted_binops
- ):
- raise Impossible()
- f = _binop_to_func[self.operator]
- try:
- return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
-
-class UnaryExpr(Expr):
- """Baseclass for all unary expressions."""
-
- fields = ("node",)
- operator = None
- abstract = True
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- # intercepted operators cannot be folded at compile time
- if (
- self.environment.sandboxed
- and self.operator in self.environment.intercepted_unops
- ):
- raise Impossible()
- f = _uaop_to_func[self.operator]
- try:
- return f(self.node.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
-
-class Name(Expr):
- """Looks up a name or stores a value in a name.
- The `ctx` of the node can be one of the following values:
-
- - `store`: store a value in the name
- - `load`: load that name
- - `param`: like `store` but if the name was defined as function parameter.
- """
-
- fields = ("name", "ctx")
-
- def can_assign(self):
- return self.name not in ("true", "false", "none", "True", "False", "None")
-
-
-class NSRef(Expr):
- """Reference to a namespace value assignment"""
-
- fields = ("name", "attr")
-
- def can_assign(self):
- # We don't need any special checks here; NSRef assignments have a
- # runtime check to ensure the target is a namespace object which will
- # have been checked already as it is created using a normal assignment
- # which goes through a `Name` node.
- return True
-
-
-class Literal(Expr):
- """Baseclass for literals."""
-
- abstract = True
-
-
-class Const(Literal):
- """All constant values. The parser will return this node for simple
- constants such as ``42`` or ``"foo"`` but it can be used to store more
- complex values such as lists too. Only constants with a safe
- representation (objects where ``eval(repr(x)) == x`` is true).
- """
-
- fields = ("value",)
-
- def as_const(self, eval_ctx=None):
- rv = self.value
- if (
- PY2
- and type(rv) is text_type
- and self.environment.policies["compiler.ascii_str"]
- ):
- try:
- rv = rv.encode("ascii")
- except UnicodeError:
- pass
- return rv
-
- @classmethod
- def from_untrusted(cls, value, lineno=None, environment=None):
- """Return a const object if the value is representable as
- constant value in the generated code, otherwise it will raise
- an `Impossible` exception.
- """
- from .compiler import has_safe_repr
-
- if not has_safe_repr(value):
- raise Impossible()
- return cls(value, lineno=lineno, environment=environment)
-
-
-class TemplateData(Literal):
- """A constant template string."""
-
- fields = ("data",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if eval_ctx.volatile:
- raise Impossible()
- if eval_ctx.autoescape:
- return Markup(self.data)
- return self.data
-
-
-class Tuple(Literal):
- """For loop unpacking and some other things like multiple arguments
- for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
- is used for loading the names or storing.
- """
-
- fields = ("items", "ctx")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return tuple(x.as_const(eval_ctx) for x in self.items)
-
- def can_assign(self):
- for item in self.items:
- if not item.can_assign():
- return False
- return True
-
-
-class List(Literal):
- """Any list literal such as ``[1, 2, 3]``"""
-
- fields = ("items",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return [x.as_const(eval_ctx) for x in self.items]
-
-
-class Dict(Literal):
- """Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
- :class:`Pair` nodes.
- """
-
- fields = ("items",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return dict(x.as_const(eval_ctx) for x in self.items)
-
-
-class Pair(Helper):
- """A key, value pair for dicts."""
-
- fields = ("key", "value")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
-
-
-class Keyword(Helper):
- """A key, value pair for keyword arguments where key is a string."""
-
- fields = ("key", "value")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.key, self.value.as_const(eval_ctx)
-
-
-class CondExpr(Expr):
- """A conditional expression (inline if expression). (``{{
- foo if bar else baz }}``)
- """
-
- fields = ("test", "expr1", "expr2")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if self.test.as_const(eval_ctx):
- return self.expr1.as_const(eval_ctx)
-
- # if we evaluate to an undefined object, we better do that at runtime
- if self.expr2 is None:
- raise Impossible()
-
- return self.expr2.as_const(eval_ctx)
-
-
-def args_as_const(node, eval_ctx):
- args = [x.as_const(eval_ctx) for x in node.args]
- kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
-
- if node.dyn_args is not None:
- try:
- args.extend(node.dyn_args.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
- if node.dyn_kwargs is not None:
- try:
- kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
- return args, kwargs
-
-
-class Filter(Expr):
- """This node applies a filter on an expression. `name` is the name of
- the filter, the rest of the fields are the same as for :class:`Call`.
-
- If the `node` of a filter is `None` the contents of the last buffer are
- filtered. Buffers are created by macros and filter blocks.
- """
-
- fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
-
- if eval_ctx.volatile or self.node is None:
- raise Impossible()
-
- # we have to be careful here because we call filter_ below.
- # if this variable would be called filter, 2to3 would wrap the
- # call in a list because it is assuming we are talking about the
- # builtin filter function here which no longer returns a list in
- # python 3. because of that, do not rename filter_ to filter!
- filter_ = self.environment.filters.get(self.name)
-
- if filter_ is None or getattr(filter_, "contextfilter", False) is True:
- raise Impossible()
-
- # We cannot constant handle async filters, so we need to make sure
- # to not go down this path.
- if eval_ctx.environment.is_async and getattr(
- filter_, "asyncfiltervariant", False
- ):
- raise Impossible()
-
- args, kwargs = args_as_const(self, eval_ctx)
- args.insert(0, self.node.as_const(eval_ctx))
-
- if getattr(filter_, "evalcontextfilter", False) is True:
- args.insert(0, eval_ctx)
- elif getattr(filter_, "environmentfilter", False) is True:
- args.insert(0, self.environment)
-
- try:
- return filter_(*args, **kwargs)
- except Exception:
- raise Impossible()
-
-
-class Test(Expr):
- """Applies a test on an expression. `name` is the name of the test, the
- rest of the fields are the same as for :class:`Call`.
- """
-
- fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
-
- def as_const(self, eval_ctx=None):
- test = self.environment.tests.get(self.name)
-
- if test is None:
- raise Impossible()
-
- eval_ctx = get_eval_context(self, eval_ctx)
- args, kwargs = args_as_const(self, eval_ctx)
- args.insert(0, self.node.as_const(eval_ctx))
-
- try:
- return test(*args, **kwargs)
- except Exception:
- raise Impossible()
-
-
-class Call(Expr):
- """Calls an expression. `args` is a list of arguments, `kwargs` a list
- of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
- and `dyn_kwargs` has to be either `None` or a node that is used as
- node for dynamic positional (``*args``) or keyword (``**kwargs``)
- arguments.
- """
-
- fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
-
-
-class Getitem(Expr):
- """Get an attribute or item from an expression and prefer the item."""
-
- fields = ("node", "arg", "ctx")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if self.ctx != "load":
- raise Impossible()
- try:
- return self.environment.getitem(
- self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
- )
- except Exception:
- raise Impossible()
-
- def can_assign(self):
- return False
-
-
-class Getattr(Expr):
- """Get an attribute or item from an expression that is a ascii-only
- bytestring and prefer the attribute.
- """
-
- fields = ("node", "attr", "ctx")
-
- def as_const(self, eval_ctx=None):
- if self.ctx != "load":
- raise Impossible()
- try:
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.environment.getattr(self.node.as_const(eval_ctx), self.attr)
- except Exception:
- raise Impossible()
-
- def can_assign(self):
- return False
-
-
-class Slice(Expr):
- """Represents a slice object. This must only be used as argument for
- :class:`Subscript`.
- """
-
- fields = ("start", "stop", "step")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
-
- def const(obj):
- if obj is None:
- return None
- return obj.as_const(eval_ctx)
-
- return slice(const(self.start), const(self.stop), const(self.step))
-
-
-class Concat(Expr):
- """Concatenates the list of expressions provided after converting them to
- unicode.
- """
-
- fields = ("nodes",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return "".join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
-
-
-class Compare(Expr):
- """Compares an expression with some other expressions. `ops` must be a
- list of :class:`Operand`\\s.
- """
-
- fields = ("expr", "ops")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- result = value = self.expr.as_const(eval_ctx)
-
- try:
- for op in self.ops:
- new_value = op.expr.as_const(eval_ctx)
- result = _cmpop_to_func[op.op](value, new_value)
-
- if not result:
- return False
-
- value = new_value
- except Exception:
- raise Impossible()
-
- return result
-
-
-class Operand(Helper):
- """Holds an operator and an expression."""
-
- fields = ("op", "expr")
-
-
-if __debug__:
- Operand.__doc__ += "\nThe following operators are available: " + ", ".join(
- sorted(
- "``%s``" % x
- for x in set(_binop_to_func) | set(_uaop_to_func) | set(_cmpop_to_func)
- )
- )
-
-
-class Mul(BinExpr):
- """Multiplies the left with the right node."""
-
- operator = "*"
-
-
-class Div(BinExpr):
- """Divides the left by the right node."""
-
- operator = "/"
-
-
-class FloorDiv(BinExpr):
- """Divides the left by the right node and truncates conver the
- result into an integer by truncating.
- """
-
- operator = "//"
-
-
-class Add(BinExpr):
- """Add the left to the right node."""
-
- operator = "+"
-
-
-class Sub(BinExpr):
- """Subtract the right from the left node."""
-
- operator = "-"
-
-
-class Mod(BinExpr):
- """Left modulo right."""
-
- operator = "%"
-
-
-class Pow(BinExpr):
- """Left to the power of right."""
-
- operator = "**"
-
-
-class And(BinExpr):
- """Short circuited AND."""
-
- operator = "and"
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
-
-
-class Or(BinExpr):
- """Short circuited OR."""
-
- operator = "or"
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
-
-
-class Not(UnaryExpr):
- """Negate the expression."""
-
- operator = "not"
-
-
-class Neg(UnaryExpr):
- """Make the expression negative."""
-
- operator = "-"
-
-
-class Pos(UnaryExpr):
- """Make the expression positive (noop for most expressions)"""
-
- operator = "+"
-
-
-# Helpers for extensions
-
-
-class EnvironmentAttribute(Expr):
- """Loads an attribute from the environment object. This is useful for
- extensions that want to call a callback stored on the environment.
- """
-
- fields = ("name",)
-
-
-class ExtensionAttribute(Expr):
- """Returns the attribute of an extension bound to the environment.
- The identifier is the identifier of the :class:`Extension`.
-
- This node is usually constructed by calling the
- :meth:`~jinja2.ext.Extension.attr` method on an extension.
- """
-
- fields = ("identifier", "name")
-
-
-class ImportedName(Expr):
- """If created with an import name the import name is returned on node
- access. For example ``ImportedName('cgi.escape')`` returns the `escape`
- function from the cgi module on evaluation. Imports are optimized by the
- compiler so there is no need to assign them to local variables.
- """
-
- fields = ("importname",)
-
-
-class InternalName(Expr):
- """An internal name in the compiler. You cannot create these nodes
- yourself but the parser provides a
- :meth:`~jinja2.parser.Parser.free_identifier` method that creates
- a new identifier for you. This identifier is not available from the
- template and is not threated specially by the compiler.
- """
-
- fields = ("name",)
-
- def __init__(self):
- raise TypeError(
- "Can't create internal names. Use the "
- "`free_identifier` method on a parser."
- )
-
-
-class MarkSafe(Expr):
- """Mark the wrapped expression as safe (wrap it as `Markup`)."""
-
- fields = ("expr",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return Markup(self.expr.as_const(eval_ctx))
-
-
-class MarkSafeIfAutoescape(Expr):
- """Mark the wrapped expression as safe (wrap it as `Markup`) but
- only if autoescaping is active.
-
- .. versionadded:: 2.5
- """
-
- fields = ("expr",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if eval_ctx.volatile:
- raise Impossible()
- expr = self.expr.as_const(eval_ctx)
- if eval_ctx.autoescape:
- return Markup(expr)
- return expr
-
-
-class ContextReference(Expr):
- """Returns the current template context. It can be used like a
- :class:`Name` node, with a ``'load'`` ctx and will return the
- current :class:`~jinja2.runtime.Context` object.
-
- Here an example that assigns the current template name to a
- variable named `foo`::
-
- Assign(Name('foo', ctx='store'),
- Getattr(ContextReference(), 'name'))
-
- This is basically equivalent to using the
- :func:`~jinja2.contextfunction` decorator when using the
- high-level API, which causes a reference to the context to be passed
- as the first argument to a function.
- """
-
-
-class DerivedContextReference(Expr):
- """Return the current template context including locals. Behaves
- exactly like :class:`ContextReference`, but includes local
- variables, such as from a ``for`` loop.
-
- .. versionadded:: 2.11
- """
-
-
-class Continue(Stmt):
- """Continue a loop."""
-
-
-class Break(Stmt):
- """Break a loop."""
-
-
-class Scope(Stmt):
- """An artificial scope."""
-
- fields = ("body",)
-
-
-class OverlayScope(Stmt):
- """An overlay scope for extensions. This is a largely unoptimized scope
- that however can be used to introduce completely arbitrary variables into
- a sub scope from a dictionary or dictionary like object. The `context`
- field has to evaluate to a dictionary object.
-
- Example usage::
-
- OverlayScope(context=self.call_method('get_context'),
- body=[...])
-
- .. versionadded:: 2.10
- """
-
- fields = ("context", "body")
-
-
-class EvalContextModifier(Stmt):
- """Modifies the eval context. For each option that should be modified,
- a :class:`Keyword` has to be added to the :attr:`options` list.
-
- Example to change the `autoescape` setting::
-
- EvalContextModifier(options=[Keyword('autoescape', Const(True))])
- """
-
- fields = ("options",)
-
-
-class ScopedEvalContextModifier(EvalContextModifier):
- """Modifies the eval context and reverts it later. Works exactly like
- :class:`EvalContextModifier` but will only modify the
- :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
- """
-
- fields = ("body",)
-
-
-# make sure nobody creates custom nodes
-def _failing_new(*args, **kwargs):
- raise TypeError("can't create custom node types")
-
-
-NodeType.__new__ = staticmethod(_failing_new)
-del _failing_new
diff --git a/lib/spack/external/jinja2/optimizer.py b/lib/spack/external/jinja2/optimizer.py
deleted file mode 100644
index 7bc78c4524..0000000000
--- a/lib/spack/external/jinja2/optimizer.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-"""The optimizer tries to constant fold expressions and modify the AST
-in place so that it should be faster to evaluate.
-
-Because the AST does not contain all the scoping information and the
-compiler has to find that out, we cannot do all the optimizations we
-want. For example, loop unrolling doesn't work because unrolled loops
-would have a different scope. The solution would be a second syntax tree
-that stored the scoping rules.
-"""
-from . import nodes
-from .visitor import NodeTransformer
-
-
-def optimize(node, environment):
- """The context hint can be used to perform an static optimization
- based on the context given."""
- optimizer = Optimizer(environment)
- return optimizer.visit(node)
-
-
-class Optimizer(NodeTransformer):
- def __init__(self, environment):
- self.environment = environment
-
- def generic_visit(self, node, *args, **kwargs):
- node = super(Optimizer, self).generic_visit(node, *args, **kwargs)
-
- # Do constant folding. Some other nodes besides Expr have
- # as_const, but folding them causes errors later on.
- if isinstance(node, nodes.Expr):
- try:
- return nodes.Const.from_untrusted(
- node.as_const(args[0] if args else None),
- lineno=node.lineno,
- environment=self.environment,
- )
- except nodes.Impossible:
- pass
-
- return node
diff --git a/lib/spack/external/jinja2/parser.py b/lib/spack/external/jinja2/parser.py
deleted file mode 100644
index d5881066f7..0000000000
--- a/lib/spack/external/jinja2/parser.py
+++ /dev/null
@@ -1,939 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Parse tokens from the lexer into nodes for the compiler."""
-from . import nodes
-from ._compat import imap
-from .exceptions import TemplateAssertionError
-from .exceptions import TemplateSyntaxError
-from .lexer import describe_token
-from .lexer import describe_token_expr
-
-_statement_keywords = frozenset(
- [
- "for",
- "if",
- "block",
- "extends",
- "print",
- "macro",
- "include",
- "from",
- "import",
- "set",
- "with",
- "autoescape",
- ]
-)
-_compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"])
-
-_math_nodes = {
- "add": nodes.Add,
- "sub": nodes.Sub,
- "mul": nodes.Mul,
- "div": nodes.Div,
- "floordiv": nodes.FloorDiv,
- "mod": nodes.Mod,
-}
-
-
-class Parser(object):
- """This is the central parsing class Jinja uses. It's passed to
- extensions and can be used to parse expressions or statements.
- """
-
- def __init__(self, environment, source, name=None, filename=None, state=None):
- self.environment = environment
- self.stream = environment._tokenize(source, name, filename, state)
- self.name = name
- self.filename = filename
- self.closed = False
- self.extensions = {}
- for extension in environment.iter_extensions():
- for tag in extension.tags:
- self.extensions[tag] = extension.parse
- self._last_identifier = 0
- self._tag_stack = []
- self._end_token_stack = []
-
- def fail(self, msg, lineno=None, exc=TemplateSyntaxError):
- """Convenience method that raises `exc` with the message, passed
- line number or last line number as well as the current name and
- filename.
- """
- if lineno is None:
- lineno = self.stream.current.lineno
- raise exc(msg, lineno, self.name, self.filename)
-
- def _fail_ut_eof(self, name, end_token_stack, lineno):
- expected = []
- for exprs in end_token_stack:
- expected.extend(imap(describe_token_expr, exprs))
- if end_token_stack:
- currently_looking = " or ".join(
- "'%s'" % describe_token_expr(expr) for expr in end_token_stack[-1]
- )
- else:
- currently_looking = None
-
- if name is None:
- message = ["Unexpected end of template."]
- else:
- message = ["Encountered unknown tag '%s'." % name]
-
- if currently_looking:
- if name is not None and name in expected:
- message.append(
- "You probably made a nesting mistake. Jinja "
- "is expecting this tag, but currently looking "
- "for %s." % currently_looking
- )
- else:
- message.append(
- "Jinja was looking for the following tags: "
- "%s." % currently_looking
- )
-
- if self._tag_stack:
- message.append(
- "The innermost block that needs to be "
- "closed is '%s'." % self._tag_stack[-1]
- )
-
- self.fail(" ".join(message), lineno)
-
- def fail_unknown_tag(self, name, lineno=None):
- """Called if the parser encounters an unknown tag. Tries to fail
- with a human readable error message that could help to identify
- the problem.
- """
- return self._fail_ut_eof(name, self._end_token_stack, lineno)
-
- def fail_eof(self, end_tokens=None, lineno=None):
- """Like fail_unknown_tag but for end of template situations."""
- stack = list(self._end_token_stack)
- if end_tokens is not None:
- stack.append(end_tokens)
- return self._fail_ut_eof(None, stack, lineno)
-
- def is_tuple_end(self, extra_end_rules=None):
- """Are we at the end of a tuple?"""
- if self.stream.current.type in ("variable_end", "block_end", "rparen"):
- return True
- elif extra_end_rules is not None:
- return self.stream.current.test_any(extra_end_rules)
- return False
-
- def free_identifier(self, lineno=None):
- """Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
- self._last_identifier += 1
- rv = object.__new__(nodes.InternalName)
- nodes.Node.__init__(rv, "fi%d" % self._last_identifier, lineno=lineno)
- return rv
-
- def parse_statement(self):
- """Parse a single statement."""
- token = self.stream.current
- if token.type != "name":
- self.fail("tag name expected", token.lineno)
- self._tag_stack.append(token.value)
- pop_tag = True
- try:
- if token.value in _statement_keywords:
- return getattr(self, "parse_" + self.stream.current.value)()
- if token.value == "call":
- return self.parse_call_block()
- if token.value == "filter":
- return self.parse_filter_block()
- ext = self.extensions.get(token.value)
- if ext is not None:
- return ext(self)
-
- # did not work out, remove the token we pushed by accident
- # from the stack so that the unknown tag fail function can
- # produce a proper error message.
- self._tag_stack.pop()
- pop_tag = False
- self.fail_unknown_tag(token.value, token.lineno)
- finally:
- if pop_tag:
- self._tag_stack.pop()
-
- def parse_statements(self, end_tokens, drop_needle=False):
- """Parse multiple statements into a list until one of the end tokens
- is reached. This is used to parse the body of statements as it also
- parses template data if appropriate. The parser checks first if the
- current token is a colon and skips it if there is one. Then it checks
- for the block end and parses until if one of the `end_tokens` is
- reached. Per default the active token in the stream at the end of
- the call is the matched end token. If this is not wanted `drop_needle`
- can be set to `True` and the end token is removed.
- """
- # the first token may be a colon for python compatibility
- self.stream.skip_if("colon")
-
- # in the future it would be possible to add whole code sections
- # by adding some sort of end of statement token and parsing those here.
- self.stream.expect("block_end")
- result = self.subparse(end_tokens)
-
- # we reached the end of the template too early, the subparser
- # does not check for this, so we do that now
- if self.stream.current.type == "eof":
- self.fail_eof(end_tokens)
-
- if drop_needle:
- next(self.stream)
- return result
-
- def parse_set(self):
- """Parse an assign statement."""
- lineno = next(self.stream).lineno
- target = self.parse_assign_target(with_namespace=True)
- if self.stream.skip_if("assign"):
- expr = self.parse_tuple()
- return nodes.Assign(target, expr, lineno=lineno)
- filter_node = self.parse_filter(None)
- body = self.parse_statements(("name:endset",), drop_needle=True)
- return nodes.AssignBlock(target, filter_node, body, lineno=lineno)
-
- def parse_for(self):
- """Parse a for loop."""
- lineno = self.stream.expect("name:for").lineno
- target = self.parse_assign_target(extra_end_rules=("name:in",))
- self.stream.expect("name:in")
- iter = self.parse_tuple(
- with_condexpr=False, extra_end_rules=("name:recursive",)
- )
- test = None
- if self.stream.skip_if("name:if"):
- test = self.parse_expression()
- recursive = self.stream.skip_if("name:recursive")
- body = self.parse_statements(("name:endfor", "name:else"))
- if next(self.stream).value == "endfor":
- else_ = []
- else:
- else_ = self.parse_statements(("name:endfor",), drop_needle=True)
- return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno)
-
- def parse_if(self):
- """Parse an if construct."""
- node = result = nodes.If(lineno=self.stream.expect("name:if").lineno)
- while 1:
- node.test = self.parse_tuple(with_condexpr=False)
- node.body = self.parse_statements(("name:elif", "name:else", "name:endif"))
- node.elif_ = []
- node.else_ = []
- token = next(self.stream)
- if token.test("name:elif"):
- node = nodes.If(lineno=self.stream.current.lineno)
- result.elif_.append(node)
- continue
- elif token.test("name:else"):
- result.else_ = self.parse_statements(("name:endif",), drop_needle=True)
- break
- return result
-
- def parse_with(self):
- node = nodes.With(lineno=next(self.stream).lineno)
- targets = []
- values = []
- while self.stream.current.type != "block_end":
- if targets:
- self.stream.expect("comma")
- target = self.parse_assign_target()
- target.set_ctx("param")
- targets.append(target)
- self.stream.expect("assign")
- values.append(self.parse_expression())
- node.targets = targets
- node.values = values
- node.body = self.parse_statements(("name:endwith",), drop_needle=True)
- return node
-
- def parse_autoescape(self):
- node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno)
- node.options = [nodes.Keyword("autoescape", self.parse_expression())]
- node.body = self.parse_statements(("name:endautoescape",), drop_needle=True)
- return nodes.Scope([node])
-
- def parse_block(self):
- node = nodes.Block(lineno=next(self.stream).lineno)
- node.name = self.stream.expect("name").value
- node.scoped = self.stream.skip_if("name:scoped")
-
- # common problem people encounter when switching from django
- # to jinja. we do not support hyphens in block names, so let's
- # raise a nicer error message in that case.
- if self.stream.current.type == "sub":
- self.fail(
- "Block names in Jinja have to be valid Python "
- "identifiers and may not contain hyphens, use an "
- "underscore instead."
- )
-
- node.body = self.parse_statements(("name:endblock",), drop_needle=True)
- self.stream.skip_if("name:" + node.name)
- return node
-
- def parse_extends(self):
- node = nodes.Extends(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
- return node
-
- def parse_import_context(self, node, default):
- if self.stream.current.test_any(
- "name:with", "name:without"
- ) and self.stream.look().test("name:context"):
- node.with_context = next(self.stream).value == "with"
- self.stream.skip()
- else:
- node.with_context = default
- return node
-
- def parse_include(self):
- node = nodes.Include(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
- if self.stream.current.test("name:ignore") and self.stream.look().test(
- "name:missing"
- ):
- node.ignore_missing = True
- self.stream.skip(2)
- else:
- node.ignore_missing = False
- return self.parse_import_context(node, True)
-
- def parse_import(self):
- node = nodes.Import(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
- self.stream.expect("name:as")
- node.target = self.parse_assign_target(name_only=True).name
- return self.parse_import_context(node, False)
-
- def parse_from(self):
- node = nodes.FromImport(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
- self.stream.expect("name:import")
- node.names = []
-
- def parse_context():
- if self.stream.current.value in (
- "with",
- "without",
- ) and self.stream.look().test("name:context"):
- node.with_context = next(self.stream).value == "with"
- self.stream.skip()
- return True
- return False
-
- while 1:
- if node.names:
- self.stream.expect("comma")
- if self.stream.current.type == "name":
- if parse_context():
- break
- target = self.parse_assign_target(name_only=True)
- if target.name.startswith("_"):
- self.fail(
- "names starting with an underline can not be imported",
- target.lineno,
- exc=TemplateAssertionError,
- )
- if self.stream.skip_if("name:as"):
- alias = self.parse_assign_target(name_only=True)
- node.names.append((target.name, alias.name))
- else:
- node.names.append(target.name)
- if parse_context() or self.stream.current.type != "comma":
- break
- else:
- self.stream.expect("name")
- if not hasattr(node, "with_context"):
- node.with_context = False
- return node
-
- def parse_signature(self, node):
- node.args = args = []
- node.defaults = defaults = []
- self.stream.expect("lparen")
- while self.stream.current.type != "rparen":
- if args:
- self.stream.expect("comma")
- arg = self.parse_assign_target(name_only=True)
- arg.set_ctx("param")
- if self.stream.skip_if("assign"):
- defaults.append(self.parse_expression())
- elif defaults:
- self.fail("non-default argument follows default argument")
- args.append(arg)
- self.stream.expect("rparen")
-
- def parse_call_block(self):
- node = nodes.CallBlock(lineno=next(self.stream).lineno)
- if self.stream.current.type == "lparen":
- self.parse_signature(node)
- else:
- node.args = []
- node.defaults = []
-
- node.call = self.parse_expression()
- if not isinstance(node.call, nodes.Call):
- self.fail("expected call", node.lineno)
- node.body = self.parse_statements(("name:endcall",), drop_needle=True)
- return node
-
- def parse_filter_block(self):
- node = nodes.FilterBlock(lineno=next(self.stream).lineno)
- node.filter = self.parse_filter(None, start_inline=True)
- node.body = self.parse_statements(("name:endfilter",), drop_needle=True)
- return node
-
- def parse_macro(self):
- node = nodes.Macro(lineno=next(self.stream).lineno)
- node.name = self.parse_assign_target(name_only=True).name
- self.parse_signature(node)
- node.body = self.parse_statements(("name:endmacro",), drop_needle=True)
- return node
-
- def parse_print(self):
- node = nodes.Output(lineno=next(self.stream).lineno)
- node.nodes = []
- while self.stream.current.type != "block_end":
- if node.nodes:
- self.stream.expect("comma")
- node.nodes.append(self.parse_expression())
- return node
-
- def parse_assign_target(
- self,
- with_tuple=True,
- name_only=False,
- extra_end_rules=None,
- with_namespace=False,
- ):
- """Parse an assignment target. As Jinja allows assignments to
- tuples, this function can parse all allowed assignment targets. Per
- default assignments to tuples are parsed, that can be disable however
- by setting `with_tuple` to `False`. If only assignments to names are
- wanted `name_only` can be set to `True`. The `extra_end_rules`
- parameter is forwarded to the tuple parsing function. If
- `with_namespace` is enabled, a namespace assignment may be parsed.
- """
- if with_namespace and self.stream.look().type == "dot":
- token = self.stream.expect("name")
- next(self.stream) # dot
- attr = self.stream.expect("name")
- target = nodes.NSRef(token.value, attr.value, lineno=token.lineno)
- elif name_only:
- token = self.stream.expect("name")
- target = nodes.Name(token.value, "store", lineno=token.lineno)
- else:
- if with_tuple:
- target = self.parse_tuple(
- simplified=True, extra_end_rules=extra_end_rules
- )
- else:
- target = self.parse_primary()
- target.set_ctx("store")
- if not target.can_assign():
- self.fail(
- "can't assign to %r" % target.__class__.__name__.lower(), target.lineno
- )
- return target
-
- def parse_expression(self, with_condexpr=True):
- """Parse an expression. Per default all expressions are parsed, if
- the optional `with_condexpr` parameter is set to `False` conditional
- expressions are not parsed.
- """
- if with_condexpr:
- return self.parse_condexpr()
- return self.parse_or()
-
- def parse_condexpr(self):
- lineno = self.stream.current.lineno
- expr1 = self.parse_or()
- while self.stream.skip_if("name:if"):
- expr2 = self.parse_or()
- if self.stream.skip_if("name:else"):
- expr3 = self.parse_condexpr()
- else:
- expr3 = None
- expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno)
- lineno = self.stream.current.lineno
- return expr1
-
- def parse_or(self):
- lineno = self.stream.current.lineno
- left = self.parse_and()
- while self.stream.skip_if("name:or"):
- right = self.parse_and()
- left = nodes.Or(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_and(self):
- lineno = self.stream.current.lineno
- left = self.parse_not()
- while self.stream.skip_if("name:and"):
- right = self.parse_not()
- left = nodes.And(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_not(self):
- if self.stream.current.test("name:not"):
- lineno = next(self.stream).lineno
- return nodes.Not(self.parse_not(), lineno=lineno)
- return self.parse_compare()
-
- def parse_compare(self):
- lineno = self.stream.current.lineno
- expr = self.parse_math1()
- ops = []
- while 1:
- token_type = self.stream.current.type
- if token_type in _compare_operators:
- next(self.stream)
- ops.append(nodes.Operand(token_type, self.parse_math1()))
- elif self.stream.skip_if("name:in"):
- ops.append(nodes.Operand("in", self.parse_math1()))
- elif self.stream.current.test("name:not") and self.stream.look().test(
- "name:in"
- ):
- self.stream.skip(2)
- ops.append(nodes.Operand("notin", self.parse_math1()))
- else:
- break
- lineno = self.stream.current.lineno
- if not ops:
- return expr
- return nodes.Compare(expr, ops, lineno=lineno)
-
- def parse_math1(self):
- lineno = self.stream.current.lineno
- left = self.parse_concat()
- while self.stream.current.type in ("add", "sub"):
- cls = _math_nodes[self.stream.current.type]
- next(self.stream)
- right = self.parse_concat()
- left = cls(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_concat(self):
- lineno = self.stream.current.lineno
- args = [self.parse_math2()]
- while self.stream.current.type == "tilde":
- next(self.stream)
- args.append(self.parse_math2())
- if len(args) == 1:
- return args[0]
- return nodes.Concat(args, lineno=lineno)
-
- def parse_math2(self):
- lineno = self.stream.current.lineno
- left = self.parse_pow()
- while self.stream.current.type in ("mul", "div", "floordiv", "mod"):
- cls = _math_nodes[self.stream.current.type]
- next(self.stream)
- right = self.parse_pow()
- left = cls(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_pow(self):
- lineno = self.stream.current.lineno
- left = self.parse_unary()
- while self.stream.current.type == "pow":
- next(self.stream)
- right = self.parse_unary()
- left = nodes.Pow(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_unary(self, with_filter=True):
- token_type = self.stream.current.type
- lineno = self.stream.current.lineno
- if token_type == "sub":
- next(self.stream)
- node = nodes.Neg(self.parse_unary(False), lineno=lineno)
- elif token_type == "add":
- next(self.stream)
- node = nodes.Pos(self.parse_unary(False), lineno=lineno)
- else:
- node = self.parse_primary()
- node = self.parse_postfix(node)
- if with_filter:
- node = self.parse_filter_expr(node)
- return node
-
- def parse_primary(self):
- token = self.stream.current
- if token.type == "name":
- if token.value in ("true", "false", "True", "False"):
- node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno)
- elif token.value in ("none", "None"):
- node = nodes.Const(None, lineno=token.lineno)
- else:
- node = nodes.Name(token.value, "load", lineno=token.lineno)
- next(self.stream)
- elif token.type == "string":
- next(self.stream)
- buf = [token.value]
- lineno = token.lineno
- while self.stream.current.type == "string":
- buf.append(self.stream.current.value)
- next(self.stream)
- node = nodes.Const("".join(buf), lineno=lineno)
- elif token.type in ("integer", "float"):
- next(self.stream)
- node = nodes.Const(token.value, lineno=token.lineno)
- elif token.type == "lparen":
- next(self.stream)
- node = self.parse_tuple(explicit_parentheses=True)
- self.stream.expect("rparen")
- elif token.type == "lbracket":
- node = self.parse_list()
- elif token.type == "lbrace":
- node = self.parse_dict()
- else:
- self.fail("unexpected '%s'" % describe_token(token), token.lineno)
- return node
-
- def parse_tuple(
- self,
- simplified=False,
- with_condexpr=True,
- extra_end_rules=None,
- explicit_parentheses=False,
- ):
- """Works like `parse_expression` but if multiple expressions are
- delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
- This method could also return a regular expression instead of a tuple
- if no commas where found.
-
- The default parsing mode is a full tuple. If `simplified` is `True`
- only names and literals are parsed. The `no_condexpr` parameter is
- forwarded to :meth:`parse_expression`.
-
- Because tuples do not require delimiters and may end in a bogus comma
- an extra hint is needed that marks the end of a tuple. For example
- for loops support tuples between `for` and `in`. In that case the
- `extra_end_rules` is set to ``['name:in']``.
-
- `explicit_parentheses` is true if the parsing was triggered by an
- expression in parentheses. This is used to figure out if an empty
- tuple is a valid expression or not.
- """
- lineno = self.stream.current.lineno
- if simplified:
- parse = self.parse_primary
- elif with_condexpr:
- parse = self.parse_expression
- else:
-
- def parse():
- return self.parse_expression(with_condexpr=False)
-
- args = []
- is_tuple = False
- while 1:
- if args:
- self.stream.expect("comma")
- if self.is_tuple_end(extra_end_rules):
- break
- args.append(parse())
- if self.stream.current.type == "comma":
- is_tuple = True
- else:
- break
- lineno = self.stream.current.lineno
-
- if not is_tuple:
- if args:
- return args[0]
-
- # if we don't have explicit parentheses, an empty tuple is
- # not a valid expression. This would mean nothing (literally
- # nothing) in the spot of an expression would be an empty
- # tuple.
- if not explicit_parentheses:
- self.fail(
- "Expected an expression, got '%s'"
- % describe_token(self.stream.current)
- )
-
- return nodes.Tuple(args, "load", lineno=lineno)
-
- def parse_list(self):
- token = self.stream.expect("lbracket")
- items = []
- while self.stream.current.type != "rbracket":
- if items:
- self.stream.expect("comma")
- if self.stream.current.type == "rbracket":
- break
- items.append(self.parse_expression())
- self.stream.expect("rbracket")
- return nodes.List(items, lineno=token.lineno)
-
- def parse_dict(self):
- token = self.stream.expect("lbrace")
- items = []
- while self.stream.current.type != "rbrace":
- if items:
- self.stream.expect("comma")
- if self.stream.current.type == "rbrace":
- break
- key = self.parse_expression()
- self.stream.expect("colon")
- value = self.parse_expression()
- items.append(nodes.Pair(key, value, lineno=key.lineno))
- self.stream.expect("rbrace")
- return nodes.Dict(items, lineno=token.lineno)
-
- def parse_postfix(self, node):
- while 1:
- token_type = self.stream.current.type
- if token_type == "dot" or token_type == "lbracket":
- node = self.parse_subscript(node)
- # calls are valid both after postfix expressions (getattr
- # and getitem) as well as filters and tests
- elif token_type == "lparen":
- node = self.parse_call(node)
- else:
- break
- return node
-
- def parse_filter_expr(self, node):
- while 1:
- token_type = self.stream.current.type
- if token_type == "pipe":
- node = self.parse_filter(node)
- elif token_type == "name" and self.stream.current.value == "is":
- node = self.parse_test(node)
- # calls are valid both after postfix expressions (getattr
- # and getitem) as well as filters and tests
- elif token_type == "lparen":
- node = self.parse_call(node)
- else:
- break
- return node
-
- def parse_subscript(self, node):
- token = next(self.stream)
- if token.type == "dot":
- attr_token = self.stream.current
- next(self.stream)
- if attr_token.type == "name":
- return nodes.Getattr(
- node, attr_token.value, "load", lineno=token.lineno
- )
- elif attr_token.type != "integer":
- self.fail("expected name or number", attr_token.lineno)
- arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
- return nodes.Getitem(node, arg, "load", lineno=token.lineno)
- if token.type == "lbracket":
- args = []
- while self.stream.current.type != "rbracket":
- if args:
- self.stream.expect("comma")
- args.append(self.parse_subscribed())
- self.stream.expect("rbracket")
- if len(args) == 1:
- arg = args[0]
- else:
- arg = nodes.Tuple(args, "load", lineno=token.lineno)
- return nodes.Getitem(node, arg, "load", lineno=token.lineno)
- self.fail("expected subscript expression", token.lineno)
-
- def parse_subscribed(self):
- lineno = self.stream.current.lineno
-
- if self.stream.current.type == "colon":
- next(self.stream)
- args = [None]
- else:
- node = self.parse_expression()
- if self.stream.current.type != "colon":
- return node
- next(self.stream)
- args = [node]
-
- if self.stream.current.type == "colon":
- args.append(None)
- elif self.stream.current.type not in ("rbracket", "comma"):
- args.append(self.parse_expression())
- else:
- args.append(None)
-
- if self.stream.current.type == "colon":
- next(self.stream)
- if self.stream.current.type not in ("rbracket", "comma"):
- args.append(self.parse_expression())
- else:
- args.append(None)
- else:
- args.append(None)
-
- return nodes.Slice(lineno=lineno, *args)
-
- def parse_call(self, node):
- token = self.stream.expect("lparen")
- args = []
- kwargs = []
- dyn_args = dyn_kwargs = None
- require_comma = False
-
- def ensure(expr):
- if not expr:
- self.fail("invalid syntax for function call expression", token.lineno)
-
- while self.stream.current.type != "rparen":
- if require_comma:
- self.stream.expect("comma")
- # support for trailing comma
- if self.stream.current.type == "rparen":
- break
- if self.stream.current.type == "mul":
- ensure(dyn_args is None and dyn_kwargs is None)
- next(self.stream)
- dyn_args = self.parse_expression()
- elif self.stream.current.type == "pow":
- ensure(dyn_kwargs is None)
- next(self.stream)
- dyn_kwargs = self.parse_expression()
- else:
- if (
- self.stream.current.type == "name"
- and self.stream.look().type == "assign"
- ):
- # Parsing a kwarg
- ensure(dyn_kwargs is None)
- key = self.stream.current.value
- self.stream.skip(2)
- value = self.parse_expression()
- kwargs.append(nodes.Keyword(key, value, lineno=value.lineno))
- else:
- # Parsing an arg
- ensure(dyn_args is None and dyn_kwargs is None and not kwargs)
- args.append(self.parse_expression())
-
- require_comma = True
- self.stream.expect("rparen")
-
- if node is None:
- return args, kwargs, dyn_args, dyn_kwargs
- return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno)
-
- def parse_filter(self, node, start_inline=False):
- while self.stream.current.type == "pipe" or start_inline:
- if not start_inline:
- next(self.stream)
- token = self.stream.expect("name")
- name = token.value
- while self.stream.current.type == "dot":
- next(self.stream)
- name += "." + self.stream.expect("name").value
- if self.stream.current.type == "lparen":
- args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
- else:
- args = []
- kwargs = []
- dyn_args = dyn_kwargs = None
- node = nodes.Filter(
- node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
- )
- start_inline = False
- return node
-
- def parse_test(self, node):
- token = next(self.stream)
- if self.stream.current.test("name:not"):
- next(self.stream)
- negated = True
- else:
- negated = False
- name = self.stream.expect("name").value
- while self.stream.current.type == "dot":
- next(self.stream)
- name += "." + self.stream.expect("name").value
- dyn_args = dyn_kwargs = None
- kwargs = []
- if self.stream.current.type == "lparen":
- args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
- elif self.stream.current.type in (
- "name",
- "string",
- "integer",
- "float",
- "lparen",
- "lbracket",
- "lbrace",
- ) and not self.stream.current.test_any("name:else", "name:or", "name:and"):
- if self.stream.current.test("name:is"):
- self.fail("You cannot chain multiple tests with is")
- arg_node = self.parse_primary()
- arg_node = self.parse_postfix(arg_node)
- args = [arg_node]
- else:
- args = []
- node = nodes.Test(
- node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
- )
- if negated:
- node = nodes.Not(node, lineno=token.lineno)
- return node
-
- def subparse(self, end_tokens=None):
- body = []
- data_buffer = []
- add_data = data_buffer.append
-
- if end_tokens is not None:
- self._end_token_stack.append(end_tokens)
-
- def flush_data():
- if data_buffer:
- lineno = data_buffer[0].lineno
- body.append(nodes.Output(data_buffer[:], lineno=lineno))
- del data_buffer[:]
-
- try:
- while self.stream:
- token = self.stream.current
- if token.type == "data":
- if token.value:
- add_data(nodes.TemplateData(token.value, lineno=token.lineno))
- next(self.stream)
- elif token.type == "variable_begin":
- next(self.stream)
- add_data(self.parse_tuple(with_condexpr=True))
- self.stream.expect("variable_end")
- elif token.type == "block_begin":
- flush_data()
- next(self.stream)
- if end_tokens is not None and self.stream.current.test_any(
- *end_tokens
- ):
- return body
- rv = self.parse_statement()
- if isinstance(rv, list):
- body.extend(rv)
- else:
- body.append(rv)
- self.stream.expect("block_end")
- else:
- raise AssertionError("internal parsing error")
-
- flush_data()
- finally:
- if end_tokens is not None:
- self._end_token_stack.pop()
-
- return body
-
- def parse(self):
- """Parse the whole template into a `Template` node."""
- result = nodes.Template(self.subparse(), lineno=1)
- result.set_environment(self.environment)
- return result
diff --git a/lib/spack/external/jinja2/runtime.py b/lib/spack/external/jinja2/runtime.py
deleted file mode 100644
index 3ad7968624..0000000000
--- a/lib/spack/external/jinja2/runtime.py
+++ /dev/null
@@ -1,1011 +0,0 @@
-# -*- coding: utf-8 -*-
-"""The runtime functions and state used by compiled templates."""
-import sys
-from itertools import chain
-from types import MethodType
-
-from markupsafe import escape # noqa: F401
-from markupsafe import Markup
-from markupsafe import soft_unicode
-
-from ._compat import abc
-from ._compat import imap
-from ._compat import implements_iterator
-from ._compat import implements_to_string
-from ._compat import iteritems
-from ._compat import PY2
-from ._compat import string_types
-from ._compat import text_type
-from ._compat import with_metaclass
-from .exceptions import TemplateNotFound # noqa: F401
-from .exceptions import TemplateRuntimeError # noqa: F401
-from .exceptions import UndefinedError
-from .nodes import EvalContext
-from .utils import concat
-from .utils import evalcontextfunction
-from .utils import internalcode
-from .utils import missing
-from .utils import Namespace # noqa: F401
-from .utils import object_type_repr
-
-# these variables are exported to the template runtime
-exported = [
- "LoopContext",
- "TemplateReference",
- "Macro",
- "Markup",
- "TemplateRuntimeError",
- "missing",
- "concat",
- "escape",
- "markup_join",
- "unicode_join",
- "to_string",
- "identity",
- "TemplateNotFound",
- "Namespace",
- "Undefined",
-]
-
-#: the name of the function that is used to convert something into
-#: a string. We can just use the text type here.
-to_string = text_type
-
-
-def identity(x):
- """Returns its argument. Useful for certain things in the
- environment.
- """
- return x
-
-
-def markup_join(seq):
- """Concatenation that escapes if necessary and converts to unicode."""
- buf = []
- iterator = imap(soft_unicode, seq)
- for arg in iterator:
- buf.append(arg)
- if hasattr(arg, "__html__"):
- return Markup(u"").join(chain(buf, iterator))
- return concat(buf)
-
-
-def unicode_join(seq):
- """Simple args to unicode conversion and concatenation."""
- return concat(imap(text_type, seq))
-
-
-def new_context(
- environment,
- template_name,
- blocks,
- vars=None,
- shared=None,
- globals=None,
- locals=None,
-):
- """Internal helper for context creation."""
- if vars is None:
- vars = {}
- if shared:
- parent = vars
- else:
- parent = dict(globals or (), **vars)
- if locals:
- # if the parent is shared a copy should be created because
- # we don't want to modify the dict passed
- if shared:
- parent = dict(parent)
- for key, value in iteritems(locals):
- if value is not missing:
- parent[key] = value
- return environment.context_class(environment, parent, template_name, blocks)
-
-
-class TemplateReference(object):
- """The `self` in templates."""
-
- def __init__(self, context):
- self.__context = context
-
- def __getitem__(self, name):
- blocks = self.__context.blocks[name]
- return BlockReference(name, self.__context, blocks, 0)
-
- def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.__context.name)
-
-
-def _get_func(x):
- return getattr(x, "__func__", x)
-
-
-class ContextMeta(type):
- def __new__(mcs, name, bases, d):
- rv = type.__new__(mcs, name, bases, d)
- if bases == ():
- return rv
-
- resolve = _get_func(rv.resolve)
- default_resolve = _get_func(Context.resolve)
- resolve_or_missing = _get_func(rv.resolve_or_missing)
- default_resolve_or_missing = _get_func(Context.resolve_or_missing)
-
- # If we have a changed resolve but no changed default or missing
- # resolve we invert the call logic.
- if (
- resolve is not default_resolve
- and resolve_or_missing is default_resolve_or_missing
- ):
- rv._legacy_resolve_mode = True
- elif (
- resolve is default_resolve
- and resolve_or_missing is default_resolve_or_missing
- ):
- rv._fast_resolve_mode = True
-
- return rv
-
-
-def resolve_or_missing(context, key, missing=missing):
- if key in context.vars:
- return context.vars[key]
- if key in context.parent:
- return context.parent[key]
- return missing
-
-
-class Context(with_metaclass(ContextMeta)):
- """The template context holds the variables of a template. It stores the
- values passed to the template and also the names the template exports.
- Creating instances is neither supported nor useful as it's created
- automatically at various stages of the template evaluation and should not
- be created by hand.
-
- The context is immutable. Modifications on :attr:`parent` **must not**
- happen and modifications on :attr:`vars` are allowed from generated
- template code only. Template filters and global functions marked as
- :func:`contextfunction`\\s get the active context passed as first argument
- and are allowed to access the context read-only.
-
- The template context supports read only dict operations (`get`,
- `keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`,
- `__getitem__`, `__contains__`). Additionally there is a :meth:`resolve`
- method that doesn't fail with a `KeyError` but returns an
- :class:`Undefined` object for missing variables.
- """
-
- # XXX: we want to eventually make this be a deprecation warning and
- # remove it.
- _legacy_resolve_mode = False
- _fast_resolve_mode = False
-
- def __init__(self, environment, parent, name, blocks):
- self.parent = parent
- self.vars = {}
- self.environment = environment
- self.eval_ctx = EvalContext(self.environment, name)
- self.exported_vars = set()
- self.name = name
-
- # create the initial mapping of blocks. Whenever template inheritance
- # takes place the runtime will update this mapping with the new blocks
- # from the template.
- self.blocks = dict((k, [v]) for k, v in iteritems(blocks))
-
- # In case we detect the fast resolve mode we can set up an alias
- # here that bypasses the legacy code logic.
- if self._fast_resolve_mode:
- self.resolve_or_missing = MethodType(resolve_or_missing, self)
-
- def super(self, name, current):
- """Render a parent block."""
- try:
- blocks = self.blocks[name]
- index = blocks.index(current) + 1
- blocks[index]
- except LookupError:
- return self.environment.undefined(
- "there is no parent block called %r." % name, name="super"
- )
- return BlockReference(name, self, blocks, index)
-
- def get(self, key, default=None):
- """Returns an item from the template context, if it doesn't exist
- `default` is returned.
- """
- try:
- return self[key]
- except KeyError:
- return default
-
- def resolve(self, key):
- """Looks up a variable like `__getitem__` or `get` but returns an
- :class:`Undefined` object with the name of the name looked up.
- """
- if self._legacy_resolve_mode:
- rv = resolve_or_missing(self, key)
- else:
- rv = self.resolve_or_missing(key)
- if rv is missing:
- return self.environment.undefined(name=key)
- return rv
-
- def resolve_or_missing(self, key):
- """Resolves a variable like :meth:`resolve` but returns the
- special `missing` value if it cannot be found.
- """
- if self._legacy_resolve_mode:
- rv = self.resolve(key)
- if isinstance(rv, Undefined):
- rv = missing
- return rv
- return resolve_or_missing(self, key)
-
- def get_exported(self):
- """Get a new dict with the exported variables."""
- return dict((k, self.vars[k]) for k in self.exported_vars)
-
- def get_all(self):
- """Return the complete context as dict including the exported
- variables. For optimizations reasons this might not return an
- actual copy so be careful with using it.
- """
- if not self.vars:
- return self.parent
- if not self.parent:
- return self.vars
- return dict(self.parent, **self.vars)
-
- @internalcode
- def call(__self, __obj, *args, **kwargs): # noqa: B902
- """Call the callable with the arguments and keyword arguments
- provided but inject the active context or environment as first
- argument if the callable is a :func:`contextfunction` or
- :func:`environmentfunction`.
- """
- if __debug__:
- __traceback_hide__ = True # noqa
-
- # Allow callable classes to take a context
- if hasattr(__obj, "__call__"): # noqa: B004
- fn = __obj.__call__
- for fn_type in (
- "contextfunction",
- "evalcontextfunction",
- "environmentfunction",
- ):
- if hasattr(fn, fn_type):
- __obj = fn
- break
-
- if callable(__obj):
- if getattr(__obj, "contextfunction", False) is True:
- args = (__self,) + args
- elif getattr(__obj, "evalcontextfunction", False) is True:
- args = (__self.eval_ctx,) + args
- elif getattr(__obj, "environmentfunction", False) is True:
- args = (__self.environment,) + args
- try:
- return __obj(*args, **kwargs)
- except StopIteration:
- return __self.environment.undefined(
- "value was undefined because "
- "a callable raised a "
- "StopIteration exception"
- )
-
- def derived(self, locals=None):
- """Internal helper function to create a derived context. This is
- used in situations where the system needs a new context in the same
- template that is independent.
- """
- context = new_context(
- self.environment, self.name, {}, self.get_all(), True, None, locals
- )
- context.eval_ctx = self.eval_ctx
- context.blocks.update((k, list(v)) for k, v in iteritems(self.blocks))
- return context
-
- def _all(meth): # noqa: B902
- def proxy(self):
- return getattr(self.get_all(), meth)()
-
- proxy.__doc__ = getattr(dict, meth).__doc__
- proxy.__name__ = meth
- return proxy
-
- keys = _all("keys")
- values = _all("values")
- items = _all("items")
-
- # not available on python 3
- if PY2:
- iterkeys = _all("iterkeys")
- itervalues = _all("itervalues")
- iteritems = _all("iteritems")
- del _all
-
- def __contains__(self, name):
- return name in self.vars or name in self.parent
-
- def __getitem__(self, key):
- """Lookup a variable or raise `KeyError` if the variable is
- undefined.
- """
- item = self.resolve_or_missing(key)
- if item is missing:
- raise KeyError(key)
- return item
-
- def __repr__(self):
- return "<%s %s of %r>" % (
- self.__class__.__name__,
- repr(self.get_all()),
- self.name,
- )
-
-
-abc.Mapping.register(Context)
-
-
-class BlockReference(object):
- """One block on a template reference."""
-
- def __init__(self, name, context, stack, depth):
- self.name = name
- self._context = context
- self._stack = stack
- self._depth = depth
-
- @property
- def super(self):
- """Super the block."""
- if self._depth + 1 >= len(self._stack):
- return self._context.environment.undefined(
- "there is no parent block called %r." % self.name, name="super"
- )
- return BlockReference(self.name, self._context, self._stack, self._depth + 1)
-
- @internalcode
- def __call__(self):
- rv = concat(self._stack[self._depth](self._context))
- if self._context.eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
-
-@implements_iterator
-class LoopContext:
- """A wrapper iterable for dynamic ``for`` loops, with information
- about the loop and iteration.
- """
-
- #: Current iteration of the loop, starting at 0.
- index0 = -1
-
- _length = None
- _after = missing
- _current = missing
- _before = missing
- _last_changed_value = missing
-
- def __init__(self, iterable, undefined, recurse=None, depth0=0):
- """
- :param iterable: Iterable to wrap.
- :param undefined: :class:`Undefined` class to use for next and
- previous items.
- :param recurse: The function to render the loop body when the
- loop is marked recursive.
- :param depth0: Incremented when looping recursively.
- """
- self._iterable = iterable
- self._iterator = self._to_iterator(iterable)
- self._undefined = undefined
- self._recurse = recurse
- #: How many levels deep a recursive loop currently is, starting at 0.
- self.depth0 = depth0
-
- @staticmethod
- def _to_iterator(iterable):
- return iter(iterable)
-
- @property
- def length(self):
- """Length of the iterable.
-
- If the iterable is a generator or otherwise does not have a
- size, it is eagerly evaluated to get a size.
- """
- if self._length is not None:
- return self._length
-
- try:
- self._length = len(self._iterable)
- except TypeError:
- iterable = list(self._iterator)
- self._iterator = self._to_iterator(iterable)
- self._length = len(iterable) + self.index + (self._after is not missing)
-
- return self._length
-
- def __len__(self):
- return self.length
-
- @property
- def depth(self):
- """How many levels deep a recursive loop currently is, starting at 1."""
- return self.depth0 + 1
-
- @property
- def index(self):
- """Current iteration of the loop, starting at 1."""
- return self.index0 + 1
-
- @property
- def revindex0(self):
- """Number of iterations from the end of the loop, ending at 0.
-
- Requires calculating :attr:`length`.
- """
- return self.length - self.index
-
- @property
- def revindex(self):
- """Number of iterations from the end of the loop, ending at 1.
-
- Requires calculating :attr:`length`.
- """
- return self.length - self.index0
-
- @property
- def first(self):
- """Whether this is the first iteration of the loop."""
- return self.index0 == 0
-
- def _peek_next(self):
- """Return the next element in the iterable, or :data:`missing`
- if the iterable is exhausted. Only peeks one item ahead, caching
- the result in :attr:`_last` for use in subsequent checks. The
- cache is reset when :meth:`__next__` is called.
- """
- if self._after is not missing:
- return self._after
-
- self._after = next(self._iterator, missing)
- return self._after
-
- @property
- def last(self):
- """Whether this is the last iteration of the loop.
-
- Causes the iterable to advance early. See
- :func:`itertools.groupby` for issues this can cause.
- The :func:`groupby` filter avoids that issue.
- """
- return self._peek_next() is missing
-
- @property
- def previtem(self):
- """The item in the previous iteration. Undefined during the
- first iteration.
- """
- if self.first:
- return self._undefined("there is no previous item")
-
- return self._before
-
- @property
- def nextitem(self):
- """The item in the next iteration. Undefined during the last
- iteration.
-
- Causes the iterable to advance early. See
- :func:`itertools.groupby` for issues this can cause.
- The :func:`groupby` filter avoids that issue.
- """
- rv = self._peek_next()
-
- if rv is missing:
- return self._undefined("there is no next item")
-
- return rv
-
- def cycle(self, *args):
- """Return a value from the given args, cycling through based on
- the current :attr:`index0`.
-
- :param args: One or more values to cycle through.
- """
- if not args:
- raise TypeError("no items for cycling given")
-
- return args[self.index0 % len(args)]
-
- def changed(self, *value):
- """Return ``True`` if previously called with a different value
- (including when called for the first time).
-
- :param value: One or more values to compare to the last call.
- """
- if self._last_changed_value != value:
- self._last_changed_value = value
- return True
-
- return False
-
- def __iter__(self):
- return self
-
- def __next__(self):
- if self._after is not missing:
- rv = self._after
- self._after = missing
- else:
- rv = next(self._iterator)
-
- self.index0 += 1
- self._before = self._current
- self._current = rv
- return rv, self
-
- @internalcode
- def __call__(self, iterable):
- """When iterating over nested data, render the body of the loop
- recursively with the given inner iterable data.
-
- The loop must have the ``recursive`` marker for this to work.
- """
- if self._recurse is None:
- raise TypeError(
- "The loop must have the 'recursive' marker to be called recursively."
- )
-
- return self._recurse(iterable, self._recurse, depth=self.depth)
-
- def __repr__(self):
- return "<%s %d/%d>" % (self.__class__.__name__, self.index, self.length)
-
-
-class Macro(object):
- """Wraps a macro function."""
-
- def __init__(
- self,
- environment,
- func,
- name,
- arguments,
- catch_kwargs,
- catch_varargs,
- caller,
- default_autoescape=None,
- ):
- self._environment = environment
- self._func = func
- self._argument_count = len(arguments)
- self.name = name
- self.arguments = arguments
- self.catch_kwargs = catch_kwargs
- self.catch_varargs = catch_varargs
- self.caller = caller
- self.explicit_caller = "caller" in arguments
- if default_autoescape is None:
- default_autoescape = environment.autoescape
- self._default_autoescape = default_autoescape
-
- @internalcode
- @evalcontextfunction
- def __call__(self, *args, **kwargs):
- # This requires a bit of explanation, In the past we used to
- # decide largely based on compile-time information if a macro is
- # safe or unsafe. While there was a volatile mode it was largely
- # unused for deciding on escaping. This turns out to be
- # problematic for macros because whether a macro is safe depends not
- # on the escape mode when it was defined, but rather when it was used.
- #
- # Because however we export macros from the module system and
- # there are historic callers that do not pass an eval context (and
- # will continue to not pass one), we need to perform an instance
- # check here.
- #
- # This is considered safe because an eval context is not a valid
- # argument to callables otherwise anyway. Worst case here is
- # that if no eval context is passed we fall back to the compile
- # time autoescape flag.
- if args and isinstance(args[0], EvalContext):
- autoescape = args[0].autoescape
- args = args[1:]
- else:
- autoescape = self._default_autoescape
-
- # try to consume the positional arguments
- arguments = list(args[: self._argument_count])
- off = len(arguments)
-
- # For information why this is necessary refer to the handling
- # of caller in the `macro_body` handler in the compiler.
- found_caller = False
-
- # if the number of arguments consumed is not the number of
- # arguments expected we start filling in keyword arguments
- # and defaults.
- if off != self._argument_count:
- for name in self.arguments[len(arguments) :]:
- try:
- value = kwargs.pop(name)
- except KeyError:
- value = missing
- if name == "caller":
- found_caller = True
- arguments.append(value)
- else:
- found_caller = self.explicit_caller
-
- # it's important that the order of these arguments does not change
- # if not also changed in the compiler's `function_scoping` method.
- # the order is caller, keyword arguments, positional arguments!
- if self.caller and not found_caller:
- caller = kwargs.pop("caller", None)
- if caller is None:
- caller = self._environment.undefined("No caller defined", name="caller")
- arguments.append(caller)
-
- if self.catch_kwargs:
- arguments.append(kwargs)
- elif kwargs:
- if "caller" in kwargs:
- raise TypeError(
- "macro %r was invoked with two values for "
- "the special caller argument. This is "
- "most likely a bug." % self.name
- )
- raise TypeError(
- "macro %r takes no keyword argument %r"
- % (self.name, next(iter(kwargs)))
- )
- if self.catch_varargs:
- arguments.append(args[self._argument_count :])
- elif len(args) > self._argument_count:
- raise TypeError(
- "macro %r takes not more than %d argument(s)"
- % (self.name, len(self.arguments))
- )
-
- return self._invoke(arguments, autoescape)
-
- def _invoke(self, arguments, autoescape):
- """This method is being swapped out by the async implementation."""
- rv = self._func(*arguments)
- if autoescape:
- rv = Markup(rv)
- return rv
-
- def __repr__(self):
- return "<%s %s>" % (
- self.__class__.__name__,
- self.name is None and "anonymous" or repr(self.name),
- )
-
-
-@implements_to_string
-class Undefined(object):
- """The default undefined type. This undefined type can be printed and
- iterated over, but every other access will raise an :exc:`UndefinedError`:
-
- >>> foo = Undefined(name='foo')
- >>> str(foo)
- ''
- >>> not foo
- True
- >>> foo + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- """
-
- __slots__ = (
- "_undefined_hint",
- "_undefined_obj",
- "_undefined_name",
- "_undefined_exception",
- )
-
- def __init__(self, hint=None, obj=missing, name=None, exc=UndefinedError):
- self._undefined_hint = hint
- self._undefined_obj = obj
- self._undefined_name = name
- self._undefined_exception = exc
-
- @property
- def _undefined_message(self):
- """Build a message about the undefined value based on how it was
- accessed.
- """
- if self._undefined_hint:
- return self._undefined_hint
-
- if self._undefined_obj is missing:
- return "%r is undefined" % self._undefined_name
-
- if not isinstance(self._undefined_name, string_types):
- return "%s has no element %r" % (
- object_type_repr(self._undefined_obj),
- self._undefined_name,
- )
-
- return "%r has no attribute %r" % (
- object_type_repr(self._undefined_obj),
- self._undefined_name,
- )
-
- @internalcode
- def _fail_with_undefined_error(self, *args, **kwargs):
- """Raise an :exc:`UndefinedError` when operations are performed
- on the undefined value.
- """
- raise self._undefined_exception(self._undefined_message)
-
- @internalcode
- def __getattr__(self, name):
- if name[:2] == "__":
- raise AttributeError(name)
- return self._fail_with_undefined_error()
-
- __add__ = (
- __radd__
- ) = (
- __mul__
- ) = (
- __rmul__
- ) = (
- __div__
- ) = (
- __rdiv__
- ) = (
- __truediv__
- ) = (
- __rtruediv__
- ) = (
- __floordiv__
- ) = (
- __rfloordiv__
- ) = (
- __mod__
- ) = (
- __rmod__
- ) = (
- __pos__
- ) = (
- __neg__
- ) = (
- __call__
- ) = (
- __getitem__
- ) = (
- __lt__
- ) = (
- __le__
- ) = (
- __gt__
- ) = (
- __ge__
- ) = (
- __int__
- ) = (
- __float__
- ) = (
- __complex__
- ) = __pow__ = __rpow__ = __sub__ = __rsub__ = _fail_with_undefined_error
-
- def __eq__(self, other):
- return type(self) is type(other)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __hash__(self):
- return id(type(self))
-
- def __str__(self):
- return u""
-
- def __len__(self):
- return 0
-
- def __iter__(self):
- if 0:
- yield None
-
- def __nonzero__(self):
- return False
-
- __bool__ = __nonzero__
-
- def __repr__(self):
- return "Undefined"
-
-
-def make_logging_undefined(logger=None, base=None):
- """Given a logger object this returns a new undefined class that will
- log certain failures. It will log iterations and printing. If no
- logger is given a default logger is created.
-
- Example::
-
- logger = logging.getLogger(__name__)
- LoggingUndefined = make_logging_undefined(
- logger=logger,
- base=Undefined
- )
-
- .. versionadded:: 2.8
-
- :param logger: the logger to use. If not provided, a default logger
- is created.
- :param base: the base class to add logging functionality to. This
- defaults to :class:`Undefined`.
- """
- if logger is None:
- import logging
-
- logger = logging.getLogger(__name__)
- logger.addHandler(logging.StreamHandler(sys.stderr))
- if base is None:
- base = Undefined
-
- def _log_message(undef):
- if undef._undefined_hint is None:
- if undef._undefined_obj is missing:
- hint = "%s is undefined" % undef._undefined_name
- elif not isinstance(undef._undefined_name, string_types):
- hint = "%s has no element %s" % (
- object_type_repr(undef._undefined_obj),
- undef._undefined_name,
- )
- else:
- hint = "%s has no attribute %s" % (
- object_type_repr(undef._undefined_obj),
- undef._undefined_name,
- )
- else:
- hint = undef._undefined_hint
- logger.warning("Template variable warning: %s", hint)
-
- class LoggingUndefined(base):
- def _fail_with_undefined_error(self, *args, **kwargs):
- try:
- return base._fail_with_undefined_error(self, *args, **kwargs)
- except self._undefined_exception as e:
- logger.error("Template variable error: %s", str(e))
- raise e
-
- def __str__(self):
- rv = base.__str__(self)
- _log_message(self)
- return rv
-
- def __iter__(self):
- rv = base.__iter__(self)
- _log_message(self)
- return rv
-
- if PY2:
-
- def __nonzero__(self):
- rv = base.__nonzero__(self)
- _log_message(self)
- return rv
-
- def __unicode__(self):
- rv = base.__unicode__(self)
- _log_message(self)
- return rv
-
- else:
-
- def __bool__(self):
- rv = base.__bool__(self)
- _log_message(self)
- return rv
-
- return LoggingUndefined
-
-
-# No @implements_to_string decorator here because __str__
-# is not overwritten from Undefined in this class.
-# This would cause a recursion error in Python 2.
-class ChainableUndefined(Undefined):
- """An undefined that is chainable, where both ``__getattr__`` and
- ``__getitem__`` return itself rather than raising an
- :exc:`UndefinedError`.
-
- >>> foo = ChainableUndefined(name='foo')
- >>> str(foo.bar['baz'])
- ''
- >>> foo.bar['baz'] + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
-
- .. versionadded:: 2.11.0
- """
-
- __slots__ = ()
-
- def __html__(self):
- return self.__str__()
-
- def __getattr__(self, _):
- return self
-
- __getitem__ = __getattr__
-
-
-@implements_to_string
-class DebugUndefined(Undefined):
- """An undefined that returns the debug info when printed.
-
- >>> foo = DebugUndefined(name='foo')
- >>> str(foo)
- '{{ foo }}'
- >>> not foo
- True
- >>> foo + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- """
-
- __slots__ = ()
-
- def __str__(self):
- if self._undefined_hint is None:
- if self._undefined_obj is missing:
- return u"{{ %s }}" % self._undefined_name
- return "{{ no such element: %s[%r] }}" % (
- object_type_repr(self._undefined_obj),
- self._undefined_name,
- )
- return u"{{ undefined value printed: %s }}" % self._undefined_hint
-
-
-@implements_to_string
-class StrictUndefined(Undefined):
- """An undefined that barks on print and iteration as well as boolean
- tests and all kinds of comparisons. In other words: you can do nothing
- with it except checking if it's defined using the `defined` test.
-
- >>> foo = StrictUndefined(name='foo')
- >>> str(foo)
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- >>> not foo
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- >>> foo + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- """
-
- __slots__ = ()
- __iter__ = (
- __str__
- ) = (
- __len__
- ) = (
- __nonzero__
- ) = __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error
-
-
-# remove remaining slots attributes, after the metaclass did the magic they
-# are unneeded and irritating as they contain wrong data for the subclasses.
-del (
- Undefined.__slots__,
- ChainableUndefined.__slots__,
- DebugUndefined.__slots__,
- StrictUndefined.__slots__,
-)
diff --git a/lib/spack/external/jinja2/sandbox.py b/lib/spack/external/jinja2/sandbox.py
deleted file mode 100644
index cfd7993aee..0000000000
--- a/lib/spack/external/jinja2/sandbox.py
+++ /dev/null
@@ -1,510 +0,0 @@
-# -*- coding: utf-8 -*-
-"""A sandbox layer that ensures unsafe operations cannot be performed.
-Useful when the template itself comes from an untrusted source.
-"""
-import operator
-import types
-import warnings
-from collections import deque
-from string import Formatter
-
-from markupsafe import EscapeFormatter
-from markupsafe import Markup
-
-from ._compat import abc
-from ._compat import PY2
-from ._compat import range_type
-from ._compat import string_types
-from .environment import Environment
-from .exceptions import SecurityError
-
-#: maximum number of items a range may produce
-MAX_RANGE = 100000
-
-#: attributes of function objects that are considered unsafe.
-if PY2:
- UNSAFE_FUNCTION_ATTRIBUTES = {
- "func_closure",
- "func_code",
- "func_dict",
- "func_defaults",
- "func_globals",
- }
-else:
- # On versions > python 2 the special attributes on functions are gone,
- # but they remain on methods and generators for whatever reason.
- UNSAFE_FUNCTION_ATTRIBUTES = set()
-
-#: unsafe method attributes. function attributes are unsafe for methods too
-UNSAFE_METHOD_ATTRIBUTES = {"im_class", "im_func", "im_self"}
-
-#: unsafe generator attributes.
-UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"}
-
-#: unsafe attributes on coroutines
-UNSAFE_COROUTINE_ATTRIBUTES = {"cr_frame", "cr_code"}
-
-#: unsafe attributes on async generators
-UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"}
-
-# make sure we don't warn in python 2.6 about stuff we don't care about
-warnings.filterwarnings(
- "ignore", "the sets module", DeprecationWarning, module=__name__
-)
-
-_mutable_set_types = (set,)
-_mutable_mapping_types = (dict,)
-_mutable_sequence_types = (list,)
-
-# on python 2.x we can register the user collection types
-try:
- from UserDict import UserDict, DictMixin
- from UserList import UserList
-
- _mutable_mapping_types += (UserDict, DictMixin)
- _mutable_set_types += (UserList,)
-except ImportError:
- pass
-
-# if sets is still available, register the mutable set from there as well
-try:
- from sets import Set
-
- _mutable_set_types += (Set,)
-except ImportError:
- pass
-
-#: register Python 2.6 abstract base classes
-_mutable_set_types += (abc.MutableSet,)
-_mutable_mapping_types += (abc.MutableMapping,)
-_mutable_sequence_types += (abc.MutableSequence,)
-
-_mutable_spec = (
- (
- _mutable_set_types,
- frozenset(
- [
- "add",
- "clear",
- "difference_update",
- "discard",
- "pop",
- "remove",
- "symmetric_difference_update",
- "update",
- ]
- ),
- ),
- (
- _mutable_mapping_types,
- frozenset(["clear", "pop", "popitem", "setdefault", "update"]),
- ),
- (
- _mutable_sequence_types,
- frozenset(["append", "reverse", "insert", "sort", "extend", "remove"]),
- ),
- (
- deque,
- frozenset(
- [
- "append",
- "appendleft",
- "clear",
- "extend",
- "extendleft",
- "pop",
- "popleft",
- "remove",
- "rotate",
- ]
- ),
- ),
-)
-
-
-class _MagicFormatMapping(abc.Mapping):
- """This class implements a dummy wrapper to fix a bug in the Python
- standard library for string formatting.
-
- See https://bugs.python.org/issue13598 for information about why
- this is necessary.
- """
-
- def __init__(self, args, kwargs):
- self._args = args
- self._kwargs = kwargs
- self._last_index = 0
-
- def __getitem__(self, key):
- if key == "":
- idx = self._last_index
- self._last_index += 1
- try:
- return self._args[idx]
- except LookupError:
- pass
- key = str(idx)
- return self._kwargs[key]
-
- def __iter__(self):
- return iter(self._kwargs)
-
- def __len__(self):
- return len(self._kwargs)
-
-
-def inspect_format_method(callable):
- if not isinstance(
- callable, (types.MethodType, types.BuiltinMethodType)
- ) or callable.__name__ not in ("format", "format_map"):
- return None
- obj = callable.__self__
- if isinstance(obj, string_types):
- return obj
-
-
-def safe_range(*args):
- """A range that can't generate ranges with a length of more than
- MAX_RANGE items.
- """
- rng = range_type(*args)
-
- if len(rng) > MAX_RANGE:
- raise OverflowError(
- "Range too big. The sandbox blocks ranges larger than"
- " MAX_RANGE (%d)." % MAX_RANGE
- )
-
- return rng
-
-
-def unsafe(f):
- """Marks a function or method as unsafe.
-
- ::
-
- @unsafe
- def delete(self):
- pass
- """
- f.unsafe_callable = True
- return f
-
-
-def is_internal_attribute(obj, attr):
- """Test if the attribute given is an internal python attribute. For
- example this function returns `True` for the `func_code` attribute of
- python objects. This is useful if the environment method
- :meth:`~SandboxedEnvironment.is_safe_attribute` is overridden.
-
- >>> from jinja2.sandbox import is_internal_attribute
- >>> is_internal_attribute(str, "mro")
- True
- >>> is_internal_attribute(str, "upper")
- False
- """
- if isinstance(obj, types.FunctionType):
- if attr in UNSAFE_FUNCTION_ATTRIBUTES:
- return True
- elif isinstance(obj, types.MethodType):
- if attr in UNSAFE_FUNCTION_ATTRIBUTES or attr in UNSAFE_METHOD_ATTRIBUTES:
- return True
- elif isinstance(obj, type):
- if attr == "mro":
- return True
- elif isinstance(obj, (types.CodeType, types.TracebackType, types.FrameType)):
- return True
- elif isinstance(obj, types.GeneratorType):
- if attr in UNSAFE_GENERATOR_ATTRIBUTES:
- return True
- elif hasattr(types, "CoroutineType") and isinstance(obj, types.CoroutineType):
- if attr in UNSAFE_COROUTINE_ATTRIBUTES:
- return True
- elif hasattr(types, "AsyncGeneratorType") and isinstance(
- obj, types.AsyncGeneratorType
- ):
- if attr in UNSAFE_ASYNC_GENERATOR_ATTRIBUTES:
- return True
- return attr.startswith("__")
-
-
-def modifies_known_mutable(obj, attr):
- """This function checks if an attribute on a builtin mutable object
- (list, dict, set or deque) would modify it if called. It also supports
- the "user"-versions of the objects (`sets.Set`, `UserDict.*` etc.) and
- with Python 2.6 onwards the abstract base classes `MutableSet`,
- `MutableMapping`, and `MutableSequence`.
-
- >>> modifies_known_mutable({}, "clear")
- True
- >>> modifies_known_mutable({}, "keys")
- False
- >>> modifies_known_mutable([], "append")
- True
- >>> modifies_known_mutable([], "index")
- False
-
- If called with an unsupported object (such as unicode) `False` is
- returned.
-
- >>> modifies_known_mutable("foo", "upper")
- False
- """
- for typespec, unsafe in _mutable_spec:
- if isinstance(obj, typespec):
- return attr in unsafe
- return False
-
-
-class SandboxedEnvironment(Environment):
- """The sandboxed environment. It works like the regular environment but
- tells the compiler to generate sandboxed code. Additionally subclasses of
- this environment may override the methods that tell the runtime what
- attributes or functions are safe to access.
-
- If the template tries to access insecure code a :exc:`SecurityError` is
- raised. However also other exceptions may occur during the rendering so
- the caller has to ensure that all exceptions are caught.
- """
-
- sandboxed = True
-
- #: default callback table for the binary operators. A copy of this is
- #: available on each instance of a sandboxed environment as
- #: :attr:`binop_table`
- default_binop_table = {
- "+": operator.add,
- "-": operator.sub,
- "*": operator.mul,
- "/": operator.truediv,
- "//": operator.floordiv,
- "**": operator.pow,
- "%": operator.mod,
- }
-
- #: default callback table for the unary operators. A copy of this is
- #: available on each instance of a sandboxed environment as
- #: :attr:`unop_table`
- default_unop_table = {"+": operator.pos, "-": operator.neg}
-
- #: a set of binary operators that should be intercepted. Each operator
- #: that is added to this set (empty by default) is delegated to the
- #: :meth:`call_binop` method that will perform the operator. The default
- #: operator callback is specified by :attr:`binop_table`.
- #:
- #: The following binary operators are interceptable:
- #: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**``
- #:
- #: The default operation form the operator table corresponds to the
- #: builtin function. Intercepted calls are always slower than the native
- #: operator call, so make sure only to intercept the ones you are
- #: interested in.
- #:
- #: .. versionadded:: 2.6
- intercepted_binops = frozenset()
-
- #: a set of unary operators that should be intercepted. Each operator
- #: that is added to this set (empty by default) is delegated to the
- #: :meth:`call_unop` method that will perform the operator. The default
- #: operator callback is specified by :attr:`unop_table`.
- #:
- #: The following unary operators are interceptable: ``+``, ``-``
- #:
- #: The default operation form the operator table corresponds to the
- #: builtin function. Intercepted calls are always slower than the native
- #: operator call, so make sure only to intercept the ones you are
- #: interested in.
- #:
- #: .. versionadded:: 2.6
- intercepted_unops = frozenset()
-
- def intercept_unop(self, operator):
- """Called during template compilation with the name of a unary
- operator to check if it should be intercepted at runtime. If this
- method returns `True`, :meth:`call_unop` is executed for this unary
- operator. The default implementation of :meth:`call_unop` will use
- the :attr:`unop_table` dictionary to perform the operator with the
- same logic as the builtin one.
-
- The following unary operators are interceptable: ``+`` and ``-``
-
- Intercepted calls are always slower than the native operator call,
- so make sure only to intercept the ones you are interested in.
-
- .. versionadded:: 2.6
- """
- return False
-
- def __init__(self, *args, **kwargs):
- Environment.__init__(self, *args, **kwargs)
- self.globals["range"] = safe_range
- self.binop_table = self.default_binop_table.copy()
- self.unop_table = self.default_unop_table.copy()
-
- def is_safe_attribute(self, obj, attr, value):
- """The sandboxed environment will call this method to check if the
- attribute of an object is safe to access. Per default all attributes
- starting with an underscore are considered private as well as the
- special attributes of internal python objects as returned by the
- :func:`is_internal_attribute` function.
- """
- return not (attr.startswith("_") or is_internal_attribute(obj, attr))
-
- def is_safe_callable(self, obj):
- """Check if an object is safely callable. Per default a function is
- considered safe unless the `unsafe_callable` attribute exists and is
- True. Override this method to alter the behavior, but this won't
- affect the `unsafe` decorator from this module.
- """
- return not (
- getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False)
- )
-
- def call_binop(self, context, operator, left, right):
- """For intercepted binary operator calls (:meth:`intercepted_binops`)
- this function is executed instead of the builtin operator. This can
- be used to fine tune the behavior of certain operators.
-
- .. versionadded:: 2.6
- """
- return self.binop_table[operator](left, right)
-
- def call_unop(self, context, operator, arg):
- """For intercepted unary operator calls (:meth:`intercepted_unops`)
- this function is executed instead of the builtin operator. This can
- be used to fine tune the behavior of certain operators.
-
- .. versionadded:: 2.6
- """
- return self.unop_table[operator](arg)
-
- def getitem(self, obj, argument):
- """Subscribe an object from sandboxed code."""
- try:
- return obj[argument]
- except (TypeError, LookupError):
- if isinstance(argument, string_types):
- try:
- attr = str(argument)
- except Exception:
- pass
- else:
- try:
- value = getattr(obj, attr)
- except AttributeError:
- pass
- else:
- if self.is_safe_attribute(obj, argument, value):
- return value
- return self.unsafe_undefined(obj, argument)
- return self.undefined(obj=obj, name=argument)
-
- def getattr(self, obj, attribute):
- """Subscribe an object from sandboxed code and prefer the
- attribute. The attribute passed *must* be a bytestring.
- """
- try:
- value = getattr(obj, attribute)
- except AttributeError:
- try:
- return obj[attribute]
- except (TypeError, LookupError):
- pass
- else:
- if self.is_safe_attribute(obj, attribute, value):
- return value
- return self.unsafe_undefined(obj, attribute)
- return self.undefined(obj=obj, name=attribute)
-
- def unsafe_undefined(self, obj, attribute):
- """Return an undefined object for unsafe attributes."""
- return self.undefined(
- "access to attribute %r of %r "
- "object is unsafe." % (attribute, obj.__class__.__name__),
- name=attribute,
- obj=obj,
- exc=SecurityError,
- )
-
- def format_string(self, s, args, kwargs, format_func=None):
- """If a format call is detected, then this is routed through this
- method so that our safety sandbox can be used for it.
- """
- if isinstance(s, Markup):
- formatter = SandboxedEscapeFormatter(self, s.escape)
- else:
- formatter = SandboxedFormatter(self)
-
- if format_func is not None and format_func.__name__ == "format_map":
- if len(args) != 1 or kwargs:
- raise TypeError(
- "format_map() takes exactly one argument %d given"
- % (len(args) + (kwargs is not None))
- )
-
- kwargs = args[0]
- args = None
-
- kwargs = _MagicFormatMapping(args, kwargs)
- rv = formatter.vformat(s, args, kwargs)
- return type(s)(rv)
-
- def call(__self, __context, __obj, *args, **kwargs): # noqa: B902
- """Call an object from sandboxed code."""
- fmt = inspect_format_method(__obj)
- if fmt is not None:
- return __self.format_string(fmt, args, kwargs, __obj)
-
- # the double prefixes are to avoid double keyword argument
- # errors when proxying the call.
- if not __self.is_safe_callable(__obj):
- raise SecurityError("%r is not safely callable" % (__obj,))
- return __context.call(__obj, *args, **kwargs)
-
-
-class ImmutableSandboxedEnvironment(SandboxedEnvironment):
- """Works exactly like the regular `SandboxedEnvironment` but does not
- permit modifications on the builtin mutable objects `list`, `set`, and
- `dict` by using the :func:`modifies_known_mutable` function.
- """
-
- def is_safe_attribute(self, obj, attr, value):
- if not SandboxedEnvironment.is_safe_attribute(self, obj, attr, value):
- return False
- return not modifies_known_mutable(obj, attr)
-
-
-# This really is not a public API apparently.
-try:
- from _string import formatter_field_name_split
-except ImportError:
-
- def formatter_field_name_split(field_name):
- return field_name._formatter_field_name_split()
-
-
-class SandboxedFormatterMixin(object):
- def __init__(self, env):
- self._env = env
-
- def get_field(self, field_name, args, kwargs):
- first, rest = formatter_field_name_split(field_name)
- obj = self.get_value(first, args, kwargs)
- for is_attr, i in rest:
- if is_attr:
- obj = self._env.getattr(obj, i)
- else:
- obj = self._env.getitem(obj, i)
- return obj, first
-
-
-class SandboxedFormatter(SandboxedFormatterMixin, Formatter):
- def __init__(self, env):
- SandboxedFormatterMixin.__init__(self, env)
- Formatter.__init__(self)
-
-
-class SandboxedEscapeFormatter(SandboxedFormatterMixin, EscapeFormatter):
- def __init__(self, env, escape):
- SandboxedFormatterMixin.__init__(self, env)
- EscapeFormatter.__init__(self, escape)
diff --git a/lib/spack/external/jinja2/tests.py b/lib/spack/external/jinja2/tests.py
deleted file mode 100644
index fabd4ce51b..0000000000
--- a/lib/spack/external/jinja2/tests.py
+++ /dev/null
@@ -1,215 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Built-in template tests used with the ``is`` operator."""
-import decimal
-import operator
-import re
-
-from ._compat import abc
-from ._compat import integer_types
-from ._compat import string_types
-from ._compat import text_type
-from .runtime import Undefined
-
-number_re = re.compile(r"^-?\d+(\.\d+)?$")
-regex_type = type(number_re)
-test_callable = callable
-
-
-def test_odd(value):
- """Return true if the variable is odd."""
- return value % 2 == 1
-
-
-def test_even(value):
- """Return true if the variable is even."""
- return value % 2 == 0
-
-
-def test_divisibleby(value, num):
- """Check if a variable is divisible by a number."""
- return value % num == 0
-
-
-def test_defined(value):
- """Return true if the variable is defined:
-
- .. sourcecode:: jinja
-
- {% if variable is defined %}
- value of variable: {{ variable }}
- {% else %}
- variable is not defined
- {% endif %}
-
- See the :func:`default` filter for a simple way to set undefined
- variables.
- """
- return not isinstance(value, Undefined)
-
-
-def test_undefined(value):
- """Like :func:`defined` but the other way round."""
- return isinstance(value, Undefined)
-
-
-def test_none(value):
- """Return true if the variable is none."""
- return value is None
-
-
-def test_boolean(value):
- """Return true if the object is a boolean value.
-
- .. versionadded:: 2.11
- """
- return value is True or value is False
-
-
-def test_false(value):
- """Return true if the object is False.
-
- .. versionadded:: 2.11
- """
- return value is False
-
-
-def test_true(value):
- """Return true if the object is True.
-
- .. versionadded:: 2.11
- """
- return value is True
-
-
-# NOTE: The existing 'number' test matches booleans and floats
-def test_integer(value):
- """Return true if the object is an integer.
-
- .. versionadded:: 2.11
- """
- return isinstance(value, integer_types) and value is not True and value is not False
-
-
-# NOTE: The existing 'number' test matches booleans and integers
-def test_float(value):
- """Return true if the object is a float.
-
- .. versionadded:: 2.11
- """
- return isinstance(value, float)
-
-
-def test_lower(value):
- """Return true if the variable is lowercased."""
- return text_type(value).islower()
-
-
-def test_upper(value):
- """Return true if the variable is uppercased."""
- return text_type(value).isupper()
-
-
-def test_string(value):
- """Return true if the object is a string."""
- return isinstance(value, string_types)
-
-
-def test_mapping(value):
- """Return true if the object is a mapping (dict etc.).
-
- .. versionadded:: 2.6
- """
- return isinstance(value, abc.Mapping)
-
-
-def test_number(value):
- """Return true if the variable is a number."""
- return isinstance(value, integer_types + (float, complex, decimal.Decimal))
-
-
-def test_sequence(value):
- """Return true if the variable is a sequence. Sequences are variables
- that are iterable.
- """
- try:
- len(value)
- value.__getitem__
- except Exception:
- return False
- return True
-
-
-def test_sameas(value, other):
- """Check if an object points to the same memory address than another
- object:
-
- .. sourcecode:: jinja
-
- {% if foo.attribute is sameas false %}
- the foo attribute really is the `False` singleton
- {% endif %}
- """
- return value is other
-
-
-def test_iterable(value):
- """Check if it's possible to iterate over an object."""
- try:
- iter(value)
- except TypeError:
- return False
- return True
-
-
-def test_escaped(value):
- """Check if the value is escaped."""
- return hasattr(value, "__html__")
-
-
-def test_in(value, seq):
- """Check if value is in seq.
-
- .. versionadded:: 2.10
- """
- return value in seq
-
-
-TESTS = {
- "odd": test_odd,
- "even": test_even,
- "divisibleby": test_divisibleby,
- "defined": test_defined,
- "undefined": test_undefined,
- "none": test_none,
- "boolean": test_boolean,
- "false": test_false,
- "true": test_true,
- "integer": test_integer,
- "float": test_float,
- "lower": test_lower,
- "upper": test_upper,
- "string": test_string,
- "mapping": test_mapping,
- "number": test_number,
- "sequence": test_sequence,
- "iterable": test_iterable,
- "callable": test_callable,
- "sameas": test_sameas,
- "escaped": test_escaped,
- "in": test_in,
- "==": operator.eq,
- "eq": operator.eq,
- "equalto": operator.eq,
- "!=": operator.ne,
- "ne": operator.ne,
- ">": operator.gt,
- "gt": operator.gt,
- "greaterthan": operator.gt,
- "ge": operator.ge,
- ">=": operator.ge,
- "<": operator.lt,
- "lt": operator.lt,
- "lessthan": operator.lt,
- "<=": operator.le,
- "le": operator.le,
-}
diff --git a/lib/spack/external/jinja2/utils.py b/lib/spack/external/jinja2/utils.py
deleted file mode 100644
index 6afca81055..0000000000
--- a/lib/spack/external/jinja2/utils.py
+++ /dev/null
@@ -1,737 +0,0 @@
-# -*- coding: utf-8 -*-
-import json
-import os
-import re
-import warnings
-from collections import deque
-from random import choice
-from random import randrange
-from string import ascii_letters as _letters
-from string import digits as _digits
-from threading import Lock
-
-from markupsafe import escape
-from markupsafe import Markup
-
-from ._compat import abc
-from ._compat import string_types
-from ._compat import text_type
-from ._compat import url_quote
-
-# special singleton representing missing values for the runtime
-missing = type("MissingType", (), {"__repr__": lambda x: "missing"})()
-
-# internal code
-internal_code = set()
-
-concat = u"".join
-
-_slash_escape = "\\/" not in json.dumps("/")
-
-
-def contextfunction(f):
- """This decorator can be used to mark a function or method context callable.
- A context callable is passed the active :class:`Context` as first argument when
- called from the template. This is useful if a function wants to get access
- to the context or functions provided on the context object. For example
- a function that returns a sorted list of template variables the current
- template exports could look like this::
-
- @contextfunction
- def get_exported_names(context):
- return sorted(context.exported_vars)
- """
- f.contextfunction = True
- return f
-
-
-def evalcontextfunction(f):
- """This decorator can be used to mark a function or method as an eval
- context callable. This is similar to the :func:`contextfunction`
- but instead of passing the context, an evaluation context object is
- passed. For more information about the eval context, see
- :ref:`eval-context`.
-
- .. versionadded:: 2.4
- """
- f.evalcontextfunction = True
- return f
-
-
-def environmentfunction(f):
- """This decorator can be used to mark a function or method as environment
- callable. This decorator works exactly like the :func:`contextfunction`
- decorator just that the first argument is the active :class:`Environment`
- and not context.
- """
- f.environmentfunction = True
- return f
-
-
-def internalcode(f):
- """Marks the function as internally used"""
- internal_code.add(f.__code__)
- return f
-
-
-def is_undefined(obj):
- """Check if the object passed is undefined. This does nothing more than
- performing an instance check against :class:`Undefined` but looks nicer.
- This can be used for custom filters or tests that want to react to
- undefined variables. For example a custom default filter can look like
- this::
-
- def default(var, default=''):
- if is_undefined(var):
- return default
- return var
- """
- from .runtime import Undefined
-
- return isinstance(obj, Undefined)
-
-
-def consume(iterable):
- """Consumes an iterable without doing anything with it."""
- for _ in iterable:
- pass
-
-
-def clear_caches():
- """Jinja keeps internal caches for environments and lexers. These are
- used so that Jinja doesn't have to recreate environments and lexers all
- the time. Normally you don't have to care about that but if you are
- measuring memory consumption you may want to clean the caches.
- """
- from .environment import _spontaneous_environments
- from .lexer import _lexer_cache
-
- _spontaneous_environments.clear()
- _lexer_cache.clear()
-
-
-def import_string(import_name, silent=False):
- """Imports an object based on a string. This is useful if you want to
- use import paths as endpoints or something similar. An import path can
- be specified either in dotted notation (``xml.sax.saxutils.escape``)
- or with a colon as object delimiter (``xml.sax.saxutils:escape``).
-
- If the `silent` is True the return value will be `None` if the import
- fails.
-
- :return: imported object
- """
- try:
- if ":" in import_name:
- module, obj = import_name.split(":", 1)
- elif "." in import_name:
- module, _, obj = import_name.rpartition(".")
- else:
- return __import__(import_name)
- return getattr(__import__(module, None, None, [obj]), obj)
- except (ImportError, AttributeError):
- if not silent:
- raise
-
-
-def open_if_exists(filename, mode="rb"):
- """Returns a file descriptor for the filename if that file exists,
- otherwise ``None``.
- """
- if not os.path.isfile(filename):
- return None
-
- return open(filename, mode)
-
-
-def object_type_repr(obj):
- """Returns the name of the object's type. For some recognized
- singletons the name of the object is returned instead. (For
- example for `None` and `Ellipsis`).
- """
- if obj is None:
- return "None"
- elif obj is Ellipsis:
- return "Ellipsis"
-
- cls = type(obj)
-
- # __builtin__ in 2.x, builtins in 3.x
- if cls.__module__ in ("__builtin__", "builtins"):
- name = cls.__name__
- else:
- name = cls.__module__ + "." + cls.__name__
-
- return "%s object" % name
-
-
-def pformat(obj, verbose=False):
- """Prettyprint an object. Either use the `pretty` library or the
- builtin `pprint`.
- """
- try:
- from pretty import pretty
-
- return pretty(obj, verbose=verbose)
- except ImportError:
- from pprint import pformat
-
- return pformat(obj)
-
-
-def urlize(text, trim_url_limit=None, rel=None, target=None):
- """Converts any URLs in text into clickable links. Works on http://,
- https:// and www. links. Links can have trailing punctuation (periods,
- commas, close-parens) and leading punctuation (opening parens) and
- it'll still do the right thing.
-
- If trim_url_limit is not None, the URLs in link text will be limited
- to trim_url_limit characters.
-
- If nofollow is True, the URLs in link text will get a rel="nofollow"
- attribute.
-
- If target is not None, a target attribute will be added to the link.
- """
- trim_url = (
- lambda x, limit=trim_url_limit: limit is not None
- and (x[:limit] + (len(x) >= limit and "..." or ""))
- or x
- )
- words = re.split(r"(\s+)", text_type(escape(text)))
- rel_attr = rel and ' rel="%s"' % text_type(escape(rel)) or ""
- target_attr = target and ' target="%s"' % escape(target) or ""
-
- for i, word in enumerate(words):
- head, middle, tail = "", word, ""
- match = re.match(r"^([(<]|&lt;)+", middle)
-
- if match:
- head = match.group()
- middle = middle[match.end() :]
-
- # Unlike lead, which is anchored to the start of the string,
- # need to check that the string ends with any of the characters
- # before trying to match all of them, to avoid backtracking.
- if middle.endswith((")", ">", ".", ",", "\n", "&gt;")):
- match = re.search(r"([)>.,\n]|&gt;)+$", middle)
-
- if match:
- tail = match.group()
- middle = middle[: match.start()]
-
- if middle.startswith("www.") or (
- "@" not in middle
- and not middle.startswith("http://")
- and not middle.startswith("https://")
- and len(middle) > 0
- and middle[0] in _letters + _digits
- and (
- middle.endswith(".org")
- or middle.endswith(".net")
- or middle.endswith(".com")
- )
- ):
- middle = '<a href="http://%s"%s%s>%s</a>' % (
- middle,
- rel_attr,
- target_attr,
- trim_url(middle),
- )
-
- if middle.startswith("http://") or middle.startswith("https://"):
- middle = '<a href="%s"%s%s>%s</a>' % (
- middle,
- rel_attr,
- target_attr,
- trim_url(middle),
- )
-
- if (
- "@" in middle
- and not middle.startswith("www.")
- and ":" not in middle
- and re.match(r"^\S+@\w[\w.-]*\.\w+$", middle)
- ):
- middle = '<a href="mailto:%s">%s</a>' % (middle, middle)
-
- words[i] = head + middle + tail
-
- return u"".join(words)
-
-
-def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
- """Generate some lorem ipsum for the template."""
- from .constants import LOREM_IPSUM_WORDS
-
- words = LOREM_IPSUM_WORDS.split()
- result = []
-
- for _ in range(n):
- next_capitalized = True
- last_comma = last_fullstop = 0
- word = None
- last = None
- p = []
-
- # each paragraph contains out of 20 to 100 words.
- for idx, _ in enumerate(range(randrange(min, max))):
- while True:
- word = choice(words)
- if word != last:
- last = word
- break
- if next_capitalized:
- word = word.capitalize()
- next_capitalized = False
- # add commas
- if idx - randrange(3, 8) > last_comma:
- last_comma = idx
- last_fullstop += 2
- word += ","
- # add end of sentences
- if idx - randrange(10, 20) > last_fullstop:
- last_comma = last_fullstop = idx
- word += "."
- next_capitalized = True
- p.append(word)
-
- # ensure that the paragraph ends with a dot.
- p = u" ".join(p)
- if p.endswith(","):
- p = p[:-1] + "."
- elif not p.endswith("."):
- p += "."
- result.append(p)
-
- if not html:
- return u"\n\n".join(result)
- return Markup(u"\n".join(u"<p>%s</p>" % escape(x) for x in result))
-
-
-def unicode_urlencode(obj, charset="utf-8", for_qs=False):
- """Quote a string for use in a URL using the given charset.
-
- This function is misnamed, it is a wrapper around
- :func:`urllib.parse.quote`.
-
- :param obj: String or bytes to quote. Other types are converted to
- string then encoded to bytes using the given charset.
- :param charset: Encode text to bytes using this charset.
- :param for_qs: Quote "/" and use "+" for spaces.
- """
- if not isinstance(obj, string_types):
- obj = text_type(obj)
-
- if isinstance(obj, text_type):
- obj = obj.encode(charset)
-
- safe = b"" if for_qs else b"/"
- rv = url_quote(obj, safe)
-
- if not isinstance(rv, text_type):
- rv = rv.decode("utf-8")
-
- if for_qs:
- rv = rv.replace("%20", "+")
-
- return rv
-
-
-class LRUCache(object):
- """A simple LRU Cache implementation."""
-
- # this is fast for small capacities (something below 1000) but doesn't
- # scale. But as long as it's only used as storage for templates this
- # won't do any harm.
-
- def __init__(self, capacity):
- self.capacity = capacity
- self._mapping = {}
- self._queue = deque()
- self._postinit()
-
- def _postinit(self):
- # alias all queue methods for faster lookup
- self._popleft = self._queue.popleft
- self._pop = self._queue.pop
- self._remove = self._queue.remove
- self._wlock = Lock()
- self._append = self._queue.append
-
- def __getstate__(self):
- return {
- "capacity": self.capacity,
- "_mapping": self._mapping,
- "_queue": self._queue,
- }
-
- def __setstate__(self, d):
- self.__dict__.update(d)
- self._postinit()
-
- def __getnewargs__(self):
- return (self.capacity,)
-
- def copy(self):
- """Return a shallow copy of the instance."""
- rv = self.__class__(self.capacity)
- rv._mapping.update(self._mapping)
- rv._queue.extend(self._queue)
- return rv
-
- def get(self, key, default=None):
- """Return an item from the cache dict or `default`"""
- try:
- return self[key]
- except KeyError:
- return default
-
- def setdefault(self, key, default=None):
- """Set `default` if the key is not in the cache otherwise
- leave unchanged. Return the value of this key.
- """
- try:
- return self[key]
- except KeyError:
- self[key] = default
- return default
-
- def clear(self):
- """Clear the cache."""
- self._wlock.acquire()
- try:
- self._mapping.clear()
- self._queue.clear()
- finally:
- self._wlock.release()
-
- def __contains__(self, key):
- """Check if a key exists in this cache."""
- return key in self._mapping
-
- def __len__(self):
- """Return the current size of the cache."""
- return len(self._mapping)
-
- def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self._mapping)
-
- def __getitem__(self, key):
- """Get an item from the cache. Moves the item up so that it has the
- highest priority then.
-
- Raise a `KeyError` if it does not exist.
- """
- self._wlock.acquire()
- try:
- rv = self._mapping[key]
- if self._queue[-1] != key:
- try:
- self._remove(key)
- except ValueError:
- # if something removed the key from the container
- # when we read, ignore the ValueError that we would
- # get otherwise.
- pass
- self._append(key)
- return rv
- finally:
- self._wlock.release()
-
- def __setitem__(self, key, value):
- """Sets the value for an item. Moves the item up so that it
- has the highest priority then.
- """
- self._wlock.acquire()
- try:
- if key in self._mapping:
- self._remove(key)
- elif len(self._mapping) == self.capacity:
- del self._mapping[self._popleft()]
- self._append(key)
- self._mapping[key] = value
- finally:
- self._wlock.release()
-
- def __delitem__(self, key):
- """Remove an item from the cache dict.
- Raise a `KeyError` if it does not exist.
- """
- self._wlock.acquire()
- try:
- del self._mapping[key]
- try:
- self._remove(key)
- except ValueError:
- pass
- finally:
- self._wlock.release()
-
- def items(self):
- """Return a list of items."""
- result = [(key, self._mapping[key]) for key in list(self._queue)]
- result.reverse()
- return result
-
- def iteritems(self):
- """Iterate over all items."""
- warnings.warn(
- "'iteritems()' will be removed in version 3.0. Use"
- " 'iter(cache.items())' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return iter(self.items())
-
- def values(self):
- """Return a list of all values."""
- return [x[1] for x in self.items()]
-
- def itervalue(self):
- """Iterate over all values."""
- warnings.warn(
- "'itervalue()' will be removed in version 3.0. Use"
- " 'iter(cache.values())' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return iter(self.values())
-
- def itervalues(self):
- """Iterate over all values."""
- warnings.warn(
- "'itervalues()' will be removed in version 3.0. Use"
- " 'iter(cache.values())' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return iter(self.values())
-
- def keys(self):
- """Return a list of all keys ordered by most recent usage."""
- return list(self)
-
- def iterkeys(self):
- """Iterate over all keys in the cache dict, ordered by
- the most recent usage.
- """
- warnings.warn(
- "'iterkeys()' will be removed in version 3.0. Use"
- " 'iter(cache.keys())' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return iter(self)
-
- def __iter__(self):
- return reversed(tuple(self._queue))
-
- def __reversed__(self):
- """Iterate over the keys in the cache dict, oldest items
- coming first.
- """
- return iter(tuple(self._queue))
-
- __copy__ = copy
-
-
-abc.MutableMapping.register(LRUCache)
-
-
-def select_autoescape(
- enabled_extensions=("html", "htm", "xml"),
- disabled_extensions=(),
- default_for_string=True,
- default=False,
-):
- """Intelligently sets the initial value of autoescaping based on the
- filename of the template. This is the recommended way to configure
- autoescaping if you do not want to write a custom function yourself.
-
- If you want to enable it for all templates created from strings or
- for all templates with `.html` and `.xml` extensions::
-
- from jinja2 import Environment, select_autoescape
- env = Environment(autoescape=select_autoescape(
- enabled_extensions=('html', 'xml'),
- default_for_string=True,
- ))
-
- Example configuration to turn it on at all times except if the template
- ends with `.txt`::
-
- from jinja2 import Environment, select_autoescape
- env = Environment(autoescape=select_autoescape(
- disabled_extensions=('txt',),
- default_for_string=True,
- default=True,
- ))
-
- The `enabled_extensions` is an iterable of all the extensions that
- autoescaping should be enabled for. Likewise `disabled_extensions` is
- a list of all templates it should be disabled for. If a template is
- loaded from a string then the default from `default_for_string` is used.
- If nothing matches then the initial value of autoescaping is set to the
- value of `default`.
-
- For security reasons this function operates case insensitive.
-
- .. versionadded:: 2.9
- """
- enabled_patterns = tuple("." + x.lstrip(".").lower() for x in enabled_extensions)
- disabled_patterns = tuple("." + x.lstrip(".").lower() for x in disabled_extensions)
-
- def autoescape(template_name):
- if template_name is None:
- return default_for_string
- template_name = template_name.lower()
- if template_name.endswith(enabled_patterns):
- return True
- if template_name.endswith(disabled_patterns):
- return False
- return default
-
- return autoescape
-
-
-def htmlsafe_json_dumps(obj, dumper=None, **kwargs):
- """Works exactly like :func:`dumps` but is safe for use in ``<script>``
- tags. It accepts the same arguments and returns a JSON string. Note that
- this is available in templates through the ``|tojson`` filter which will
- also mark the result as safe. Due to how this function escapes certain
- characters this is safe even if used outside of ``<script>`` tags.
-
- The following characters are escaped in strings:
-
- - ``<``
- - ``>``
- - ``&``
- - ``'``
-
- This makes it safe to embed such strings in any place in HTML with the
- notable exception of double quoted attributes. In that case single
- quote your attributes or HTML escape it in addition.
- """
- if dumper is None:
- dumper = json.dumps
- rv = (
- dumper(obj, **kwargs)
- .replace(u"<", u"\\u003c")
- .replace(u">", u"\\u003e")
- .replace(u"&", u"\\u0026")
- .replace(u"'", u"\\u0027")
- )
- return Markup(rv)
-
-
-class Cycler(object):
- """Cycle through values by yield them one at a time, then restarting
- once the end is reached. Available as ``cycler`` in templates.
-
- Similar to ``loop.cycle``, but can be used outside loops or across
- multiple loops. For example, render a list of folders and files in a
- list, alternating giving them "odd" and "even" classes.
-
- .. code-block:: html+jinja
-
- {% set row_class = cycler("odd", "even") %}
- <ul class="browser">
- {% for folder in folders %}
- <li class="folder {{ row_class.next() }}">{{ folder }}
- {% endfor %}
- {% for file in files %}
- <li class="file {{ row_class.next() }}">{{ file }}
- {% endfor %}
- </ul>
-
- :param items: Each positional argument will be yielded in the order
- given for each cycle.
-
- .. versionadded:: 2.1
- """
-
- def __init__(self, *items):
- if not items:
- raise RuntimeError("at least one item has to be provided")
- self.items = items
- self.pos = 0
-
- def reset(self):
- """Resets the current item to the first item."""
- self.pos = 0
-
- @property
- def current(self):
- """Return the current item. Equivalent to the item that will be
- returned next time :meth:`next` is called.
- """
- return self.items[self.pos]
-
- def next(self):
- """Return the current item, then advance :attr:`current` to the
- next item.
- """
- rv = self.current
- self.pos = (self.pos + 1) % len(self.items)
- return rv
-
- __next__ = next
-
-
-class Joiner(object):
- """A joining helper for templates."""
-
- def __init__(self, sep=u", "):
- self.sep = sep
- self.used = False
-
- def __call__(self):
- if not self.used:
- self.used = True
- return u""
- return self.sep
-
-
-class Namespace(object):
- """A namespace object that can hold arbitrary attributes. It may be
- initialized from a dictionary or with keyword arguments."""
-
- def __init__(*args, **kwargs): # noqa: B902
- self, args = args[0], args[1:]
- self.__attrs = dict(*args, **kwargs)
-
- def __getattribute__(self, name):
- # __class__ is needed for the awaitable check in async mode
- if name in {"_Namespace__attrs", "__class__"}:
- return object.__getattribute__(self, name)
- try:
- return self.__attrs[name]
- except KeyError:
- raise AttributeError(name)
-
- def __setitem__(self, name, value):
- self.__attrs[name] = value
-
- def __repr__(self):
- return "<Namespace %r>" % self.__attrs
-
-
-# does this python version support async for in and async generators?
-try:
- exec("async def _():\n async for _ in ():\n yield _")
- have_async_gen = True
-except SyntaxError:
- have_async_gen = False
-
-
-def soft_unicode(s):
- from markupsafe import soft_unicode
-
- warnings.warn(
- "'jinja2.utils.soft_unicode' will be removed in version 3.0."
- " Use 'markupsafe.soft_unicode' instead.",
- DeprecationWarning,
- stacklevel=2,
- )
- return soft_unicode(s)
diff --git a/lib/spack/external/jinja2/visitor.py b/lib/spack/external/jinja2/visitor.py
deleted file mode 100644
index d1365bf10e..0000000000
--- a/lib/spack/external/jinja2/visitor.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-"""API for traversing the AST nodes. Implemented by the compiler and
-meta introspection.
-"""
-from .nodes import Node
-
-
-class NodeVisitor(object):
- """Walks the abstract syntax tree and call visitor functions for every
- node found. The visitor functions may return values which will be
- forwarded by the `visit` method.
-
- Per default the visitor functions for the nodes are ``'visit_'`` +
- class name of the node. So a `TryFinally` node visit function would
- be `visit_TryFinally`. This behavior can be changed by overriding
- the `get_visitor` function. If no visitor function exists for a node
- (return value `None`) the `generic_visit` visitor is used instead.
- """
-
- def get_visitor(self, node):
- """Return the visitor function for this node or `None` if no visitor
- exists for this node. In that case the generic visit function is
- used instead.
- """
- method = "visit_" + node.__class__.__name__
- return getattr(self, method, None)
-
- def visit(self, node, *args, **kwargs):
- """Visit a node."""
- f = self.get_visitor(node)
- if f is not None:
- return f(node, *args, **kwargs)
- return self.generic_visit(node, *args, **kwargs)
-
- def generic_visit(self, node, *args, **kwargs):
- """Called if no explicit visitor function exists for a node."""
- for node in node.iter_child_nodes():
- self.visit(node, *args, **kwargs)
-
-
-class NodeTransformer(NodeVisitor):
- """Walks the abstract syntax tree and allows modifications of nodes.
-
- The `NodeTransformer` will walk the AST and use the return value of the
- visitor functions to replace or remove the old node. If the return
- value of the visitor function is `None` the node will be removed
- from the previous location otherwise it's replaced with the return
- value. The return value may be the original node in which case no
- replacement takes place.
- """
-
- def generic_visit(self, node, *args, **kwargs):
- for field, old_value in node.iter_fields():
- if isinstance(old_value, list):
- new_values = []
- for value in old_value:
- if isinstance(value, Node):
- value = self.visit(value, *args, **kwargs)
- if value is None:
- continue
- elif not isinstance(value, Node):
- new_values.extend(value)
- continue
- new_values.append(value)
- old_value[:] = new_values
- elif isinstance(old_value, Node):
- new_node = self.visit(old_value, *args, **kwargs)
- if new_node is None:
- delattr(node, field)
- else:
- setattr(node, field, new_node)
- return node
-
- def visit_list(self, node, *args, **kwargs):
- """As transformers may return lists in some places this method
- can be used to enforce a list as return value.
- """
- rv = self.visit(node, *args, **kwargs)
- if not isinstance(rv, list):
- rv = [rv]
- return rv
diff --git a/lib/spack/external/jsonschema/COPYING b/lib/spack/external/jsonschema/COPYING
deleted file mode 100644
index af9cfbdb13..0000000000
--- a/lib/spack/external/jsonschema/COPYING
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright (c) 2013 Julian Berman
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/lib/spack/external/jsonschema/__init__.py b/lib/spack/external/jsonschema/__init__.py
deleted file mode 100644
index 6dfdb9419a..0000000000
--- a/lib/spack/external/jsonschema/__init__.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-An implementation of JSON Schema for Python
-
-The main functionality is provided by the validator classes for each of the
-supported JSON Schema versions.
-
-Most commonly, `validate` is the quickest way to simply validate a given
-instance under a schema, and will create a validator for you.
-"""
-
-from jsonschema.exceptions import (
- ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
-)
-from jsonschema._format import (
- FormatChecker,
- draft3_format_checker,
- draft4_format_checker,
- draft6_format_checker,
- draft7_format_checker,
-)
-from jsonschema._types import TypeChecker
-from jsonschema.validators import (
- Draft3Validator,
- Draft4Validator,
- Draft6Validator,
- Draft7Validator,
- RefResolver,
- validate,
-)
-# try:
-# from importlib import metadata
-# except ImportError: # for Python<3.8
-# import importlib_metadata as metadata
-# __version__ = metadata.version("jsonschema")
-# set the version manually here, as we don't install dist-info or egg-info
-# files for vendored spack externals.
-__version__ = '3.2.0'
diff --git a/lib/spack/external/jsonschema/__main__.py b/lib/spack/external/jsonschema/__main__.py
deleted file mode 100644
index 82c29fd39e..0000000000
--- a/lib/spack/external/jsonschema/__main__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from jsonschema.cli import main
-main()
diff --git a/lib/spack/external/jsonschema/_format.py b/lib/spack/external/jsonschema/_format.py
deleted file mode 100644
index 281a7cfcff..0000000000
--- a/lib/spack/external/jsonschema/_format.py
+++ /dev/null
@@ -1,425 +0,0 @@
-import datetime
-import re
-import socket
-import struct
-
-from jsonschema.compat import str_types
-from jsonschema.exceptions import FormatError
-
-
-class FormatChecker(object):
- """
- A ``format`` property checker.
-
- JSON Schema does not mandate that the ``format`` property actually do any
- validation. If validation is desired however, instances of this class can
- be hooked into validators to enable format validation.
-
- `FormatChecker` objects always return ``True`` when asked about
- formats that they do not know how to validate.
-
- To check a custom format using a function that takes an instance and
- returns a ``bool``, use the `FormatChecker.checks` or
- `FormatChecker.cls_checks` decorators.
-
- Arguments:
-
- formats (~collections.Iterable):
-
- The known formats to validate. This argument can be used to
- limit which formats will be used during validation.
- """
-
- checkers = {}
-
- def __init__(self, formats=None):
- if formats is None:
- self.checkers = self.checkers.copy()
- else:
- self.checkers = dict((k, self.checkers[k]) for k in formats)
-
- def __repr__(self):
- return "<FormatChecker checkers={}>".format(sorted(self.checkers))
-
- def checks(self, format, raises=()):
- """
- Register a decorated function as validating a new format.
-
- Arguments:
-
- format (str):
-
- The format that the decorated function will check.
-
- raises (Exception):
-
- The exception(s) raised by the decorated function when an
- invalid instance is found.
-
- The exception object will be accessible as the
- `jsonschema.exceptions.ValidationError.cause` attribute of the
- resulting validation error.
- """
-
- def _checks(func):
- self.checkers[format] = (func, raises)
- return func
- return _checks
-
- cls_checks = classmethod(checks)
-
- def check(self, instance, format):
- """
- Check whether the instance conforms to the given format.
-
- Arguments:
-
- instance (*any primitive type*, i.e. str, number, bool):
-
- The instance to check
-
- format (str):
-
- The format that instance should conform to
-
-
- Raises:
-
- FormatError: if the instance does not conform to ``format``
- """
-
- if format not in self.checkers:
- return
-
- func, raises = self.checkers[format]
- result, cause = None, None
- try:
- result = func(instance)
- except raises as e:
- cause = e
- if not result:
- raise FormatError(
- "%r is not a %r" % (instance, format), cause=cause,
- )
-
- def conforms(self, instance, format):
- """
- Check whether the instance conforms to the given format.
-
- Arguments:
-
- instance (*any primitive type*, i.e. str, number, bool):
-
- The instance to check
-
- format (str):
-
- The format that instance should conform to
-
- Returns:
-
- bool: whether it conformed
- """
-
- try:
- self.check(instance, format)
- except FormatError:
- return False
- else:
- return True
-
-
-draft3_format_checker = FormatChecker()
-draft4_format_checker = FormatChecker()
-draft6_format_checker = FormatChecker()
-draft7_format_checker = FormatChecker()
-
-
-_draft_checkers = dict(
- draft3=draft3_format_checker,
- draft4=draft4_format_checker,
- draft6=draft6_format_checker,
- draft7=draft7_format_checker,
-)
-
-
-def _checks_drafts(
- name=None,
- draft3=None,
- draft4=None,
- draft6=None,
- draft7=None,
- raises=(),
-):
- draft3 = draft3 or name
- draft4 = draft4 or name
- draft6 = draft6 or name
- draft7 = draft7 or name
-
- def wrap(func):
- if draft3:
- func = _draft_checkers["draft3"].checks(draft3, raises)(func)
- if draft4:
- func = _draft_checkers["draft4"].checks(draft4, raises)(func)
- if draft6:
- func = _draft_checkers["draft6"].checks(draft6, raises)(func)
- if draft7:
- func = _draft_checkers["draft7"].checks(draft7, raises)(func)
-
- # Oy. This is bad global state, but relied upon for now, until
- # deprecation. See https://github.com/Julian/jsonschema/issues/519
- # and test_format_checkers_come_with_defaults
- FormatChecker.cls_checks(draft7 or draft6 or draft4 or draft3, raises)(
- func,
- )
- return func
- return wrap
-
-
-@_checks_drafts(name="idn-email")
-@_checks_drafts(name="email")
-def is_email(instance):
- if not isinstance(instance, str_types):
- return True
- return "@" in instance
-
-
-_ipv4_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
-
-
-@_checks_drafts(
- draft3="ip-address", draft4="ipv4", draft6="ipv4", draft7="ipv4",
-)
-def is_ipv4(instance):
- if not isinstance(instance, str_types):
- return True
- if not _ipv4_re.match(instance):
- return False
- return all(0 <= int(component) <= 255 for component in instance.split("."))
-
-
-if hasattr(socket, "inet_pton"):
- # FIXME: Really this only should raise struct.error, but see the sadness
- # that is https://twistedmatrix.com/trac/ticket/9409
- @_checks_drafts(
- name="ipv6", raises=(socket.error, struct.error, ValueError),
- )
- def is_ipv6(instance):
- if not isinstance(instance, str_types):
- return True
- return socket.inet_pton(socket.AF_INET6, instance)
-
-
-_host_name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9\.\-]{1,255}$")
-
-
-@_checks_drafts(
- draft3="host-name",
- draft4="hostname",
- draft6="hostname",
- draft7="hostname",
-)
-def is_host_name(instance):
- if not isinstance(instance, str_types):
- return True
- if not _host_name_re.match(instance):
- return False
- components = instance.split(".")
- for component in components:
- if len(component) > 63:
- return False
- return True
-
-
-try:
- # The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
- import idna
-except ImportError:
- pass
-else:
- @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
- def is_idn_host_name(instance):
- if not isinstance(instance, str_types):
- return True
- idna.encode(instance)
- return True
-
-
-try:
- import rfc3987
-except ImportError:
- try:
- from rfc3986_validator import validate_rfc3986
- except ImportError:
- pass
- else:
- @_checks_drafts(name="uri")
- def is_uri(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3986(instance, rule="URI")
-
- @_checks_drafts(
- draft6="uri-reference",
- draft7="uri-reference",
- raises=ValueError,
- )
- def is_uri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3986(instance, rule="URI_reference")
-
-else:
- @_checks_drafts(draft7="iri", raises=ValueError)
- def is_iri(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="IRI")
-
- @_checks_drafts(draft7="iri-reference", raises=ValueError)
- def is_iri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="IRI_reference")
-
- @_checks_drafts(name="uri", raises=ValueError)
- def is_uri(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="URI")
-
- @_checks_drafts(
- draft6="uri-reference",
- draft7="uri-reference",
- raises=ValueError,
- )
- def is_uri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="URI_reference")
-
-
-try:
- from strict_rfc3339 import validate_rfc3339
-except ImportError:
- try:
- from rfc3339_validator import validate_rfc3339
- except ImportError:
- validate_rfc3339 = None
-
-if validate_rfc3339:
- @_checks_drafts(name="date-time")
- def is_datetime(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3339(instance)
-
- @_checks_drafts(draft7="time")
- def is_time(instance):
- if not isinstance(instance, str_types):
- return True
- return is_datetime("1970-01-01T" + instance)
-
-
-@_checks_drafts(name="regex", raises=re.error)
-def is_regex(instance):
- if not isinstance(instance, str_types):
- return True
- return re.compile(instance)
-
-
-@_checks_drafts(draft3="date", draft7="date", raises=ValueError)
-def is_date(instance):
- if not isinstance(instance, str_types):
- return True
- return datetime.datetime.strptime(instance, "%Y-%m-%d")
-
-
-@_checks_drafts(draft3="time", raises=ValueError)
-def is_draft3_time(instance):
- if not isinstance(instance, str_types):
- return True
- return datetime.datetime.strptime(instance, "%H:%M:%S")
-
-
-try:
- import webcolors
-except ImportError:
- pass
-else:
- def is_css_color_code(instance):
- return webcolors.normalize_hex(instance)
-
- @_checks_drafts(draft3="color", raises=(ValueError, TypeError))
- def is_css21_color(instance):
- if (
- not isinstance(instance, str_types) or
- instance.lower() in webcolors.css21_names_to_hex
- ):
- return True
- return is_css_color_code(instance)
-
- def is_css3_color(instance):
- if instance.lower() in webcolors.css3_names_to_hex:
- return True
- return is_css_color_code(instance)
-
-
-try:
- import jsonpointer
-except ImportError:
- pass
-else:
- @_checks_drafts(
- draft6="json-pointer",
- draft7="json-pointer",
- raises=jsonpointer.JsonPointerException,
- )
- def is_json_pointer(instance):
- if not isinstance(instance, str_types):
- return True
- return jsonpointer.JsonPointer(instance)
-
- # TODO: I don't want to maintain this, so it
- # needs to go either into jsonpointer (pending
- # https://github.com/stefankoegl/python-json-pointer/issues/34) or
- # into a new external library.
- @_checks_drafts(
- draft7="relative-json-pointer",
- raises=jsonpointer.JsonPointerException,
- )
- def is_relative_json_pointer(instance):
- # Definition taken from:
- # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
- if not isinstance(instance, str_types):
- return True
- non_negative_integer, rest = [], ""
- for i, character in enumerate(instance):
- if character.isdigit():
- non_negative_integer.append(character)
- continue
-
- if not non_negative_integer:
- return False
-
- rest = instance[i:]
- break
- return (rest == "#") or jsonpointer.JsonPointer(rest)
-
-
-try:
- import uritemplate.exceptions
-except ImportError:
- pass
-else:
- @_checks_drafts(
- draft6="uri-template",
- draft7="uri-template",
- raises=uritemplate.exceptions.InvalidTemplate,
- )
- def is_uri_template(
- instance,
- template_validator=uritemplate.Validator().force_balanced_braces(),
- ):
- template = uritemplate.URITemplate(instance)
- return template_validator.validate(template)
diff --git a/lib/spack/external/jsonschema/_legacy_validators.py b/lib/spack/external/jsonschema/_legacy_validators.py
deleted file mode 100644
index 264ff7d713..0000000000
--- a/lib/spack/external/jsonschema/_legacy_validators.py
+++ /dev/null
@@ -1,141 +0,0 @@
-from jsonschema import _utils
-from jsonschema.compat import iteritems
-from jsonschema.exceptions import ValidationError
-
-
-def dependencies_draft3(validator, dependencies, instance, schema):
- if not validator.is_type(instance, "object"):
- return
-
- for property, dependency in iteritems(dependencies):
- if property not in instance:
- continue
-
- if validator.is_type(dependency, "object"):
- for error in validator.descend(
- instance, dependency, schema_path=property,
- ):
- yield error
- elif validator.is_type(dependency, "string"):
- if dependency not in instance:
- yield ValidationError(
- "%r is a dependency of %r" % (dependency, property)
- )
- else:
- for each in dependency:
- if each not in instance:
- message = "%r is a dependency of %r"
- yield ValidationError(message % (each, property))
-
-
-def disallow_draft3(validator, disallow, instance, schema):
- for disallowed in _utils.ensure_list(disallow):
- if validator.is_valid(instance, {"type": [disallowed]}):
- yield ValidationError(
- "%r is disallowed for %r" % (disallowed, instance)
- )
-
-
-def extends_draft3(validator, extends, instance, schema):
- if validator.is_type(extends, "object"):
- for error in validator.descend(instance, extends):
- yield error
- return
- for index, subschema in enumerate(extends):
- for error in validator.descend(instance, subschema, schema_path=index):
- yield error
-
-
-def items_draft3_draft4(validator, items, instance, schema):
- if not validator.is_type(instance, "array"):
- return
-
- if validator.is_type(items, "object"):
- for index, item in enumerate(instance):
- for error in validator.descend(item, items, path=index):
- yield error
- else:
- for (index, item), subschema in zip(enumerate(instance), items):
- for error in validator.descend(
- item, subschema, path=index, schema_path=index,
- ):
- yield error
-
-
-def minimum_draft3_draft4(validator, minimum, instance, schema):
- if not validator.is_type(instance, "number"):
- return
-
- if schema.get("exclusiveMinimum", False):
- failed = instance <= minimum
- cmp = "less than or equal to"
- else:
- failed = instance < minimum
- cmp = "less than"
-
- if failed:
- yield ValidationError(
- "%r is %s the minimum of %r" % (instance, cmp, minimum)
- )
-
-
-def maximum_draft3_draft4(validator, maximum, instance, schema):
- if not validator.is_type(instance, "number"):
- return
-
- if schema.get("exclusiveMaximum", False):
- failed = instance >= maximum
- cmp = "greater than or equal to"
- else:
- failed = instance > maximum
- cmp = "greater than"
-
- if failed:
- yield ValidationError(
- "%r is %s the maximum of %r" % (instance, cmp, maximum)
- )
-
-
-def properties_draft3(validator, properties, instance, schema):
- if not validator.is_type(instance, "object"):
- return
-
- for property, subschema in iteritems(properties):
- if property in instance:
- for error in validator.descend(
- instance[property],
- subschema,
- path=property,
- schema_path=property,
- ):
- yield error
- elif subschema.get("required", False):
- error = ValidationError("%r is a required property" % property)
- error._set(
- validator="required",
- validator_value=subschema["required"],
- instance=instance,
- schema=schema,
- )
- error.path.appendleft(property)
- error.schema_path.extend([property, "required"])
- yield error
-
-
-def type_draft3(validator, types, instance, schema):
- types = _utils.ensure_list(types)
-
- all_errors = []
- for index, type in enumerate(types):
- if validator.is_type(type, "object"):
- errors = list(validator.descend(instance, type, schema_path=index))
- if not errors:
- return
- all_errors.extend(errors)
- else:
- if validator.is_type(instance, type):
- return
- else:
- yield ValidationError(
- _utils.types_msg(instance, types), context=all_errors,
- )
diff --git a/lib/spack/external/jsonschema/_reflect.py b/lib/spack/external/jsonschema/_reflect.py
deleted file mode 100644
index d09e38fbdc..0000000000
--- a/lib/spack/external/jsonschema/_reflect.py
+++ /dev/null
@@ -1,155 +0,0 @@
-# -*- test-case-name: twisted.test.test_reflect -*-
-# Copyright (c) Twisted Matrix Laboratories.
-# See LICENSE for details.
-
-"""
-Standardized versions of various cool and/or strange things that you can do
-with Python's reflection capabilities.
-"""
-
-import sys
-
-from jsonschema.compat import PY3
-
-
-class _NoModuleFound(Exception):
- """
- No module was found because none exists.
- """
-
-
-
-class InvalidName(ValueError):
- """
- The given name is not a dot-separated list of Python objects.
- """
-
-
-
-class ModuleNotFound(InvalidName):
- """
- The module associated with the given name doesn't exist and it can't be
- imported.
- """
-
-
-
-class ObjectNotFound(InvalidName):
- """
- The object associated with the given name doesn't exist and it can't be
- imported.
- """
-
-
-
-if PY3:
- def reraise(exception, traceback):
- raise exception.with_traceback(traceback)
-else:
- exec("""def reraise(exception, traceback):
- raise exception.__class__, exception, traceback""")
-
-reraise.__doc__ = """
-Re-raise an exception, with an optional traceback, in a way that is compatible
-with both Python 2 and Python 3.
-
-Note that on Python 3, re-raised exceptions will be mutated, with their
-C{__traceback__} attribute being set.
-
-@param exception: The exception instance.
-@param traceback: The traceback to use, or C{None} indicating a new traceback.
-"""
-
-
-def _importAndCheckStack(importName):
- """
- Import the given name as a module, then walk the stack to determine whether
- the failure was the module not existing, or some code in the module (for
- example a dependent import) failing. This can be helpful to determine
- whether any actual application code was run. For example, to distiguish
- administrative error (entering the wrong module name), from programmer
- error (writing buggy code in a module that fails to import).
-
- @param importName: The name of the module to import.
- @type importName: C{str}
- @raise Exception: if something bad happens. This can be any type of
- exception, since nobody knows what loading some arbitrary code might
- do.
- @raise _NoModuleFound: if no module was found.
- """
- try:
- return __import__(importName)
- except ImportError:
- excType, excValue, excTraceback = sys.exc_info()
- while excTraceback:
- execName = excTraceback.tb_frame.f_globals["__name__"]
- # in Python 2 execName is None when an ImportError is encountered,
- # where in Python 3 execName is equal to the importName.
- if execName is None or execName == importName:
- reraise(excValue, excTraceback)
- excTraceback = excTraceback.tb_next
- raise _NoModuleFound()
-
-
-
-def namedAny(name):
- """
- Retrieve a Python object by its fully qualified name from the global Python
- module namespace. The first part of the name, that describes a module,
- will be discovered and imported. Each subsequent part of the name is
- treated as the name of an attribute of the object specified by all of the
- name which came before it. For example, the fully-qualified name of this
- object is 'twisted.python.reflect.namedAny'.
-
- @type name: L{str}
- @param name: The name of the object to return.
-
- @raise InvalidName: If the name is an empty string, starts or ends with
- a '.', or is otherwise syntactically incorrect.
-
- @raise ModuleNotFound: If the name is syntactically correct but the
- module it specifies cannot be imported because it does not appear to
- exist.
-
- @raise ObjectNotFound: If the name is syntactically correct, includes at
- least one '.', but the module it specifies cannot be imported because
- it does not appear to exist.
-
- @raise AttributeError: If an attribute of an object along the way cannot be
- accessed, or a module along the way is not found.
-
- @return: the Python object identified by 'name'.
- """
- if not name:
- raise InvalidName('Empty module name')
-
- names = name.split('.')
-
- # if the name starts or ends with a '.' or contains '..', the __import__
- # will raise an 'Empty module name' error. This will provide a better error
- # message.
- if '' in names:
- raise InvalidName(
- "name must be a string giving a '.'-separated list of Python "
- "identifiers, not %r" % (name,))
-
- topLevelPackage = None
- moduleNames = names[:]
- while not topLevelPackage:
- if moduleNames:
- trialname = '.'.join(moduleNames)
- try:
- topLevelPackage = _importAndCheckStack(trialname)
- except _NoModuleFound:
- moduleNames.pop()
- else:
- if len(names) == 1:
- raise ModuleNotFound("No module named %r" % (name,))
- else:
- raise ObjectNotFound('%r does not name an object' % (name,))
-
- obj = topLevelPackage
- for n in names[1:]:
- obj = getattr(obj, n)
-
- return obj
diff --git a/lib/spack/external/jsonschema/_types.py b/lib/spack/external/jsonschema/_types.py
deleted file mode 100644
index a71a4e34bd..0000000000
--- a/lib/spack/external/jsonschema/_types.py
+++ /dev/null
@@ -1,188 +0,0 @@
-import numbers
-
-from pyrsistent import pmap
-import attr
-
-from jsonschema.compat import int_types, str_types
-from jsonschema.exceptions import UndefinedTypeCheck
-
-
-def is_array(checker, instance):
- return isinstance(instance, list)
-
-
-def is_bool(checker, instance):
- return isinstance(instance, bool)
-
-
-def is_integer(checker, instance):
- # bool inherits from int, so ensure bools aren't reported as ints
- if isinstance(instance, bool):
- return False
- return isinstance(instance, int_types)
-
-
-def is_null(checker, instance):
- return instance is None
-
-
-def is_number(checker, instance):
- # bool inherits from int, so ensure bools aren't reported as ints
- if isinstance(instance, bool):
- return False
- return isinstance(instance, numbers.Number)
-
-
-def is_object(checker, instance):
- return isinstance(instance, dict)
-
-
-def is_string(checker, instance):
- return isinstance(instance, str_types)
-
-
-def is_any(checker, instance):
- return True
-
-
-@attr.s(frozen=True)
-class TypeChecker(object):
- """
- A ``type`` property checker.
-
- A `TypeChecker` performs type checking for an `IValidator`. Type
- checks to perform are updated using `TypeChecker.redefine` or
- `TypeChecker.redefine_many` and removed via `TypeChecker.remove`.
- Each of these return a new `TypeChecker` object.
-
- Arguments:
-
- type_checkers (dict):
-
- The initial mapping of types to their checking functions.
- """
- _type_checkers = attr.ib(default=pmap(), converter=pmap)
-
- def is_type(self, instance, type):
- """
- Check if the instance is of the appropriate type.
-
- Arguments:
-
- instance (object):
-
- The instance to check
-
- type (str):
-
- The name of the type that is expected.
-
- Returns:
-
- bool: Whether it conformed.
-
-
- Raises:
-
- `jsonschema.exceptions.UndefinedTypeCheck`:
- if type is unknown to this object.
- """
- try:
- fn = self._type_checkers[type]
- except KeyError:
- raise UndefinedTypeCheck(type)
-
- return fn(self, instance)
-
- def redefine(self, type, fn):
- """
- Produce a new checker with the given type redefined.
-
- Arguments:
-
- type (str):
-
- The name of the type to check.
-
- fn (collections.Callable):
-
- A function taking exactly two parameters - the type
- checker calling the function and the instance to check.
- The function should return true if instance is of this
- type and false otherwise.
-
- Returns:
-
- A new `TypeChecker` instance.
- """
- return self.redefine_many({type: fn})
-
- def redefine_many(self, definitions=()):
- """
- Produce a new checker with the given types redefined.
-
- Arguments:
-
- definitions (dict):
-
- A dictionary mapping types to their checking functions.
-
- Returns:
-
- A new `TypeChecker` instance.
- """
- return attr.evolve(
- self, type_checkers=self._type_checkers.update(definitions),
- )
-
- def remove(self, *types):
- """
- Produce a new checker with the given types forgotten.
-
- Arguments:
-
- types (~collections.Iterable):
-
- the names of the types to remove.
-
- Returns:
-
- A new `TypeChecker` instance
-
- Raises:
-
- `jsonschema.exceptions.UndefinedTypeCheck`:
-
- if any given type is unknown to this object
- """
-
- checkers = self._type_checkers
- for each in types:
- try:
- checkers = checkers.remove(each)
- except KeyError:
- raise UndefinedTypeCheck(each)
- return attr.evolve(self, type_checkers=checkers)
-
-
-draft3_type_checker = TypeChecker(
- {
- u"any": is_any,
- u"array": is_array,
- u"boolean": is_bool,
- u"integer": is_integer,
- u"object": is_object,
- u"null": is_null,
- u"number": is_number,
- u"string": is_string,
- },
-)
-draft4_type_checker = draft3_type_checker.remove(u"any")
-draft6_type_checker = draft4_type_checker.redefine(
- u"integer",
- lambda checker, instance: (
- is_integer(checker, instance) or
- isinstance(instance, float) and instance.is_integer()
- ),
-)
-draft7_type_checker = draft6_type_checker
diff --git a/lib/spack/external/jsonschema/_utils.py b/lib/spack/external/jsonschema/_utils.py
deleted file mode 100644
index ceb880198d..0000000000
--- a/lib/spack/external/jsonschema/_utils.py
+++ /dev/null
@@ -1,212 +0,0 @@
-import itertools
-import json
-import pkgutil
-import re
-
-from jsonschema.compat import MutableMapping, str_types, urlsplit
-
-
-class URIDict(MutableMapping):
- """
- Dictionary which uses normalized URIs as keys.
- """
-
- def normalize(self, uri):
- return urlsplit(uri).geturl()
-
- def __init__(self, *args, **kwargs):
- self.store = dict()
- self.store.update(*args, **kwargs)
-
- def __getitem__(self, uri):
- return self.store[self.normalize(uri)]
-
- def __setitem__(self, uri, value):
- self.store[self.normalize(uri)] = value
-
- def __delitem__(self, uri):
- del self.store[self.normalize(uri)]
-
- def __iter__(self):
- return iter(self.store)
-
- def __len__(self):
- return len(self.store)
-
- def __repr__(self):
- return repr(self.store)
-
-
-class Unset(object):
- """
- An as-of-yet unset attribute or unprovided default parameter.
- """
-
- def __repr__(self):
- return "<unset>"
-
-
-def load_schema(name):
- """
- Load a schema from ./schemas/``name``.json and return it.
- """
-
- data = pkgutil.get_data("jsonschema", "schemas/{0}.json".format(name))
- return json.loads(data.decode("utf-8"))
-
-
-def indent(string, times=1):
- """
- A dumb version of `textwrap.indent` from Python 3.3.
- """
-
- return "\n".join(" " * (4 * times) + line for line in string.splitlines())
-
-
-def format_as_index(indices):
- """
- Construct a single string containing indexing operations for the indices.
-
- For example, [1, 2, "foo"] -> [1][2]["foo"]
-
- Arguments:
-
- indices (sequence):
-
- The indices to format.
- """
-
- if not indices:
- return ""
- return "[%s]" % "][".join(repr(index) for index in indices)
-
-
-def find_additional_properties(instance, schema):
- """
- Return the set of additional properties for the given ``instance``.
-
- Weeds out properties that should have been validated by ``properties`` and
- / or ``patternProperties``.
-
- Assumes ``instance`` is dict-like already.
- """
-
- properties = schema.get("properties", {})
- patterns = "|".join(schema.get("patternProperties", {}))
- for property in instance:
- if property not in properties:
- if patterns and re.search(patterns, property):
- continue
- yield property
-
-
-def extras_msg(extras):
- """
- Create an error message for extra items or properties.
- """
-
- if len(extras) == 1:
- verb = "was"
- else:
- verb = "were"
- return ", ".join(repr(extra) for extra in extras), verb
-
-
-def types_msg(instance, types):
- """
- Create an error message for a failure to match the given types.
-
- If the ``instance`` is an object and contains a ``name`` property, it will
- be considered to be a description of that object and used as its type.
-
- Otherwise the message is simply the reprs of the given ``types``.
- """
-
- reprs = []
- for type in types:
- try:
- reprs.append(repr(type["name"]))
- except Exception:
- reprs.append(repr(type))
- return "%r is not of type %s" % (instance, ", ".join(reprs))
-
-
-def flatten(suitable_for_isinstance):
- """
- isinstance() can accept a bunch of really annoying different types:
- * a single type
- * a tuple of types
- * an arbitrary nested tree of tuples
-
- Return a flattened tuple of the given argument.
- """
-
- types = set()
-
- if not isinstance(suitable_for_isinstance, tuple):
- suitable_for_isinstance = (suitable_for_isinstance,)
- for thing in suitable_for_isinstance:
- if isinstance(thing, tuple):
- types.update(flatten(thing))
- else:
- types.add(thing)
- return tuple(types)
-
-
-def ensure_list(thing):
- """
- Wrap ``thing`` in a list if it's a single str.
-
- Otherwise, return it unchanged.
- """
-
- if isinstance(thing, str_types):
- return [thing]
- return thing
-
-
-def equal(one, two):
- """
- Check if two things are equal, but evade booleans and ints being equal.
- """
- return unbool(one) == unbool(two)
-
-
-def unbool(element, true=object(), false=object()):
- """
- A hack to make True and 1 and False and 0 unique for ``uniq``.
- """
-
- if element is True:
- return true
- elif element is False:
- return false
- return element
-
-
-def uniq(container):
- """
- Check if all of a container's elements are unique.
-
- Successively tries first to rely that the elements are hashable, then
- falls back on them being sortable, and finally falls back on brute
- force.
- """
-
- try:
- return len(set(unbool(i) for i in container)) == len(container)
- except TypeError:
- try:
- sort = sorted(unbool(i) for i in container)
- sliced = itertools.islice(sort, 1, None)
- for i, j in zip(sort, sliced):
- if i == j:
- return False
- except (NotImplementedError, TypeError):
- seen = []
- for e in container:
- e = unbool(e)
- if e in seen:
- return False
- seen.append(e)
- return True
diff --git a/lib/spack/external/jsonschema/_validators.py b/lib/spack/external/jsonschema/_validators.py
deleted file mode 100644
index 179fec09a9..0000000000
--- a/lib/spack/external/jsonschema/_validators.py
+++ /dev/null
@@ -1,373 +0,0 @@
-import re
-
-from jsonschema._utils import (
- ensure_list,
- equal,
- extras_msg,
- find_additional_properties,
- types_msg,
- unbool,
- uniq,
-)
-from jsonschema.exceptions import FormatError, ValidationError
-from jsonschema.compat import iteritems
-
-
-def patternProperties(validator, patternProperties, instance, schema):
- if not validator.is_type(instance, "object"):
- return
-
- for pattern, subschema in iteritems(patternProperties):
- for k, v in iteritems(instance):
- if re.search(pattern, k):
- for error in validator.descend(
- v, subschema, path=k, schema_path=pattern,
- ):
- yield error
-
-
-def propertyNames(validator, propertyNames, instance, schema):
- if not validator.is_type(instance, "object"):
- return
-
- for property in instance:
- for error in validator.descend(
- instance=property,
- schema=propertyNames,
- ):
- yield error
-
-
-def additionalProperties(validator, aP, instance, schema):
- if not validator.is_type(instance, "object"):
- return
-
- extras = set(find_additional_properties(instance, schema))
-
- if validator.is_type(aP, "object"):
- for extra in extras:
- for error in validator.descend(instance[extra], aP, path=extra):
- yield error
- elif not aP and extras:
- if "patternProperties" in schema:
- patterns = sorted(schema["patternProperties"])
- if len(extras) == 1:
- verb = "does"
- else:
- verb = "do"
- error = "%s %s not match any of the regexes: %s" % (
- ", ".join(map(repr, sorted(extras))),
- verb,
- ", ".join(map(repr, patterns)),
- )
- yield ValidationError(error)
- else:
- error = "Additional properties are not allowed (%s %s unexpected)"
- yield ValidationError(error % extras_msg(extras))
-
-
-def items(validator, items, instance, schema):
- if not validator.is_type(instance, "array"):
- return
-
- if validator.is_type(items, "array"):
- for (index, item), subschema in zip(enumerate(instance), items):
- for error in validator.descend(
- item, subschema, path=index, schema_path=index,
- ):
- yield error
- else:
- for index, item in enumerate(instance):
- for error in validator.descend(item, items, path=index):
- yield error
-
-
-def additionalItems(validator, aI, instance, schema):
- if (
- not validator.is_type(instance, "array") or
- validator.is_type(schema.get("items", {}), "object")
- ):
- return
-
- len_items = len(schema.get("items", []))
- if validator.is_type(aI, "object"):
- for index, item in enumerate(instance[len_items:], start=len_items):
- for error in validator.descend(item, aI, path=index):
- yield error
- elif not aI and len(instance) > len(schema.get("items", [])):
- error = "Additional items are not allowed (%s %s unexpected)"
- yield ValidationError(
- error %
- extras_msg(instance[len(schema.get("items", [])):])
- )
-
-
-def const(validator, const, instance, schema):
- if not equal(instance, const):
- yield ValidationError("%r was expected" % (const,))
-
-
-def contains(validator, contains, instance, schema):
- if not validator.is_type(instance, "array"):
- return
-
- if not any(validator.is_valid(element, contains) for element in instance):
- yield ValidationError(
- "None of %r are valid under the given schema" % (instance,)
- )
-
-
-def exclusiveMinimum(validator, minimum, instance, schema):
- if not validator.is_type(instance, "number"):
- return
-
- if instance <= minimum:
- yield ValidationError(
- "%r is less than or equal to the minimum of %r" % (
- instance, minimum,
- ),
- )
-
-
-def exclusiveMaximum(validator, maximum, instance, schema):
- if not validator.is_type(instance, "number"):
- return
-
- if instance >= maximum:
- yield ValidationError(
- "%r is greater than or equal to the maximum of %r" % (
- instance, maximum,
- ),
- )
-
-
-def minimum(validator, minimum, instance, schema):
- if not validator.is_type(instance, "number"):
- return
-
- if instance < minimum:
- yield ValidationError(
- "%r is less than the minimum of %r" % (instance, minimum)
- )
-
-
-def maximum(validator, maximum, instance, schema):
- if not validator.is_type(instance, "number"):
- return
-
- if instance > maximum:
- yield ValidationError(
- "%r is greater than the maximum of %r" % (instance, maximum)
- )
-
-
-def multipleOf(validator, dB, instance, schema):
- if not validator.is_type(instance, "number"):
- return
-
- if isinstance(dB, float):
- quotient = instance / dB
- failed = int(quotient) != quotient
- else:
- failed = instance % dB
-
- if failed:
- yield ValidationError("%r is not a multiple of %r" % (instance, dB))
-
-
-def minItems(validator, mI, instance, schema):
- if validator.is_type(instance, "array") and len(instance) < mI:
- yield ValidationError("%r is too short" % (instance,))
-
-
-def maxItems(validator, mI, instance, schema):
- if validator.is_type(instance, "array") and len(instance) > mI:
- yield ValidationError("%r is too long" % (instance,))
-
-
-def uniqueItems(validator, uI, instance, schema):
- if (
- uI and
- validator.is_type(instance, "array") and
- not uniq(instance)
- ):
- yield ValidationError("%r has non-unique elements" % (instance,))
-
-
-def pattern(validator, patrn, instance, schema):
- if (
- validator.is_type(instance, "string") and
- not re.search(patrn, instance)
- ):
- yield ValidationError("%r does not match %r" % (instance, patrn))
-
-
-def format(validator, format, instance, schema):
- if validator.format_checker is not None:
- try:
- validator.format_checker.check(instance, format)
- except FormatError as error:
- yield ValidationError(error.message, cause=error.cause)
-
-
-def minLength(validator, mL, instance, schema):
- if validator.is_type(instance, "string") and len(instance) < mL:
- yield ValidationError("%r is too short" % (instance,))
-
-
-def maxLength(validator, mL, instance, schema):
- if validator.is_type(instance, "string") and len(instance) > mL:
- yield ValidationError("%r is too long" % (instance,))
-
-
-def dependencies(validator, dependencies, instance, schema):
- if not validator.is_type(instance, "object"):
- return
-
- for property, dependency in iteritems(dependencies):
- if property not in instance:
- continue
-
- if validator.is_type(dependency, "array"):
- for each in dependency:
- if each not in instance:
- message = "%r is a dependency of %r"
- yield ValidationError(message % (each, property))
- else:
- for error in validator.descend(
- instance, dependency, schema_path=property,
- ):
- yield error
-
-
-def enum(validator, enums, instance, schema):
- if instance == 0 or instance == 1:
- unbooled = unbool(instance)
- if all(unbooled != unbool(each) for each in enums):
- yield ValidationError("%r is not one of %r" % (instance, enums))
- elif instance not in enums:
- yield ValidationError("%r is not one of %r" % (instance, enums))
-
-
-def ref(validator, ref, instance, schema):
- resolve = getattr(validator.resolver, "resolve", None)
- if resolve is None:
- with validator.resolver.resolving(ref) as resolved:
- for error in validator.descend(instance, resolved):
- yield error
- else:
- scope, resolved = validator.resolver.resolve(ref)
- validator.resolver.push_scope(scope)
-
- try:
- for error in validator.descend(instance, resolved):
- yield error
- finally:
- validator.resolver.pop_scope()
-
-
-def type(validator, types, instance, schema):
- types = ensure_list(types)
-
- if not any(validator.is_type(instance, type) for type in types):
- yield ValidationError(types_msg(instance, types))
-
-
-def properties(validator, properties, instance, schema):
- if not validator.is_type(instance, "object"):
- return
-
- for property, subschema in iteritems(properties):
- if property in instance:
- for error in validator.descend(
- instance[property],
- subschema,
- path=property,
- schema_path=property,
- ):
- yield error
-
-
-def required(validator, required, instance, schema):
- if not validator.is_type(instance, "object"):
- return
- for property in required:
- if property not in instance:
- yield ValidationError("%r is a required property" % property)
-
-
-def minProperties(validator, mP, instance, schema):
- if validator.is_type(instance, "object") and len(instance) < mP:
- yield ValidationError(
- "%r does not have enough properties" % (instance,)
- )
-
-
-def maxProperties(validator, mP, instance, schema):
- if not validator.is_type(instance, "object"):
- return
- if validator.is_type(instance, "object") and len(instance) > mP:
- yield ValidationError("%r has too many properties" % (instance,))
-
-
-def allOf(validator, allOf, instance, schema):
- for index, subschema in enumerate(allOf):
- for error in validator.descend(instance, subschema, schema_path=index):
- yield error
-
-
-def anyOf(validator, anyOf, instance, schema):
- all_errors = []
- for index, subschema in enumerate(anyOf):
- errs = list(validator.descend(instance, subschema, schema_path=index))
- if not errs:
- break
- all_errors.extend(errs)
- else:
- yield ValidationError(
- "%r is not valid under any of the given schemas" % (instance,),
- context=all_errors,
- )
-
-
-def oneOf(validator, oneOf, instance, schema):
- subschemas = enumerate(oneOf)
- all_errors = []
- for index, subschema in subschemas:
- errs = list(validator.descend(instance, subschema, schema_path=index))
- if not errs:
- first_valid = subschema
- break
- all_errors.extend(errs)
- else:
- yield ValidationError(
- "%r is not valid under any of the given schemas" % (instance,),
- context=all_errors,
- )
-
- more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)]
- if more_valid:
- more_valid.append(first_valid)
- reprs = ", ".join(repr(schema) for schema in more_valid)
- yield ValidationError(
- "%r is valid under each of %s" % (instance, reprs)
- )
-
-
-def not_(validator, not_schema, instance, schema):
- if validator.is_valid(instance, not_schema):
- yield ValidationError(
- "%r is not allowed for %r" % (not_schema, instance)
- )
-
-
-def if_(validator, if_schema, instance, schema):
- if validator.is_valid(instance, if_schema):
- if u"then" in schema:
- then = schema[u"then"]
- for error in validator.descend(instance, then, schema_path="then"):
- yield error
- elif u"else" in schema:
- else_ = schema[u"else"]
- for error in validator.descend(instance, else_, schema_path="else"):
- yield error
diff --git a/lib/spack/external/jsonschema/cli.py b/lib/spack/external/jsonschema/cli.py
deleted file mode 100644
index ab3335b27c..0000000000
--- a/lib/spack/external/jsonschema/cli.py
+++ /dev/null
@@ -1,90 +0,0 @@
-"""
-The ``jsonschema`` command line.
-"""
-from __future__ import absolute_import
-import argparse
-import json
-import sys
-
-from jsonschema import __version__
-from jsonschema._reflect import namedAny
-from jsonschema.validators import validator_for
-
-
-def _namedAnyWithDefault(name):
- if "." not in name:
- name = "jsonschema." + name
- return namedAny(name)
-
-
-def _json_file(path):
- with open(path) as file:
- return json.load(file)
-
-
-parser = argparse.ArgumentParser(
- description="JSON Schema Validation CLI",
-)
-parser.add_argument(
- "-i", "--instance",
- action="append",
- dest="instances",
- type=_json_file,
- help=(
- "a path to a JSON instance (i.e. filename.json) "
- "to validate (may be specified multiple times)"
- ),
-)
-parser.add_argument(
- "-F", "--error-format",
- default="{error.instance}: {error.message}\n",
- help=(
- "the format to use for each error output message, specified in "
- "a form suitable for passing to str.format, which will be called "
- "with 'error' for each error"
- ),
-)
-parser.add_argument(
- "-V", "--validator",
- type=_namedAnyWithDefault,
- help=(
- "the fully qualified object name of a validator to use, or, for "
- "validators that are registered with jsonschema, simply the name "
- "of the class."
- ),
-)
-parser.add_argument(
- "--version",
- action="version",
- version=__version__,
-)
-parser.add_argument(
- "schema",
- help="the JSON Schema to validate with (i.e. schema.json)",
- type=_json_file,
-)
-
-
-def parse_args(args):
- arguments = vars(parser.parse_args(args=args or ["--help"]))
- if arguments["validator"] is None:
- arguments["validator"] = validator_for(arguments["schema"])
- return arguments
-
-
-def main(args=sys.argv[1:]):
- sys.exit(run(arguments=parse_args(args=args)))
-
-
-def run(arguments, stdout=sys.stdout, stderr=sys.stderr):
- error_format = arguments["error_format"]
- validator = arguments["validator"](schema=arguments["schema"])
-
- validator.check_schema(arguments["schema"])
-
- errored = False
- for instance in arguments["instances"] or ():
- for error in validator.iter_errors(instance):
- stderr.write(error_format.format(error=error))
- errored = True
- return errored
diff --git a/lib/spack/external/jsonschema/compat.py b/lib/spack/external/jsonschema/compat.py
deleted file mode 100644
index 47e0980455..0000000000
--- a/lib/spack/external/jsonschema/compat.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
-Python 2/3 compatibility helpers.
-
-Note: This module is *not* public API.
-"""
-import contextlib
-import operator
-import sys
-
-
-try:
- from collections.abc import MutableMapping, Sequence # noqa
-except ImportError:
- from collections import MutableMapping, Sequence # noqa
-
-PY3 = sys.version_info[0] >= 3
-
-if PY3:
- zip = zip
- from functools import lru_cache
- from io import StringIO as NativeIO
- from urllib.parse import (
- unquote, urljoin, urlunsplit, SplitResult, urlsplit
- )
- from urllib.request import pathname2url, urlopen
- str_types = str,
- int_types = int,
- iteritems = operator.methodcaller("items")
-else:
- from itertools import izip as zip # noqa
- from io import BytesIO as NativeIO
- from urlparse import urljoin, urlunsplit, SplitResult, urlsplit
- from urllib import pathname2url, unquote # noqa
- import urllib2 # noqa
- def urlopen(*args, **kwargs):
- return contextlib.closing(urllib2.urlopen(*args, **kwargs))
-
- str_types = basestring
- int_types = int, long
- iteritems = operator.methodcaller("iteritems")
-
- from functools32 import lru_cache
-
-
-def urldefrag(url):
- if "#" in url:
- s, n, p, q, frag = urlsplit(url)
- defrag = urlunsplit((s, n, p, q, ""))
- else:
- defrag = url
- frag = ""
- return defrag, frag
-
-
-# flake8: noqa
diff --git a/lib/spack/external/jsonschema/exceptions.py b/lib/spack/external/jsonschema/exceptions.py
deleted file mode 100644
index 691dcffe6c..0000000000
--- a/lib/spack/external/jsonschema/exceptions.py
+++ /dev/null
@@ -1,374 +0,0 @@
-"""
-Validation errors, and some surrounding helpers.
-"""
-from collections import defaultdict, deque
-import itertools
-import pprint
-import textwrap
-
-import attr
-
-from jsonschema import _utils
-from jsonschema.compat import PY3, iteritems
-
-
-WEAK_MATCHES = frozenset(["anyOf", "oneOf"])
-STRONG_MATCHES = frozenset()
-
-_unset = _utils.Unset()
-
-
-class _Error(Exception):
- def __init__(
- self,
- message,
- validator=_unset,
- path=(),
- cause=None,
- context=(),
- validator_value=_unset,
- instance=_unset,
- schema=_unset,
- schema_path=(),
- parent=None,
- ):
- super(_Error, self).__init__(
- message,
- validator,
- path,
- cause,
- context,
- validator_value,
- instance,
- schema,
- schema_path,
- parent,
- )
- self.message = message
- self.path = self.relative_path = deque(path)
- self.schema_path = self.relative_schema_path = deque(schema_path)
- self.context = list(context)
- self.cause = self.__cause__ = cause
- self.validator = validator
- self.validator_value = validator_value
- self.instance = instance
- self.schema = schema
- self.parent = parent
-
- for error in context:
- error.parent = self
-
- def __repr__(self):
- return "<%s: %r>" % (self.__class__.__name__, self.message)
-
- def __unicode__(self):
- essential_for_verbose = (
- self.validator, self.validator_value, self.instance, self.schema,
- )
- if any(m is _unset for m in essential_for_verbose):
- return self.message
-
- pschema = pprint.pformat(self.schema, width=72)
- pinstance = pprint.pformat(self.instance, width=72)
- return self.message + textwrap.dedent("""
-
- Failed validating %r in %s%s:
- %s
-
- On %s%s:
- %s
- """.rstrip()
- ) % (
- self.validator,
- self._word_for_schema_in_error_message,
- _utils.format_as_index(list(self.relative_schema_path)[:-1]),
- _utils.indent(pschema),
- self._word_for_instance_in_error_message,
- _utils.format_as_index(self.relative_path),
- _utils.indent(pinstance),
- )
-
- if PY3:
- __str__ = __unicode__
- else:
- def __str__(self):
- return unicode(self).encode("utf-8")
-
- @classmethod
- def create_from(cls, other):
- return cls(**other._contents())
-
- @property
- def absolute_path(self):
- parent = self.parent
- if parent is None:
- return self.relative_path
-
- path = deque(self.relative_path)
- path.extendleft(reversed(parent.absolute_path))
- return path
-
- @property
- def absolute_schema_path(self):
- parent = self.parent
- if parent is None:
- return self.relative_schema_path
-
- path = deque(self.relative_schema_path)
- path.extendleft(reversed(parent.absolute_schema_path))
- return path
-
- def _set(self, **kwargs):
- for k, v in iteritems(kwargs):
- if getattr(self, k) is _unset:
- setattr(self, k, v)
-
- def _contents(self):
- attrs = (
- "message", "cause", "context", "validator", "validator_value",
- "path", "schema_path", "instance", "schema", "parent",
- )
- return dict((attr, getattr(self, attr)) for attr in attrs)
-
-
-class ValidationError(_Error):
- """
- An instance was invalid under a provided schema.
- """
-
- _word_for_schema_in_error_message = "schema"
- _word_for_instance_in_error_message = "instance"
-
-
-class SchemaError(_Error):
- """
- A schema was invalid under its corresponding metaschema.
- """
-
- _word_for_schema_in_error_message = "metaschema"
- _word_for_instance_in_error_message = "schema"
-
-
-@attr.s(hash=True)
-class RefResolutionError(Exception):
- """
- A ref could not be resolved.
- """
-
- _cause = attr.ib()
-
- def __str__(self):
- return str(self._cause)
-
-
-class UndefinedTypeCheck(Exception):
- """
- A type checker was asked to check a type it did not have registered.
- """
-
- def __init__(self, type):
- self.type = type
-
- def __unicode__(self):
- return "Type %r is unknown to this type checker" % self.type
-
- if PY3:
- __str__ = __unicode__
- else:
- def __str__(self):
- return unicode(self).encode("utf-8")
-
-
-class UnknownType(Exception):
- """
- A validator was asked to validate an instance against an unknown type.
- """
-
- def __init__(self, type, instance, schema):
- self.type = type
- self.instance = instance
- self.schema = schema
-
- def __unicode__(self):
- pschema = pprint.pformat(self.schema, width=72)
- pinstance = pprint.pformat(self.instance, width=72)
- return textwrap.dedent("""
- Unknown type %r for validator with schema:
- %s
-
- While checking instance:
- %s
- """.rstrip()
- ) % (self.type, _utils.indent(pschema), _utils.indent(pinstance))
-
- if PY3:
- __str__ = __unicode__
- else:
- def __str__(self):
- return unicode(self).encode("utf-8")
-
-
-class FormatError(Exception):
- """
- Validating a format failed.
- """
-
- def __init__(self, message, cause=None):
- super(FormatError, self).__init__(message, cause)
- self.message = message
- self.cause = self.__cause__ = cause
-
- def __unicode__(self):
- return self.message
-
- if PY3:
- __str__ = __unicode__
- else:
- def __str__(self):
- return self.message.encode("utf-8")
-
-
-class ErrorTree(object):
- """
- ErrorTrees make it easier to check which validations failed.
- """
-
- _instance = _unset
-
- def __init__(self, errors=()):
- self.errors = {}
- self._contents = defaultdict(self.__class__)
-
- for error in errors:
- container = self
- for element in error.path:
- container = container[element]
- container.errors[error.validator] = error
-
- container._instance = error.instance
-
- def __contains__(self, index):
- """
- Check whether ``instance[index]`` has any errors.
- """
-
- return index in self._contents
-
- def __getitem__(self, index):
- """
- Retrieve the child tree one level down at the given ``index``.
-
- If the index is not in the instance that this tree corresponds to and
- is not known by this tree, whatever error would be raised by
- ``instance.__getitem__`` will be propagated (usually this is some
- subclass of `exceptions.LookupError`.
- """
-
- if self._instance is not _unset and index not in self:
- self._instance[index]
- return self._contents[index]
-
- def __setitem__(self, index, value):
- """
- Add an error to the tree at the given ``index``.
- """
- self._contents[index] = value
-
- def __iter__(self):
- """
- Iterate (non-recursively) over the indices in the instance with errors.
- """
-
- return iter(self._contents)
-
- def __len__(self):
- """
- Return the `total_errors`.
- """
- return self.total_errors
-
- def __repr__(self):
- return "<%s (%s total errors)>" % (self.__class__.__name__, len(self))
-
- @property
- def total_errors(self):
- """
- The total number of errors in the entire tree, including children.
- """
-
- child_errors = sum(len(tree) for _, tree in iteritems(self._contents))
- return len(self.errors) + child_errors
-
-
-def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
- """
- Create a key function that can be used to sort errors by relevance.
-
- Arguments:
- weak (set):
- a collection of validator names to consider to be "weak".
- If there are two errors at the same level of the instance
- and one is in the set of weak validator names, the other
- error will take priority. By default, :validator:`anyOf` and
- :validator:`oneOf` are considered weak validators and will
- be superseded by other same-level validation errors.
-
- strong (set):
- a collection of validator names to consider to be "strong"
- """
- def relevance(error):
- validator = error.validator
- return -len(error.path), validator not in weak, validator in strong
- return relevance
-
-
-relevance = by_relevance()
-
-
-def best_match(errors, key=relevance):
- """
- Try to find an error that appears to be the best match among given errors.
-
- In general, errors that are higher up in the instance (i.e. for which
- `ValidationError.path` is shorter) are considered better matches,
- since they indicate "more" is wrong with the instance.
-
- If the resulting match is either :validator:`oneOf` or :validator:`anyOf`,
- the *opposite* assumption is made -- i.e. the deepest error is picked,
- since these validators only need to match once, and any other errors may
- not be relevant.
-
- Arguments:
- errors (collections.Iterable):
-
- the errors to select from. Do not provide a mixture of
- errors from different validation attempts (i.e. from
- different instances or schemas), since it won't produce
- sensical output.
-
- key (collections.Callable):
-
- the key to use when sorting errors. See `relevance` and
- transitively `by_relevance` for more details (the default is
- to sort with the defaults of that function). Changing the
- default is only useful if you want to change the function
- that rates errors but still want the error context descent
- done by this function.
-
- Returns:
- the best matching error, or ``None`` if the iterable was empty
-
- .. note::
-
- This function is a heuristic. Its return value may change for a given
- set of inputs from version to version if better heuristics are added.
- """
- errors = iter(errors)
- best = next(errors, None)
- if best is None:
- return
- best = max(itertools.chain([best], errors), key=key)
-
- while best.context:
- best = min(best.context, key=key)
- return best
diff --git a/lib/spack/external/jsonschema/schemas/draft3.json b/lib/spack/external/jsonschema/schemas/draft3.json
deleted file mode 100644
index f8a09c563b..0000000000
--- a/lib/spack/external/jsonschema/schemas/draft3.json
+++ /dev/null
@@ -1,199 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-03/schema#",
- "dependencies": {
- "exclusiveMaximum": "maximum",
- "exclusiveMinimum": "minimum"
- },
- "id": "http://json-schema.org/draft-03/schema#",
- "properties": {
- "$ref": {
- "format": "uri",
- "type": "string"
- },
- "$schema": {
- "format": "uri",
- "type": "string"
- },
- "additionalItems": {
- "default": {},
- "type": [
- {
- "$ref": "#"
- },
- "boolean"
- ]
- },
- "additionalProperties": {
- "default": {},
- "type": [
- {
- "$ref": "#"
- },
- "boolean"
- ]
- },
- "default": {
- "type": "any"
- },
- "dependencies": {
- "additionalProperties": {
- "items": {
- "type": "string"
- },
- "type": [
- "string",
- "array",
- {
- "$ref": "#"
- }
- ]
- },
- "default": {},
- "type": [
- "string",
- "array",
- "object"
- ]
- },
- "description": {
- "type": "string"
- },
- "disallow": {
- "items": {
- "type": [
- "string",
- {
- "$ref": "#"
- }
- ]
- },
- "type": [
- "string",
- "array"
- ],
- "uniqueItems": true
- },
- "divisibleBy": {
- "default": 1,
- "exclusiveMinimum": true,
- "minimum": 0,
- "type": "number"
- },
- "enum": {
- "type": "array"
- },
- "exclusiveMaximum": {
- "default": false,
- "type": "boolean"
- },
- "exclusiveMinimum": {
- "default": false,
- "type": "boolean"
- },
- "extends": {
- "default": {},
- "items": {
- "$ref": "#"
- },
- "type": [
- {
- "$ref": "#"
- },
- "array"
- ]
- },
- "format": {
- "type": "string"
- },
- "id": {
- "format": "uri",
- "type": "string"
- },
- "items": {
- "default": {},
- "items": {
- "$ref": "#"
- },
- "type": [
- {
- "$ref": "#"
- },
- "array"
- ]
- },
- "maxDecimal": {
- "minimum": 0,
- "type": "number"
- },
- "maxItems": {
- "minimum": 0,
- "type": "integer"
- },
- "maxLength": {
- "type": "integer"
- },
- "maximum": {
- "type": "number"
- },
- "minItems": {
- "default": 0,
- "minimum": 0,
- "type": "integer"
- },
- "minLength": {
- "default": 0,
- "minimum": 0,
- "type": "integer"
- },
- "minimum": {
- "type": "number"
- },
- "pattern": {
- "format": "regex",
- "type": "string"
- },
- "patternProperties": {
- "additionalProperties": {
- "$ref": "#"
- },
- "default": {},
- "type": "object"
- },
- "properties": {
- "additionalProperties": {
- "$ref": "#",
- "type": "object"
- },
- "default": {},
- "type": "object"
- },
- "required": {
- "default": false,
- "type": "boolean"
- },
- "title": {
- "type": "string"
- },
- "type": {
- "default": "any",
- "items": {
- "type": [
- "string",
- {
- "$ref": "#"
- }
- ]
- },
- "type": [
- "string",
- "array"
- ],
- "uniqueItems": true
- },
- "uniqueItems": {
- "default": false,
- "type": "boolean"
- }
- },
- "type": "object"
-}
diff --git a/lib/spack/external/jsonschema/schemas/draft4.json b/lib/spack/external/jsonschema/schemas/draft4.json
deleted file mode 100644
index 9b666cff88..0000000000
--- a/lib/spack/external/jsonschema/schemas/draft4.json
+++ /dev/null
@@ -1,222 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "default": {},
- "definitions": {
- "positiveInteger": {
- "minimum": 0,
- "type": "integer"
- },
- "positiveIntegerDefault0": {
- "allOf": [
- {
- "$ref": "#/definitions/positiveInteger"
- },
- {
- "default": 0
- }
- ]
- },
- "schemaArray": {
- "items": {
- "$ref": "#"
- },
- "minItems": 1,
- "type": "array"
- },
- "simpleTypes": {
- "enum": [
- "array",
- "boolean",
- "integer",
- "null",
- "number",
- "object",
- "string"
- ]
- },
- "stringArray": {
- "items": {
- "type": "string"
- },
- "minItems": 1,
- "type": "array",
- "uniqueItems": true
- }
- },
- "dependencies": {
- "exclusiveMaximum": [
- "maximum"
- ],
- "exclusiveMinimum": [
- "minimum"
- ]
- },
- "description": "Core schema meta-schema",
- "id": "http://json-schema.org/draft-04/schema#",
- "properties": {
- "$schema": {
- "format": "uri",
- "type": "string"
- },
- "additionalItems": {
- "anyOf": [
- {
- "type": "boolean"
- },
- {
- "$ref": "#"
- }
- ],
- "default": {}
- },
- "additionalProperties": {
- "anyOf": [
- {
- "type": "boolean"
- },
- {
- "$ref": "#"
- }
- ],
- "default": {}
- },
- "allOf": {
- "$ref": "#/definitions/schemaArray"
- },
- "anyOf": {
- "$ref": "#/definitions/schemaArray"
- },
- "default": {},
- "definitions": {
- "additionalProperties": {
- "$ref": "#"
- },
- "default": {},
- "type": "object"
- },
- "dependencies": {
- "additionalProperties": {
- "anyOf": [
- {
- "$ref": "#"
- },
- {
- "$ref": "#/definitions/stringArray"
- }
- ]
- },
- "type": "object"
- },
- "description": {
- "type": "string"
- },
- "enum": {
- "type": "array"
- },
- "exclusiveMaximum": {
- "default": false,
- "type": "boolean"
- },
- "exclusiveMinimum": {
- "default": false,
- "type": "boolean"
- },
- "format": {
- "type": "string"
- },
- "id": {
- "format": "uri",
- "type": "string"
- },
- "items": {
- "anyOf": [
- {
- "$ref": "#"
- },
- {
- "$ref": "#/definitions/schemaArray"
- }
- ],
- "default": {}
- },
- "maxItems": {
- "$ref": "#/definitions/positiveInteger"
- },
- "maxLength": {
- "$ref": "#/definitions/positiveInteger"
- },
- "maxProperties": {
- "$ref": "#/definitions/positiveInteger"
- },
- "maximum": {
- "type": "number"
- },
- "minItems": {
- "$ref": "#/definitions/positiveIntegerDefault0"
- },
- "minLength": {
- "$ref": "#/definitions/positiveIntegerDefault0"
- },
- "minProperties": {
- "$ref": "#/definitions/positiveIntegerDefault0"
- },
- "minimum": {
- "type": "number"
- },
- "multipleOf": {
- "exclusiveMinimum": true,
- "minimum": 0,
- "type": "number"
- },
- "not": {
- "$ref": "#"
- },
- "oneOf": {
- "$ref": "#/definitions/schemaArray"
- },
- "pattern": {
- "format": "regex",
- "type": "string"
- },
- "patternProperties": {
- "additionalProperties": {
- "$ref": "#"
- },
- "default": {},
- "type": "object"
- },
- "properties": {
- "additionalProperties": {
- "$ref": "#"
- },
- "default": {},
- "type": "object"
- },
- "required": {
- "$ref": "#/definitions/stringArray"
- },
- "title": {
- "type": "string"
- },
- "type": {
- "anyOf": [
- {
- "$ref": "#/definitions/simpleTypes"
- },
- {
- "items": {
- "$ref": "#/definitions/simpleTypes"
- },
- "minItems": 1,
- "type": "array",
- "uniqueItems": true
- }
- ]
- },
- "uniqueItems": {
- "default": false,
- "type": "boolean"
- }
- },
- "type": "object"
-}
diff --git a/lib/spack/external/jsonschema/schemas/draft6.json b/lib/spack/external/jsonschema/schemas/draft6.json
deleted file mode 100644
index a0d2bf7896..0000000000
--- a/lib/spack/external/jsonschema/schemas/draft6.json
+++ /dev/null
@@ -1,153 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-06/schema#",
- "$id": "http://json-schema.org/draft-06/schema#",
- "title": "Core schema meta-schema",
- "definitions": {
- "schemaArray": {
- "type": "array",
- "minItems": 1,
- "items": { "$ref": "#" }
- },
- "nonNegativeInteger": {
- "type": "integer",
- "minimum": 0
- },
- "nonNegativeIntegerDefault0": {
- "allOf": [
- { "$ref": "#/definitions/nonNegativeInteger" },
- { "default": 0 }
- ]
- },
- "simpleTypes": {
- "enum": [
- "array",
- "boolean",
- "integer",
- "null",
- "number",
- "object",
- "string"
- ]
- },
- "stringArray": {
- "type": "array",
- "items": { "type": "string" },
- "uniqueItems": true,
- "default": []
- }
- },
- "type": ["object", "boolean"],
- "properties": {
- "$id": {
- "type": "string",
- "format": "uri-reference"
- },
- "$schema": {
- "type": "string",
- "format": "uri"
- },
- "$ref": {
- "type": "string",
- "format": "uri-reference"
- },
- "title": {
- "type": "string"
- },
- "description": {
- "type": "string"
- },
- "default": {},
- "examples": {
- "type": "array",
- "items": {}
- },
- "multipleOf": {
- "type": "number",
- "exclusiveMinimum": 0
- },
- "maximum": {
- "type": "number"
- },
- "exclusiveMaximum": {
- "type": "number"
- },
- "minimum": {
- "type": "number"
- },
- "exclusiveMinimum": {
- "type": "number"
- },
- "maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
- "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
- "pattern": {
- "type": "string",
- "format": "regex"
- },
- "additionalItems": { "$ref": "#" },
- "items": {
- "anyOf": [
- { "$ref": "#" },
- { "$ref": "#/definitions/schemaArray" }
- ],
- "default": {}
- },
- "maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
- "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
- "uniqueItems": {
- "type": "boolean",
- "default": false
- },
- "contains": { "$ref": "#" },
- "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
- "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
- "required": { "$ref": "#/definitions/stringArray" },
- "additionalProperties": { "$ref": "#" },
- "definitions": {
- "type": "object",
- "additionalProperties": { "$ref": "#" },
- "default": {}
- },
- "properties": {
- "type": "object",
- "additionalProperties": { "$ref": "#" },
- "default": {}
- },
- "patternProperties": {
- "type": "object",
- "additionalProperties": { "$ref": "#" },
- "propertyNames": { "format": "regex" },
- "default": {}
- },
- "dependencies": {
- "type": "object",
- "additionalProperties": {
- "anyOf": [
- { "$ref": "#" },
- { "$ref": "#/definitions/stringArray" }
- ]
- }
- },
- "propertyNames": { "$ref": "#" },
- "const": {},
- "enum": {
- "type": "array"
- },
- "type": {
- "anyOf": [
- { "$ref": "#/definitions/simpleTypes" },
- {
- "type": "array",
- "items": { "$ref": "#/definitions/simpleTypes" },
- "minItems": 1,
- "uniqueItems": true
- }
- ]
- },
- "format": { "type": "string" },
- "allOf": { "$ref": "#/definitions/schemaArray" },
- "anyOf": { "$ref": "#/definitions/schemaArray" },
- "oneOf": { "$ref": "#/definitions/schemaArray" },
- "not": { "$ref": "#" }
- },
- "default": {}
-}
diff --git a/lib/spack/external/jsonschema/schemas/draft7.json b/lib/spack/external/jsonschema/schemas/draft7.json
deleted file mode 100644
index 746cde9690..0000000000
--- a/lib/spack/external/jsonschema/schemas/draft7.json
+++ /dev/null
@@ -1,166 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "http://json-schema.org/draft-07/schema#",
- "title": "Core schema meta-schema",
- "definitions": {
- "schemaArray": {
- "type": "array",
- "minItems": 1,
- "items": { "$ref": "#" }
- },
- "nonNegativeInteger": {
- "type": "integer",
- "minimum": 0
- },
- "nonNegativeIntegerDefault0": {
- "allOf": [
- { "$ref": "#/definitions/nonNegativeInteger" },
- { "default": 0 }
- ]
- },
- "simpleTypes": {
- "enum": [
- "array",
- "boolean",
- "integer",
- "null",
- "number",
- "object",
- "string"
- ]
- },
- "stringArray": {
- "type": "array",
- "items": { "type": "string" },
- "uniqueItems": true,
- "default": []
- }
- },
- "type": ["object", "boolean"],
- "properties": {
- "$id": {
- "type": "string",
- "format": "uri-reference"
- },
- "$schema": {
- "type": "string",
- "format": "uri"
- },
- "$ref": {
- "type": "string",
- "format": "uri-reference"
- },
- "$comment": {
- "type": "string"
- },
- "title": {
- "type": "string"
- },
- "description": {
- "type": "string"
- },
- "default": true,
- "readOnly": {
- "type": "boolean",
- "default": false
- },
- "examples": {
- "type": "array",
- "items": true
- },
- "multipleOf": {
- "type": "number",
- "exclusiveMinimum": 0
- },
- "maximum": {
- "type": "number"
- },
- "exclusiveMaximum": {
- "type": "number"
- },
- "minimum": {
- "type": "number"
- },
- "exclusiveMinimum": {
- "type": "number"
- },
- "maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
- "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
- "pattern": {
- "type": "string",
- "format": "regex"
- },
- "additionalItems": { "$ref": "#" },
- "items": {
- "anyOf": [
- { "$ref": "#" },
- { "$ref": "#/definitions/schemaArray" }
- ],
- "default": true
- },
- "maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
- "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
- "uniqueItems": {
- "type": "boolean",
- "default": false
- },
- "contains": { "$ref": "#" },
- "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
- "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
- "required": { "$ref": "#/definitions/stringArray" },
- "additionalProperties": { "$ref": "#" },
- "definitions": {
- "type": "object",
- "additionalProperties": { "$ref": "#" },
- "default": {}
- },
- "properties": {
- "type": "object",
- "additionalProperties": { "$ref": "#" },
- "default": {}
- },
- "patternProperties": {
- "type": "object",
- "additionalProperties": { "$ref": "#" },
- "propertyNames": { "format": "regex" },
- "default": {}
- },
- "dependencies": {
- "type": "object",
- "additionalProperties": {
- "anyOf": [
- { "$ref": "#" },
- { "$ref": "#/definitions/stringArray" }
- ]
- }
- },
- "propertyNames": { "$ref": "#" },
- "const": true,
- "enum": {
- "type": "array",
- "items": true
- },
- "type": {
- "anyOf": [
- { "$ref": "#/definitions/simpleTypes" },
- {
- "type": "array",
- "items": { "$ref": "#/definitions/simpleTypes" },
- "minItems": 1,
- "uniqueItems": true
- }
- ]
- },
- "format": { "type": "string" },
- "contentMediaType": { "type": "string" },
- "contentEncoding": { "type": "string" },
- "if": {"$ref": "#"},
- "then": {"$ref": "#"},
- "else": {"$ref": "#"},
- "allOf": { "$ref": "#/definitions/schemaArray" },
- "anyOf": { "$ref": "#/definitions/schemaArray" },
- "oneOf": { "$ref": "#/definitions/schemaArray" },
- "not": { "$ref": "#" }
- },
- "default": true
-}
diff --git a/lib/spack/external/jsonschema/validators.py b/lib/spack/external/jsonschema/validators.py
deleted file mode 100644
index 1dc420c70d..0000000000
--- a/lib/spack/external/jsonschema/validators.py
+++ /dev/null
@@ -1,970 +0,0 @@
-"""
-Creation and extension of validators, with implementations for existing drafts.
-"""
-from __future__ import division
-
-from warnings import warn
-import contextlib
-import json
-import numbers
-
-from six import add_metaclass
-
-from jsonschema import (
- _legacy_validators,
- _types,
- _utils,
- _validators,
- exceptions,
-)
-from jsonschema.compat import (
- Sequence,
- int_types,
- iteritems,
- lru_cache,
- str_types,
- unquote,
- urldefrag,
- urljoin,
- urlopen,
- urlsplit,
-)
-
-# Sigh. https://gitlab.com/pycqa/flake8/issues/280
-# https://github.com/pyga/ebb-lint/issues/7
-# Imported for backwards compatibility.
-from jsonschema.exceptions import ErrorTree
-ErrorTree
-
-
-class _DontDoThat(Exception):
- """
- Raised when a Validators with non-default type checker is misused.
-
- Asking one for DEFAULT_TYPES doesn't make sense, since type checkers
- exist for the unrepresentable cases where DEFAULT_TYPES can't
- represent the type relationship.
- """
-
- def __str__(self):
- return "DEFAULT_TYPES cannot be used on Validators using TypeCheckers"
-
-
-validators = {}
-meta_schemas = _utils.URIDict()
-
-
-def _generate_legacy_type_checks(types=()):
- """
- Generate newer-style type checks out of JSON-type-name-to-type mappings.
-
- Arguments:
-
- types (dict):
-
- A mapping of type names to their Python types
-
- Returns:
-
- A dictionary of definitions to pass to `TypeChecker`
- """
- types = dict(types)
-
- def gen_type_check(pytypes):
- pytypes = _utils.flatten(pytypes)
-
- def type_check(checker, instance):
- if isinstance(instance, bool):
- if bool not in pytypes:
- return False
- return isinstance(instance, pytypes)
-
- return type_check
-
- definitions = {}
- for typename, pytypes in iteritems(types):
- definitions[typename] = gen_type_check(pytypes)
-
- return definitions
-
-
-_DEPRECATED_DEFAULT_TYPES = {
- u"array": list,
- u"boolean": bool,
- u"integer": int_types,
- u"null": type(None),
- u"number": numbers.Number,
- u"object": dict,
- u"string": str_types,
-}
-_TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES = _types.TypeChecker(
- type_checkers=_generate_legacy_type_checks(_DEPRECATED_DEFAULT_TYPES),
-)
-
-
-def validates(version):
- """
- Register the decorated validator for a ``version`` of the specification.
-
- Registered validators and their meta schemas will be considered when
- parsing ``$schema`` properties' URIs.
-
- Arguments:
-
- version (str):
-
- An identifier to use as the version's name
-
- Returns:
-
- collections.Callable:
-
- a class decorator to decorate the validator with the version
- """
-
- def _validates(cls):
- validators[version] = cls
- meta_schema_id = cls.ID_OF(cls.META_SCHEMA)
- if meta_schema_id:
- meta_schemas[meta_schema_id] = cls
- return cls
- return _validates
-
-
-def _DEFAULT_TYPES(self):
- if self._CREATED_WITH_DEFAULT_TYPES is None:
- raise _DontDoThat()
-
- warn(
- (
- "The DEFAULT_TYPES attribute is deprecated. "
- "See the type checker attached to this validator instead."
- ),
- DeprecationWarning,
- stacklevel=2,
- )
- return self._DEFAULT_TYPES
-
-
-class _DefaultTypesDeprecatingMetaClass(type):
- DEFAULT_TYPES = property(_DEFAULT_TYPES)
-
-
-def _id_of(schema):
- if schema is True or schema is False:
- return u""
- return schema.get(u"$id", u"")
-
-
-def create(
- meta_schema,
- validators=(),
- version=None,
- default_types=None,
- type_checker=None,
- id_of=_id_of,
-):
- """
- Create a new validator class.
-
- Arguments:
-
- meta_schema (collections.Mapping):
-
- the meta schema for the new validator class
-
- validators (collections.Mapping):
-
- a mapping from names to callables, where each callable will
- validate the schema property with the given name.
-
- Each callable should take 4 arguments:
-
- 1. a validator instance,
- 2. the value of the property being validated within the
- instance
- 3. the instance
- 4. the schema
-
- version (str):
-
- an identifier for the version that this validator class will
- validate. If provided, the returned validator class will
- have its ``__name__`` set to include the version, and also
- will have `jsonschema.validators.validates` automatically
- called for the given version.
-
- type_checker (jsonschema.TypeChecker):
-
- a type checker, used when applying the :validator:`type` validator.
-
- If unprovided, a `jsonschema.TypeChecker` will be created
- with a set of default types typical of JSON Schema drafts.
-
- default_types (collections.Mapping):
-
- .. deprecated:: 3.0.0
-
- Please use the type_checker argument instead.
-
- If set, it provides mappings of JSON types to Python types
- that will be converted to functions and redefined in this
- object's `jsonschema.TypeChecker`.
-
- id_of (collections.Callable):
-
- A function that given a schema, returns its ID.
-
- Returns:
-
- a new `jsonschema.IValidator` class
- """
-
- if default_types is not None:
- if type_checker is not None:
- raise TypeError(
- "Do not specify default_types when providing a type checker.",
- )
- _created_with_default_types = True
- warn(
- (
- "The default_types argument is deprecated. "
- "Use the type_checker argument instead."
- ),
- DeprecationWarning,
- stacklevel=2,
- )
- type_checker = _types.TypeChecker(
- type_checkers=_generate_legacy_type_checks(default_types),
- )
- else:
- default_types = _DEPRECATED_DEFAULT_TYPES
- if type_checker is None:
- _created_with_default_types = False
- type_checker = _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES
- elif type_checker is _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES:
- _created_with_default_types = False
- else:
- _created_with_default_types = None
-
- @add_metaclass(_DefaultTypesDeprecatingMetaClass)
- class Validator(object):
-
- VALIDATORS = dict(validators)
- META_SCHEMA = dict(meta_schema)
- TYPE_CHECKER = type_checker
- ID_OF = staticmethod(id_of)
-
- DEFAULT_TYPES = property(_DEFAULT_TYPES)
- _DEFAULT_TYPES = dict(default_types)
- _CREATED_WITH_DEFAULT_TYPES = _created_with_default_types
-
- def __init__(
- self,
- schema,
- types=(),
- resolver=None,
- format_checker=None,
- ):
- if types:
- warn(
- (
- "The types argument is deprecated. Provide "
- "a type_checker to jsonschema.validators.extend "
- "instead."
- ),
- DeprecationWarning,
- stacklevel=2,
- )
-
- self.TYPE_CHECKER = self.TYPE_CHECKER.redefine_many(
- _generate_legacy_type_checks(types),
- )
-
- if resolver is None:
- resolver = RefResolver.from_schema(schema, id_of=id_of)
-
- self.resolver = resolver
- self.format_checker = format_checker
- self.schema = schema
-
- @classmethod
- def check_schema(cls, schema):
- for error in cls(cls.META_SCHEMA).iter_errors(schema):
- raise exceptions.SchemaError.create_from(error)
-
- def iter_errors(self, instance, _schema=None):
- if _schema is None:
- _schema = self.schema
-
- if _schema is True:
- return
- elif _schema is False:
- yield exceptions.ValidationError(
- "False schema does not allow %r" % (instance,),
- validator=None,
- validator_value=None,
- instance=instance,
- schema=_schema,
- )
- return
-
- scope = id_of(_schema)
- if scope:
- self.resolver.push_scope(scope)
- try:
- ref = _schema.get(u"$ref")
- if ref is not None:
- validators = [(u"$ref", ref)]
- else:
- validators = iteritems(_schema)
-
- for k, v in validators:
- validator = self.VALIDATORS.get(k)
- if validator is None:
- continue
-
- errors = validator(self, v, instance, _schema) or ()
- for error in errors:
- # set details if not already set by the called fn
- error._set(
- validator=k,
- validator_value=v,
- instance=instance,
- schema=_schema,
- )
- if k != u"$ref":
- error.schema_path.appendleft(k)
- yield error
- finally:
- if scope:
- self.resolver.pop_scope()
-
- def descend(self, instance, schema, path=None, schema_path=None):
- for error in self.iter_errors(instance, schema):
- if path is not None:
- error.path.appendleft(path)
- if schema_path is not None:
- error.schema_path.appendleft(schema_path)
- yield error
-
- def validate(self, *args, **kwargs):
- for error in self.iter_errors(*args, **kwargs):
- raise error
-
- def is_type(self, instance, type):
- try:
- return self.TYPE_CHECKER.is_type(instance, type)
- except exceptions.UndefinedTypeCheck:
- raise exceptions.UnknownType(type, instance, self.schema)
-
- def is_valid(self, instance, _schema=None):
- error = next(self.iter_errors(instance, _schema), None)
- return error is None
-
- if version is not None:
- Validator = validates(version)(Validator)
- Validator.__name__ = version.title().replace(" ", "") + "Validator"
-
- return Validator
-
-
-def extend(validator, validators=(), version=None, type_checker=None):
- """
- Create a new validator class by extending an existing one.
-
- Arguments:
-
- validator (jsonschema.IValidator):
-
- an existing validator class
-
- validators (collections.Mapping):
-
- a mapping of new validator callables to extend with, whose
- structure is as in `create`.
-
- .. note::
-
- Any validator callables with the same name as an
- existing one will (silently) replace the old validator
- callable entirely, effectively overriding any validation
- done in the "parent" validator class.
-
- If you wish to instead extend the behavior of a parent's
- validator callable, delegate and call it directly in
- the new validator function by retrieving it using
- ``OldValidator.VALIDATORS["validator_name"]``.
-
- version (str):
-
- a version for the new validator class
-
- type_checker (jsonschema.TypeChecker):
-
- a type checker, used when applying the :validator:`type` validator.
-
- If unprovided, the type checker of the extended
- `jsonschema.IValidator` will be carried along.`
-
- Returns:
-
- a new `jsonschema.IValidator` class extending the one provided
-
- .. note:: Meta Schemas
-
- The new validator class will have its parent's meta schema.
-
- If you wish to change or extend the meta schema in the new
- validator class, modify ``META_SCHEMA`` directly on the returned
- class. Note that no implicit copying is done, so a copy should
- likely be made before modifying it, in order to not affect the
- old validator.
- """
-
- all_validators = dict(validator.VALIDATORS)
- all_validators.update(validators)
-
- if type_checker is None:
- type_checker = validator.TYPE_CHECKER
- elif validator._CREATED_WITH_DEFAULT_TYPES:
- raise TypeError(
- "Cannot extend a validator created with default_types "
- "with a type_checker. Update the validator to use a "
- "type_checker when created."
- )
- return create(
- meta_schema=validator.META_SCHEMA,
- validators=all_validators,
- version=version,
- type_checker=type_checker,
- id_of=validator.ID_OF,
- )
-
-
-Draft3Validator = create(
- meta_schema=_utils.load_schema("draft3"),
- validators={
- u"$ref": _validators.ref,
- u"additionalItems": _validators.additionalItems,
- u"additionalProperties": _validators.additionalProperties,
- u"dependencies": _legacy_validators.dependencies_draft3,
- u"disallow": _legacy_validators.disallow_draft3,
- u"divisibleBy": _validators.multipleOf,
- u"enum": _validators.enum,
- u"extends": _legacy_validators.extends_draft3,
- u"format": _validators.format,
- u"items": _legacy_validators.items_draft3_draft4,
- u"maxItems": _validators.maxItems,
- u"maxLength": _validators.maxLength,
- u"maximum": _legacy_validators.maximum_draft3_draft4,
- u"minItems": _validators.minItems,
- u"minLength": _validators.minLength,
- u"minimum": _legacy_validators.minimum_draft3_draft4,
- u"pattern": _validators.pattern,
- u"patternProperties": _validators.patternProperties,
- u"properties": _legacy_validators.properties_draft3,
- u"type": _legacy_validators.type_draft3,
- u"uniqueItems": _validators.uniqueItems,
- },
- type_checker=_types.draft3_type_checker,
- version="draft3",
- id_of=lambda schema: schema.get(u"id", ""),
-)
-
-Draft4Validator = create(
- meta_schema=_utils.load_schema("draft4"),
- validators={
- u"$ref": _validators.ref,
- u"additionalItems": _validators.additionalItems,
- u"additionalProperties": _validators.additionalProperties,
- u"allOf": _validators.allOf,
- u"anyOf": _validators.anyOf,
- u"dependencies": _validators.dependencies,
- u"enum": _validators.enum,
- u"format": _validators.format,
- u"items": _legacy_validators.items_draft3_draft4,
- u"maxItems": _validators.maxItems,
- u"maxLength": _validators.maxLength,
- u"maxProperties": _validators.maxProperties,
- u"maximum": _legacy_validators.maximum_draft3_draft4,
- u"minItems": _validators.minItems,
- u"minLength": _validators.minLength,
- u"minProperties": _validators.minProperties,
- u"minimum": _legacy_validators.minimum_draft3_draft4,
- u"multipleOf": _validators.multipleOf,
- u"not": _validators.not_,
- u"oneOf": _validators.oneOf,
- u"pattern": _validators.pattern,
- u"patternProperties": _validators.patternProperties,
- u"properties": _validators.properties,
- u"required": _validators.required,
- u"type": _validators.type,
- u"uniqueItems": _validators.uniqueItems,
- },
- type_checker=_types.draft4_type_checker,
- version="draft4",
- id_of=lambda schema: schema.get(u"id", ""),
-)
-
-Draft6Validator = create(
- meta_schema=_utils.load_schema("draft6"),
- validators={
- u"$ref": _validators.ref,
- u"additionalItems": _validators.additionalItems,
- u"additionalProperties": _validators.additionalProperties,
- u"allOf": _validators.allOf,
- u"anyOf": _validators.anyOf,
- u"const": _validators.const,
- u"contains": _validators.contains,
- u"dependencies": _validators.dependencies,
- u"enum": _validators.enum,
- u"exclusiveMaximum": _validators.exclusiveMaximum,
- u"exclusiveMinimum": _validators.exclusiveMinimum,
- u"format": _validators.format,
- u"items": _validators.items,
- u"maxItems": _validators.maxItems,
- u"maxLength": _validators.maxLength,
- u"maxProperties": _validators.maxProperties,
- u"maximum": _validators.maximum,
- u"minItems": _validators.minItems,
- u"minLength": _validators.minLength,
- u"minProperties": _validators.minProperties,
- u"minimum": _validators.minimum,
- u"multipleOf": _validators.multipleOf,
- u"not": _validators.not_,
- u"oneOf": _validators.oneOf,
- u"pattern": _validators.pattern,
- u"patternProperties": _validators.patternProperties,
- u"properties": _validators.properties,
- u"propertyNames": _validators.propertyNames,
- u"required": _validators.required,
- u"type": _validators.type,
- u"uniqueItems": _validators.uniqueItems,
- },
- type_checker=_types.draft6_type_checker,
- version="draft6",
-)
-
-Draft7Validator = create(
- meta_schema=_utils.load_schema("draft7"),
- validators={
- u"$ref": _validators.ref,
- u"additionalItems": _validators.additionalItems,
- u"additionalProperties": _validators.additionalProperties,
- u"allOf": _validators.allOf,
- u"anyOf": _validators.anyOf,
- u"const": _validators.const,
- u"contains": _validators.contains,
- u"dependencies": _validators.dependencies,
- u"enum": _validators.enum,
- u"exclusiveMaximum": _validators.exclusiveMaximum,
- u"exclusiveMinimum": _validators.exclusiveMinimum,
- u"format": _validators.format,
- u"if": _validators.if_,
- u"items": _validators.items,
- u"maxItems": _validators.maxItems,
- u"maxLength": _validators.maxLength,
- u"maxProperties": _validators.maxProperties,
- u"maximum": _validators.maximum,
- u"minItems": _validators.minItems,
- u"minLength": _validators.minLength,
- u"minProperties": _validators.minProperties,
- u"minimum": _validators.minimum,
- u"multipleOf": _validators.multipleOf,
- u"oneOf": _validators.oneOf,
- u"not": _validators.not_,
- u"pattern": _validators.pattern,
- u"patternProperties": _validators.patternProperties,
- u"properties": _validators.properties,
- u"propertyNames": _validators.propertyNames,
- u"required": _validators.required,
- u"type": _validators.type,
- u"uniqueItems": _validators.uniqueItems,
- },
- type_checker=_types.draft7_type_checker,
- version="draft7",
-)
-
-_LATEST_VERSION = Draft7Validator
-
-
-class RefResolver(object):
- """
- Resolve JSON References.
-
- Arguments:
-
- base_uri (str):
-
- The URI of the referring document
-
- referrer:
-
- The actual referring document
-
- store (dict):
-
- A mapping from URIs to documents to cache
-
- cache_remote (bool):
-
- Whether remote refs should be cached after first resolution
-
- handlers (dict):
-
- A mapping from URI schemes to functions that should be used
- to retrieve them
-
- urljoin_cache (:func:`functools.lru_cache`):
-
- A cache that will be used for caching the results of joining
- the resolution scope to subscopes.
-
- remote_cache (:func:`functools.lru_cache`):
-
- A cache that will be used for caching the results of
- resolved remote URLs.
-
- Attributes:
-
- cache_remote (bool):
-
- Whether remote refs should be cached after first resolution
- """
-
- def __init__(
- self,
- base_uri,
- referrer,
- store=(),
- cache_remote=True,
- handlers=(),
- urljoin_cache=None,
- remote_cache=None,
- ):
- if urljoin_cache is None:
- urljoin_cache = lru_cache(1024)(urljoin)
- if remote_cache is None:
- remote_cache = lru_cache(1024)(self.resolve_from_url)
-
- self.referrer = referrer
- self.cache_remote = cache_remote
- self.handlers = dict(handlers)
-
- self._scopes_stack = [base_uri]
- self.store = _utils.URIDict(
- (id, validator.META_SCHEMA)
- for id, validator in iteritems(meta_schemas)
- )
- self.store.update(store)
- self.store[base_uri] = referrer
-
- self._urljoin_cache = urljoin_cache
- self._remote_cache = remote_cache
-
- @classmethod
- def from_schema(cls, schema, id_of=_id_of, *args, **kwargs):
- """
- Construct a resolver from a JSON schema object.
-
- Arguments:
-
- schema:
-
- the referring schema
-
- Returns:
-
- `RefResolver`
- """
-
- return cls(base_uri=id_of(schema), referrer=schema, *args, **kwargs)
-
- def push_scope(self, scope):
- """
- Enter a given sub-scope.
-
- Treats further dereferences as being performed underneath the
- given scope.
- """
- self._scopes_stack.append(
- self._urljoin_cache(self.resolution_scope, scope),
- )
-
- def pop_scope(self):
- """
- Exit the most recent entered scope.
-
- Treats further dereferences as being performed underneath the
- original scope.
-
- Don't call this method more times than `push_scope` has been
- called.
- """
- try:
- self._scopes_stack.pop()
- except IndexError:
- raise exceptions.RefResolutionError(
- "Failed to pop the scope from an empty stack. "
- "`pop_scope()` should only be called once for every "
- "`push_scope()`"
- )
-
- @property
- def resolution_scope(self):
- """
- Retrieve the current resolution scope.
- """
- return self._scopes_stack[-1]
-
- @property
- def base_uri(self):
- """
- Retrieve the current base URI, not including any fragment.
- """
- uri, _ = urldefrag(self.resolution_scope)
- return uri
-
- @contextlib.contextmanager
- def in_scope(self, scope):
- """
- Temporarily enter the given scope for the duration of the context.
- """
- self.push_scope(scope)
- try:
- yield
- finally:
- self.pop_scope()
-
- @contextlib.contextmanager
- def resolving(self, ref):
- """
- Resolve the given ``ref`` and enter its resolution scope.
-
- Exits the scope on exit of this context manager.
-
- Arguments:
-
- ref (str):
-
- The reference to resolve
- """
-
- url, resolved = self.resolve(ref)
- self.push_scope(url)
- try:
- yield resolved
- finally:
- self.pop_scope()
-
- def resolve(self, ref):
- """
- Resolve the given reference.
- """
- url = self._urljoin_cache(self.resolution_scope, ref)
- return url, self._remote_cache(url)
-
- def resolve_from_url(self, url):
- """
- Resolve the given remote URL.
- """
- url, fragment = urldefrag(url)
- try:
- document = self.store[url]
- except KeyError:
- try:
- document = self.resolve_remote(url)
- except Exception as exc:
- raise exceptions.RefResolutionError(exc)
-
- return self.resolve_fragment(document, fragment)
-
- def resolve_fragment(self, document, fragment):
- """
- Resolve a ``fragment`` within the referenced ``document``.
-
- Arguments:
-
- document:
-
- The referent document
-
- fragment (str):
-
- a URI fragment to resolve within it
- """
-
- fragment = fragment.lstrip(u"/")
- parts = unquote(fragment).split(u"/") if fragment else []
-
- for part in parts:
- part = part.replace(u"~1", u"/").replace(u"~0", u"~")
-
- if isinstance(document, Sequence):
- # Array indexes should be turned into integers
- try:
- part = int(part)
- except ValueError:
- pass
- try:
- document = document[part]
- except (TypeError, LookupError):
- raise exceptions.RefResolutionError(
- "Unresolvable JSON pointer: %r" % fragment
- )
-
- return document
-
- def resolve_remote(self, uri):
- """
- Resolve a remote ``uri``.
-
- If called directly, does not check the store first, but after
- retrieving the document at the specified URI it will be saved in
- the store if :attr:`cache_remote` is True.
-
- .. note::
-
- If the requests_ library is present, ``jsonschema`` will use it to
- request the remote ``uri``, so that the correct encoding is
- detected and used.
-
- If it isn't, or if the scheme of the ``uri`` is not ``http`` or
- ``https``, UTF-8 is assumed.
-
- Arguments:
-
- uri (str):
-
- The URI to resolve
-
- Returns:
-
- The retrieved document
-
- .. _requests: https://pypi.org/project/requests/
- """
- try:
- import requests
- except ImportError:
- requests = None
-
- scheme = urlsplit(uri).scheme
-
- if scheme in self.handlers:
- result = self.handlers[scheme](uri)
- elif scheme in [u"http", u"https"] and requests:
- # Requests has support for detecting the correct encoding of
- # json over http
- result = requests.get(uri).json()
- else:
- # Otherwise, pass off to urllib and assume utf-8
- with urlopen(uri) as url:
- result = json.loads(url.read().decode("utf-8"))
-
- if self.cache_remote:
- self.store[uri] = result
- return result
-
-
-def validate(instance, schema, cls=None, *args, **kwargs):
- """
- Validate an instance under the given schema.
-
- >>> validate([2, 3, 4], {"maxItems": 2})
- Traceback (most recent call last):
- ...
- ValidationError: [2, 3, 4] is too long
-
- :func:`validate` will first verify that the provided schema is
- itself valid, since not doing so can lead to less obvious error
- messages and fail in less obvious or consistent ways.
-
- If you know you have a valid schema already, especially if you
- intend to validate multiple instances with the same schema, you
- likely would prefer using the `IValidator.validate` method directly
- on a specific validator (e.g. ``Draft7Validator.validate``).
-
-
- Arguments:
-
- instance:
-
- The instance to validate
-
- schema:
-
- The schema to validate with
-
- cls (IValidator):
-
- The class that will be used to validate the instance.
-
- If the ``cls`` argument is not provided, two things will happen
- in accordance with the specification. First, if the schema has a
- :validator:`$schema` property containing a known meta-schema [#]_
- then the proper validator will be used. The specification recommends
- that all schemas contain :validator:`$schema` properties for this
- reason. If no :validator:`$schema` property is found, the default
- validator class is the latest released draft.
-
- Any other provided positional and keyword arguments will be passed
- on when instantiating the ``cls``.
-
- Raises:
-
- `jsonschema.exceptions.ValidationError` if the instance
- is invalid
-
- `jsonschema.exceptions.SchemaError` if the schema itself
- is invalid
-
- .. rubric:: Footnotes
- .. [#] known by a validator registered with
- `jsonschema.validators.validates`
- """
- if cls is None:
- cls = validator_for(schema)
-
- cls.check_schema(schema)
- validator = cls(schema, *args, **kwargs)
- error = exceptions.best_match(validator.iter_errors(instance))
- if error is not None:
- raise error
-
-
-def validator_for(schema, default=_LATEST_VERSION):
- """
- Retrieve the validator class appropriate for validating the given schema.
-
- Uses the :validator:`$schema` property that should be present in the
- given schema to look up the appropriate validator class.
-
- Arguments:
-
- schema (collections.Mapping or bool):
-
- the schema to look at
-
- default:
-
- the default to return if the appropriate validator class
- cannot be determined.
-
- If unprovided, the default is to return the latest supported
- draft.
- """
- if schema is True or schema is False or u"$schema" not in schema:
- return default
- if schema[u"$schema"] not in meta_schemas:
- warn(
- (
- "The metaschema specified by $schema was not found. "
- "Using the latest draft to validate, but this will raise "
- "an error in the future."
- ),
- DeprecationWarning,
- stacklevel=2,
- )
- return meta_schemas.get(schema[u"$schema"], _LATEST_VERSION)
diff --git a/lib/spack/external/macholib/MachO.py b/lib/spack/external/macholib/MachO.py
deleted file mode 100644
index 3db95201f0..0000000000
--- a/lib/spack/external/macholib/MachO.py
+++ /dev/null
@@ -1,471 +0,0 @@
-"""
-Utilities for reading and writing Mach-O headers
-"""
-from __future__ import print_function
-
-import os
-import struct
-import sys
-
-from macholib.util import fileview
-
-from .mach_o import (
- FAT_MAGIC,
- FAT_MAGIC_64,
- LC_DYSYMTAB,
- LC_ID_DYLIB,
- LC_LOAD_DYLIB,
- LC_LOAD_UPWARD_DYLIB,
- LC_LOAD_WEAK_DYLIB,
- LC_PREBOUND_DYLIB,
- LC_REEXPORT_DYLIB,
- LC_REGISTRY,
- LC_SEGMENT,
- LC_SEGMENT_64,
- LC_SYMTAB,
- MH_CIGAM,
- MH_CIGAM_64,
- MH_FILETYPE_SHORTNAMES,
- MH_MAGIC,
- MH_MAGIC_64,
- S_ZEROFILL,
- fat_arch,
- fat_arch64,
- fat_header,
- load_command,
- mach_header,
- mach_header_64,
- section,
- section_64,
-)
-from .ptypes import sizeof
-
-try:
- from macholib.compat import bytes
-except ImportError:
- pass
-
-try:
- unicode
-except NameError:
- unicode = str
-
-if sys.version_info[0] == 2:
- range = xrange # noqa: F821
-
-__all__ = ["MachO"]
-
-_RELOCATABLE = {
- # relocatable commands that should be used for dependency walking
- LC_LOAD_DYLIB,
- LC_LOAD_UPWARD_DYLIB,
- LC_LOAD_WEAK_DYLIB,
- LC_PREBOUND_DYLIB,
- LC_REEXPORT_DYLIB,
-}
-
-_RELOCATABLE_NAMES = {
- LC_LOAD_DYLIB: "load_dylib",
- LC_LOAD_UPWARD_DYLIB: "load_upward_dylib",
- LC_LOAD_WEAK_DYLIB: "load_weak_dylib",
- LC_PREBOUND_DYLIB: "prebound_dylib",
- LC_REEXPORT_DYLIB: "reexport_dylib",
-}
-
-
-def _shouldRelocateCommand(cmd):
- """
- Should this command id be investigated for relocation?
- """
- return cmd in _RELOCATABLE
-
-
-def lc_str_value(offset, cmd_info):
- """
- Fetch the actual value of a field of type "lc_str"
- """
- cmd_load, cmd_cmd, cmd_data = cmd_info
-
- offset -= sizeof(cmd_load) + sizeof(cmd_cmd)
- return cmd_data[offset:].strip(b"\x00")
-
-
-class MachO(object):
- """
- Provides reading/writing the Mach-O header of a specific existing file
- """
-
- # filename - the original filename of this mach-o
- # sizediff - the current deviation from the initial mach-o size
- # header - the mach-o header
- # commands - a list of (load_command, somecommand, data)
- # data is either a str, or a list of segment structures
- # total_size - the current mach-o header size (including header)
- # low_offset - essentially, the maximum mach-o header size
- # id_cmd - the index of my id command, or None
-
- def __init__(self, filename):
-
- # supports the ObjectGraph protocol
- self.graphident = filename
- self.filename = filename
- self.loader_path = os.path.dirname(filename)
-
- # initialized by load
- self.fat = None
- self.headers = []
- with open(filename, "rb") as fp:
- self.load(fp)
-
- def __repr__(self):
- return "<MachO filename=%r>" % (self.filename,)
-
- def load(self, fh):
- assert fh.tell() == 0
- header = struct.unpack(">I", fh.read(4))[0]
- fh.seek(0)
- if header in (FAT_MAGIC, FAT_MAGIC_64):
- self.load_fat(fh)
- else:
- fh.seek(0, 2)
- size = fh.tell()
- fh.seek(0)
- self.load_header(fh, 0, size)
-
- def load_fat(self, fh):
- self.fat = fat_header.from_fileobj(fh)
- if self.fat.magic == FAT_MAGIC:
- archs = [fat_arch.from_fileobj(fh) for i in range(self.fat.nfat_arch)]
- elif self.fat.magic == FAT_MAGIC_64:
- archs = [fat_arch64.from_fileobj(fh) for i in range(self.fat.nfat_arch)]
- else:
- raise ValueError("Unknown fat header magic: %r" % (self.fat.magic))
-
- for arch in archs:
- self.load_header(fh, arch.offset, arch.size)
-
- def rewriteLoadCommands(self, *args, **kw):
- changed = False
- for header in self.headers:
- if header.rewriteLoadCommands(*args, **kw):
- changed = True
- return changed
-
- def load_header(self, fh, offset, size):
- fh.seek(offset)
- header = struct.unpack(">I", fh.read(4))[0]
- fh.seek(offset)
- if header == MH_MAGIC:
- magic, hdr, endian = MH_MAGIC, mach_header, ">"
- elif header == MH_CIGAM:
- magic, hdr, endian = MH_CIGAM, mach_header, "<"
- elif header == MH_MAGIC_64:
- magic, hdr, endian = MH_MAGIC_64, mach_header_64, ">"
- elif header == MH_CIGAM_64:
- magic, hdr, endian = MH_CIGAM_64, mach_header_64, "<"
- else:
- raise ValueError("Unknown Mach-O header: 0x%08x in %r" % (header, fh))
- hdr = MachOHeader(self, fh, offset, size, magic, hdr, endian)
- self.headers.append(hdr)
-
- def write(self, f):
- for header in self.headers:
- header.write(f)
-
-
-class MachOHeader(object):
- """
- Provides reading/writing the Mach-O header of a specific existing file
- """
-
- # filename - the original filename of this mach-o
- # sizediff - the current deviation from the initial mach-o size
- # header - the mach-o header
- # commands - a list of (load_command, somecommand, data)
- # data is either a str, or a list of segment structures
- # total_size - the current mach-o header size (including header)
- # low_offset - essentially, the maximum mach-o header size
- # id_cmd - the index of my id command, or None
-
- def __init__(self, parent, fh, offset, size, magic, hdr, endian):
- self.MH_MAGIC = magic
- self.mach_header = hdr
-
- # These are all initialized by self.load()
- self.parent = parent
- self.offset = offset
- self.size = size
-
- self.endian = endian
- self.header = None
- self.commands = None
- self.id_cmd = None
- self.sizediff = None
- self.total_size = None
- self.low_offset = None
- self.filetype = None
- self.headers = []
-
- self.load(fh)
-
- def __repr__(self):
- return "<%s filename=%r offset=%d size=%d endian=%r>" % (
- type(self).__name__,
- self.parent.filename,
- self.offset,
- self.size,
- self.endian,
- )
-
- def load(self, fh):
- fh = fileview(fh, self.offset, self.size)
- fh.seek(0)
-
- self.sizediff = 0
- kw = {"_endian_": self.endian}
- header = self.mach_header.from_fileobj(fh, **kw)
- self.header = header
- # if header.magic != self.MH_MAGIC:
- # raise ValueError("header has magic %08x, expecting %08x" % (
- # header.magic, self.MH_MAGIC))
-
- cmd = self.commands = []
-
- self.filetype = self.get_filetype_shortname(header.filetype)
-
- read_bytes = 0
- low_offset = sys.maxsize
- for i in range(header.ncmds):
- # read the load command
- cmd_load = load_command.from_fileobj(fh, **kw)
-
- # read the specific command
- klass = LC_REGISTRY.get(cmd_load.cmd, None)
- if klass is None:
- raise ValueError("Unknown load command: %d" % (cmd_load.cmd,))
- cmd_cmd = klass.from_fileobj(fh, **kw)
-
- if cmd_load.cmd == LC_ID_DYLIB:
- # remember where this command was
- if self.id_cmd is not None:
- raise ValueError("This dylib already has an id")
- self.id_cmd = i
-
- if cmd_load.cmd in (LC_SEGMENT, LC_SEGMENT_64):
- # for segment commands, read the list of segments
- segs = []
- # assert that the size makes sense
- if cmd_load.cmd == LC_SEGMENT:
- section_cls = section
- else: # LC_SEGMENT_64
- section_cls = section_64
-
- expected_size = (
- sizeof(klass)
- + sizeof(load_command)
- + (sizeof(section_cls) * cmd_cmd.nsects)
- )
- if cmd_load.cmdsize != expected_size:
- raise ValueError("Segment size mismatch")
- # this is a zero block or something
- # so the beginning is wherever the fileoff of this command is
- if cmd_cmd.nsects == 0:
- if cmd_cmd.filesize != 0:
- low_offset = min(low_offset, cmd_cmd.fileoff)
- else:
- # this one has multiple segments
- for _j in range(cmd_cmd.nsects):
- # read the segment
- seg = section_cls.from_fileobj(fh, **kw)
- # if the segment has a size and is not zero filled
- # then its beginning is the offset of this segment
- not_zerofill = (seg.flags & S_ZEROFILL) != S_ZEROFILL
- if seg.offset > 0 and seg.size > 0 and not_zerofill:
- low_offset = min(low_offset, seg.offset)
- if not_zerofill:
- c = fh.tell()
- fh.seek(seg.offset)
- sd = fh.read(seg.size)
- seg.add_section_data(sd)
- fh.seek(c)
- segs.append(seg)
- # data is a list of segments
- cmd_data = segs
-
- # These are disabled for now because writing back doesn't work
- # elif cmd_load.cmd == LC_CODE_SIGNATURE:
- # c = fh.tell()
- # fh.seek(cmd_cmd.dataoff)
- # cmd_data = fh.read(cmd_cmd.datasize)
- # fh.seek(c)
- # elif cmd_load.cmd == LC_SYMTAB:
- # c = fh.tell()
- # fh.seek(cmd_cmd.stroff)
- # cmd_data = fh.read(cmd_cmd.strsize)
- # fh.seek(c)
-
- else:
- # data is a raw str
- data_size = cmd_load.cmdsize - sizeof(klass) - sizeof(load_command)
- cmd_data = fh.read(data_size)
- cmd.append((cmd_load, cmd_cmd, cmd_data))
- read_bytes += cmd_load.cmdsize
-
- # make sure the header made sense
- if read_bytes != header.sizeofcmds:
- raise ValueError(
- "Read %d bytes, header reports %d bytes"
- % (read_bytes, header.sizeofcmds)
- )
- self.total_size = sizeof(self.mach_header) + read_bytes
- self.low_offset = low_offset
-
- def walkRelocatables(self, shouldRelocateCommand=_shouldRelocateCommand):
- """
- for all relocatable commands
- yield (command_index, command_name, filename)
- """
- for (idx, (lc, cmd, data)) in enumerate(self.commands):
- if shouldRelocateCommand(lc.cmd):
- name = _RELOCATABLE_NAMES[lc.cmd]
- ofs = cmd.name - sizeof(lc.__class__) - sizeof(cmd.__class__)
- yield idx, name, data[
- ofs : data.find(b"\x00", ofs) # noqa: E203
- ].decode(sys.getfilesystemencoding())
-
- def rewriteInstallNameCommand(self, loadcmd):
- """Rewrite the load command of this dylib"""
- if self.id_cmd is not None:
- self.rewriteDataForCommand(self.id_cmd, loadcmd)
- return True
- return False
-
- def changedHeaderSizeBy(self, bytes):
- self.sizediff += bytes
- if (self.total_size + self.sizediff) > self.low_offset:
- print(
- "WARNING: Mach-O header in %r may be too large to relocate"
- % (self.parent.filename,)
- )
-
- def rewriteLoadCommands(self, changefunc):
- """
- Rewrite the load commands based upon a change dictionary
- """
- data = changefunc(self.parent.filename)
- changed = False
- if data is not None:
- if self.rewriteInstallNameCommand(data.encode(sys.getfilesystemencoding())):
- changed = True
- for idx, _name, filename in self.walkRelocatables():
- data = changefunc(filename)
- if data is not None:
- if self.rewriteDataForCommand(
- idx, data.encode(sys.getfilesystemencoding())
- ):
- changed = True
- return changed
-
- def rewriteDataForCommand(self, idx, data):
- lc, cmd, old_data = self.commands[idx]
- hdrsize = sizeof(lc.__class__) + sizeof(cmd.__class__)
- align = struct.calcsize("Q")
- data = data + (b"\x00" * (align - (len(data) % align)))
- newsize = hdrsize + len(data)
- self.commands[idx] = (lc, cmd, data)
- self.changedHeaderSizeBy(newsize - lc.cmdsize)
- lc.cmdsize, cmd.name = newsize, hdrsize
- return True
-
- def synchronize_size(self):
- if (self.total_size + self.sizediff) > self.low_offset:
- raise ValueError(
- (
- "New Mach-O header is too large to relocate in %r "
- "(new size=%r, max size=%r, delta=%r)"
- )
- % (
- self.parent.filename,
- self.total_size + self.sizediff,
- self.low_offset,
- self.sizediff,
- )
- )
- self.header.sizeofcmds += self.sizediff
- self.total_size = sizeof(self.mach_header) + self.header.sizeofcmds
- self.sizediff = 0
-
- def write(self, fileobj):
- fileobj = fileview(fileobj, self.offset, self.size)
- fileobj.seek(0)
-
- # serialize all the mach-o commands
- self.synchronize_size()
-
- self.header.to_fileobj(fileobj)
- for lc, cmd, data in self.commands:
- lc.to_fileobj(fileobj)
- cmd.to_fileobj(fileobj)
-
- if sys.version_info[0] == 2:
- if isinstance(data, unicode):
- fileobj.write(data.encode(sys.getfilesystemencoding()))
-
- elif isinstance(data, (bytes, str)):
- fileobj.write(data)
- else:
- # segments..
- for obj in data:
- obj.to_fileobj(fileobj)
- else:
- if isinstance(data, str):
- fileobj.write(data.encode(sys.getfilesystemencoding()))
-
- elif isinstance(data, bytes):
- fileobj.write(data)
-
- else:
- # segments..
- for obj in data:
- obj.to_fileobj(fileobj)
-
- # zero out the unused space, doubt this is strictly necessary
- # and is generally probably already the case
- fileobj.write(b"\x00" * (self.low_offset - fileobj.tell()))
-
- def getSymbolTableCommand(self):
- for lc, cmd, _data in self.commands:
- if lc.cmd == LC_SYMTAB:
- return cmd
- return None
-
- def getDynamicSymbolTableCommand(self):
- for lc, cmd, _data in self.commands:
- if lc.cmd == LC_DYSYMTAB:
- return cmd
- return None
-
- def get_filetype_shortname(self, filetype):
- if filetype in MH_FILETYPE_SHORTNAMES:
- return MH_FILETYPE_SHORTNAMES[filetype]
- else:
- return "unknown"
-
-
-def main(fn):
- m = MachO(fn)
- seen = set()
- for header in m.headers:
- for _idx, name, other in header.walkRelocatables():
- if other not in seen:
- seen.add(other)
- print("\t" + name + ": " + other)
-
-
-if __name__ == "__main__":
- import sys
-
- files = sys.argv[1:] or ["/bin/ls"]
- for fn in files:
- print(fn)
- main(fn)
diff --git a/lib/spack/external/macholib/MachOGraph.py b/lib/spack/external/macholib/MachOGraph.py
deleted file mode 100644
index 8943ed8bd3..0000000000
--- a/lib/spack/external/macholib/MachOGraph.py
+++ /dev/null
@@ -1,141 +0,0 @@
-"""
-Utilities for reading and writing Mach-O headers
-"""
-
-import os
-import sys
-
-from altgraph.ObjectGraph import ObjectGraph
-
-from macholib.dyld import dyld_find
-from macholib.itergraphreport import itergraphreport
-from macholib.MachO import MachO
-
-__all__ = ["MachOGraph"]
-
-try:
- unicode
-except NameError:
- unicode = str
-
-
-class MissingMachO(object):
- def __init__(self, filename):
- self.graphident = filename
- self.headers = ()
-
- def __repr__(self):
- return "<%s graphident=%r>" % (type(self).__name__, self.graphident)
-
-
-class MachOGraph(ObjectGraph):
- """
- Graph data structure of Mach-O dependencies
- """
-
- def __init__(self, debug=0, graph=None, env=None, executable_path=None):
- super(MachOGraph, self).__init__(debug=debug, graph=graph)
- self.env = env
- self.trans_table = {}
- self.executable_path = executable_path
-
- def locate(self, filename, loader=None):
- if not isinstance(filename, (str, unicode)):
- raise TypeError("%r is not a string" % (filename,))
- if filename.startswith("@loader_path/") and loader is not None:
- fn = self.trans_table.get((loader.filename, filename))
- if fn is None:
- loader_path = loader.loader_path
-
- try:
- fn = dyld_find(
- filename,
- env=self.env,
- executable_path=self.executable_path,
- loader_path=loader_path,
- )
- self.trans_table[(loader.filename, filename)] = fn
- except ValueError:
- return None
-
- else:
- fn = self.trans_table.get(filename)
- if fn is None:
- try:
- fn = dyld_find(
- filename, env=self.env, executable_path=self.executable_path
- )
- self.trans_table[filename] = fn
- except ValueError:
- return None
- return fn
-
- def findNode(self, name, loader=None):
- assert isinstance(name, (str, unicode))
- data = super(MachOGraph, self).findNode(name)
- if data is not None:
- return data
- newname = self.locate(name, loader=loader)
- if newname is not None and newname != name:
- return self.findNode(newname)
- return None
-
- def run_file(self, pathname, caller=None):
- assert isinstance(pathname, (str, unicode))
- self.msgin(2, "run_file", pathname)
- m = self.findNode(pathname, loader=caller)
- if m is None:
- if not os.path.exists(pathname):
- raise ValueError("%r does not exist" % (pathname,))
- m = self.createNode(MachO, pathname)
- self.createReference(caller, m, edge_data="run_file")
- self.scan_node(m)
- self.msgout(2, "")
- return m
-
- def load_file(self, name, caller=None):
- assert isinstance(name, (str, unicode))
- self.msgin(2, "load_file", name, caller)
- m = self.findNode(name, loader=caller)
- if m is None:
- newname = self.locate(name, loader=caller)
- if newname is not None and newname != name:
- return self.load_file(newname, caller=caller)
- if os.path.exists(name):
- m = self.createNode(MachO, name)
- self.scan_node(m)
- else:
- m = self.createNode(MissingMachO, name)
- self.msgout(2, "")
- return m
-
- def scan_node(self, node):
- self.msgin(2, "scan_node", node)
- for header in node.headers:
- for _idx, name, filename in header.walkRelocatables():
- assert isinstance(name, (str, unicode))
- assert isinstance(filename, (str, unicode))
- m = self.load_file(filename, caller=node)
- self.createReference(node, m, edge_data=name)
- self.msgout(2, "", node)
-
- def itergraphreport(self, name="G"):
- nodes = map(self.graph.describe_node, self.graph.iterdfs(self))
- describe_edge = self.graph.describe_edge
- return itergraphreport(nodes, describe_edge, name=name)
-
- def graphreport(self, fileobj=None):
- if fileobj is None:
- fileobj = sys.stdout
- fileobj.writelines(self.itergraphreport())
-
-
-def main(args):
- g = MachOGraph()
- for arg in args:
- g.run_file(arg)
- g.graphreport()
-
-
-if __name__ == "__main__":
- main(sys.argv[1:] or ["/bin/ls"])
diff --git a/lib/spack/external/macholib/MachOStandalone.py b/lib/spack/external/macholib/MachOStandalone.py
deleted file mode 100644
index 6ce154227c..0000000000
--- a/lib/spack/external/macholib/MachOStandalone.py
+++ /dev/null
@@ -1,173 +0,0 @@
-import os
-from collections import deque
-
-from macholib.dyld import framework_info
-from macholib.MachOGraph import MachOGraph, MissingMachO
-from macholib.util import (
- flipwritable,
- has_filename_filter,
- in_system_path,
- iter_platform_files,
- mergecopy,
- mergetree,
-)
-
-
-class ExcludedMachO(MissingMachO):
- pass
-
-
-class FilteredMachOGraph(MachOGraph):
- def __init__(self, delegate, *args, **kwargs):
- super(FilteredMachOGraph, self).__init__(*args, **kwargs)
- self.delegate = delegate
-
- def createNode(self, cls, name):
- cls = self.delegate.getClass(name, cls)
- res = super(FilteredMachOGraph, self).createNode(cls, name)
- return self.delegate.update_node(res)
-
- def locate(self, filename, loader=None):
- newname = super(FilteredMachOGraph, self).locate(filename, loader)
- if newname is None:
- return None
- return self.delegate.locate(newname, loader=loader)
-
-
-class MachOStandalone(object):
- def __init__(self, base, dest=None, graph=None, env=None, executable_path=None):
- self.base = os.path.join(os.path.abspath(base), "")
- if dest is None:
- dest = os.path.join(self.base, "Contents", "Frameworks")
- self.dest = dest
- self.mm = FilteredMachOGraph(
- self, graph=graph, env=env, executable_path=executable_path
- )
- self.changemap = {}
- self.excludes = []
- self.pending = deque()
-
- def update_node(self, m):
- return m
-
- def getClass(self, name, cls):
- if in_system_path(name):
- return ExcludedMachO
- for base in self.excludes:
- if name.startswith(base):
- return ExcludedMachO
- return cls
-
- def locate(self, filename, loader=None):
- if in_system_path(filename):
- return filename
- if filename.startswith(self.base):
- return filename
- for base in self.excludes:
- if filename.startswith(base):
- return filename
- if filename in self.changemap:
- return self.changemap[filename]
- info = framework_info(filename)
- if info is None:
- res = self.copy_dylib(filename)
- self.changemap[filename] = res
- return res
- else:
- res = self.copy_framework(info)
- self.changemap[filename] = res
- return res
-
- def copy_dylib(self, filename):
- # When the filename is a symlink use the basename of the target of
- # the link as the name in standalone bundle. This avoids problems
- # when two libraries link to the same dylib but using different
- # symlinks.
- if os.path.islink(filename):
- dest = os.path.join(self.dest, os.path.basename(os.path.realpath(filename)))
- else:
- dest = os.path.join(self.dest, os.path.basename(filename))
-
- if not os.path.exists(dest):
- self.mergecopy(filename, dest)
- return dest
-
- def mergecopy(self, src, dest):
- return mergecopy(src, dest)
-
- def mergetree(self, src, dest):
- return mergetree(src, dest)
-
- def copy_framework(self, info):
- dest = os.path.join(self.dest, info["shortname"] + ".framework")
- destfn = os.path.join(self.dest, info["name"])
- src = os.path.join(info["location"], info["shortname"] + ".framework")
- if not os.path.exists(dest):
- self.mergetree(src, dest)
- self.pending.append((destfn, iter_platform_files(dest)))
- return destfn
-
- def run(self, platfiles=None, contents=None):
- mm = self.mm
- if contents is None:
- contents = "@executable_path/.."
- if platfiles is None:
- platfiles = iter_platform_files(self.base)
-
- for fn in platfiles:
- mm.run_file(fn)
-
- while self.pending:
- fmwk, files = self.pending.popleft()
- ref = mm.findNode(fmwk)
- for fn in files:
- mm.run_file(fn, caller=ref)
-
- changemap = {}
- skipcontents = os.path.join(os.path.dirname(self.dest), "")
- machfiles = []
-
- for node in mm.flatten(has_filename_filter):
- machfiles.append(node)
- dest = os.path.join(
- contents,
- os.path.normpath(node.filename[len(skipcontents) :]), # noqa: E203
- )
- changemap[node.filename] = dest
-
- def changefunc(path):
- if path.startswith("@loader_path/"):
- # This is a quick hack for py2app: In that
- # usecase paths like this are found in the load
- # commands of relocatable wheels. Those don't
- # need rewriting.
- return path
-
- res = mm.locate(path)
- rv = changemap.get(res)
- if rv is None and path.startswith("@loader_path/"):
- rv = changemap.get(mm.locate(mm.trans_table.get((node.filename, path))))
- return rv
-
- for node in machfiles:
- fn = mm.locate(node.filename)
- if fn is None:
- continue
- rewroteAny = False
- for _header in node.headers:
- if node.rewriteLoadCommands(changefunc):
- rewroteAny = True
- if rewroteAny:
- old_mode = flipwritable(fn)
- try:
- with open(fn, "rb+") as f:
- for _header in node.headers:
- f.seek(0)
- node.write(f)
- f.seek(0, 2)
- f.flush()
- finally:
- flipwritable(fn, old_mode)
-
- allfiles = [mm.locate(node.filename) for node in machfiles]
- return set(filter(None, allfiles))
diff --git a/lib/spack/external/macholib/SymbolTable.py b/lib/spack/external/macholib/SymbolTable.py
deleted file mode 100644
index 006abab59f..0000000000
--- a/lib/spack/external/macholib/SymbolTable.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
-Class to read the symbol table from a Mach-O header
-"""
-from __future__ import with_statement
-
-import sys
-
-from macholib.mach_o import (
- MH_CIGAM_64,
- MH_MAGIC_64,
- dylib_module,
- dylib_reference,
- dylib_table_of_contents,
- nlist,
- nlist_64,
- relocation_info,
-)
-
-__all__ = ["SymbolTable"]
-
-if sys.version_info[0] == 2:
- range = xrange # noqa: F821
-
-
-class SymbolTable(object):
- def __init__(self, macho, header=None, openfile=None):
- if openfile is None:
- openfile = open
- if header is None:
- header = macho.headers[0]
- self.macho_header = header
- with openfile(macho.filename, "rb") as fh:
- self.symtab = header.getSymbolTableCommand()
- self.dysymtab = header.getDynamicSymbolTableCommand()
-
- if self.symtab is not None:
- self.nlists = self.readSymbolTable(fh)
-
- if self.dysymtab is not None:
- self.readDynamicSymbolTable(fh)
-
- def readSymbolTable(self, fh):
- cmd = self.symtab
- fh.seek(self.macho_header.offset + cmd.stroff)
- strtab = fh.read(cmd.strsize)
- fh.seek(self.macho_header.offset + cmd.symoff)
- nlists = []
-
- if self.macho_header.MH_MAGIC in [MH_MAGIC_64, MH_CIGAM_64]:
- cls = nlist_64
- else:
- cls = nlist
-
- for _i in range(cmd.nsyms):
- cmd = cls.from_fileobj(fh, _endian_=self.macho_header.endian)
- if cmd.n_un == 0:
- nlists.append((cmd, ""))
- else:
- nlists.append(
- (
- cmd,
- strtab[cmd.n_un : strtab.find(b"\x00", cmd.n_un)], # noqa: E203
- )
- )
- return nlists
-
- def readDynamicSymbolTable(self, fh):
- cmd = self.dysymtab
- nlists = self.nlists
-
- self.localsyms = nlists[
- cmd.ilocalsym : cmd.ilocalsym + cmd.nlocalsym # noqa: E203
- ]
- self.extdefsyms = nlists[
- cmd.iextdefsym : cmd.iextdefsym + cmd.nextdefsym # noqa: E203
- ]
- self.undefsyms = nlists[
- cmd.iundefsym : cmd.iundefsym + cmd.nundefsym # noqa: E203
- ]
- if cmd.tocoff == 0:
- self.toc = None
- else:
- self.toc = self.readtoc(fh, cmd.tocoff, cmd.ntoc)
-
- def readtoc(self, fh, off, n):
- fh.seek(self.macho_header.offset + off)
- return [dylib_table_of_contents.from_fileobj(fh) for i in range(n)]
-
- def readmodtab(self, fh, off, n):
- fh.seek(self.macho_header.offset + off)
- return [dylib_module.from_fileobj(fh) for i in range(n)]
-
- def readsym(self, fh, off, n):
- fh.seek(self.macho_header.offset + off)
- refs = []
- for _i in range(n):
- ref = dylib_reference.from_fileobj(fh)
- isym, flags = divmod(ref.isym_flags, 256)
- refs.append((self.nlists[isym], flags))
- return refs
-
- def readrel(self, fh, off, n):
- fh.seek(self.macho_header.offset + off)
- return [relocation_info.from_fileobj(fh) for i in range(n)]
diff --git a/lib/spack/external/macholib/__init__.py b/lib/spack/external/macholib/__init__.py
deleted file mode 100644
index f2945849f8..0000000000
--- a/lib/spack/external/macholib/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-"""
-Enough Mach-O to make your head spin.
-
-See the relevant header files in /usr/include/mach-o
-
-And also Apple's documentation.
-"""
-__version__ = "1.15.2"
diff --git a/lib/spack/external/macholib/__main__.py b/lib/spack/external/macholib/__main__.py
deleted file mode 100644
index dd3cc11610..0000000000
--- a/lib/spack/external/macholib/__main__.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from __future__ import absolute_import, print_function
-
-import os
-import sys
-
-from macholib import macho_dump, macho_standalone
-from macholib.util import is_platform_file
-
-gCommand = None
-
-
-def check_file(fp, path, callback):
- if not os.path.exists(path):
- print("%s: %s: No such file or directory" % (gCommand, path), file=sys.stderr)
- return 1
-
- try:
- is_plat = is_platform_file(path)
-
- except IOError as msg:
- print("%s: %s: %s" % (gCommand, path, msg), file=sys.stderr)
- return 1
-
- else:
- if is_plat:
- callback(fp, path)
- return 0
-
-
-def walk_tree(callback, paths):
- err = 0
-
- for base in paths:
- if os.path.isdir(base):
- for root, _dirs, files in os.walk(base):
- for fn in files:
- err |= check_file(sys.stdout, os.path.join(root, fn), callback)
- else:
- err |= check_file(sys.stdout, base, callback)
-
- return err
-
-
-def print_usage(fp):
- print("Usage:", file=fp)
- print(" python -mmacholib [help|--help]", file=fp)
- print(" python -mmacholib dump FILE ...", file=fp)
- print(" python -mmacholib find DIR ...", file=fp)
- print(" python -mmacholib standalone DIR ...", file=fp)
-
-
-def main():
- global gCommand
- if len(sys.argv) < 3:
- print_usage(sys.stderr)
- sys.exit(1)
-
- gCommand = sys.argv[1]
-
- if gCommand == "dump":
- walk_tree(macho_dump.print_file, sys.argv[2:])
-
- elif gCommand == "find":
- walk_tree(lambda fp, path: print(path, file=fp), sys.argv[2:])
-
- elif gCommand == "standalone":
- for dn in sys.argv[2:]:
- macho_standalone.standaloneApp(dn)
-
- elif gCommand in ("help", "--help"):
- print_usage(sys.stdout)
- sys.exit(0)
-
- else:
- print_usage(sys.stderr)
- sys.exit(1)
-
-
-if __name__ == "__main__":
- main()
diff --git a/lib/spack/external/macholib/_cmdline.py b/lib/spack/external/macholib/_cmdline.py
deleted file mode 100644
index 9304fb6257..0000000000
--- a/lib/spack/external/macholib/_cmdline.py
+++ /dev/null
@@ -1,49 +0,0 @@
-"""
-Internal helpers for basic commandline tools
-"""
-from __future__ import absolute_import, print_function
-
-import os
-import sys
-
-from macholib.util import is_platform_file
-
-
-def check_file(fp, path, callback):
- if not os.path.exists(path):
- print(
- "%s: %s: No such file or directory" % (sys.argv[0], path), file=sys.stderr
- )
- return 1
-
- try:
- is_plat = is_platform_file(path)
-
- except IOError as msg:
- print("%s: %s: %s" % (sys.argv[0], path, msg), file=sys.stderr)
- return 1
-
- else:
- if is_plat:
- callback(fp, path)
- return 0
-
-
-def main(callback):
- args = sys.argv[1:]
- name = os.path.basename(sys.argv[0])
- err = 0
-
- if not args:
- print("Usage: %s filename..." % (name,), file=sys.stderr)
- return 1
-
- for base in args:
- if os.path.isdir(base):
- for root, _dirs, files in os.walk(base):
- for fn in files:
- err |= check_file(sys.stdout, os.path.join(root, fn), callback)
- else:
- err |= check_file(sys.stdout, base, callback)
-
- return err
diff --git a/lib/spack/external/macholib/dyld.py b/lib/spack/external/macholib/dyld.py
deleted file mode 100644
index 5282a08d42..0000000000
--- a/lib/spack/external/macholib/dyld.py
+++ /dev/null
@@ -1,230 +0,0 @@
-"""
-dyld emulation
-"""
-
-import ctypes
-import os
-import platform
-import sys
-from itertools import chain
-
-from macholib.dylib import dylib_info
-from macholib.framework import framework_info
-
-__all__ = ["dyld_find", "framework_find", "framework_info", "dylib_info"]
-
-if sys.platform == "darwin" and [
- int(x) for x in platform.mac_ver()[0].split(".")[:2]
-] >= [10, 16]:
- try:
- libc = ctypes.CDLL("libSystem.dylib")
-
- except OSError:
- _dyld_shared_cache_contains_path = None
-
- else:
- try:
- _dyld_shared_cache_contains_path = libc._dyld_shared_cache_contains_path
- except AttributeError:
- _dyld_shared_cache_contains_path = None
-
- else:
- _dyld_shared_cache_contains_path.restype = ctypes.c_bool
- _dyld_shared_cache_contains_path.argtypes = [ctypes.c_char_p]
-
- if sys.version_info[0] != 2:
- __dyld_shared_cache_contains_path = _dyld_shared_cache_contains_path
-
- def _dyld_shared_cache_contains_path(path):
- return __dyld_shared_cache_contains_path(path.encode())
-
-
-else:
- _dyld_shared_cache_contains_path = None
-
-# These are the defaults as per man dyld(1)
-#
-_DEFAULT_FRAMEWORK_FALLBACK = [
- os.path.expanduser("~/Library/Frameworks"),
- "/Library/Frameworks",
- "/Network/Library/Frameworks",
- "/System/Library/Frameworks",
-]
-
-_DEFAULT_LIBRARY_FALLBACK = [
- os.path.expanduser("~/lib"),
- "/usr/local/lib",
- "/lib",
- "/usr/lib",
-]
-
-if sys.version_info[0] == 2:
-
- def _ensure_utf8(s):
- if isinstance(s, unicode): # noqa: F821
- return s.encode("utf8")
- return s
-
-
-else:
-
- def _ensure_utf8(s):
- if s is not None and not isinstance(s, str):
- raise ValueError(s)
- return s
-
-
-def _dyld_env(env, var):
- if env is None:
- env = os.environ
- rval = env.get(var)
- if rval is None or rval == "":
- return []
- return rval.split(":")
-
-
-def dyld_image_suffix(env=None):
- if env is None:
- env = os.environ
- return env.get("DYLD_IMAGE_SUFFIX")
-
-
-def dyld_framework_path(env=None):
- return _dyld_env(env, "DYLD_FRAMEWORK_PATH")
-
-
-def dyld_library_path(env=None):
- return _dyld_env(env, "DYLD_LIBRARY_PATH")
-
-
-def dyld_fallback_framework_path(env=None):
- return _dyld_env(env, "DYLD_FALLBACK_FRAMEWORK_PATH")
-
-
-def dyld_fallback_library_path(env=None):
- return _dyld_env(env, "DYLD_FALLBACK_LIBRARY_PATH")
-
-
-def dyld_image_suffix_search(iterator, env=None):
- """For a potential path iterator, add DYLD_IMAGE_SUFFIX semantics"""
- suffix = dyld_image_suffix(env)
- if suffix is None:
- return iterator
-
- def _inject(iterator=iterator, suffix=suffix):
- for path in iterator:
- if path.endswith(".dylib"):
- yield path[: -len(".dylib")] + suffix + ".dylib"
- else:
- yield path + suffix
- yield path
-
- return _inject()
-
-
-def dyld_override_search(name, env=None):
- # If DYLD_FRAMEWORK_PATH is set and this dylib_name is a
- # framework name, use the first file that exists in the framework
- # path if any. If there is none go on to search the DYLD_LIBRARY_PATH
- # if any.
-
- framework = framework_info(name)
-
- if framework is not None:
- for path in dyld_framework_path(env):
- yield os.path.join(path, framework["name"])
-
- # If DYLD_LIBRARY_PATH is set then use the first file that exists
- # in the path. If none use the original name.
- for path in dyld_library_path(env):
- yield os.path.join(path, os.path.basename(name))
-
-
-def dyld_executable_path_search(name, executable_path=None):
- # If we haven't done any searching and found a library and the
- # dylib_name starts with "@executable_path/" then construct the
- # library name.
- if name.startswith("@executable_path/") and executable_path is not None:
- yield os.path.join(
- executable_path, name[len("@executable_path/") :] # noqa: E203
- )
-
-
-def dyld_loader_search(name, loader_path=None):
- # If we haven't done any searching and found a library and the
- # dylib_name starts with "@loader_path/" then construct the
- # library name.
- if name.startswith("@loader_path/") and loader_path is not None:
- yield os.path.join(loader_path, name[len("@loader_path/") :]) # noqa: E203
-
-
-def dyld_default_search(name, env=None):
- yield name
-
- framework = framework_info(name)
-
- if framework is not None:
- fallback_framework_path = dyld_fallback_framework_path(env)
-
- if fallback_framework_path:
- for path in fallback_framework_path:
- yield os.path.join(path, framework["name"])
-
- else:
- for path in _DEFAULT_FRAMEWORK_FALLBACK:
- yield os.path.join(path, framework["name"])
-
- fallback_library_path = dyld_fallback_library_path(env)
- if fallback_library_path:
- for path in fallback_library_path:
- yield os.path.join(path, os.path.basename(name))
-
- else:
- for path in _DEFAULT_LIBRARY_FALLBACK:
- yield os.path.join(path, os.path.basename(name))
-
-
-def dyld_find(name, executable_path=None, env=None, loader_path=None):
- """
- Find a library or framework using dyld semantics
- """
- name = _ensure_utf8(name)
- executable_path = _ensure_utf8(executable_path)
- for path in dyld_image_suffix_search(
- chain(
- dyld_override_search(name, env),
- dyld_executable_path_search(name, executable_path),
- dyld_loader_search(name, loader_path),
- dyld_default_search(name, env),
- ),
- env,
- ):
- if (
- _dyld_shared_cache_contains_path is not None
- and _dyld_shared_cache_contains_path(path)
- ):
- return path
- if os.path.isfile(path):
- return path
- raise ValueError("dylib %s could not be found" % (name,))
-
-
-def framework_find(fn, executable_path=None, env=None):
- """
- Find a framework using dyld semantics in a very loose manner.
-
- Will take input such as:
- Python
- Python.framework
- Python.framework/Versions/Current
- """
- try:
- return dyld_find(fn, executable_path=executable_path, env=env)
- except ValueError:
- pass
- fmwk_index = fn.rfind(".framework")
- if fmwk_index == -1:
- fmwk_index = len(fn)
- fn += ".framework"
- fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
- return dyld_find(fn, executable_path=executable_path, env=env)
diff --git a/lib/spack/external/macholib/dylib.py b/lib/spack/external/macholib/dylib.py
deleted file mode 100644
index 72301a8f19..0000000000
--- a/lib/spack/external/macholib/dylib.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""
-Generic dylib path manipulation
-"""
-
-import re
-
-__all__ = ["dylib_info"]
-
-_DYLIB_RE = re.compile(
- r"""(?x)
-(?P<location>^.*)(?:^|/)
-(?P<name>
- (?P<shortname>\w+?)
- (?:\.(?P<version>[^._]+))?
- (?:_(?P<suffix>[^._]+))?
- \.dylib$
-)
-"""
-)
-
-
-def dylib_info(filename):
- """
- A dylib name can take one of the following four forms:
- Location/Name.SomeVersion_Suffix.dylib
- Location/Name.SomeVersion.dylib
- Location/Name_Suffix.dylib
- Location/Name.dylib
-
- returns None if not found or a mapping equivalent to:
- dict(
- location='Location',
- name='Name.SomeVersion_Suffix.dylib',
- shortname='Name',
- version='SomeVersion',
- suffix='Suffix',
- )
-
- Note that SomeVersion and Suffix are optional and may be None
- if not present.
- """
- is_dylib = _DYLIB_RE.match(filename)
- if not is_dylib:
- return None
- return is_dylib.groupdict()
diff --git a/lib/spack/external/macholib/framework.py b/lib/spack/external/macholib/framework.py
deleted file mode 100644
index 027001e413..0000000000
--- a/lib/spack/external/macholib/framework.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""
-Generic framework path manipulation
-"""
-
-import re
-
-__all__ = ["framework_info"]
-
-_STRICT_FRAMEWORK_RE = re.compile(
- r"""(?x)
-(?P<location>^.*)(?:^|/)
-(?P<name>
- (?P<shortname>[-_A-Za-z0-9]+).framework/
- (?:Versions/(?P<version>[^/]+)/)?
- (?P=shortname)
- (?:_(?P<suffix>[^_]+))?
-)$
-"""
-)
-
-
-def framework_info(filename):
- """
- A framework name can take one of the following four forms:
- Location/Name.framework/Versions/SomeVersion/Name_Suffix
- Location/Name.framework/Versions/SomeVersion/Name
- Location/Name.framework/Name_Suffix
- Location/Name.framework/Name
-
- returns None if not found, or a mapping equivalent to:
- dict(
- location='Location',
- name='Name.framework/Versions/SomeVersion/Name_Suffix',
- shortname='Name',
- version='SomeVersion',
- suffix='Suffix',
- )
-
- Note that SomeVersion and Suffix are optional and may be None
- if not present
- """
- is_framework = _STRICT_FRAMEWORK_RE.match(filename)
- if not is_framework:
- return None
- return is_framework.groupdict()
diff --git a/lib/spack/external/macholib/itergraphreport.py b/lib/spack/external/macholib/itergraphreport.py
deleted file mode 100644
index 08692c057c..0000000000
--- a/lib/spack/external/macholib/itergraphreport.py
+++ /dev/null
@@ -1,73 +0,0 @@
-"""
-Utilities for creating dot output from a MachOGraph
-"""
-
-from collections import deque
-
-try:
- from itertools import imap
-except ImportError:
- imap = map
-
-__all__ = ["itergraphreport"]
-
-
-def itergraphreport(nodes, describe_edge, name="G"):
- edges = deque()
- nodetoident = {}
-
- def nodevisitor(node, data, outgoing, incoming):
- return {"label": str(node)}
-
- def edgevisitor(edge, data, head, tail):
- return {}
-
- yield "digraph %s {\n" % (name,)
- attr = {"rankdir": "LR", "concentrate": "true"}
- cpatt = '%s="%s"'
- for item in attr.items():
- yield "\t%s;\n" % (cpatt % item,)
-
- # find all packages (subgraphs)
- for (node, data, _outgoing, _incoming) in nodes:
- nodetoident[node] = getattr(data, "identifier", node)
-
- # create sets for subgraph, write out descriptions
- for (node, data, outgoing, incoming) in nodes:
- # update edges
- for edge in imap(describe_edge, outgoing):
- edges.append(edge)
-
- # describe node
- yield '\t"%s" [%s];\n' % (
- node,
- ",".join(
- [
- (cpatt % item)
- for item in nodevisitor(node, data, outgoing, incoming).items()
- ]
- ),
- )
-
- graph = []
-
- while edges:
- edge, data, head, tail = edges.popleft()
- if data in ("run_file", "load_dylib"):
- graph.append((edge, data, head, tail))
-
- def do_graph(edges, tabs):
- edgestr = tabs + '"%s" -> "%s" [%s];\n'
- # describe edge
- for (edge, data, head, tail) in edges:
- attribs = edgevisitor(edge, data, head, tail)
- yield edgestr % (
- head,
- tail,
- ",".join([(cpatt % item) for item in attribs.items()]),
- )
-
- for s in do_graph(graph, "\t"):
- yield s
-
- yield "}\n"
diff --git a/lib/spack/external/macholib/mach_o.py b/lib/spack/external/macholib/mach_o.py
deleted file mode 100644
index 7768ee38d2..0000000000
--- a/lib/spack/external/macholib/mach_o.py
+++ /dev/null
@@ -1,1636 +0,0 @@
-"""
-Other than changing the load commands in such a way that they do not
-contain the load command itself, this is largely a by-hand conversion
-of the C headers. Hopefully everything in here should be at least as
-obvious as the C headers, and you should be using the C headers as a real
-reference because the documentation didn't come along for the ride.
-
-Doing much of anything with the symbol tables or segments is really
-not covered at this point.
-
-See /usr/include/mach-o and friends.
-"""
-
-import time
-
-from macholib.ptypes import (
- Structure,
- p_int32,
- p_int64,
- p_long,
- p_short,
- p_uint8,
- p_uint32,
- p_uint64,
- p_ulong,
- pypackable,
-)
-
-_CPU_ARCH_ABI64 = 0x01000000
-
-CPU_TYPE_NAMES = {
- -1: "ANY",
- 1: "VAX",
- 6: "MC680x0",
- 7: "i386",
- _CPU_ARCH_ABI64 | 7: "x86_64",
- 8: "MIPS",
- 10: "MC98000",
- 11: "HPPA",
- 12: "ARM",
- _CPU_ARCH_ABI64 | 12: "ARM64",
- 13: "MC88000",
- 14: "SPARC",
- 15: "i860",
- 16: "Alpha",
- 18: "PowerPC",
- _CPU_ARCH_ABI64 | 18: "PowerPC64",
-}
-
-INTEL64_SUBTYPE = {
- 3: "CPU_SUBTYPE_X86_64_ALL",
- 4: "CPU_SUBTYPE_X86_ARCH1",
- 8: "CPU_SUBTYPE_X86_64_H",
-}
-
-# define CPU_SUBTYPE_INTEL(f, m) ((cpu_subtype_t) (f) + ((m) << 4))
-INTEL_SUBTYPE = {
- 0: "CPU_SUBTYPE_INTEL_MODEL_ALL",
- 1: "CPU_THREADTYPE_INTEL_HTT",
- 3: "CPU_SUBTYPE_I386_ALL",
- 4: "CPU_SUBTYPE_486",
- 5: "CPU_SUBTYPE_586",
- 8: "CPU_SUBTYPE_PENTIUM_3",
- 9: "CPU_SUBTYPE_PENTIUM_M",
- 10: "CPU_SUBTYPE_PENTIUM_4",
- 11: "CPU_SUBTYPE_ITANIUM",
- 12: "CPU_SUBTYPE_XEON",
- 34: "CPU_SUBTYPE_XEON_MP",
- 42: "CPU_SUBTYPE_PENTIUM_4_M",
- 43: "CPU_SUBTYPE_ITANIUM_2",
- 38: "CPU_SUBTYPE_PENTPRO",
- 40: "CPU_SUBTYPE_PENTIUM_3_M",
- 52: "CPU_SUBTYPE_PENTIUM_3_XEON",
- 102: "CPU_SUBTYPE_PENTII_M3",
- 132: "CPU_SUBTYPE_486SX",
- 166: "CPU_SUBTYPE_PENTII_M5",
- 199: "CPU_SUBTYPE_CELERON",
- 231: "CPU_SUBTYPE_CELERON_MOBILE",
-}
-
-MC680_SUBTYPE = {
- 1: "CPU_SUBTYPE_MC680x0_ALL",
- 2: "CPU_SUBTYPE_MC68040",
- 3: "CPU_SUBTYPE_MC68030_ONLY",
-}
-
-MIPS_SUBTYPE = {
- 0: "CPU_SUBTYPE_MIPS_ALL",
- 1: "CPU_SUBTYPE_MIPS_R2300",
- 2: "CPU_SUBTYPE_MIPS_R2600",
- 3: "CPU_SUBTYPE_MIPS_R2800",
- 4: "CPU_SUBTYPE_MIPS_R2000a",
- 5: "CPU_SUBTYPE_MIPS_R2000",
- 6: "CPU_SUBTYPE_MIPS_R3000a",
- 7: "CPU_SUBTYPE_MIPS_R3000",
-}
-
-MC98000_SUBTYPE = {0: "CPU_SUBTYPE_MC98000_ALL", 1: "CPU_SUBTYPE_MC98601"}
-
-HPPA_SUBTYPE = {0: "CPU_SUBTYPE_HPPA_7100", 1: "CPU_SUBTYPE_HPPA_7100LC"}
-
-MC88_SUBTYPE = {
- 0: "CPU_SUBTYPE_MC88000_ALL",
- 1: "CPU_SUBTYPE_MC88100",
- 2: "CPU_SUBTYPE_MC88110",
-}
-
-SPARC_SUBTYPE = {0: "CPU_SUBTYPE_SPARC_ALL"}
-
-I860_SUBTYPE = {0: "CPU_SUBTYPE_I860_ALL", 1: "CPU_SUBTYPE_I860_860"}
-
-POWERPC_SUBTYPE = {
- 0: "CPU_SUBTYPE_POWERPC_ALL",
- 1: "CPU_SUBTYPE_POWERPC_601",
- 2: "CPU_SUBTYPE_POWERPC_602",
- 3: "CPU_SUBTYPE_POWERPC_603",
- 4: "CPU_SUBTYPE_POWERPC_603e",
- 5: "CPU_SUBTYPE_POWERPC_603ev",
- 6: "CPU_SUBTYPE_POWERPC_604",
- 7: "CPU_SUBTYPE_POWERPC_604e",
- 8: "CPU_SUBTYPE_POWERPC_620",
- 9: "CPU_SUBTYPE_POWERPC_750",
- 10: "CPU_SUBTYPE_POWERPC_7400",
- 11: "CPU_SUBTYPE_POWERPC_7450",
- 100: "CPU_SUBTYPE_POWERPC_970",
-}
-
-ARM_SUBTYPE = {
- 0: "CPU_SUBTYPE_ARM_ALL12",
- 5: "CPU_SUBTYPE_ARM_V4T",
- 6: "CPU_SUBTYPE_ARM_V6",
- 7: "CPU_SUBTYPE_ARM_V5TEJ",
- 8: "CPU_SUBTYPE_ARM_XSCALE",
- 9: "CPU_SUBTYPE_ARM_V7",
- 10: "CPU_SUBTYPE_ARM_V7F",
- 11: "CPU_SUBTYPE_ARM_V7S",
- 12: "CPU_SUBTYPE_ARM_V7K",
- 13: "CPU_SUBTYPE_ARM_V8",
- 14: "CPU_SUBTYPE_ARM_V6M",
- 15: "CPU_SUBTYPE_ARM_V7M",
- 16: "CPU_SUBTYPE_ARM_V7EM",
-}
-
-ARM64_SUBTYPE = {0: "CPU_SUBTYPE_ARM64_ALL", 1: "CPU_SUBTYPE_ARM64_V8"}
-
-VAX_SUBTYPE = {
- 0: "CPU_SUBTYPE_VAX_ALL",
- 1: "CPU_SUBTYPE_VAX780",
- 2: "CPU_SUBTYPE_VAX785",
- 3: "CPU_SUBTYPE_VAX750",
- 4: "CPU_SUBTYPE_VAX730",
- 5: "CPU_SUBTYPE_UVAXI",
- 6: "CPU_SUBTYPE_UVAXII",
- 7: "CPU_SUBTYPE_VAX8200",
- 8: "CPU_SUBTYPE_VAX8500",
- 9: "CPU_SUBTYPE_VAX8600",
- 10: "CPU_SUBTYPE_VAX8650",
- 11: "CPU_SUBTYPE_VAX8800",
- 12: "CPU_SUBTYPE_UVAXIII",
-}
-
-
-def get_cpu_subtype(cpu_type, cpu_subtype):
- st = cpu_subtype & 0x0FFFFFFF
-
- if cpu_type == 1:
- subtype = VAX_SUBTYPE.get(st, st)
- elif cpu_type == 6:
- subtype = MC680_SUBTYPE.get(st, st)
- elif cpu_type == 7:
- subtype = INTEL_SUBTYPE.get(st, st)
- elif cpu_type == 7 | _CPU_ARCH_ABI64:
- subtype = INTEL64_SUBTYPE.get(st, st)
- elif cpu_type == 8:
- subtype = MIPS_SUBTYPE.get(st, st)
- elif cpu_type == 10:
- subtype = MC98000_SUBTYPE.get(st, st)
- elif cpu_type == 11:
- subtype = HPPA_SUBTYPE.get(st, st)
- elif cpu_type == 12:
- subtype = ARM_SUBTYPE.get(st, st)
- elif cpu_type == 12 | _CPU_ARCH_ABI64:
- subtype = ARM64_SUBTYPE.get(st, st)
- elif cpu_type == 13:
- subtype = MC88_SUBTYPE.get(st, st)
- elif cpu_type == 14:
- subtype = SPARC_SUBTYPE.get(st, st)
- elif cpu_type == 15:
- subtype = I860_SUBTYPE.get(st, st)
- elif cpu_type == 16:
- subtype = MIPS_SUBTYPE.get(st, st)
- elif cpu_type == 18:
- subtype = POWERPC_SUBTYPE.get(st, st)
- elif cpu_type == 18 | _CPU_ARCH_ABI64:
- subtype = POWERPC_SUBTYPE.get(st, st)
- else:
- subtype = str(st)
-
- return subtype
-
-
-_MH_EXECUTE_SYM = "__mh_execute_header"
-MH_EXECUTE_SYM = "_mh_execute_header"
-_MH_BUNDLE_SYM = "__mh_bundle_header"
-MH_BUNDLE_SYM = "_mh_bundle_header"
-_MH_DYLIB_SYM = "__mh_dylib_header"
-MH_DYLIB_SYM = "_mh_dylib_header"
-_MH_DYLINKER_SYM = "__mh_dylinker_header"
-MH_DYLINKER_SYM = "_mh_dylinker_header"
-
-(
- MH_OBJECT,
- MH_EXECUTE,
- MH_FVMLIB,
- MH_CORE,
- MH_PRELOAD,
- MH_DYLIB,
- MH_DYLINKER,
- MH_BUNDLE,
- MH_DYLIB_STUB,
- MH_DSYM,
-) = range(0x1, 0xB)
-
-MH_FILESET = 0xC
-
-(
- MH_NOUNDEFS,
- MH_INCRLINK,
- MH_DYLDLINK,
- MH_BINDATLOAD,
- MH_PREBOUND,
- MH_SPLIT_SEGS,
- MH_LAZY_INIT,
- MH_TWOLEVEL,
- MH_FORCE_FLAT,
- MH_NOMULTIDEFS,
- MH_NOFIXPREBINDING,
- MH_PREBINDABLE,
- MH_ALLMODSBOUND,
- MH_SUBSECTIONS_VIA_SYMBOLS,
- MH_CANONICAL,
- MH_WEAK_DEFINES,
- MH_BINDS_TO_WEAK,
- MH_ALLOW_STACK_EXECUTION,
- MH_ROOT_SAFE,
- MH_SETUID_SAFE,
- MH_NO_REEXPORTED_DYLIBS,
- MH_PIE,
- MH_DEAD_STRIPPABLE_DYLIB,
- MH_HAS_TLV_DESCRIPTORS,
- MH_NO_HEAP_EXECUTION,
- MH_APP_EXTENSION_SAFE,
-) = map((1).__lshift__, range(26))
-
-MH_MAGIC = 0xFEEDFACE
-MH_CIGAM = 0xCEFAEDFE
-MH_MAGIC_64 = 0xFEEDFACF
-MH_CIGAM_64 = 0xCFFAEDFE
-
-integer_t = p_int32
-cpu_type_t = integer_t
-cpu_subtype_t = p_uint32
-
-MH_FILETYPE_NAMES = {
- MH_OBJECT: "relocatable object",
- MH_EXECUTE: "demand paged executable",
- MH_FVMLIB: "fixed vm shared library",
- MH_CORE: "core",
- MH_PRELOAD: "preloaded executable",
- MH_DYLIB: "dynamically bound shared library",
- MH_DYLINKER: "dynamic link editor",
- MH_BUNDLE: "dynamically bound bundle",
- MH_DYLIB_STUB: "shared library stub for static linking",
- MH_DSYM: "symbol information",
- MH_FILESET: "fileset object",
-}
-
-MH_FILETYPE_SHORTNAMES = {
- MH_OBJECT: "object",
- MH_EXECUTE: "execute",
- MH_FVMLIB: "fvmlib",
- MH_CORE: "core",
- MH_PRELOAD: "preload",
- MH_DYLIB: "dylib",
- MH_DYLINKER: "dylinker",
- MH_BUNDLE: "bundle",
- MH_DYLIB_STUB: "dylib_stub",
- MH_DSYM: "dsym",
-}
-
-MH_FLAGS_NAMES = {
- MH_NOUNDEFS: "MH_NOUNDEFS",
- MH_INCRLINK: "MH_INCRLINK",
- MH_DYLDLINK: "MH_DYLDLINK",
- MH_BINDATLOAD: "MH_BINDATLOAD",
- MH_PREBOUND: "MH_PREBOUND",
- MH_SPLIT_SEGS: "MH_SPLIT_SEGS",
- MH_LAZY_INIT: "MH_LAZY_INIT",
- MH_TWOLEVEL: "MH_TWOLEVEL",
- MH_FORCE_FLAT: "MH_FORCE_FLAT",
- MH_NOMULTIDEFS: "MH_NOMULTIDEFS",
- MH_NOFIXPREBINDING: "MH_NOFIXPREBINDING",
- MH_PREBINDABLE: "MH_PREBINDABLE",
- MH_ALLMODSBOUND: "MH_ALLMODSBOUND",
- MH_SUBSECTIONS_VIA_SYMBOLS: "MH_SUBSECTIONS_VIA_SYMBOLS",
- MH_CANONICAL: "MH_CANONICAL",
- MH_WEAK_DEFINES: "MH_WEAK_DEFINES",
- MH_BINDS_TO_WEAK: "MH_BINDS_TO_WEAK",
- MH_ALLOW_STACK_EXECUTION: "MH_ALLOW_STACK_EXECUTION",
- MH_ROOT_SAFE: "MH_ROOT_SAFE",
- MH_SETUID_SAFE: "MH_SETUID_SAFE",
- MH_NO_REEXPORTED_DYLIBS: "MH_NO_REEXPORTED_DYLIBS",
- MH_PIE: "MH_PIE",
- MH_DEAD_STRIPPABLE_DYLIB: "MH_DEAD_STRIPPABLE_DYLIB",
- MH_HAS_TLV_DESCRIPTORS: "MH_HAS_TLV_DESCRIPTORS",
- MH_NO_HEAP_EXECUTION: "MH_NO_HEAP_EXECUTION",
- MH_APP_EXTENSION_SAFE: "MH_APP_EXTENSION_SAFE",
-}
-
-MH_FLAGS_DESCRIPTIONS = {
- MH_NOUNDEFS: "no undefined references",
- MH_INCRLINK: "output of an incremental link",
- MH_DYLDLINK: "input for the dynamic linker",
- MH_BINDATLOAD: "undefined references bound dynamically when loaded",
- MH_PREBOUND: "dynamic undefined references prebound",
- MH_SPLIT_SEGS: "split read-only and read-write segments",
- MH_LAZY_INIT: "(obsolete)",
- MH_TWOLEVEL: "using two-level name space bindings",
- MH_FORCE_FLAT: "forcing all imagges to use flat name space bindings",
- MH_NOMULTIDEFS: "umbrella guarantees no multiple definitions",
- MH_NOFIXPREBINDING: "do not notify prebinding agent about this executable",
- MH_PREBINDABLE: "the binary is not prebound but can have its prebinding redone",
- MH_ALLMODSBOUND: "indicates that this binary binds to all "
- "two-level namespace modules of its dependent libraries",
- MH_SUBSECTIONS_VIA_SYMBOLS: "safe to divide up the sections into "
- "sub-sections via symbols for dead code stripping",
- MH_CANONICAL: "the binary has been canonicalized via the unprebind operation",
- MH_WEAK_DEFINES: "the final linked image contains external weak symbols",
- MH_BINDS_TO_WEAK: "the final linked image uses weak symbols",
- MH_ALLOW_STACK_EXECUTION: "all stacks in the task will be given "
- "stack execution privilege",
- MH_ROOT_SAFE: "the binary declares it is safe for use in processes with uid zero",
- MH_SETUID_SAFE: "the binary declares it is safe for use in processes "
- "when issetugid() is true",
- MH_NO_REEXPORTED_DYLIBS: "the static linker does not need to examine dependent "
- "dylibs to see if any are re-exported",
- MH_PIE: "the OS will load the main executable at a random address",
- MH_DEAD_STRIPPABLE_DYLIB: "the static linker will automatically not create a "
- "LC_LOAD_DYLIB load command to the dylib if no symbols are being "
- "referenced from the dylib",
- MH_HAS_TLV_DESCRIPTORS: "contains a section of type S_THREAD_LOCAL_VARIABLES",
- MH_NO_HEAP_EXECUTION: "the OS will run the main executable with a "
- "non-executable heap even on platforms that don't require it",
- MH_APP_EXTENSION_SAFE: "the code was linked for use in an application extension.",
-}
-
-
-class mach_version_helper(Structure):
- _fields_ = (("_version", p_uint32),)
-
- @property
- def major(self):
- return self._version >> 16 & 0xFFFF
-
- @major.setter
- def major(self, v):
- self._version = (self._version & 0xFFFF) | (v << 16)
-
- @property
- def minor(self):
- return self._version >> 8 & 0xFF
-
- @minor.setter
- def minor(self, v):
- self._version = (self._version & 0xFFFF00FF) | (v << 8)
-
- @property
- def rev(self):
- return self._version & 0xFF
-
- @rev.setter
- def rev(self, v):
- return (self._version & 0xFFFFFF00) | v
-
- def __str__(self):
- return "%s.%s.%s" % (self.major, self.minor, self.rev)
-
-
-class mach_timestamp_helper(p_uint32):
- def __str__(self):
- return time.ctime(self)
-
-
-def read_struct(f, s, **kw):
- return s.from_fileobj(f, **kw)
-
-
-class mach_header(Structure):
- _fields_ = (
- ("magic", p_uint32),
- ("cputype", cpu_type_t),
- ("cpusubtype", cpu_subtype_t),
- ("filetype", p_uint32),
- ("ncmds", p_uint32),
- ("sizeofcmds", p_uint32),
- ("flags", p_uint32),
- )
-
- def _describe(self):
- bit = 1
- flags = self.flags
- dflags = []
- while flags and bit < (1 << 32):
- if flags & bit:
- dflags.append(
- {
- "name": MH_FLAGS_NAMES.get(bit, str(bit)),
- "description": MH_FLAGS_DESCRIPTIONS.get(bit, str(bit)),
- }
- )
- flags = flags ^ bit
- bit <<= 1
- return (
- ("magic", int(self.magic)),
- ("cputype_string", CPU_TYPE_NAMES.get(self.cputype, self.cputype)),
- ("cputype", int(self.cputype)),
- ("cpusubtype_string", get_cpu_subtype(self.cputype, self.cpusubtype)),
- ("cpusubtype", int(self.cpusubtype)),
- ("filetype_string", MH_FILETYPE_NAMES.get(self.filetype, self.filetype)),
- ("filetype", int(self.filetype)),
- ("ncmds", self.ncmds),
- ("sizeofcmds", self.sizeofcmds),
- ("flags", dflags),
- ("raw_flags", int(self.flags)),
- )
-
-
-class mach_header_64(mach_header):
- _fields_ = mach_header._fields_ + (("reserved", p_uint32),)
-
-
-class load_command(Structure):
- _fields_ = (("cmd", p_uint32), ("cmdsize", p_uint32))
-
- def get_cmd_name(self):
- return LC_NAMES.get(self.cmd, self.cmd)
-
-
-LC_REQ_DYLD = 0x80000000
-
-(
- LC_SEGMENT,
- LC_SYMTAB,
- LC_SYMSEG,
- LC_THREAD,
- LC_UNIXTHREAD,
- LC_LOADFVMLIB,
- LC_IDFVMLIB,
- LC_IDENT,
- LC_FVMFILE,
- LC_PREPAGE,
- LC_DYSYMTAB,
- LC_LOAD_DYLIB,
- LC_ID_DYLIB,
- LC_LOAD_DYLINKER,
- LC_ID_DYLINKER,
- LC_PREBOUND_DYLIB,
- LC_ROUTINES,
- LC_SUB_FRAMEWORK,
- LC_SUB_UMBRELLA,
- LC_SUB_CLIENT,
- LC_SUB_LIBRARY,
- LC_TWOLEVEL_HINTS,
- LC_PREBIND_CKSUM,
-) = range(0x1, 0x18)
-
-LC_LOAD_WEAK_DYLIB = LC_REQ_DYLD | 0x18
-
-LC_SEGMENT_64 = 0x19
-LC_ROUTINES_64 = 0x1A
-LC_UUID = 0x1B
-LC_RPATH = 0x1C | LC_REQ_DYLD
-LC_CODE_SIGNATURE = 0x1D
-LC_CODE_SEGMENT_SPLIT_INFO = 0x1E
-LC_REEXPORT_DYLIB = 0x1F | LC_REQ_DYLD
-LC_LAZY_LOAD_DYLIB = 0x20
-LC_ENCRYPTION_INFO = 0x21
-LC_DYLD_INFO = 0x22
-LC_DYLD_INFO_ONLY = 0x22 | LC_REQ_DYLD
-LC_LOAD_UPWARD_DYLIB = 0x23 | LC_REQ_DYLD
-LC_VERSION_MIN_MACOSX = 0x24
-LC_VERSION_MIN_IPHONEOS = 0x25
-LC_FUNCTION_STARTS = 0x26
-LC_DYLD_ENVIRONMENT = 0x27
-LC_MAIN = 0x28 | LC_REQ_DYLD
-LC_DATA_IN_CODE = 0x29
-LC_SOURCE_VERSION = 0x2A
-LC_DYLIB_CODE_SIGN_DRS = 0x2B
-LC_ENCRYPTION_INFO_64 = 0x2C
-LC_LINKER_OPTION = 0x2D
-LC_LINKER_OPTIMIZATION_HINT = 0x2E
-LC_VERSION_MIN_TVOS = 0x2F
-LC_VERSION_MIN_WATCHOS = 0x30
-LC_NOTE = 0x31
-LC_BUILD_VERSION = 0x32
-LC_DYLD_EXPORTS_TRIE = 0x33 | LC_REQ_DYLD
-LC_DYLD_CHAINED_FIXUPS = 0x34 | LC_REQ_DYLD
-LC_FILESET_ENTRY = 0x35 | LC_REQ_DYLD
-
-
-# this is really a union.. but whatever
-class lc_str(p_uint32):
- pass
-
-
-p_str16 = pypackable("p_str16", bytes, "16s")
-
-vm_prot_t = p_int32
-
-
-class segment_command(Structure):
- _fields_ = (
- ("segname", p_str16),
- ("vmaddr", p_uint32),
- ("vmsize", p_uint32),
- ("fileoff", p_uint32),
- ("filesize", p_uint32),
- ("maxprot", vm_prot_t),
- ("initprot", vm_prot_t),
- ("nsects", p_uint32), # read the section structures ?
- ("flags", p_uint32),
- )
-
- def describe(self):
- s = {}
- s["segname"] = self.segname.rstrip("\x00")
- s["vmaddr"] = int(self.vmaddr)
- s["vmsize"] = int(self.vmsize)
- s["fileoff"] = int(self.fileoff)
- s["filesize"] = int(self.filesize)
- s["initprot"] = self.get_initial_virtual_memory_protections()
- s["initprot_raw"] = int(self.initprot)
- s["maxprot"] = self.get_max_virtual_memory_protections()
- s["maxprot_raw"] = int(self.maxprot)
- s["nsects"] = int(self.nsects)
- s["flags"] = self.flags
- return s
-
- def get_initial_virtual_memory_protections(self):
- vm = []
- if self.initprot == 0:
- vm.append("VM_PROT_NONE")
- if self.initprot & 1:
- vm.append("VM_PROT_READ")
- if self.initprot & 2:
- vm.append("VM_PROT_WRITE")
- if self.initprot & 4:
- vm.append("VM_PROT_EXECUTE")
- return vm
-
- def get_max_virtual_memory_protections(self):
- vm = []
- if self.maxprot == 0:
- vm.append("VM_PROT_NONE")
- if self.maxprot & 1:
- vm.append("VM_PROT_READ")
- if self.maxprot & 2:
- vm.append("VM_PROT_WRITE")
- if self.maxprot & 4:
- vm.append("VM_PROT_EXECUTE")
- return vm
-
-
-class segment_command_64(Structure):
- _fields_ = (
- ("segname", p_str16),
- ("vmaddr", p_uint64),
- ("vmsize", p_uint64),
- ("fileoff", p_uint64),
- ("filesize", p_uint64),
- ("maxprot", vm_prot_t),
- ("initprot", vm_prot_t),
- ("nsects", p_uint32), # read the section structures ?
- ("flags", p_uint32),
- )
-
- def describe(self):
- s = {}
- s["segname"] = self.segname.rstrip("\x00")
- s["vmaddr"] = int(self.vmaddr)
- s["vmsize"] = int(self.vmsize)
- s["fileoff"] = int(self.fileoff)
- s["filesize"] = int(self.filesize)
- s["initprot"] = self.get_initial_virtual_memory_protections()
- s["initprot_raw"] = int(self.initprot)
- s["maxprot"] = self.get_max_virtual_memory_protections()
- s["maxprot_raw"] = int(self.maxprot)
- s["nsects"] = int(self.nsects)
- s["flags"] = self.flags
- return s
-
- def get_initial_virtual_memory_protections(self):
- vm = []
- if self.initprot == 0:
- vm.append("VM_PROT_NONE")
- if self.initprot & 1:
- vm.append("VM_PROT_READ")
- if self.initprot & 2:
- vm.append("VM_PROT_WRITE")
- if self.initprot & 4:
- vm.append("VM_PROT_EXECUTE")
- return vm
-
- def get_max_virtual_memory_protections(self):
- vm = []
- if self.maxprot == 0:
- vm.append("VM_PROT_NONE")
- if self.maxprot & 1:
- vm.append("VM_PROT_READ")
- if self.maxprot & 2:
- vm.append("VM_PROT_WRITE")
- if self.maxprot & 4:
- vm.append("VM_PROT_EXECUTE")
- return vm
-
-
-SG_HIGHVM = 0x1
-SG_FVMLIB = 0x2
-SG_NORELOC = 0x4
-SG_PROTECTED_VERSION_1 = 0x8
-
-
-class section(Structure):
- _fields_ = (
- ("sectname", p_str16),
- ("segname", p_str16),
- ("addr", p_uint32),
- ("size", p_uint32),
- ("offset", p_uint32),
- ("align", p_uint32),
- ("reloff", p_uint32),
- ("nreloc", p_uint32),
- ("flags", p_uint32),
- ("reserved1", p_uint32),
- ("reserved2", p_uint32),
- )
-
- def describe(self):
- s = {}
- s["sectname"] = self.sectname.rstrip("\x00")
- s["segname"] = self.segname.rstrip("\x00")
- s["addr"] = int(self.addr)
- s["size"] = int(self.size)
- s["offset"] = int(self.offset)
- s["align"] = int(self.align)
- s["reloff"] = int(self.reloff)
- s["nreloc"] = int(self.nreloc)
- f = {}
- f["type"] = FLAG_SECTION_TYPES[int(self.flags) & 0xFF]
- f["attributes"] = []
- for k in FLAG_SECTION_ATTRIBUTES:
- if k & self.flags:
- f["attributes"].append(FLAG_SECTION_ATTRIBUTES[k])
- if not f["attributes"]:
- del f["attributes"]
- s["flags"] = f
- s["reserved1"] = int(self.reserved1)
- s["reserved2"] = int(self.reserved2)
- return s
-
- def add_section_data(self, data):
- self.section_data = data
-
-
-class section_64(Structure):
- _fields_ = (
- ("sectname", p_str16),
- ("segname", p_str16),
- ("addr", p_uint64),
- ("size", p_uint64),
- ("offset", p_uint32),
- ("align", p_uint32),
- ("reloff", p_uint32),
- ("nreloc", p_uint32),
- ("flags", p_uint32),
- ("reserved1", p_uint32),
- ("reserved2", p_uint32),
- ("reserved3", p_uint32),
- )
-
- def describe(self):
- s = {}
- s["sectname"] = self.sectname.rstrip("\x00")
- s["segname"] = self.segname.rstrip("\x00")
- s["addr"] = int(self.addr)
- s["size"] = int(self.size)
- s["offset"] = int(self.offset)
- s["align"] = int(self.align)
- s["reloff"] = int(self.reloff)
- s["nreloc"] = int(self.nreloc)
- f = {}
- f["type"] = FLAG_SECTION_TYPES[int(self.flags) & 0xFF]
- f["attributes"] = []
- for k in FLAG_SECTION_ATTRIBUTES:
- if k & self.flags:
- f["attributes"].append(FLAG_SECTION_ATTRIBUTES[k])
- if not f["attributes"]:
- del f["attributes"]
- s["flags"] = f
- s["reserved1"] = int(self.reserved1)
- s["reserved2"] = int(self.reserved2)
- s["reserved3"] = int(self.reserved3)
- return s
-
- def add_section_data(self, data):
- self.section_data = data
-
-
-SECTION_TYPE = 0xFF
-SECTION_ATTRIBUTES = 0xFFFFFF00
-S_REGULAR = 0x0
-S_ZEROFILL = 0x1
-S_CSTRING_LITERALS = 0x2
-S_4BYTE_LITERALS = 0x3
-S_8BYTE_LITERALS = 0x4
-S_LITERAL_POINTERS = 0x5
-S_NON_LAZY_SYMBOL_POINTERS = 0x6
-S_LAZY_SYMBOL_POINTERS = 0x7
-S_SYMBOL_STUBS = 0x8
-S_MOD_INIT_FUNC_POINTERS = 0x9
-S_MOD_TERM_FUNC_POINTERS = 0xA
-S_COALESCED = 0xB
-S_GB_ZEROFILL = 0xC
-S_INTERPOSING = 0xD
-S_16BYTE_LITERALS = 0xE
-S_DTRACE_DOF = 0xF
-S_LAZY_DYLIB_SYMBOL_POINTERS = 0x10
-S_THREAD_LOCAL_REGULAR = 0x11
-S_THREAD_LOCAL_ZEROFILL = 0x12
-S_THREAD_LOCAL_VARIABLES = 0x13
-S_THREAD_LOCAL_VARIABLE_POINTERS = 0x14
-S_THREAD_LOCAL_INIT_FUNCTION_POINTERS = 0x15
-
-FLAG_SECTION_TYPES = {
- S_REGULAR: "S_REGULAR",
- S_ZEROFILL: "S_ZEROFILL",
- S_CSTRING_LITERALS: "S_CSTRING_LITERALS",
- S_4BYTE_LITERALS: "S_4BYTE_LITERALS",
- S_8BYTE_LITERALS: "S_8BYTE_LITERALS",
- S_LITERAL_POINTERS: "S_LITERAL_POINTERS",
- S_NON_LAZY_SYMBOL_POINTERS: "S_NON_LAZY_SYMBOL_POINTERS",
- S_LAZY_SYMBOL_POINTERS: "S_LAZY_SYMBOL_POINTERS",
- S_SYMBOL_STUBS: "S_SYMBOL_STUBS",
- S_MOD_INIT_FUNC_POINTERS: "S_MOD_INIT_FUNC_POINTERS",
- S_MOD_TERM_FUNC_POINTERS: "S_MOD_TERM_FUNC_POINTERS",
- S_COALESCED: "S_COALESCED",
- S_GB_ZEROFILL: "S_GB_ZEROFILL",
- S_INTERPOSING: "S_INTERPOSING",
- S_16BYTE_LITERALS: "S_16BYTE_LITERALS",
- S_DTRACE_DOF: "S_DTRACE_DOF",
- S_LAZY_DYLIB_SYMBOL_POINTERS: "S_LAZY_DYLIB_SYMBOL_POINTERS",
- S_THREAD_LOCAL_REGULAR: "S_THREAD_LOCAL_REGULAR",
- S_THREAD_LOCAL_ZEROFILL: "S_THREAD_LOCAL_ZEROFILL",
- S_THREAD_LOCAL_VARIABLES: "S_THREAD_LOCAL_VARIABLES",
- S_THREAD_LOCAL_VARIABLE_POINTERS: "S_THREAD_LOCAL_VARIABLE_POINTERS",
- S_THREAD_LOCAL_INIT_FUNCTION_POINTERS: "S_THREAD_LOCAL_INIT_FUNCTION_POINTERS",
-}
-
-SECTION_ATTRIBUTES_USR = 0xFF000000
-S_ATTR_PURE_INSTRUCTIONS = 0x80000000
-S_ATTR_NO_TOC = 0x40000000
-S_ATTR_STRIP_STATIC_SYMS = 0x20000000
-S_ATTR_NO_DEAD_STRIP = 0x10000000
-S_ATTR_LIVE_SUPPORT = 0x08000000
-S_ATTR_SELF_MODIFYING_CODE = 0x04000000
-S_ATTR_DEBUG = 0x02000000
-
-SECTION_ATTRIBUTES_SYS = 0x00FFFF00
-S_ATTR_SOME_INSTRUCTIONS = 0x00000400
-S_ATTR_EXT_RELOC = 0x00000200
-S_ATTR_LOC_RELOC = 0x00000100
-
-FLAG_SECTION_ATTRIBUTES = {
- S_ATTR_PURE_INSTRUCTIONS: "S_ATTR_PURE_INSTRUCTIONS",
- S_ATTR_NO_TOC: "S_ATTR_NO_TOC",
- S_ATTR_STRIP_STATIC_SYMS: "S_ATTR_STRIP_STATIC_SYMS",
- S_ATTR_NO_DEAD_STRIP: "S_ATTR_NO_DEAD_STRIP",
- S_ATTR_LIVE_SUPPORT: "S_ATTR_LIVE_SUPPORT",
- S_ATTR_SELF_MODIFYING_CODE: "S_ATTR_SELF_MODIFYING_CODE",
- S_ATTR_DEBUG: "S_ATTR_DEBUG",
- S_ATTR_SOME_INSTRUCTIONS: "S_ATTR_SOME_INSTRUCTIONS",
- S_ATTR_EXT_RELOC: "S_ATTR_EXT_RELOC",
- S_ATTR_LOC_RELOC: "S_ATTR_LOC_RELOC",
-}
-
-SEG_PAGEZERO = "__PAGEZERO"
-SEG_TEXT = "__TEXT"
-SECT_TEXT = "__text"
-SECT_FVMLIB_INIT0 = "__fvmlib_init0"
-SECT_FVMLIB_INIT1 = "__fvmlib_init1"
-SEG_DATA = "__DATA"
-SECT_DATA = "__data"
-SECT_BSS = "__bss"
-SECT_COMMON = "__common"
-SEG_OBJC = "__OBJC"
-SECT_OBJC_SYMBOLS = "__symbol_table"
-SECT_OBJC_MODULES = "__module_info"
-SECT_OBJC_STRINGS = "__selector_strs"
-SECT_OBJC_REFS = "__selector_refs"
-SEG_ICON = "__ICON"
-SECT_ICON_HEADER = "__header"
-SECT_ICON_TIFF = "__tiff"
-SEG_LINKEDIT = "__LINKEDIT"
-SEG_UNIXSTACK = "__UNIXSTACK"
-SEG_IMPORT = "__IMPORT"
-
-#
-# I really should remove all these _command classes because they
-# are no different. I decided to keep the load commands separate,
-# so classes like fvmlib and fvmlib_command are equivalent.
-#
-
-
-class fvmlib(Structure):
- _fields_ = (
- ("name", lc_str),
- ("minor_version", mach_version_helper),
- ("header_addr", p_uint32),
- )
-
-
-class fvmlib_command(Structure):
- _fields_ = fvmlib._fields_
-
- def describe(self):
- s = {}
- s["header_addr"] = int(self.header_addr)
- return s
-
-
-class dylib(Structure):
- _fields_ = (
- ("name", lc_str),
- ("timestamp", mach_timestamp_helper),
- ("current_version", mach_version_helper),
- ("compatibility_version", mach_version_helper),
- )
-
-
-# merged dylib structure
-class dylib_command(Structure):
- _fields_ = dylib._fields_
-
- def describe(self):
- s = {}
- s["timestamp"] = str(self.timestamp)
- s["current_version"] = str(self.current_version)
- s["compatibility_version"] = str(self.compatibility_version)
- return s
-
-
-class sub_framework_command(Structure):
- _fields_ = (("umbrella", lc_str),)
-
- def describe(self):
- return {}
-
-
-class sub_client_command(Structure):
- _fields_ = (("client", lc_str),)
-
- def describe(self):
- return {}
-
-
-class sub_umbrella_command(Structure):
- _fields_ = (("sub_umbrella", lc_str),)
-
- def describe(self):
- return {}
-
-
-class sub_library_command(Structure):
- _fields_ = (("sub_library", lc_str),)
-
- def describe(self):
- return {}
-
-
-class prebound_dylib_command(Structure):
- _fields_ = (("name", lc_str), ("nmodules", p_uint32), ("linked_modules", lc_str))
-
- def describe(self):
- return {"nmodules": int(self.nmodules)}
-
-
-class dylinker_command(Structure):
- _fields_ = (("name", lc_str),)
-
- def describe(self):
- return {}
-
-
-class thread_command(Structure):
- _fields_ = (("flavor", p_uint32), ("count", p_uint32))
-
- def describe(self):
- s = {}
- s["flavor"] = int(self.flavor)
- s["count"] = int(self.count)
- return s
-
-
-class entry_point_command(Structure):
- _fields_ = (("entryoff", p_uint64), ("stacksize", p_uint64))
-
- def describe(self):
- s = {}
- s["entryoff"] = int(self.entryoff)
- s["stacksize"] = int(self.stacksize)
- return s
-
-
-class routines_command(Structure):
- _fields_ = (
- ("init_address", p_uint32),
- ("init_module", p_uint32),
- ("reserved1", p_uint32),
- ("reserved2", p_uint32),
- ("reserved3", p_uint32),
- ("reserved4", p_uint32),
- ("reserved5", p_uint32),
- ("reserved6", p_uint32),
- )
-
- def describe(self):
- s = {}
- s["init_address"] = int(self.init_address)
- s["init_module"] = int(self.init_module)
- s["reserved1"] = int(self.reserved1)
- s["reserved2"] = int(self.reserved2)
- s["reserved3"] = int(self.reserved3)
- s["reserved4"] = int(self.reserved4)
- s["reserved5"] = int(self.reserved5)
- s["reserved6"] = int(self.reserved6)
- return s
-
-
-class routines_command_64(Structure):
- _fields_ = (
- ("init_address", p_uint64),
- ("init_module", p_uint64),
- ("reserved1", p_uint64),
- ("reserved2", p_uint64),
- ("reserved3", p_uint64),
- ("reserved4", p_uint64),
- ("reserved5", p_uint64),
- ("reserved6", p_uint64),
- )
-
- def describe(self):
- s = {}
- s["init_address"] = int(self.init_address)
- s["init_module"] = int(self.init_module)
- s["reserved1"] = int(self.reserved1)
- s["reserved2"] = int(self.reserved2)
- s["reserved3"] = int(self.reserved3)
- s["reserved4"] = int(self.reserved4)
- s["reserved5"] = int(self.reserved5)
- s["reserved6"] = int(self.reserved6)
- return s
-
-
-class symtab_command(Structure):
- _fields_ = (
- ("symoff", p_uint32),
- ("nsyms", p_uint32),
- ("stroff", p_uint32),
- ("strsize", p_uint32),
- )
-
- def describe(self):
- s = {}
- s["symoff"] = int(self.symoff)
- s["nsyms"] = int(self.nsyms)
- s["stroff"] = int(self.stroff)
- s["strsize"] = int(self.strsize)
- return s
-
-
-class dysymtab_command(Structure):
- _fields_ = (
- ("ilocalsym", p_uint32),
- ("nlocalsym", p_uint32),
- ("iextdefsym", p_uint32),
- ("nextdefsym", p_uint32),
- ("iundefsym", p_uint32),
- ("nundefsym", p_uint32),
- ("tocoff", p_uint32),
- ("ntoc", p_uint32),
- ("modtaboff", p_uint32),
- ("nmodtab", p_uint32),
- ("extrefsymoff", p_uint32),
- ("nextrefsyms", p_uint32),
- ("indirectsymoff", p_uint32),
- ("nindirectsyms", p_uint32),
- ("extreloff", p_uint32),
- ("nextrel", p_uint32),
- ("locreloff", p_uint32),
- ("nlocrel", p_uint32),
- )
-
- def describe(self):
- dys = {}
- dys["ilocalsym"] = int(self.ilocalsym)
- dys["nlocalsym"] = int(self.nlocalsym)
- dys["iextdefsym"] = int(self.iextdefsym)
- dys["nextdefsym"] = int(self.nextdefsym)
- dys["iundefsym"] = int(self.iundefsym)
- dys["nundefsym"] = int(self.nundefsym)
- dys["tocoff"] = int(self.tocoff)
- dys["ntoc"] = int(self.ntoc)
- dys["modtaboff"] = int(self.modtaboff)
- dys["nmodtab"] = int(self.nmodtab)
- dys["extrefsymoff"] = int(self.extrefsymoff)
- dys["nextrefsyms"] = int(self.nextrefsyms)
- dys["indirectsymoff"] = int(self.indirectsymoff)
- dys["nindirectsyms"] = int(self.nindirectsyms)
- dys["extreloff"] = int(self.extreloff)
- dys["nextrel"] = int(self.nextrel)
- dys["locreloff"] = int(self.locreloff)
- dys["nlocrel"] = int(self.nlocrel)
- return dys
-
-
-INDIRECT_SYMBOL_LOCAL = 0x80000000
-INDIRECT_SYMBOL_ABS = 0x40000000
-
-
-class dylib_table_of_contents(Structure):
- _fields_ = (("symbol_index", p_uint32), ("module_index", p_uint32))
-
-
-class dylib_module(Structure):
- _fields_ = (
- ("module_name", p_uint32),
- ("iextdefsym", p_uint32),
- ("nextdefsym", p_uint32),
- ("irefsym", p_uint32),
- ("nrefsym", p_uint32),
- ("ilocalsym", p_uint32),
- ("nlocalsym", p_uint32),
- ("iextrel", p_uint32),
- ("nextrel", p_uint32),
- ("iinit_iterm", p_uint32),
- ("ninit_nterm", p_uint32),
- ("objc_module_info_addr", p_uint32),
- ("objc_module_info_size", p_uint32),
- )
-
-
-class dylib_module_64(Structure):
- _fields_ = (
- ("module_name", p_uint32),
- ("iextdefsym", p_uint32),
- ("nextdefsym", p_uint32),
- ("irefsym", p_uint32),
- ("nrefsym", p_uint32),
- ("ilocalsym", p_uint32),
- ("nlocalsym", p_uint32),
- ("iextrel", p_uint32),
- ("nextrel", p_uint32),
- ("iinit_iterm", p_uint32),
- ("ninit_nterm", p_uint32),
- ("objc_module_info_size", p_uint32),
- ("objc_module_info_addr", p_uint64),
- )
-
-
-class dylib_reference(Structure):
- _fields_ = (
- ("isym_flags", p_uint32),
- # ('isym', p_uint8 * 3),
- # ('flags', p_uint8),
- )
-
-
-class twolevel_hints_command(Structure):
- _fields_ = (("offset", p_uint32), ("nhints", p_uint32))
-
- def describe(self):
- s = {}
- s["offset"] = int(self.offset)
- s["nhints"] = int(self.nhints)
- return s
-
-
-class twolevel_hint(Structure):
- _fields_ = (
- ("isub_image_itoc", p_uint32),
- # ('isub_image', p_uint8),
- # ('itoc', p_uint8 * 3),
- )
-
-
-class prebind_cksum_command(Structure):
- _fields_ = (("cksum", p_uint32),)
-
- def describe(self):
- return {"cksum": int(self.cksum)}
-
-
-class symseg_command(Structure):
- _fields_ = (("offset", p_uint32), ("size", p_uint32))
-
- def describe(self):
- s = {}
- s["offset"] = int(self.offset)
- s["size"] = int(self.size)
-
-
-class ident_command(Structure):
- _fields_ = ()
-
- def describe(self):
- return {}
-
-
-class fvmfile_command(Structure):
- _fields_ = (("name", lc_str), ("header_addr", p_uint32))
-
- def describe(self):
- return {"header_addr": int(self.header_addr)}
-
-
-class uuid_command(Structure):
- _fields_ = (("uuid", p_str16),)
-
- def describe(self):
- return {"uuid": self.uuid.rstrip("\x00")}
-
-
-class rpath_command(Structure):
- _fields_ = (("path", lc_str),)
-
- def describe(self):
- return {}
-
-
-class linkedit_data_command(Structure):
- _fields_ = (("dataoff", p_uint32), ("datasize", p_uint32))
-
- def describe(self):
- s = {}
- s["dataoff"] = int(self.dataoff)
- s["datasize"] = int(self.datasize)
- return s
-
-
-class version_min_command(Structure):
- _fields_ = (
- ("version", p_uint32), # X.Y.Z is encoded in nibbles xxxx.yy.zz
- ("sdk", p_uint32),
- )
-
- def describe(self):
- v = int(self.version)
- v3 = v & 0xFF
- v = v >> 8
- v2 = v & 0xFF
- v = v >> 8
- v1 = v & 0xFFFF
- s = int(self.sdk)
- s3 = s & 0xFF
- s = s >> 8
- s2 = s & 0xFF
- s = s >> 8
- s1 = s & 0xFFFF
- return {
- "version": str(int(v1)) + "." + str(int(v2)) + "." + str(int(v3)),
- "sdk": str(int(s1)) + "." + str(int(s2)) + "." + str(int(s3)),
- }
-
-
-class source_version_command(Structure):
- _fields_ = (("version", p_uint64),)
-
- def describe(self):
- v = int(self.version)
- a = v >> 40
- b = (v >> 30) & 0x3FF
- c = (v >> 20) & 0x3FF
- d = (v >> 10) & 0x3FF
- e = v & 0x3FF
- r = str(a) + "." + str(b) + "." + str(c) + "." + str(d) + "." + str(e)
- return {"version": r}
-
-
-class note_command(Structure):
- _fields_ = (("data_owner", p_str16), ("offset", p_uint64), ("size", p_uint64))
-
-
-class build_version_command(Structure):
- _fields_ = (
- ("platform", p_uint32),
- ("minos", p_uint32),
- ("sdk", p_uint32),
- ("ntools", p_uint32),
- )
-
- def describe(self):
- return {}
-
-
-class build_tool_version(Structure):
- _fields_ = (("tool", p_uint32), ("version", p_uint32))
-
-
-class data_in_code_entry(Structure):
- _fields_ = (("offset", p_uint32), ("length", p_uint32), ("kind", p_uint32))
-
- def describe(self):
- return {"offset": self.offset, "length": self.length, "kind": self.kind}
-
-
-DICE_KIND_DATA = 0x0001
-DICE_KIND_JUMP_TABLE8 = 0x0002
-DICE_KIND_JUMP_TABLE16 = 0x0003
-DICE_KIND_JUMP_TABLE32 = 0x0004
-DICE_KIND_ABS_JUMP_TABLE32 = 0x0005
-
-DATA_IN_CODE_KINDS = {
- DICE_KIND_DATA: "DICE_KIND_DATA",
- DICE_KIND_JUMP_TABLE8: "DICE_KIND_JUMP_TABLE8",
- DICE_KIND_JUMP_TABLE16: "DICE_KIND_JUMP_TABLE16",
- DICE_KIND_JUMP_TABLE32: "DICE_KIND_JUMP_TABLE32",
- DICE_KIND_ABS_JUMP_TABLE32: "DICE_KIND_ABS_JUMP_TABLE32",
-}
-
-
-class tlv_descriptor(Structure):
- _fields_ = (
- ("thunk", p_long), # Actually a pointer to a function
- ("key", p_ulong),
- ("offset", p_ulong),
- )
-
- def describe(self):
- return {"thunk": self.thunk, "key": self.key, "offset": self.offset}
-
-
-class encryption_info_command(Structure):
- _fields_ = (("cryptoff", p_uint32), ("cryptsize", p_uint32), ("cryptid", p_uint32))
-
- def describe(self):
- s = {}
- s["cryptoff"] = int(self.cryptoff)
- s["cryptsize"] = int(self.cryptsize)
- s["cryptid"] = int(self.cryptid)
- return s
-
-
-class encryption_info_command_64(Structure):
- _fields_ = (
- ("cryptoff", p_uint32),
- ("cryptsize", p_uint32),
- ("cryptid", p_uint32),
- ("pad", p_uint32),
- )
-
- def describe(self):
- s = {}
- s["cryptoff"] = int(self.cryptoff)
- s["cryptsize"] = int(self.cryptsize)
- s["cryptid"] = int(self.cryptid)
- s["pad"] = int(self.pad)
- return s
-
-
-class dyld_info_command(Structure):
- _fields_ = (
- ("rebase_off", p_uint32),
- ("rebase_size", p_uint32),
- ("bind_off", p_uint32),
- ("bind_size", p_uint32),
- ("weak_bind_off", p_uint32),
- ("weak_bind_size", p_uint32),
- ("lazy_bind_off", p_uint32),
- ("lazy_bind_size", p_uint32),
- ("export_off", p_uint32),
- ("export_size", p_uint32),
- )
-
- def describe(self):
- dyld = {}
- dyld["rebase_off"] = int(self.rebase_off)
- dyld["rebase_size"] = int(self.rebase_size)
- dyld["bind_off"] = int(self.bind_off)
- dyld["bind_size"] = int(self.bind_size)
- dyld["weak_bind_off"] = int(self.weak_bind_off)
- dyld["weak_bind_size"] = int(self.weak_bind_size)
- dyld["lazy_bind_off"] = int(self.lazy_bind_off)
- dyld["lazy_bind_size"] = int(self.lazy_bind_size)
- dyld["export_off"] = int(self.export_off)
- dyld["export_size"] = int(self.export_size)
- return dyld
-
-
-class linker_option_command(Structure):
- _fields_ = (("count", p_uint32),)
-
- def describe(self):
- return {"count": int(self.count)}
-
-
-class fileset_entry_command(Structure):
- _fields_ = (
- ("vmaddr", p_uint64),
- ("fileoff", p_uint64),
- ("entry_id", lc_str),
- ("reserved", p_uint32),
- )
-
-
-LC_REGISTRY = {
- LC_SEGMENT: segment_command,
- LC_IDFVMLIB: fvmlib_command,
- LC_LOADFVMLIB: fvmlib_command,
- LC_ID_DYLIB: dylib_command,
- LC_LOAD_DYLIB: dylib_command,
- LC_LOAD_WEAK_DYLIB: dylib_command,
- LC_SUB_FRAMEWORK: sub_framework_command,
- LC_SUB_CLIENT: sub_client_command,
- LC_SUB_UMBRELLA: sub_umbrella_command,
- LC_SUB_LIBRARY: sub_library_command,
- LC_PREBOUND_DYLIB: prebound_dylib_command,
- LC_ID_DYLINKER: dylinker_command,
- LC_LOAD_DYLINKER: dylinker_command,
- LC_THREAD: thread_command,
- LC_UNIXTHREAD: thread_command,
- LC_ROUTINES: routines_command,
- LC_SYMTAB: symtab_command,
- LC_DYSYMTAB: dysymtab_command,
- LC_TWOLEVEL_HINTS: twolevel_hints_command,
- LC_PREBIND_CKSUM: prebind_cksum_command,
- LC_SYMSEG: symseg_command,
- LC_IDENT: ident_command,
- LC_FVMFILE: fvmfile_command,
- LC_SEGMENT_64: segment_command_64,
- LC_ROUTINES_64: routines_command_64,
- LC_UUID: uuid_command,
- LC_RPATH: rpath_command,
- LC_CODE_SIGNATURE: linkedit_data_command,
- LC_CODE_SEGMENT_SPLIT_INFO: linkedit_data_command,
- LC_REEXPORT_DYLIB: dylib_command,
- LC_LAZY_LOAD_DYLIB: dylib_command,
- LC_ENCRYPTION_INFO: encryption_info_command,
- LC_DYLD_INFO: dyld_info_command,
- LC_DYLD_INFO_ONLY: dyld_info_command,
- LC_LOAD_UPWARD_DYLIB: dylib_command,
- LC_VERSION_MIN_MACOSX: version_min_command,
- LC_VERSION_MIN_IPHONEOS: version_min_command,
- LC_FUNCTION_STARTS: linkedit_data_command,
- LC_DYLD_ENVIRONMENT: dylinker_command,
- LC_MAIN: entry_point_command,
- LC_DATA_IN_CODE: linkedit_data_command,
- LC_SOURCE_VERSION: source_version_command,
- LC_DYLIB_CODE_SIGN_DRS: linkedit_data_command,
- LC_ENCRYPTION_INFO_64: encryption_info_command_64,
- LC_LINKER_OPTION: linker_option_command,
- LC_LINKER_OPTIMIZATION_HINT: linkedit_data_command,
- LC_VERSION_MIN_TVOS: version_min_command,
- LC_VERSION_MIN_WATCHOS: version_min_command,
- LC_NOTE: note_command,
- LC_BUILD_VERSION: build_version_command,
- LC_DYLD_EXPORTS_TRIE: linkedit_data_command,
- LC_DYLD_CHAINED_FIXUPS: linkedit_data_command,
- LC_FILESET_ENTRY: fileset_entry_command,
-}
-
-LC_NAMES = {
- LC_SEGMENT: "LC_SEGMENT",
- LC_IDFVMLIB: "LC_IDFVMLIB",
- LC_LOADFVMLIB: "LC_LOADFVMLIB",
- LC_ID_DYLIB: "LC_ID_DYLIB",
- LC_LOAD_DYLIB: "LC_LOAD_DYLIB",
- LC_LOAD_WEAK_DYLIB: "LC_LOAD_WEAK_DYLIB",
- LC_SUB_FRAMEWORK: "LC_SUB_FRAMEWORK",
- LC_SUB_CLIENT: "LC_SUB_CLIENT",
- LC_SUB_UMBRELLA: "LC_SUB_UMBRELLA",
- LC_SUB_LIBRARY: "LC_SUB_LIBRARY",
- LC_PREBOUND_DYLIB: "LC_PREBOUND_DYLIB",
- LC_ID_DYLINKER: "LC_ID_DYLINKER",
- LC_LOAD_DYLINKER: "LC_LOAD_DYLINKER",
- LC_THREAD: "LC_THREAD",
- LC_UNIXTHREAD: "LC_UNIXTHREAD",
- LC_ROUTINES: "LC_ROUTINES",
- LC_SYMTAB: "LC_SYMTAB",
- LC_DYSYMTAB: "LC_DYSYMTAB",
- LC_TWOLEVEL_HINTS: "LC_TWOLEVEL_HINTS",
- LC_PREBIND_CKSUM: "LC_PREBIND_CKSUM",
- LC_SYMSEG: "LC_SYMSEG",
- LC_IDENT: "LC_IDENT",
- LC_FVMFILE: "LC_FVMFILE",
- LC_SEGMENT_64: "LC_SEGMENT_64",
- LC_ROUTINES_64: "LC_ROUTINES_64",
- LC_UUID: "LC_UUID",
- LC_RPATH: "LC_RPATH",
- LC_CODE_SIGNATURE: "LC_CODE_SIGNATURE",
- LC_CODE_SEGMENT_SPLIT_INFO: "LC_CODE_SEGMENT_SPLIT_INFO",
- LC_REEXPORT_DYLIB: "LC_REEXPORT_DYLIB",
- LC_LAZY_LOAD_DYLIB: "LC_LAZY_LOAD_DYLIB",
- LC_ENCRYPTION_INFO: "LC_ENCRYPTION_INFO",
- LC_DYLD_INFO: "LC_DYLD_INFO",
- LC_DYLD_INFO_ONLY: "LC_DYLD_INFO_ONLY",
- LC_LOAD_UPWARD_DYLIB: "LC_LOAD_UPWARD_DYLIB",
- LC_VERSION_MIN_MACOSX: "LC_VERSION_MIN_MACOSX",
- LC_VERSION_MIN_IPHONEOS: "LC_VERSION_MIN_IPHONEOS",
- LC_FUNCTION_STARTS: "LC_FUNCTION_STARTS",
- LC_DYLD_ENVIRONMENT: "LC_DYLD_ENVIRONMENT",
- LC_MAIN: "LC_MAIN",
- LC_DATA_IN_CODE: "LC_DATA_IN_CODE",
- LC_SOURCE_VERSION: "LC_SOURCE_VERSION",
- LC_DYLIB_CODE_SIGN_DRS: "LC_DYLIB_CODE_SIGN_DRS",
- LC_LINKER_OPTIMIZATION_HINT: "LC_LINKER_OPTIMIZATION_HINT",
- LC_VERSION_MIN_TVOS: "LC_VERSION_MIN_TVOS",
- LC_VERSION_MIN_WATCHOS: "LC_VERSION_MIN_WATCHOS",
- LC_NOTE: "LC_NOTE",
- LC_BUILD_VERSION: "LC_BUILD_VERSION",
- LC_DYLD_EXPORTS_TRIE: "LC_DYLD_EXPORTS_TRIE",
- LC_DYLD_CHAINED_FIXUPS: "LC_DYLD_CHAINED_FIXUPS",
- LC_ENCRYPTION_INFO_64: "LC_ENCRYPTION_INFO_64",
- LC_LINKER_OPTION: "LC_LINKER_OPTION",
- LC_PREPAGE: "LC_PREPAGE",
- LC_FILESET_ENTRY: "LC_FILESET_ENTRY",
-}
-
-
-# this is another union.
-class n_un(p_int32):
- pass
-
-
-class nlist(Structure):
- _fields_ = (
- ("n_un", n_un),
- ("n_type", p_uint8),
- ("n_sect", p_uint8),
- ("n_desc", p_short),
- ("n_value", p_uint32),
- )
-
-
-class nlist_64(Structure):
- _fields_ = [
- ("n_un", n_un),
- ("n_type", p_uint8),
- ("n_sect", p_uint8),
- ("n_desc", p_short),
- ("n_value", p_int64),
- ]
-
-
-N_STAB = 0xE0
-N_PEXT = 0x10
-N_TYPE = 0x0E
-N_EXT = 0x01
-
-N_UNDF = 0x0
-N_ABS = 0x2
-N_SECT = 0xE
-N_PBUD = 0xC
-N_INDR = 0xA
-
-NO_SECT = 0
-MAX_SECT = 255
-
-
-class relocation_info(Structure):
- _fields_ = (("r_address", p_uint32), ("_r_bitfield", p_uint32))
-
- def _describe(self):
- return (("r_address", self.r_address), ("_r_bitfield", self._r_bitfield))
-
-
-def GET_COMM_ALIGN(n_desc):
- return (n_desc >> 8) & 0x0F
-
-
-def SET_COMM_ALIGN(n_desc, align):
- return (n_desc & 0xF0FF) | ((align & 0x0F) << 8)
-
-
-REFERENCE_TYPE = 0xF
-REFERENCE_FLAG_UNDEFINED_NON_LAZY = 0
-REFERENCE_FLAG_UNDEFINED_LAZY = 1
-REFERENCE_FLAG_DEFINED = 2
-REFERENCE_FLAG_PRIVATE_DEFINED = 3
-REFERENCE_FLAG_PRIVATE_UNDEFINED_NON_LAZY = 4
-REFERENCE_FLAG_PRIVATE_UNDEFINED_LAZY = 5
-
-REFERENCED_DYNAMICALLY = 0x0010
-
-
-def GET_LIBRARY_ORDINAL(n_desc):
- return ((n_desc) >> 8) & 0xFF
-
-
-def SET_LIBRARY_ORDINAL(n_desc, ordinal):
- return ((n_desc) & 0x00FF) | (((ordinal & 0xFF) << 8))
-
-
-SELF_LIBRARY_ORDINAL = 0x0
-MAX_LIBRARY_ORDINAL = 0xFD
-DYNAMIC_LOOKUP_ORDINAL = 0xFE
-EXECUTABLE_ORDINAL = 0xFF
-
-N_NO_DEAD_STRIP = 0x0020
-N_DESC_DISCARDED = 0x0020
-N_WEAK_REF = 0x0040
-N_WEAK_DEF = 0x0080
-N_REF_TO_WEAK = 0x0080
-N_ARM_THUMB_DEF = 0x0008
-N_SYMBOL_RESOLVER = 0x0100
-N_ALT_ENTRY = 0x0200
-
-# /usr/include/mach-o/fat.h
-FAT_MAGIC = 0xCAFEBABE
-FAT_CIGAM = 0xBEBAFECA
-FAT_MAGIC_64 = 0xCAFEBABF
-FAT_CIGAM_64 = 0xBFBAFECA
-
-
-class fat_header(Structure):
- _fields_ = (("magic", p_uint32), ("nfat_arch", p_uint32))
-
-
-class fat_arch(Structure):
- _fields_ = (
- ("cputype", cpu_type_t),
- ("cpusubtype", cpu_subtype_t),
- ("offset", p_uint32),
- ("size", p_uint32),
- ("align", p_uint32),
- )
-
-
-class fat_arch64(Structure):
- _fields_ = (
- ("cputype", cpu_type_t),
- ("cpusubtype", cpu_subtype_t),
- ("offset", p_uint64),
- ("size", p_uint64),
- ("align", p_uint32),
- ("reserved", p_uint32),
- )
-
-
-REBASE_TYPE_POINTER = 1 # noqa: E221
-REBASE_TYPE_TEXT_ABSOLUTE32 = 2 # noqa: E221
-REBASE_TYPE_TEXT_PCREL32 = 3 # noqa: E221
-
-REBASE_OPCODE_MASK = 0xF0 # noqa: E221
-REBASE_IMMEDIATE_MASK = 0x0F # noqa: E221
-REBASE_OPCODE_DONE = 0x00 # noqa: E221
-REBASE_OPCODE_SET_TYPE_IMM = 0x10 # noqa: E221
-REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB = 0x20 # noqa: E221
-REBASE_OPCODE_ADD_ADDR_ULEB = 0x30 # noqa: E221
-REBASE_OPCODE_ADD_ADDR_IMM_SCALED = 0x40 # noqa: E221
-REBASE_OPCODE_DO_REBASE_IMM_TIMES = 0x50 # noqa: E221
-REBASE_OPCODE_DO_REBASE_ULEB_TIMES = 0x60 # noqa: E221
-REBASE_OPCODE_DO_REBASE_ADD_ADDR_ULEB = 0x70 # noqa: E221
-REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB = 0x80 # noqa: E221
-
-BIND_TYPE_POINTER = 1 # noqa: E221
-BIND_TYPE_TEXT_ABSOLUTE32 = 2 # noqa: E221
-BIND_TYPE_TEXT_PCREL32 = 3 # noqa: E221
-
-BIND_SPECIAL_DYLIB_SELF = 0 # noqa: E221
-BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE = -1 # noqa: E221
-BIND_SPECIAL_DYLIB_FLAT_LOOKUP = -2 # noqa: E221
-
-BIND_SYMBOL_FLAGS_WEAK_IMPORT = 0x1 # noqa: E221
-BIND_SYMBOL_FLAGS_NON_WEAK_DEFINITION = 0x8 # noqa: E221
-
-BIND_OPCODE_MASK = 0xF0 # noqa: E221
-BIND_IMMEDIATE_MASK = 0x0F # noqa: E221
-BIND_OPCODE_DONE = 0x00 # noqa: E221
-BIND_OPCODE_SET_DYLIB_ORDINAL_IMM = 0x10 # noqa: E221
-BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB = 0x20 # noqa: E221
-BIND_OPCODE_SET_DYLIB_SPECIAL_IMM = 0x30 # noqa: E221
-BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM = 0x40 # noqa: E221
-BIND_OPCODE_SET_TYPE_IMM = 0x50 # noqa: E221
-BIND_OPCODE_SET_ADDEND_SLEB = 0x60 # noqa: E221
-BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB = 0x70 # noqa: E221
-BIND_OPCODE_ADD_ADDR_ULEB = 0x80 # noqa: E221
-BIND_OPCODE_DO_BIND = 0x90 # noqa: E221
-BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB = 0xA0 # noqa: E221
-BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED = 0xB0 # noqa: E221
-BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB = 0xC0 # noqa: E221
-
-EXPORT_SYMBOL_FLAGS_KIND_MASK = 0x03 # noqa: E221
-EXPORT_SYMBOL_FLAGS_KIND_REGULAR = 0x00 # noqa: E221
-EXPORT_SYMBOL_FLAGS_KIND_THREAD_LOCAL = 0x01 # noqa: E221
-EXPORT_SYMBOL_FLAGS_WEAK_DEFINITION = 0x04 # noqa: E221
-EXPORT_SYMBOL_FLAGS_REEXPORT = 0x08 # noqa: E221
-EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER = 0x10 # noqa: E221
-
-PLATFORM_MACOS = 1
-PLATFORM_IOS = 2
-PLATFORM_TVOS = 3
-PLATFORM_WATCHOS = 4
-PLATFORM_BRIDGEOS = 5
-PLATFORM_IOSMAC = 6
-PLATFORM_MACCATALYST = 6
-PLATFORM_IOSSIMULATOR = 7
-PLATFORM_TVOSSIMULATOR = 8
-PLATFORM_WATCHOSSIMULATOR = 9
-
-PLATFORM_NAMES = {
- PLATFORM_MACOS: "macOS",
- PLATFORM_IOS: "iOS",
- PLATFORM_TVOS: "tvOS",
- PLATFORM_WATCHOS: "watchOS",
- PLATFORM_BRIDGEOS: "bridgeOS",
- PLATFORM_MACCATALYST: "catalyst",
- PLATFORM_IOSSIMULATOR: "iOS simulator",
- PLATFORM_TVOSSIMULATOR: "tvOS simulator",
- PLATFORM_WATCHOSSIMULATOR: "watchOS simulator",
-}
-
-TOOL_CLANG = 1
-TOOL_SWIFT = 2
-TOOL_LD = 3
-
-TOOL_NAMES = {TOOL_CLANG: "clang", TOOL_SWIFT: "swift", TOOL_LD: "ld"}
diff --git a/lib/spack/external/macholib/macho_dump.py b/lib/spack/external/macholib/macho_dump.py
deleted file mode 100644
index bca5d777ae..0000000000
--- a/lib/spack/external/macholib/macho_dump.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-
-from __future__ import print_function
-
-import sys
-
-from macholib._cmdline import main as _main
-from macholib.mach_o import CPU_TYPE_NAMES, MH_CIGAM_64, MH_MAGIC_64, get_cpu_subtype
-from macholib.MachO import MachO
-
-ARCH_MAP = {
- ("<", "64-bit"): "x86_64",
- ("<", "32-bit"): "i386",
- (">", "64-bit"): "ppc64",
- (">", "32-bit"): "ppc",
-}
-
-
-def print_file(fp, path):
- print(path, file=fp)
- m = MachO(path)
- for header in m.headers:
- seen = set()
-
- if header.MH_MAGIC == MH_MAGIC_64 or header.MH_MAGIC == MH_CIGAM_64:
- sz = "64-bit"
- else:
- sz = "32-bit"
-
- arch = CPU_TYPE_NAMES.get(header.header.cputype, header.header.cputype)
-
- subarch = get_cpu_subtype(header.header.cputype, header.header.cpusubtype)
-
- print(
- " [%s endian=%r size=%r arch=%r subarch=%r]"
- % (header.__class__.__name__, header.endian, sz, arch, subarch),
- file=fp,
- )
- for _idx, _name, other in header.walkRelocatables():
- if other not in seen:
- seen.add(other)
- print("\t" + other, file=fp)
- print("", file=fp)
-
-
-def main():
- print(
- "WARNING: 'macho_dump' is deprecated, use 'python -mmacholib dump' " "instead"
- )
- _main(print_file)
-
-
-if __name__ == "__main__":
- try:
- sys.exit(main())
- except KeyboardInterrupt:
- pass
diff --git a/lib/spack/external/macholib/macho_find.py b/lib/spack/external/macholib/macho_find.py
deleted file mode 100644
index a963c36dcc..0000000000
--- a/lib/spack/external/macholib/macho_find.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-from __future__ import print_function
-
-from macholib._cmdline import main as _main
-
-
-def print_file(fp, path):
- print(path, file=fp)
-
-
-def main():
- print(
- "WARNING: 'macho_find' is deprecated, " "use 'python -mmacholib dump' instead"
- )
- _main(print_file)
-
-
-if __name__ == "__main__":
- try:
- main()
- except KeyboardInterrupt:
- pass
diff --git a/lib/spack/external/macholib/macho_standalone.py b/lib/spack/external/macholib/macho_standalone.py
deleted file mode 100644
index 0bb29e802f..0000000000
--- a/lib/spack/external/macholib/macho_standalone.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import sys
-
-from macholib.MachOStandalone import MachOStandalone
-from macholib.util import strip_files
-
-
-def standaloneApp(path):
- if not (os.path.isdir(path) and os.path.exists(os.path.join(path, "Contents"))):
- print("%s: %s does not look like an app bundle" % (sys.argv[0], path))
- sys.exit(1)
- files = MachOStandalone(path).run()
- strip_files(files)
-
-
-def main():
- print(
- "WARNING: 'macho_standalone' is deprecated, use "
- "'python -mmacholib standalone' instead"
- )
- if not sys.argv[1:]:
- raise SystemExit("usage: %s [appbundle ...]" % (sys.argv[0],))
- for fn in sys.argv[1:]:
- standaloneApp(fn)
-
-
-if __name__ == "__main__":
- main()
diff --git a/lib/spack/external/macholib/ptypes.py b/lib/spack/external/macholib/ptypes.py
deleted file mode 100644
index 248b5cb2a3..0000000000
--- a/lib/spack/external/macholib/ptypes.py
+++ /dev/null
@@ -1,334 +0,0 @@
-"""
-This module defines packable types, that is types than can be easily
-converted to a binary format as used in MachO headers.
-"""
-import struct
-import sys
-from itertools import chain, starmap
-
-try:
- from itertools import imap, izip
-except ImportError:
- izip, imap = zip, map
-
-__all__ = """
-sizeof
-BasePackable
-Structure
-pypackable
-p_char
-p_byte
-p_ubyte
-p_short
-p_ushort
-p_int
-p_uint
-p_long
-p_ulong
-p_longlong
-p_ulonglong
-p_int8
-p_uint8
-p_int16
-p_uint16
-p_int32
-p_uint32
-p_int64
-p_uint64
-p_float
-p_double
-""".split()
-
-
-def sizeof(s):
- """
- Return the size of an object when packed
- """
- if hasattr(s, "_size_"):
- return s._size_
-
- elif isinstance(s, bytes):
- return len(s)
-
- raise ValueError(s)
-
-
-class MetaPackable(type):
- """
- Fixed size struct.unpack-able types use from_tuple as their designated
- initializer
- """
-
- def from_mmap(cls, mm, ptr, **kw):
- return cls.from_str(mm[ptr : ptr + cls._size_], **kw) # noqa: E203
-
- def from_fileobj(cls, f, **kw):
- return cls.from_str(f.read(cls._size_), **kw)
-
- def from_str(cls, s, **kw):
- endian = kw.get("_endian_", cls._endian_)
- return cls.from_tuple(struct.unpack(endian + cls._format_, s), **kw)
-
- def from_tuple(cls, tpl, **kw):
- return cls(tpl[0], **kw)
-
-
-class BasePackable(object):
- _endian_ = ">"
-
- def to_str(self):
- raise NotImplementedError
-
- def to_fileobj(self, f):
- f.write(self.to_str())
-
- def to_mmap(self, mm, ptr):
- mm[ptr : ptr + self._size_] = self.to_str() # noqa: E203
-
-
-# This defines a class with a custom metaclass, we'd normally
-# use "class Packable(BasePackable, metaclass=MetaPackage)",
-# but that syntax is not valid in Python 2 (and likewise the
-# python 2 syntax is not valid in Python 3)
-def _make():
- def to_str(self):
- cls = type(self)
- endian = getattr(self, "_endian_", cls._endian_)
- return struct.pack(endian + cls._format_, self)
-
- return MetaPackable("Packable", (BasePackable,), {"to_str": to_str})
-
-
-Packable = _make()
-del _make
-
-
-def pypackable(name, pytype, format):
- """
- Create a "mix-in" class with a python type and a
- Packable with the given struct format
- """
- size, items = _formatinfo(format)
-
- def __new__(cls, *args, **kwds):
- if "_endian_" in kwds:
- _endian_ = kwds.pop("_endian_")
- else:
- _endian_ = cls._endian_
-
- result = pytype.__new__(cls, *args, **kwds)
- result._endian_ = _endian_
- return result
-
- return type(Packable)(
- name,
- (pytype, Packable),
- {"_format_": format, "_size_": size, "_items_": items, "__new__": __new__},
- )
-
-
-def _formatinfo(format):
- """
- Calculate the size and number of items in a struct format.
- """
- size = struct.calcsize(format)
- return size, len(struct.unpack(format, b"\x00" * size))
-
-
-class MetaStructure(MetaPackable):
- """
- The metaclass of Structure objects that does all the magic.
-
- Since we can assume that all Structures have a fixed size,
- we can do a bunch of calculations up front and pack or
- unpack the whole thing in one struct call.
- """
-
- def __new__(cls, clsname, bases, dct):
- fields = dct["_fields_"]
- names = []
- types = []
- structmarks = []
- format = ""
- items = 0
- size = 0
-
- def struct_property(name, typ):
- def _get(self):
- return self._objects_[name]
-
- def _set(self, obj):
- if type(obj) is not typ:
- obj = typ(obj)
- self._objects_[name] = obj
-
- return property(_get, _set, typ.__name__)
-
- for name, typ in fields:
- dct[name] = struct_property(name, typ)
- names.append(name)
- types.append(typ)
- format += typ._format_
- size += typ._size_
- if typ._items_ > 1:
- structmarks.append((items, typ._items_, typ))
- items += typ._items_
-
- dct["_structmarks_"] = structmarks
- dct["_names_"] = names
- dct["_types_"] = types
- dct["_size_"] = size
- dct["_items_"] = items
- dct["_format_"] = format
- return super(MetaStructure, cls).__new__(cls, clsname, bases, dct)
-
- def from_tuple(cls, tpl, **kw):
- values = []
- current = 0
- for begin, length, typ in cls._structmarks_:
- if begin > current:
- values.extend(tpl[current:begin])
- current = begin + length
- values.append(typ.from_tuple(tpl[begin:current], **kw))
- values.extend(tpl[current:])
- return cls(*values, **kw)
-
-
-# See metaclass discussion earlier in this file
-def _make():
- class_dict = {}
- class_dict["_fields_"] = ()
-
- def as_method(function):
- class_dict[function.__name__] = function
-
- @as_method
- def __init__(self, *args, **kwargs):
- if len(args) == 1 and not kwargs and type(args[0]) is type(self):
- kwargs = args[0]._objects_
- args = ()
- self._objects_ = {}
- iargs = chain(izip(self._names_, args), kwargs.items())
- for key, value in iargs:
- if key not in self._names_ and key != "_endian_":
- raise TypeError
- setattr(self, key, value)
- for key, typ in izip(self._names_, self._types_):
- if key not in self._objects_:
- self._objects_[key] = typ()
-
- @as_method
- def _get_packables(self):
- for obj in imap(self._objects_.__getitem__, self._names_):
- if hasattr(obj, "_get_packables"):
- for obj in obj._get_packables():
- yield obj
-
- else:
- yield obj
-
- @as_method
- def to_str(self):
- return struct.pack(self._endian_ + self._format_, *self._get_packables())
-
- @as_method
- def __cmp__(self, other):
- if type(other) is not type(self):
- raise TypeError(
- "Cannot compare objects of type %r to objects of type %r"
- % (type(other), type(self))
- )
- if sys.version_info[0] == 2:
- _cmp = cmp # noqa: F821
- else:
-
- def _cmp(a, b):
- if a < b:
- return -1
- elif a > b:
- return 1
- elif a == b:
- return 0
- else:
- raise TypeError()
-
- for cmpval in starmap(
- _cmp, izip(self._get_packables(), other._get_packables())
- ):
- if cmpval != 0:
- return cmpval
- return 0
-
- @as_method
- def __eq__(self, other):
- r = self.__cmp__(other)
- return r == 0
-
- @as_method
- def __ne__(self, other):
- r = self.__cmp__(other)
- return r != 0
-
- @as_method
- def __lt__(self, other):
- r = self.__cmp__(other)
- return r < 0
-
- @as_method
- def __le__(self, other):
- r = self.__cmp__(other)
- return r <= 0
-
- @as_method
- def __gt__(self, other):
- r = self.__cmp__(other)
- return r > 0
-
- @as_method
- def __ge__(self, other):
- r = self.__cmp__(other)
- return r >= 0
-
- @as_method
- def __repr__(self):
- result = []
- result.append("<")
- result.append(type(self).__name__)
- for nm in self._names_:
- result.append(" %s=%r" % (nm, getattr(self, nm)))
- result.append(">")
- return "".join(result)
-
- return MetaStructure("Structure", (BasePackable,), class_dict)
-
-
-Structure = _make()
-del _make
-
-try:
- long
-except NameError:
- long = int
-
-# export common packables with predictable names
-p_char = pypackable("p_char", bytes, "c")
-p_int8 = pypackable("p_int8", int, "b")
-p_uint8 = pypackable("p_uint8", int, "B")
-p_int16 = pypackable("p_int16", int, "h")
-p_uint16 = pypackable("p_uint16", int, "H")
-p_int32 = pypackable("p_int32", int, "i")
-p_uint32 = pypackable("p_uint32", long, "I")
-p_int64 = pypackable("p_int64", long, "q")
-p_uint64 = pypackable("p_uint64", long, "Q")
-p_float = pypackable("p_float", float, "f")
-p_double = pypackable("p_double", float, "d")
-
-# Deprecated names, need trick to emit deprecation warning.
-p_byte = p_int8
-p_ubyte = p_uint8
-p_short = p_int16
-p_ushort = p_uint16
-p_int = p_long = p_int32
-p_uint = p_ulong = p_uint32
-p_longlong = p_int64
-p_ulonglong = p_uint64
diff --git a/lib/spack/external/macholib/util.py b/lib/spack/external/macholib/util.py
deleted file mode 100644
index d5ab33544a..0000000000
--- a/lib/spack/external/macholib/util.py
+++ /dev/null
@@ -1,262 +0,0 @@
-import os
-import shutil
-import stat
-import struct
-import sys
-
-from macholib import mach_o
-
-MAGIC = [
- struct.pack("!L", getattr(mach_o, "MH_" + _))
- for _ in ["MAGIC", "CIGAM", "MAGIC_64", "CIGAM_64"]
-]
-FAT_MAGIC_BYTES = struct.pack("!L", mach_o.FAT_MAGIC)
-MAGIC_LEN = 4
-STRIPCMD = ["/usr/bin/strip", "-x", "-S", "-"]
-
-try:
- unicode
-except NameError:
- unicode = str
-
-
-def fsencoding(s, encoding=sys.getfilesystemencoding()): # noqa: M511,B008
- """
- Ensure the given argument is in filesystem encoding (not unicode)
- """
- if isinstance(s, unicode):
- s = s.encode(encoding)
- return s
-
-
-def move(src, dst):
- """
- move that ensures filesystem encoding of paths
- """
- shutil.move(fsencoding(src), fsencoding(dst))
-
-
-def copy2(src, dst):
- """
- copy2 that ensures filesystem encoding of paths
- """
- shutil.copy2(fsencoding(src), fsencoding(dst))
-
-
-def flipwritable(fn, mode=None):
- """
- Flip the writability of a file and return the old mode. Returns None
- if the file is already writable.
- """
- if os.access(fn, os.W_OK):
- return None
- old_mode = os.stat(fn).st_mode
- os.chmod(fn, stat.S_IWRITE | old_mode)
- return old_mode
-
-
-class fileview(object):
- """
- A proxy for file-like objects that exposes a given view of a file
- """
-
- def __init__(self, fileobj, start, size):
- self._fileobj = fileobj
- self._start = start
- self._end = start + size
-
- def __repr__(self):
- return "<fileview [%d, %d] %r>" % (self._start, self._end, self._fileobj)
-
- def tell(self):
- return self._fileobj.tell() - self._start
-
- def _checkwindow(self, seekto, op):
- if not (self._start <= seekto <= self._end):
- raise IOError(
- "%s to offset %d is outside window [%d, %d]"
- % (op, seekto, self._start, self._end)
- )
-
- def seek(self, offset, whence=0):
- seekto = offset
- if whence == 0:
- seekto += self._start
- elif whence == 1:
- seekto += self._fileobj.tell()
- elif whence == 2:
- seekto += self._end
- else:
- raise IOError("Invalid whence argument to seek: %r" % (whence,))
- self._checkwindow(seekto, "seek")
- self._fileobj.seek(seekto)
-
- def write(self, bytes):
- here = self._fileobj.tell()
- self._checkwindow(here, "write")
- self._checkwindow(here + len(bytes), "write")
- self._fileobj.write(bytes)
-
- def read(self, size=sys.maxsize):
- if size < 0:
- raise ValueError(
- "Invalid size %s while reading from %s", size, self._fileobj
- )
- here = self._fileobj.tell()
- self._checkwindow(here, "read")
- bytes = min(size, self._end - here)
- return self._fileobj.read(bytes)
-
-
-def mergecopy(src, dest):
- """
- copy2, but only if the destination isn't up to date
- """
- if os.path.exists(dest) and os.stat(dest).st_mtime >= os.stat(src).st_mtime:
- return
-
- copy2(src, dest)
-
-
-def mergetree(src, dst, condition=None, copyfn=mergecopy, srcbase=None):
- """
- Recursively merge a directory tree using mergecopy().
- """
- src = fsencoding(src)
- dst = fsencoding(dst)
- if srcbase is None:
- srcbase = src
- names = map(fsencoding, os.listdir(src))
- try:
- os.makedirs(dst)
- except OSError:
- pass
- errors = []
- for name in names:
- srcname = os.path.join(src, name)
- dstname = os.path.join(dst, name)
- if condition is not None and not condition(srcname):
- continue
- try:
- if os.path.islink(srcname):
- realsrc = os.readlink(srcname)
- os.symlink(realsrc, dstname)
- elif os.path.isdir(srcname):
- mergetree(
- srcname,
- dstname,
- condition=condition,
- copyfn=copyfn,
- srcbase=srcbase,
- )
- else:
- copyfn(srcname, dstname)
- except (IOError, os.error) as why:
- errors.append((srcname, dstname, why))
- if errors:
- raise IOError(errors)
-
-
-def sdk_normalize(filename):
- """
- Normalize a path to strip out the SDK portion, normally so that it
- can be decided whether it is in a system path or not.
- """
- if filename.startswith("/Developer/SDKs/"):
- pathcomp = filename.split("/")
- del pathcomp[1:4]
- filename = "/".join(pathcomp)
- return filename
-
-
-NOT_SYSTEM_FILES = []
-
-
-def in_system_path(filename):
- """
- Return True if the file is in a system path
- """
- fn = sdk_normalize(os.path.realpath(filename))
- if fn.startswith("/usr/local/"):
- return False
- elif fn.startswith("/System/") or fn.startswith("/usr/"):
- if fn in NOT_SYSTEM_FILES:
- return False
- return True
- else:
- return False
-
-
-def has_filename_filter(module):
- """
- Return False if the module does not have a filename attribute
- """
- return getattr(module, "filename", None) is not None
-
-
-def get_magic():
- """
- Get a list of valid Mach-O header signatures, not including the fat header
- """
- return MAGIC
-
-
-def is_platform_file(path):
- """
- Return True if the file is Mach-O
- """
- if not os.path.exists(path) or os.path.islink(path):
- return False
- # If the header is fat, we need to read into the first arch
- with open(path, "rb") as fileobj:
- bytes = fileobj.read(MAGIC_LEN)
- if bytes == FAT_MAGIC_BYTES:
- # Read in the fat header
- fileobj.seek(0)
- header = mach_o.fat_header.from_fileobj(fileobj, _endian_=">")
- if header.nfat_arch < 1:
- return False
- # Read in the first fat arch header
- arch = mach_o.fat_arch.from_fileobj(fileobj, _endian_=">")
- fileobj.seek(arch.offset)
- # Read magic off the first header
- bytes = fileobj.read(MAGIC_LEN)
- for magic in MAGIC:
- if bytes == magic:
- return True
- return False
-
-
-def iter_platform_files(dst):
- """
- Walk a directory and yield each full path that is a Mach-O file
- """
- for root, _dirs, files in os.walk(dst):
- for fn in files:
- fn = os.path.join(root, fn)
- if is_platform_file(fn):
- yield fn
-
-
-def strip_files(files, argv_max=(256 * 1024)):
- """
- Strip a list of files
- """
- tostrip = [(fn, flipwritable(fn)) for fn in files]
- while tostrip:
- cmd = list(STRIPCMD)
- flips = []
- pathlen = sum(len(s) + 1 for s in cmd)
- while pathlen < argv_max:
- if not tostrip:
- break
- added, flip = tostrip.pop()
- pathlen += len(added) + 1
- cmd.append(added)
- flips.append((added, flip))
- else:
- cmd.pop()
- tostrip.append(flips.pop())
- os.spawnv(os.P_WAIT, cmd[0], cmd)
- for args in flips:
- flipwritable(*args)
diff --git a/lib/spack/external/markupsafe/LICENSE.rst b/lib/spack/external/markupsafe/LICENSE.rst
deleted file mode 100644
index 9d227a0cc4..0000000000
--- a/lib/spack/external/markupsafe/LICENSE.rst
+++ /dev/null
@@ -1,28 +0,0 @@
-Copyright 2010 Pallets
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-3. Neither the name of the copyright holder nor the names of its
- contributors may be used to endorse or promote products derived from
- this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
-TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/lib/spack/external/markupsafe/README.rst b/lib/spack/external/markupsafe/README.rst
deleted file mode 100644
index 3548b8d1f7..0000000000
--- a/lib/spack/external/markupsafe/README.rst
+++ /dev/null
@@ -1,69 +0,0 @@
-MarkupSafe
-==========
-
-MarkupSafe implements a text object that escapes characters so it is
-safe to use in HTML and XML. Characters that have special meanings are
-replaced so that they display as the actual characters. This mitigates
-injection attacks, meaning untrusted user input can safely be displayed
-on a page.
-
-
-Installing
-----------
-
-Install and update using `pip`_:
-
-.. code-block:: text
-
- pip install -U MarkupSafe
-
-.. _pip: https://pip.pypa.io/en/stable/quickstart/
-
-
-Examples
---------
-
-.. code-block:: pycon
-
- >>> from markupsafe import Markup, escape
- >>> # escape replaces special characters and wraps in Markup
- >>> escape('<script>alert(document.cookie);</script>')
- Markup(u'&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
- >>> # wrap in Markup to mark text "safe" and prevent escaping
- >>> Markup('<strong>Hello</strong>')
- Markup('<strong>hello</strong>')
- >>> escape(Markup('<strong>Hello</strong>'))
- Markup('<strong>hello</strong>')
- >>> # Markup is a text subclass (str on Python 3, unicode on Python 2)
- >>> # methods and operators escape their arguments
- >>> template = Markup("Hello <em>%s</em>")
- >>> template % '"World"'
- Markup('Hello <em>&#34;World&#34;</em>')
-
-
-Donate
-------
-
-The Pallets organization develops and supports MarkupSafe and other
-libraries that use it. In order to grow the community of contributors
-and users, and allow the maintainers to devote more time to the
-projects, `please donate today`_.
-
-.. _please donate today: https://palletsprojects.com/donate
-
-
-Links
------
-
-* Website: https://palletsprojects.com/p/markupsafe/
-* Documentation: https://markupsafe.palletsprojects.com/
-* License: `BSD-3-Clause <https://github.com/pallets/markupsafe/blob/master/LICENSE.rst>`_
-* Releases: https://pypi.org/project/MarkupSafe/
-* Code: https://github.com/pallets/markupsafe
-* Issue tracker: https://github.com/pallets/markupsafe/issues
-* Test status:
-
- * Linux, Mac: https://travis-ci.org/pallets/markupsafe
- * Windows: https://ci.appveyor.com/project/pallets/markupsafe
-
-* Test coverage: https://codecov.io/gh/pallets/markupsafe
diff --git a/lib/spack/external/markupsafe/__init__.py b/lib/spack/external/markupsafe/__init__.py
deleted file mode 100644
index da05ed328a..0000000000
--- a/lib/spack/external/markupsafe/__init__.py
+++ /dev/null
@@ -1,327 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-markupsafe
-~~~~~~~~~~
-
-Implements an escape function and a Markup string to replace HTML
-special characters with safe representations.
-
-:copyright: 2010 Pallets
-:license: BSD-3-Clause
-"""
-import re
-import string
-
-from ._compat import int_types
-from ._compat import iteritems
-from ._compat import Mapping
-from ._compat import PY2
-from ._compat import string_types
-from ._compat import text_type
-from ._compat import unichr
-
-__version__ = "1.1.1"
-
-__all__ = ["Markup", "soft_unicode", "escape", "escape_silent"]
-
-_striptags_re = re.compile(r"(<!--.*?-->|<[^>]*>)")
-_entity_re = re.compile(r"&([^& ;]+);")
-
-
-class Markup(text_type):
- """A string that is ready to be safely inserted into an HTML or XML
- document, either because it was escaped or because it was marked
- safe.
-
- Passing an object to the constructor converts it to text and wraps
- it to mark it safe without escaping. To escape the text, use the
- :meth:`escape` class method instead.
-
- >>> Markup('Hello, <em>World</em>!')
- Markup('Hello, <em>World</em>!')
- >>> Markup(42)
- Markup('42')
- >>> Markup.escape('Hello, <em>World</em>!')
- Markup('Hello &lt;em&gt;World&lt;/em&gt;!')
-
- This implements the ``__html__()`` interface that some frameworks
- use. Passing an object that implements ``__html__()`` will wrap the
- output of that method, marking it safe.
-
- >>> class Foo:
- ... def __html__(self):
- ... return '<a href="/foo">foo</a>'
- ...
- >>> Markup(Foo())
- Markup('<a href="/foo">foo</a>')
-
- This is a subclass of the text type (``str`` in Python 3,
- ``unicode`` in Python 2). It has the same methods as that type, but
- all methods escape their arguments and return a ``Markup`` instance.
-
- >>> Markup('<em>%s</em>') % 'foo & bar'
- Markup('<em>foo &amp; bar</em>')
- >>> Markup('<em>Hello</em> ') + '<foo>'
- Markup('<em>Hello</em> &lt;foo&gt;')
- """
-
- __slots__ = ()
-
- def __new__(cls, base=u"", encoding=None, errors="strict"):
- if hasattr(base, "__html__"):
- base = base.__html__()
- if encoding is None:
- return text_type.__new__(cls, base)
- return text_type.__new__(cls, base, encoding, errors)
-
- def __html__(self):
- return self
-
- def __add__(self, other):
- if isinstance(other, string_types) or hasattr(other, "__html__"):
- return self.__class__(super(Markup, self).__add__(self.escape(other)))
- return NotImplemented
-
- def __radd__(self, other):
- if hasattr(other, "__html__") or isinstance(other, string_types):
- return self.escape(other).__add__(self)
- return NotImplemented
-
- def __mul__(self, num):
- if isinstance(num, int_types):
- return self.__class__(text_type.__mul__(self, num))
- return NotImplemented
-
- __rmul__ = __mul__
-
- def __mod__(self, arg):
- if isinstance(arg, tuple):
- arg = tuple(_MarkupEscapeHelper(x, self.escape) for x in arg)
- else:
- arg = _MarkupEscapeHelper(arg, self.escape)
- return self.__class__(text_type.__mod__(self, arg))
-
- def __repr__(self):
- return "%s(%s)" % (self.__class__.__name__, text_type.__repr__(self))
-
- def join(self, seq):
- return self.__class__(text_type.join(self, map(self.escape, seq)))
-
- join.__doc__ = text_type.join.__doc__
-
- def split(self, *args, **kwargs):
- return list(map(self.__class__, text_type.split(self, *args, **kwargs)))
-
- split.__doc__ = text_type.split.__doc__
-
- def rsplit(self, *args, **kwargs):
- return list(map(self.__class__, text_type.rsplit(self, *args, **kwargs)))
-
- rsplit.__doc__ = text_type.rsplit.__doc__
-
- def splitlines(self, *args, **kwargs):
- return list(map(self.__class__, text_type.splitlines(self, *args, **kwargs)))
-
- splitlines.__doc__ = text_type.splitlines.__doc__
-
- def unescape(self):
- """Convert escaped markup back into a text string. This replaces
- HTML entities with the characters they represent.
-
- >>> Markup('Main &raquo; <em>About</em>').unescape()
- 'Main » <em>About</em>'
- """
- from ._constants import HTML_ENTITIES
-
- def handle_match(m):
- name = m.group(1)
- if name in HTML_ENTITIES:
- return unichr(HTML_ENTITIES[name])
- try:
- if name[:2] in ("#x", "#X"):
- return unichr(int(name[2:], 16))
- elif name.startswith("#"):
- return unichr(int(name[1:]))
- except ValueError:
- pass
- # Don't modify unexpected input.
- return m.group()
-
- return _entity_re.sub(handle_match, text_type(self))
-
- def striptags(self):
- """:meth:`unescape` the markup, remove tags, and normalize
- whitespace to single spaces.
-
- >>> Markup('Main &raquo;\t<em>About</em>').striptags()
- 'Main » About'
- """
- stripped = u" ".join(_striptags_re.sub("", self).split())
- return Markup(stripped).unescape()
-
- @classmethod
- def escape(cls, s):
- """Escape a string. Calls :func:`escape` and ensures that for
- subclasses the correct type is returned.
- """
- rv = escape(s)
- if rv.__class__ is not cls:
- return cls(rv)
- return rv
-
- def make_simple_escaping_wrapper(name): # noqa: B902
- orig = getattr(text_type, name)
-
- def func(self, *args, **kwargs):
- args = _escape_argspec(list(args), enumerate(args), self.escape)
- _escape_argspec(kwargs, iteritems(kwargs), self.escape)
- return self.__class__(orig(self, *args, **kwargs))
-
- func.__name__ = orig.__name__
- func.__doc__ = orig.__doc__
- return func
-
- for method in (
- "__getitem__",
- "capitalize",
- "title",
- "lower",
- "upper",
- "replace",
- "ljust",
- "rjust",
- "lstrip",
- "rstrip",
- "center",
- "strip",
- "translate",
- "expandtabs",
- "swapcase",
- "zfill",
- ):
- locals()[method] = make_simple_escaping_wrapper(method)
-
- def partition(self, sep):
- return tuple(map(self.__class__, text_type.partition(self, self.escape(sep))))
-
- def rpartition(self, sep):
- return tuple(map(self.__class__, text_type.rpartition(self, self.escape(sep))))
-
- def format(self, *args, **kwargs):
- formatter = EscapeFormatter(self.escape)
- kwargs = _MagicFormatMapping(args, kwargs)
- return self.__class__(formatter.vformat(self, args, kwargs))
-
- def __html_format__(self, format_spec):
- if format_spec:
- raise ValueError("Unsupported format specification " "for Markup.")
- return self
-
- # not in python 3
- if hasattr(text_type, "__getslice__"):
- __getslice__ = make_simple_escaping_wrapper("__getslice__")
-
- del method, make_simple_escaping_wrapper
-
-
-class _MagicFormatMapping(Mapping):
- """This class implements a dummy wrapper to fix a bug in the Python
- standard library for string formatting.
-
- See http://bugs.python.org/issue13598 for information about why
- this is necessary.
- """
-
- def __init__(self, args, kwargs):
- self._args = args
- self._kwargs = kwargs
- self._last_index = 0
-
- def __getitem__(self, key):
- if key == "":
- idx = self._last_index
- self._last_index += 1
- try:
- return self._args[idx]
- except LookupError:
- pass
- key = str(idx)
- return self._kwargs[key]
-
- def __iter__(self):
- return iter(self._kwargs)
-
- def __len__(self):
- return len(self._kwargs)
-
-
-if hasattr(text_type, "format"):
-
- class EscapeFormatter(string.Formatter):
- def __init__(self, escape):
- self.escape = escape
-
- def format_field(self, value, format_spec):
- if hasattr(value, "__html_format__"):
- rv = value.__html_format__(format_spec)
- elif hasattr(value, "__html__"):
- if format_spec:
- raise ValueError(
- "Format specifier {0} given, but {1} does not"
- " define __html_format__. A class that defines"
- " __html__ must define __html_format__ to work"
- " with format specifiers.".format(format_spec, type(value))
- )
- rv = value.__html__()
- else:
- # We need to make sure the format spec is unicode here as
- # otherwise the wrong callback methods are invoked. For
- # instance a byte string there would invoke __str__ and
- # not __unicode__.
- rv = string.Formatter.format_field(self, value, text_type(format_spec))
- return text_type(self.escape(rv))
-
-
-def _escape_argspec(obj, iterable, escape):
- """Helper for various string-wrapped functions."""
- for key, value in iterable:
- if hasattr(value, "__html__") or isinstance(value, string_types):
- obj[key] = escape(value)
- return obj
-
-
-class _MarkupEscapeHelper(object):
- """Helper for Markup.__mod__"""
-
- def __init__(self, obj, escape):
- self.obj = obj
- self.escape = escape
-
- def __getitem__(self, item):
- return _MarkupEscapeHelper(self.obj[item], self.escape)
-
- def __str__(self):
- return text_type(self.escape(self.obj))
-
- __unicode__ = __str__
-
- def __repr__(self):
- return str(self.escape(repr(self.obj)))
-
- def __int__(self):
- return int(self.obj)
-
- def __float__(self):
- return float(self.obj)
-
-
-# we have to import it down here as the speedups and native
-# modules imports the markup type which is define above.
-try:
- from ._speedups import escape, escape_silent, soft_unicode
-except ImportError:
- from ._native import escape, escape_silent, soft_unicode
-
-if not PY2:
- soft_str = soft_unicode
- __all__.append("soft_str")
diff --git a/lib/spack/external/markupsafe/_compat.py b/lib/spack/external/markupsafe/_compat.py
deleted file mode 100644
index bc05090f9e..0000000000
--- a/lib/spack/external/markupsafe/_compat.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-markupsafe._compat
-~~~~~~~~~~~~~~~~~~
-
-:copyright: 2010 Pallets
-:license: BSD-3-Clause
-"""
-import sys
-
-PY2 = sys.version_info[0] == 2
-
-if not PY2:
- text_type = str
- string_types = (str,)
- unichr = chr
- int_types = (int,)
-
- def iteritems(x):
- return iter(x.items())
-
- from collections.abc import Mapping
-
-else:
- text_type = unicode
- string_types = (str, unicode)
- unichr = unichr
- int_types = (int, long)
-
- def iteritems(x):
- return x.iteritems()
-
- from collections import Mapping
diff --git a/lib/spack/external/markupsafe/_constants.py b/lib/spack/external/markupsafe/_constants.py
deleted file mode 100644
index 7c57c2d294..0000000000
--- a/lib/spack/external/markupsafe/_constants.py
+++ /dev/null
@@ -1,264 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-markupsafe._constants
-~~~~~~~~~~~~~~~~~~~~~
-
-:copyright: 2010 Pallets
-:license: BSD-3-Clause
-"""
-
-HTML_ENTITIES = {
- "AElig": 198,
- "Aacute": 193,
- "Acirc": 194,
- "Agrave": 192,
- "Alpha": 913,
- "Aring": 197,
- "Atilde": 195,
- "Auml": 196,
- "Beta": 914,
- "Ccedil": 199,
- "Chi": 935,
- "Dagger": 8225,
- "Delta": 916,
- "ETH": 208,
- "Eacute": 201,
- "Ecirc": 202,
- "Egrave": 200,
- "Epsilon": 917,
- "Eta": 919,
- "Euml": 203,
- "Gamma": 915,
- "Iacute": 205,
- "Icirc": 206,
- "Igrave": 204,
- "Iota": 921,
- "Iuml": 207,
- "Kappa": 922,
- "Lambda": 923,
- "Mu": 924,
- "Ntilde": 209,
- "Nu": 925,
- "OElig": 338,
- "Oacute": 211,
- "Ocirc": 212,
- "Ograve": 210,
- "Omega": 937,
- "Omicron": 927,
- "Oslash": 216,
- "Otilde": 213,
- "Ouml": 214,
- "Phi": 934,
- "Pi": 928,
- "Prime": 8243,
- "Psi": 936,
- "Rho": 929,
- "Scaron": 352,
- "Sigma": 931,
- "THORN": 222,
- "Tau": 932,
- "Theta": 920,
- "Uacute": 218,
- "Ucirc": 219,
- "Ugrave": 217,
- "Upsilon": 933,
- "Uuml": 220,
- "Xi": 926,
- "Yacute": 221,
- "Yuml": 376,
- "Zeta": 918,
- "aacute": 225,
- "acirc": 226,
- "acute": 180,
- "aelig": 230,
- "agrave": 224,
- "alefsym": 8501,
- "alpha": 945,
- "amp": 38,
- "and": 8743,
- "ang": 8736,
- "apos": 39,
- "aring": 229,
- "asymp": 8776,
- "atilde": 227,
- "auml": 228,
- "bdquo": 8222,
- "beta": 946,
- "brvbar": 166,
- "bull": 8226,
- "cap": 8745,
- "ccedil": 231,
- "cedil": 184,
- "cent": 162,
- "chi": 967,
- "circ": 710,
- "clubs": 9827,
- "cong": 8773,
- "copy": 169,
- "crarr": 8629,
- "cup": 8746,
- "curren": 164,
- "dArr": 8659,
- "dagger": 8224,
- "darr": 8595,
- "deg": 176,
- "delta": 948,
- "diams": 9830,
- "divide": 247,
- "eacute": 233,
- "ecirc": 234,
- "egrave": 232,
- "empty": 8709,
- "emsp": 8195,
- "ensp": 8194,
- "epsilon": 949,
- "equiv": 8801,
- "eta": 951,
- "eth": 240,
- "euml": 235,
- "euro": 8364,
- "exist": 8707,
- "fnof": 402,
- "forall": 8704,
- "frac12": 189,
- "frac14": 188,
- "frac34": 190,
- "frasl": 8260,
- "gamma": 947,
- "ge": 8805,
- "gt": 62,
- "hArr": 8660,
- "harr": 8596,
- "hearts": 9829,
- "hellip": 8230,
- "iacute": 237,
- "icirc": 238,
- "iexcl": 161,
- "igrave": 236,
- "image": 8465,
- "infin": 8734,
- "int": 8747,
- "iota": 953,
- "iquest": 191,
- "isin": 8712,
- "iuml": 239,
- "kappa": 954,
- "lArr": 8656,
- "lambda": 955,
- "lang": 9001,
- "laquo": 171,
- "larr": 8592,
- "lceil": 8968,
- "ldquo": 8220,
- "le": 8804,
- "lfloor": 8970,
- "lowast": 8727,
- "loz": 9674,
- "lrm": 8206,
- "lsaquo": 8249,
- "lsquo": 8216,
- "lt": 60,
- "macr": 175,
- "mdash": 8212,
- "micro": 181,
- "middot": 183,
- "minus": 8722,
- "mu": 956,
- "nabla": 8711,
- "nbsp": 160,
- "ndash": 8211,
- "ne": 8800,
- "ni": 8715,
- "not": 172,
- "notin": 8713,
- "nsub": 8836,
- "ntilde": 241,
- "nu": 957,
- "oacute": 243,
- "ocirc": 244,
- "oelig": 339,
- "ograve": 242,
- "oline": 8254,
- "omega": 969,
- "omicron": 959,
- "oplus": 8853,
- "or": 8744,
- "ordf": 170,
- "ordm": 186,
- "oslash": 248,
- "otilde": 245,
- "otimes": 8855,
- "ouml": 246,
- "para": 182,
- "part": 8706,
- "permil": 8240,
- "perp": 8869,
- "phi": 966,
- "pi": 960,
- "piv": 982,
- "plusmn": 177,
- "pound": 163,
- "prime": 8242,
- "prod": 8719,
- "prop": 8733,
- "psi": 968,
- "quot": 34,
- "rArr": 8658,
- "radic": 8730,
- "rang": 9002,
- "raquo": 187,
- "rarr": 8594,
- "rceil": 8969,
- "rdquo": 8221,
- "real": 8476,
- "reg": 174,
- "rfloor": 8971,
- "rho": 961,
- "rlm": 8207,
- "rsaquo": 8250,
- "rsquo": 8217,
- "sbquo": 8218,
- "scaron": 353,
- "sdot": 8901,
- "sect": 167,
- "shy": 173,
- "sigma": 963,
- "sigmaf": 962,
- "sim": 8764,
- "spades": 9824,
- "sub": 8834,
- "sube": 8838,
- "sum": 8721,
- "sup": 8835,
- "sup1": 185,
- "sup2": 178,
- "sup3": 179,
- "supe": 8839,
- "szlig": 223,
- "tau": 964,
- "there4": 8756,
- "theta": 952,
- "thetasym": 977,
- "thinsp": 8201,
- "thorn": 254,
- "tilde": 732,
- "times": 215,
- "trade": 8482,
- "uArr": 8657,
- "uacute": 250,
- "uarr": 8593,
- "ucirc": 251,
- "ugrave": 249,
- "uml": 168,
- "upsih": 978,
- "upsilon": 965,
- "uuml": 252,
- "weierp": 8472,
- "xi": 958,
- "yacute": 253,
- "yen": 165,
- "yuml": 255,
- "zeta": 950,
- "zwj": 8205,
- "zwnj": 8204,
-}
diff --git a/lib/spack/external/markupsafe/_native.py b/lib/spack/external/markupsafe/_native.py
deleted file mode 100644
index cd08752cd8..0000000000
--- a/lib/spack/external/markupsafe/_native.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-markupsafe._native
-~~~~~~~~~~~~~~~~~~
-
-Native Python implementation used when the C module is not compiled.
-
-:copyright: 2010 Pallets
-:license: BSD-3-Clause
-"""
-from . import Markup
-from ._compat import text_type
-
-
-def escape(s):
- """Replace the characters ``&``, ``<``, ``>``, ``'``, and ``"`` in
- the string with HTML-safe sequences. Use this if you need to display
- text that might contain such characters in HTML.
-
- If the object has an ``__html__`` method, it is called and the
- return value is assumed to already be safe for HTML.
-
- :param s: An object to be converted to a string and escaped.
- :return: A :class:`Markup` string with the escaped text.
- """
- if hasattr(s, "__html__"):
- return Markup(s.__html__())
- return Markup(
- text_type(s)
- .replace("&", "&amp;")
- .replace(">", "&gt;")
- .replace("<", "&lt;")
- .replace("'", "&#39;")
- .replace('"', "&#34;")
- )
-
-
-def escape_silent(s):
- """Like :func:`escape` but treats ``None`` as the empty string.
- Useful with optional values, as otherwise you get the string
- ``'None'`` when the value is ``None``.
-
- >>> escape(None)
- Markup('None')
- >>> escape_silent(None)
- Markup('')
- """
- if s is None:
- return Markup()
- return escape(s)
-
-
-def soft_unicode(s):
- """Convert an object to a string if it isn't already. This preserves
- a :class:`Markup` string rather than converting it back to a basic
- string, so it will still be marked as safe and won't be escaped
- again.
-
- >>> value = escape('<User 1>')
- >>> value
- Markup('&lt;User 1&gt;')
- >>> escape(str(value))
- Markup('&amp;lt;User 1&amp;gt;')
- >>> escape(soft_unicode(value))
- Markup('&lt;User 1&gt;')
- """
- if not isinstance(s, text_type):
- s = text_type(s)
- return s
diff --git a/lib/spack/external/pyrsistent/LICENSE b/lib/spack/external/pyrsistent/LICENSE
deleted file mode 100644
index 6609e4c05a..0000000000
--- a/lib/spack/external/pyrsistent/LICENSE
+++ /dev/null
@@ -1,22 +0,0 @@
-Copyright (c) 2019 Tobias Gustafsson
-
-Permission is hereby granted, free of charge, to any person
-obtaining a copy of this software and associated documentation
-files (the "Software"), to deal in the Software without
-restriction, including without limitation the rights to use,
-copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file
diff --git a/lib/spack/external/pyrsistent/__init__.py b/lib/spack/external/pyrsistent/__init__.py
deleted file mode 100644
index 6e610c1ddb..0000000000
--- a/lib/spack/external/pyrsistent/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyrsistent._pmap import pmap
-
-
-__all__ = ('pmap',)
diff --git a/lib/spack/external/pyrsistent/_compat.py b/lib/spack/external/pyrsistent/_compat.py
deleted file mode 100644
index e728586afe..0000000000
--- a/lib/spack/external/pyrsistent/_compat.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from six import string_types
-
-
-# enum compat
-try:
- from enum import Enum
-except:
- class Enum(object): pass
- # no objects will be instances of this class
-
-# collections compat
-try:
- from collections.abc import (
- Container,
- Hashable,
- Iterable,
- Mapping,
- Sequence,
- Set,
- Sized,
- )
-except ImportError:
- from collections import (
- Container,
- Hashable,
- Iterable,
- Mapping,
- Sequence,
- Set,
- Sized,
- )
diff --git a/lib/spack/external/pyrsistent/_pmap.py b/lib/spack/external/pyrsistent/_pmap.py
deleted file mode 100644
index e8a0ec53f8..0000000000
--- a/lib/spack/external/pyrsistent/_pmap.py
+++ /dev/null
@@ -1,460 +0,0 @@
-from ._compat import Mapping, Hashable
-from itertools import chain
-import six
-from pyrsistent._pvector import pvector
-from pyrsistent._transformations import transform
-
-
-class PMap(object):
- """
- Persistent map/dict. Tries to follow the same naming conventions as the built in dict where feasible.
-
- Do not instantiate directly, instead use the factory functions :py:func:`m` or :py:func:`pmap` to
- create an instance.
-
- Was originally written as a very close copy of the Clojure equivalent but was later rewritten to closer
- re-assemble the python dict. This means that a sparse vector (a PVector) of buckets is used. The keys are
- hashed and the elements inserted at position hash % len(bucket_vector). Whenever the map size exceeds 2/3 of
- the containing vectors size the map is reallocated to a vector of double the size. This is done to avoid
- excessive hash collisions.
-
- This structure corresponds most closely to the built in dict type and is intended as a replacement. Where the
- semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
- for example assignments and deletion of values.
-
- PMap implements the Mapping protocol and is Hashable. It also supports dot-notation for
- element access.
-
- Random access and insert is log32(n) where n is the size of the map.
-
- The following are examples of some common operations on persistent maps
-
- >>> m1 = m(a=1, b=3)
- >>> m2 = m1.set('c', 3)
- >>> m3 = m2.remove('a')
- >>> m1
- pmap({'b': 3, 'a': 1})
- >>> m2
- pmap({'c': 3, 'b': 3, 'a': 1})
- >>> m3
- pmap({'c': 3, 'b': 3})
- >>> m3['c']
- 3
- >>> m3.c
- 3
- """
- __slots__ = ('_size', '_buckets', '__weakref__', '_cached_hash')
-
- def __new__(cls, size, buckets):
- self = super(PMap, cls).__new__(cls)
- self._size = size
- self._buckets = buckets
- return self
-
- @staticmethod
- def _get_bucket(buckets, key):
- index = hash(key) % len(buckets)
- bucket = buckets[index]
- return index, bucket
-
- @staticmethod
- def _getitem(buckets, key):
- _, bucket = PMap._get_bucket(buckets, key)
- if bucket:
- for k, v in bucket:
- if k == key:
- return v
-
- raise KeyError(key)
-
- def __getitem__(self, key):
- return PMap._getitem(self._buckets, key)
-
- @staticmethod
- def _contains(buckets, key):
- _, bucket = PMap._get_bucket(buckets, key)
- if bucket:
- for k, _ in bucket:
- if k == key:
- return True
-
- return False
-
- return False
-
- def __contains__(self, key):
- return self._contains(self._buckets, key)
-
- get = Mapping.get
-
- def __iter__(self):
- return self.iterkeys()
-
- def __getattr__(self, key):
- try:
- return self[key]
- except KeyError:
- raise AttributeError(
- "{0} has no attribute '{1}'".format(type(self).__name__, key)
- )
-
- def iterkeys(self):
- for k, _ in self.iteritems():
- yield k
-
- # These are more efficient implementations compared to the original
- # methods that are based on the keys iterator and then calls the
- # accessor functions to access the value for the corresponding key
- def itervalues(self):
- for _, v in self.iteritems():
- yield v
-
- def iteritems(self):
- for bucket in self._buckets:
- if bucket:
- for k, v in bucket:
- yield k, v
-
- def values(self):
- return pvector(self.itervalues())
-
- def keys(self):
- return pvector(self.iterkeys())
-
- def items(self):
- return pvector(self.iteritems())
-
- def __len__(self):
- return self._size
-
- def __repr__(self):
- return 'pmap({0})'.format(str(dict(self)))
-
- def __eq__(self, other):
- if self is other:
- return True
- if not isinstance(other, Mapping):
- return NotImplemented
- if len(self) != len(other):
- return False
- if isinstance(other, PMap):
- if (hasattr(self, '_cached_hash') and hasattr(other, '_cached_hash')
- and self._cached_hash != other._cached_hash):
- return False
- if self._buckets == other._buckets:
- return True
- return dict(self.iteritems()) == dict(other.iteritems())
- elif isinstance(other, dict):
- return dict(self.iteritems()) == other
- return dict(self.iteritems()) == dict(six.iteritems(other))
-
- __ne__ = Mapping.__ne__
-
- def __lt__(self, other):
- raise TypeError('PMaps are not orderable')
-
- __le__ = __lt__
- __gt__ = __lt__
- __ge__ = __lt__
-
- def __str__(self):
- return self.__repr__()
-
- def __hash__(self):
- if not hasattr(self, '_cached_hash'):
- self._cached_hash = hash(frozenset(self.iteritems()))
- return self._cached_hash
-
- def set(self, key, val):
- """
- Return a new PMap with key and val inserted.
-
- >>> m1 = m(a=1, b=2)
- >>> m2 = m1.set('a', 3)
- >>> m3 = m1.set('c' ,4)
- >>> m1
- pmap({'b': 2, 'a': 1})
- >>> m2
- pmap({'b': 2, 'a': 3})
- >>> m3
- pmap({'c': 4, 'b': 2, 'a': 1})
- """
- return self.evolver().set(key, val).persistent()
-
- def remove(self, key):
- """
- Return a new PMap without the element specified by key. Raises KeyError if the element
- is not present.
-
- >>> m1 = m(a=1, b=2)
- >>> m1.remove('a')
- pmap({'b': 2})
- """
- return self.evolver().remove(key).persistent()
-
- def discard(self, key):
- """
- Return a new PMap without the element specified by key. Returns reference to itself
- if element is not present.
-
- >>> m1 = m(a=1, b=2)
- >>> m1.discard('a')
- pmap({'b': 2})
- >>> m1 is m1.discard('c')
- True
- """
- try:
- return self.remove(key)
- except KeyError:
- return self
-
- def update(self, *maps):
- """
- Return a new PMap with the items in Mappings inserted. If the same key is present in multiple
- maps the rightmost (last) value is inserted.
-
- >>> m1 = m(a=1, b=2)
- >>> m1.update(m(a=2, c=3), {'a': 17, 'd': 35})
- pmap({'c': 3, 'b': 2, 'a': 17, 'd': 35})
- """
- return self.update_with(lambda l, r: r, *maps)
-
- def update_with(self, update_fn, *maps):
- """
- Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple
- maps the values will be merged using merge_fn going from left to right.
-
- >>> from operator import add
- >>> m1 = m(a=1, b=2)
- >>> m1.update_with(add, m(a=2))
- pmap({'b': 2, 'a': 3})
-
- The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost.
-
- >>> m1 = m(a=1)
- >>> m1.update_with(lambda l, r: l, m(a=2), {'a':3})
- pmap({'a': 1})
- """
- evolver = self.evolver()
- for map in maps:
- for key, value in map.items():
- evolver.set(key, update_fn(evolver[key], value) if key in evolver else value)
-
- return evolver.persistent()
-
- def __add__(self, other):
- return self.update(other)
-
- def __reduce__(self):
- # Pickling support
- return pmap, (dict(self),)
-
- def transform(self, *transformations):
- """
- Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
- consists of two parts. One match expression that specifies which elements to transform
- and one transformation function that performs the actual transformation.
-
- >>> from pyrsistent import freeze, ny
- >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
- ... {'author': 'Steve', 'content': 'A slightly longer article'}],
- ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
- >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
- >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
- >>> very_short_news.articles[0].content
- 'A short article'
- >>> very_short_news.articles[1].content
- 'A slightly long...'
-
- When nothing has been transformed the original data structure is kept
-
- >>> short_news is news_paper
- True
- >>> very_short_news is news_paper
- False
- >>> very_short_news.articles[0] is news_paper.articles[0]
- True
- """
- return transform(self, transformations)
-
- def copy(self):
- return self
-
- class _Evolver(object):
- __slots__ = ('_buckets_evolver', '_size', '_original_pmap')
-
- def __init__(self, original_pmap):
- self._original_pmap = original_pmap
- self._buckets_evolver = original_pmap._buckets.evolver()
- self._size = original_pmap._size
-
- def __getitem__(self, key):
- return PMap._getitem(self._buckets_evolver, key)
-
- def __setitem__(self, key, val):
- self.set(key, val)
-
- def set(self, key, val):
- if len(self._buckets_evolver) < 0.67 * self._size:
- self._reallocate(2 * len(self._buckets_evolver))
-
- kv = (key, val)
- index, bucket = PMap._get_bucket(self._buckets_evolver, key)
- if bucket:
- for k, v in bucket:
- if k == key:
- if v is not val:
- new_bucket = [(k2, v2) if k2 != k else (k2, val) for k2, v2 in bucket]
- self._buckets_evolver[index] = new_bucket
-
- return self
-
- new_bucket = [kv]
- new_bucket.extend(bucket)
- self._buckets_evolver[index] = new_bucket
- self._size += 1
- else:
- self._buckets_evolver[index] = [kv]
- self._size += 1
-
- return self
-
- def _reallocate(self, new_size):
- new_list = new_size * [None]
- buckets = self._buckets_evolver.persistent()
- for k, v in chain.from_iterable(x for x in buckets if x):
- index = hash(k) % new_size
- if new_list[index]:
- new_list[index].append((k, v))
- else:
- new_list[index] = [(k, v)]
-
- # A reallocation should always result in a dirty buckets evolver to avoid
- # possible loss of elements when doing the reallocation.
- self._buckets_evolver = pvector().evolver()
- self._buckets_evolver.extend(new_list)
-
- def is_dirty(self):
- return self._buckets_evolver.is_dirty()
-
- def persistent(self):
- if self.is_dirty():
- self._original_pmap = PMap(self._size, self._buckets_evolver.persistent())
-
- return self._original_pmap
-
- def __len__(self):
- return self._size
-
- def __contains__(self, key):
- return PMap._contains(self._buckets_evolver, key)
-
- def __delitem__(self, key):
- self.remove(key)
-
- def remove(self, key):
- index, bucket = PMap._get_bucket(self._buckets_evolver, key)
-
- if bucket:
- new_bucket = [(k, v) for (k, v) in bucket if k != key]
- if len(bucket) > len(new_bucket):
- self._buckets_evolver[index] = new_bucket if new_bucket else None
- self._size -= 1
- return self
-
- raise KeyError('{0}'.format(key))
-
- def evolver(self):
- """
- Create a new evolver for this pmap. For a discussion on evolvers in general see the
- documentation for the pvector evolver.
-
- Create the evolver and perform various mutating updates to it:
-
- >>> m1 = m(a=1, b=2)
- >>> e = m1.evolver()
- >>> e['c'] = 3
- >>> len(e)
- 3
- >>> del e['a']
-
- The underlying pmap remains the same:
-
- >>> m1
- pmap({'b': 2, 'a': 1})
-
- The changes are kept in the evolver. An updated pmap can be created using the
- persistent() function on the evolver.
-
- >>> m2 = e.persistent()
- >>> m2
- pmap({'c': 3, 'b': 2})
-
- The new pmap will share data with the original pmap in the same way that would have
- been done if only using operations on the pmap.
- """
- return self._Evolver(self)
-
-Mapping.register(PMap)
-Hashable.register(PMap)
-
-
-def _turbo_mapping(initial, pre_size):
- if pre_size:
- size = pre_size
- else:
- try:
- size = 2 * len(initial) or 8
- except Exception:
- # Guess we can't figure out the length. Give up on length hinting,
- # we can always reallocate later.
- size = 8
-
- buckets = size * [None]
-
- if not isinstance(initial, Mapping):
- # Make a dictionary of the initial data if it isn't already,
- # that will save us some job further down since we can assume no
- # key collisions
- initial = dict(initial)
-
- for k, v in six.iteritems(initial):
- h = hash(k)
- index = h % size
- bucket = buckets[index]
-
- if bucket:
- bucket.append((k, v))
- else:
- buckets[index] = [(k, v)]
-
- return PMap(len(initial), pvector().extend(buckets))
-
-
-_EMPTY_PMAP = _turbo_mapping({}, 0)
-
-
-def pmap(initial={}, pre_size=0):
- """
- Create new persistent map, inserts all elements in initial into the newly created map.
- The optional argument pre_size may be used to specify an initial size of the underlying bucket vector. This
- may have a positive performance impact in the cases where you know beforehand that a large number of elements
- will be inserted into the map eventually since it will reduce the number of reallocations required.
-
- >>> pmap({'a': 13, 'b': 14})
- pmap({'b': 14, 'a': 13})
- """
- if not initial:
- return _EMPTY_PMAP
-
- return _turbo_mapping(initial, pre_size)
-
-
-def m(**kwargs):
- """
- Creates a new persitent map. Inserts all key value arguments into the newly created map.
-
- >>> m(a=13, b=14)
- pmap({'b': 14, 'a': 13})
- """
- return pmap(kwargs)
diff --git a/lib/spack/external/pyrsistent/_pvector.py b/lib/spack/external/pyrsistent/_pvector.py
deleted file mode 100644
index 82232782b7..0000000000
--- a/lib/spack/external/pyrsistent/_pvector.py
+++ /dev/null
@@ -1,713 +0,0 @@
-from abc import abstractmethod, ABCMeta
-from ._compat import Sequence, Hashable
-from numbers import Integral
-import operator
-import six
-from pyrsistent._transformations import transform
-
-
-def _bitcount(val):
- return bin(val).count("1")
-
-BRANCH_FACTOR = 32
-BIT_MASK = BRANCH_FACTOR - 1
-SHIFT = _bitcount(BIT_MASK)
-
-
-def compare_pvector(v, other, operator):
- return operator(v.tolist(), other.tolist() if isinstance(other, PVector) else other)
-
-
-def _index_or_slice(index, stop):
- if stop is None:
- return index
-
- return slice(index, stop)
-
-
-class PythonPVector(object):
- """
- Support structure for PVector that implements structural sharing for vectors using a trie.
- """
- __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '__weakref__')
-
- def __new__(cls, count, shift, root, tail):
- self = super(PythonPVector, cls).__new__(cls)
- self._count = count
- self._shift = shift
- self._root = root
- self._tail = tail
-
- # Derived attribute stored for performance
- self._tail_offset = self._count - len(self._tail)
- return self
-
- def __len__(self):
- return self._count
-
- def __getitem__(self, index):
- if isinstance(index, slice):
- # There are more conditions than the below where it would be OK to
- # return ourselves, implement those...
- if index.start is None and index.stop is None and index.step is None:
- return self
-
- # This is a bit nasty realizing the whole structure as a list before
- # slicing it but it is the fastest way I've found to date, and it's easy :-)
- return _EMPTY_PVECTOR.extend(self.tolist()[index])
-
- if index < 0:
- index += self._count
-
- return PythonPVector._node_for(self, index)[index & BIT_MASK]
-
- def __add__(self, other):
- return self.extend(other)
-
- def __repr__(self):
- return 'pvector({0})'.format(str(self.tolist()))
-
- def __str__(self):
- return self.__repr__()
-
- def __iter__(self):
- # This is kind of lazy and will produce some memory overhead but it is the fasted method
- # by far of those tried since it uses the speed of the built in python list directly.
- return iter(self.tolist())
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __eq__(self, other):
- return self is other or (hasattr(other, '__len__') and self._count == len(other)) and compare_pvector(self, other, operator.eq)
-
- def __gt__(self, other):
- return compare_pvector(self, other, operator.gt)
-
- def __lt__(self, other):
- return compare_pvector(self, other, operator.lt)
-
- def __ge__(self, other):
- return compare_pvector(self, other, operator.ge)
-
- def __le__(self, other):
- return compare_pvector(self, other, operator.le)
-
- def __mul__(self, times):
- if times <= 0 or self is _EMPTY_PVECTOR:
- return _EMPTY_PVECTOR
-
- if times == 1:
- return self
-
- return _EMPTY_PVECTOR.extend(times * self.tolist())
-
- __rmul__ = __mul__
-
- def _fill_list(self, node, shift, the_list):
- if shift:
- shift -= SHIFT
- for n in node:
- self._fill_list(n, shift, the_list)
- else:
- the_list.extend(node)
-
- def tolist(self):
- """
- The fastest way to convert the vector into a python list.
- """
- the_list = []
- self._fill_list(self._root, self._shift, the_list)
- the_list.extend(self._tail)
- return the_list
-
- def _totuple(self):
- """
- Returns the content as a python tuple.
- """
- return tuple(self.tolist())
-
- def __hash__(self):
- # Taking the easy way out again...
- return hash(self._totuple())
-
- def transform(self, *transformations):
- return transform(self, transformations)
-
- def __reduce__(self):
- # Pickling support
- return pvector, (self.tolist(),)
-
- def mset(self, *args):
- if len(args) % 2:
- raise TypeError("mset expected an even number of arguments")
-
- evolver = self.evolver()
- for i in range(0, len(args), 2):
- evolver[args[i]] = args[i+1]
-
- return evolver.persistent()
-
- class Evolver(object):
- __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '_dirty_nodes',
- '_extra_tail', '_cached_leafs', '_orig_pvector')
-
- def __init__(self, v):
- self._reset(v)
-
- def __getitem__(self, index):
- if not isinstance(index, Integral):
- raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
-
- if index < 0:
- index += self._count + len(self._extra_tail)
-
- if self._count <= index < self._count + len(self._extra_tail):
- return self._extra_tail[index - self._count]
-
- return PythonPVector._node_for(self, index)[index & BIT_MASK]
-
- def _reset(self, v):
- self._count = v._count
- self._shift = v._shift
- self._root = v._root
- self._tail = v._tail
- self._tail_offset = v._tail_offset
- self._dirty_nodes = {}
- self._cached_leafs = {}
- self._extra_tail = []
- self._orig_pvector = v
-
- def append(self, element):
- self._extra_tail.append(element)
- return self
-
- def extend(self, iterable):
- self._extra_tail.extend(iterable)
- return self
-
- def set(self, index, val):
- self[index] = val
- return self
-
- def __setitem__(self, index, val):
- if not isinstance(index, Integral):
- raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
-
- if index < 0:
- index += self._count + len(self._extra_tail)
-
- if 0 <= index < self._count:
- node = self._cached_leafs.get(index >> SHIFT)
- if node:
- node[index & BIT_MASK] = val
- elif index >= self._tail_offset:
- if id(self._tail) not in self._dirty_nodes:
- self._tail = list(self._tail)
- self._dirty_nodes[id(self._tail)] = True
- self._cached_leafs[index >> SHIFT] = self._tail
- self._tail[index & BIT_MASK] = val
- else:
- self._root = self._do_set(self._shift, self._root, index, val)
- elif self._count <= index < self._count + len(self._extra_tail):
- self._extra_tail[index - self._count] = val
- elif index == self._count + len(self._extra_tail):
- self._extra_tail.append(val)
- else:
- raise IndexError("Index out of range: %s" % (index,))
-
- def _do_set(self, level, node, i, val):
- if id(node) in self._dirty_nodes:
- ret = node
- else:
- ret = list(node)
- self._dirty_nodes[id(ret)] = True
-
- if level == 0:
- ret[i & BIT_MASK] = val
- self._cached_leafs[i >> SHIFT] = ret
- else:
- sub_index = (i >> level) & BIT_MASK # >>>
- ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
-
- return ret
-
- def delete(self, index):
- del self[index]
- return self
-
- def __delitem__(self, key):
- if self._orig_pvector:
- # All structural sharing bets are off, base evolver on _extra_tail only
- l = PythonPVector(self._count, self._shift, self._root, self._tail).tolist()
- l.extend(self._extra_tail)
- self._reset(_EMPTY_PVECTOR)
- self._extra_tail = l
-
- del self._extra_tail[key]
-
- def persistent(self):
- result = self._orig_pvector
- if self.is_dirty():
- result = PythonPVector(self._count, self._shift, self._root, self._tail).extend(self._extra_tail)
- self._reset(result)
-
- return result
-
- def __len__(self):
- return self._count + len(self._extra_tail)
-
- def is_dirty(self):
- return bool(self._dirty_nodes or self._extra_tail)
-
- def evolver(self):
- return PythonPVector.Evolver(self)
-
- def set(self, i, val):
- # This method could be implemented by a call to mset() but doing so would cause
- # a ~5 X performance penalty on PyPy (considered the primary platform for this implementation
- # of PVector) so we're keeping this implementation for now.
-
- if not isinstance(i, Integral):
- raise TypeError("'%s' object cannot be interpreted as an index" % type(i).__name__)
-
- if i < 0:
- i += self._count
-
- if 0 <= i < self._count:
- if i >= self._tail_offset:
- new_tail = list(self._tail)
- new_tail[i & BIT_MASK] = val
- return PythonPVector(self._count, self._shift, self._root, new_tail)
-
- return PythonPVector(self._count, self._shift, self._do_set(self._shift, self._root, i, val), self._tail)
-
- if i == self._count:
- return self.append(val)
-
- raise IndexError("Index out of range: %s" % (i,))
-
- def _do_set(self, level, node, i, val):
- ret = list(node)
- if level == 0:
- ret[i & BIT_MASK] = val
- else:
- sub_index = (i >> level) & BIT_MASK # >>>
- ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
-
- return ret
-
- @staticmethod
- def _node_for(pvector_like, i):
- if 0 <= i < pvector_like._count:
- if i >= pvector_like._tail_offset:
- return pvector_like._tail
-
- node = pvector_like._root
- for level in range(pvector_like._shift, 0, -SHIFT):
- node = node[(i >> level) & BIT_MASK] # >>>
-
- return node
-
- raise IndexError("Index out of range: %s" % (i,))
-
- def _create_new_root(self):
- new_shift = self._shift
-
- # Overflow root?
- if (self._count >> SHIFT) > (1 << self._shift): # >>>
- new_root = [self._root, self._new_path(self._shift, self._tail)]
- new_shift += SHIFT
- else:
- new_root = self._push_tail(self._shift, self._root, self._tail)
-
- return new_root, new_shift
-
- def append(self, val):
- if len(self._tail) < BRANCH_FACTOR:
- new_tail = list(self._tail)
- new_tail.append(val)
- return PythonPVector(self._count + 1, self._shift, self._root, new_tail)
-
- # Full tail, push into tree
- new_root, new_shift = self._create_new_root()
- return PythonPVector(self._count + 1, new_shift, new_root, [val])
-
- def _new_path(self, level, node):
- if level == 0:
- return node
-
- return [self._new_path(level - SHIFT, node)]
-
- def _mutating_insert_tail(self):
- self._root, self._shift = self._create_new_root()
- self._tail = []
-
- def _mutating_fill_tail(self, offset, sequence):
- max_delta_len = BRANCH_FACTOR - len(self._tail)
- delta = sequence[offset:offset + max_delta_len]
- self._tail.extend(delta)
- delta_len = len(delta)
- self._count += delta_len
- return offset + delta_len
-
- def _mutating_extend(self, sequence):
- offset = 0
- sequence_len = len(sequence)
- while offset < sequence_len:
- offset = self._mutating_fill_tail(offset, sequence)
- if len(self._tail) == BRANCH_FACTOR:
- self._mutating_insert_tail()
-
- self._tail_offset = self._count - len(self._tail)
-
- def extend(self, obj):
- # Mutates the new vector directly for efficiency but that's only an
- # implementation detail, once it is returned it should be considered immutable
- l = obj.tolist() if isinstance(obj, PythonPVector) else list(obj)
- if l:
- new_vector = self.append(l[0])
- new_vector._mutating_extend(l[1:])
- return new_vector
-
- return self
-
- def _push_tail(self, level, parent, tail_node):
- """
- if parent is leaf, insert node,
- else does it map to an existing child? ->
- node_to_insert = push node one more level
- else alloc new path
-
- return node_to_insert placed in copy of parent
- """
- ret = list(parent)
-
- if level == SHIFT:
- ret.append(tail_node)
- return ret
-
- sub_index = ((self._count - 1) >> level) & BIT_MASK # >>>
- if len(parent) > sub_index:
- ret[sub_index] = self._push_tail(level - SHIFT, parent[sub_index], tail_node)
- return ret
-
- ret.append(self._new_path(level - SHIFT, tail_node))
- return ret
-
- def index(self, value, *args, **kwargs):
- return self.tolist().index(value, *args, **kwargs)
-
- def count(self, value):
- return self.tolist().count(value)
-
- def delete(self, index, stop=None):
- l = self.tolist()
- del l[_index_or_slice(index, stop)]
- return _EMPTY_PVECTOR.extend(l)
-
- def remove(self, value):
- l = self.tolist()
- l.remove(value)
- return _EMPTY_PVECTOR.extend(l)
-
-@six.add_metaclass(ABCMeta)
-class PVector(object):
- """
- Persistent vector implementation. Meant as a replacement for the cases where you would normally
- use a Python list.
-
- Do not instantiate directly, instead use the factory functions :py:func:`v` and :py:func:`pvector` to
- create an instance.
-
- Heavily influenced by the persistent vector available in Clojure. Initially this was more or
- less just a port of the Java code for the Clojure vector. It has since been modified and to
- some extent optimized for usage in Python.
-
- The vector is organized as a trie, any mutating method will return a new vector that contains the changes. No
- updates are done to the original vector. Structural sharing between vectors are applied where possible to save
- space and to avoid making complete copies.
-
- This structure corresponds most closely to the built in list type and is intended as a replacement. Where the
- semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
- for example assignments.
-
- The PVector implements the Sequence protocol and is Hashable.
-
- Inserts are amortized O(1). Random access is log32(n) where n is the size of the vector.
-
- The following are examples of some common operations on persistent vectors:
-
- >>> p = v(1, 2, 3)
- >>> p2 = p.append(4)
- >>> p3 = p2.extend([5, 6, 7])
- >>> p
- pvector([1, 2, 3])
- >>> p2
- pvector([1, 2, 3, 4])
- >>> p3
- pvector([1, 2, 3, 4, 5, 6, 7])
- >>> p3[5]
- 6
- >>> p.set(1, 99)
- pvector([1, 99, 3])
- >>>
- """
-
- @abstractmethod
- def __len__(self):
- """
- >>> len(v(1, 2, 3))
- 3
- """
-
- @abstractmethod
- def __getitem__(self, index):
- """
- Get value at index. Full slicing support.
-
- >>> v1 = v(5, 6, 7, 8)
- >>> v1[2]
- 7
- >>> v1[1:3]
- pvector([6, 7])
- """
-
- @abstractmethod
- def __add__(self, other):
- """
- >>> v1 = v(1, 2)
- >>> v2 = v(3, 4)
- >>> v1 + v2
- pvector([1, 2, 3, 4])
- """
-
- @abstractmethod
- def __mul__(self, times):
- """
- >>> v1 = v(1, 2)
- >>> 3 * v1
- pvector([1, 2, 1, 2, 1, 2])
- """
-
- @abstractmethod
- def __hash__(self):
- """
- >>> v1 = v(1, 2, 3)
- >>> v2 = v(1, 2, 3)
- >>> hash(v1) == hash(v2)
- True
- """
-
- @abstractmethod
- def evolver(self):
- """
- Create a new evolver for this pvector. The evolver acts as a mutable view of the vector
- with "transaction like" semantics. No part of the underlying vector i updated, it is still
- fully immutable. Furthermore multiple evolvers created from the same pvector do not
- interfere with each other.
-
- You may want to use an evolver instead of working directly with the pvector in the
- following cases:
-
- * Multiple updates are done to the same vector and the intermediate results are of no
- interest. In this case using an evolver may be a more efficient and easier to work with.
- * You need to pass a vector into a legacy function or a function that you have no control
- over which performs in place mutations of lists. In this case pass an evolver instance
- instead and then create a new pvector from the evolver once the function returns.
-
- The following example illustrates a typical workflow when working with evolvers. It also
- displays most of the API (which i kept small by design, you should not be tempted to
- use evolvers in excess ;-)).
-
- Create the evolver and perform various mutating updates to it:
-
- >>> v1 = v(1, 2, 3, 4, 5)
- >>> e = v1.evolver()
- >>> e[1] = 22
- >>> _ = e.append(6)
- >>> _ = e.extend([7, 8, 9])
- >>> e[8] += 1
- >>> len(e)
- 9
-
- The underlying pvector remains the same:
-
- >>> v1
- pvector([1, 2, 3, 4, 5])
-
- The changes are kept in the evolver. An updated pvector can be created using the
- persistent() function on the evolver.
-
- >>> v2 = e.persistent()
- >>> v2
- pvector([1, 22, 3, 4, 5, 6, 7, 8, 10])
-
- The new pvector will share data with the original pvector in the same way that would have
- been done if only using operations on the pvector.
- """
-
- @abstractmethod
- def mset(self, *args):
- """
- Return a new vector with elements in specified positions replaced by values (multi set).
-
- Elements on even positions in the argument list are interpreted as indexes while
- elements on odd positions are considered values.
-
- >>> v1 = v(1, 2, 3)
- >>> v1.mset(0, 11, 2, 33)
- pvector([11, 2, 33])
- """
-
- @abstractmethod
- def set(self, i, val):
- """
- Return a new vector with element at position i replaced with val. The original vector remains unchanged.
-
- Setting a value one step beyond the end of the vector is equal to appending. Setting beyond that will
- result in an IndexError.
-
- >>> v1 = v(1, 2, 3)
- >>> v1.set(1, 4)
- pvector([1, 4, 3])
- >>> v1.set(3, 4)
- pvector([1, 2, 3, 4])
- >>> v1.set(-1, 4)
- pvector([1, 2, 4])
- """
-
- @abstractmethod
- def append(self, val):
- """
- Return a new vector with val appended.
-
- >>> v1 = v(1, 2)
- >>> v1.append(3)
- pvector([1, 2, 3])
- """
-
- @abstractmethod
- def extend(self, obj):
- """
- Return a new vector with all values in obj appended to it. Obj may be another
- PVector or any other Iterable.
-
- >>> v1 = v(1, 2, 3)
- >>> v1.extend([4, 5])
- pvector([1, 2, 3, 4, 5])
- """
-
- @abstractmethod
- def index(self, value, *args, **kwargs):
- """
- Return first index of value. Additional indexes may be supplied to limit the search to a
- sub range of the vector.
-
- >>> v1 = v(1, 2, 3, 4, 3)
- >>> v1.index(3)
- 2
- >>> v1.index(3, 3, 5)
- 4
- """
-
- @abstractmethod
- def count(self, value):
- """
- Return the number of times that value appears in the vector.
-
- >>> v1 = v(1, 4, 3, 4)
- >>> v1.count(4)
- 2
- """
-
- @abstractmethod
- def transform(self, *transformations):
- """
- Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
- consists of two parts. One match expression that specifies which elements to transform
- and one transformation function that performs the actual transformation.
-
- >>> from pyrsistent import freeze, ny
- >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
- ... {'author': 'Steve', 'content': 'A slightly longer article'}],
- ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
- >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
- >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
- >>> very_short_news.articles[0].content
- 'A short article'
- >>> very_short_news.articles[1].content
- 'A slightly long...'
-
- When nothing has been transformed the original data structure is kept
-
- >>> short_news is news_paper
- True
- >>> very_short_news is news_paper
- False
- >>> very_short_news.articles[0] is news_paper.articles[0]
- True
- """
-
- @abstractmethod
- def delete(self, index, stop=None):
- """
- Delete a portion of the vector by index or range.
-
- >>> v1 = v(1, 2, 3, 4, 5)
- >>> v1.delete(1)
- pvector([1, 3, 4, 5])
- >>> v1.delete(1, 3)
- pvector([1, 4, 5])
- """
-
- @abstractmethod
- def remove(self, value):
- """
- Remove the first occurrence of a value from the vector.
-
- >>> v1 = v(1, 2, 3, 2, 1)
- >>> v2 = v1.remove(1)
- >>> v2
- pvector([2, 3, 2, 1])
- >>> v2.remove(1)
- pvector([2, 3, 2])
- """
-
-
-_EMPTY_PVECTOR = PythonPVector(0, SHIFT, [], [])
-PVector.register(PythonPVector)
-Sequence.register(PVector)
-Hashable.register(PVector)
-
-def python_pvector(iterable=()):
- """
- Create a new persistent vector containing the elements in iterable.
-
- >>> v1 = pvector([1, 2, 3])
- >>> v1
- pvector([1, 2, 3])
- """
- return _EMPTY_PVECTOR.extend(iterable)
-
-try:
- # Use the C extension as underlying trie implementation if it is available
- import os
- if os.environ.get('PYRSISTENT_NO_C_EXTENSION'):
- pvector = python_pvector
- else:
- from pvectorc import pvector
- PVector.register(type(pvector()))
-except ImportError:
- pvector = python_pvector
-
-
-def v(*elements):
- """
- Create a new persistent vector containing all parameters to this function.
-
- >>> v1 = v(1, 2, 3)
- >>> v1
- pvector([1, 2, 3])
- """
- return pvector(elements)
diff --git a/lib/spack/external/pyrsistent/_transformations.py b/lib/spack/external/pyrsistent/_transformations.py
deleted file mode 100644
index 612098969b..0000000000
--- a/lib/spack/external/pyrsistent/_transformations.py
+++ /dev/null
@@ -1,143 +0,0 @@
-import re
-import six
-try:
- from inspect import Parameter, signature
-except ImportError:
- signature = None
- try:
- from inspect import getfullargspec as getargspec
- except ImportError:
- from inspect import getargspec
-
-
-_EMPTY_SENTINEL = object()
-
-
-def inc(x):
- """ Add one to the current value """
- return x + 1
-
-
-def dec(x):
- """ Subtract one from the current value """
- return x - 1
-
-
-def discard(evolver, key):
- """ Discard the element and returns a structure without the discarded elements """
- try:
- del evolver[key]
- except KeyError:
- pass
-
-
-# Matchers
-def rex(expr):
- """ Regular expression matcher to use together with transform functions """
- r = re.compile(expr)
- return lambda key: isinstance(key, six.string_types) and r.match(key)
-
-
-def ny(_):
- """ Matcher that matches any value """
- return True
-
-
-# Support functions
-def _chunks(l, n):
- for i in range(0, len(l), n):
- yield l[i:i + n]
-
-
-def transform(structure, transformations):
- r = structure
- for path, command in _chunks(transformations, 2):
- r = _do_to_path(r, path, command)
- return r
-
-
-def _do_to_path(structure, path, command):
- if not path:
- return command(structure) if callable(command) else command
-
- kvs = _get_keys_and_values(structure, path[0])
- return _update_structure(structure, kvs, path[1:], command)
-
-
-def _items(structure):
- try:
- return structure.items()
- except AttributeError:
- # Support wider range of structures by adding a transform_items() or similar?
- return list(enumerate(structure))
-
-
-def _get(structure, key, default):
- try:
- if hasattr(structure, '__getitem__'):
- return structure[key]
-
- return getattr(structure, key)
-
- except (IndexError, KeyError):
- return default
-
-
-def _get_keys_and_values(structure, key_spec):
- if callable(key_spec):
- # Support predicates as callable objects in the path
- arity = _get_arity(key_spec)
- if arity == 1:
- # Unary predicates are called with the "key" of the path
- # - eg a key in a mapping, an index in a sequence.
- return [(k, v) for k, v in _items(structure) if key_spec(k)]
- elif arity == 2:
- # Binary predicates are called with the key and the corresponding
- # value.
- return [(k, v) for k, v in _items(structure) if key_spec(k, v)]
- else:
- # Other arities are an error.
- raise ValueError(
- "callable in transform path must take 1 or 2 arguments"
- )
-
- # Non-callables are used as-is as a key.
- return [(key_spec, _get(structure, key_spec, _EMPTY_SENTINEL))]
-
-
-if signature is None:
- def _get_arity(f):
- argspec = getargspec(f)
- return len(argspec.args) - len(argspec.defaults or ())
-else:
- def _get_arity(f):
- return sum(
- 1
- for p
- in signature(f).parameters.values()
- if p.default is Parameter.empty
- and p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
- )
-
-
-def _update_structure(structure, kvs, path, command):
- from pyrsistent._pmap import pmap
- e = structure.evolver()
- if not path and command is discard:
- # Do this in reverse to avoid index problems with vectors. See #92.
- for k, v in reversed(kvs):
- discard(e, k)
- else:
- for k, v in kvs:
- is_empty = False
- if v is _EMPTY_SENTINEL:
- # Allow expansion of structure but make sure to cover the case
- # when an empty pmap is added as leaf node. See #154.
- is_empty = True
- v = pmap()
-
- result = _do_to_path(v, path, command)
- if result is not v or is_empty:
- e[k] = result
-
- return e.persistent()
diff --git a/lib/spack/external/pytest-fallback/_pytest/LICENSE b/lib/spack/external/pytest-fallback/_pytest/LICENSE
deleted file mode 100644
index 629df45ac4..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2004-2017 Holger Krekel and others
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/lib/spack/external/pytest-fallback/_pytest/__init__.py b/lib/spack/external/pytest-fallback/_pytest/__init__.py
deleted file mode 100644
index 6e41f0504e..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-__all__ = ['__version__']
-
-try:
- from ._version import version as __version__
-except ImportError:
- # broken installation, we don't even try
- # unknown only works because we do poor mans version compare
- __version__ = 'unknown'
diff --git a/lib/spack/external/pytest-fallback/_pytest/_argcomplete.py b/lib/spack/external/pytest-fallback/_pytest/_argcomplete.py
deleted file mode 100644
index 965ec79513..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/_argcomplete.py
+++ /dev/null
@@ -1,106 +0,0 @@
-
-"""allow bash-completion for argparse with argcomplete if installed
-needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail
-to find the magic string, so _ARGCOMPLETE env. var is never set, and
-this does not need special code.
-
-argcomplete does not support python 2.5 (although the changes for that
-are minor).
-
-Function try_argcomplete(parser) should be called directly before
-the call to ArgumentParser.parse_args().
-
-The filescompleter is what you normally would use on the positional
-arguments specification, in order to get "dirname/" after "dirn<TAB>"
-instead of the default "dirname ":
-
- optparser.add_argument(Config._file_or_dir, nargs='*'
- ).completer=filescompleter
-
-Other, application specific, completers should go in the file
-doing the add_argument calls as they need to be specified as .completer
-attributes as well. (If argcomplete is not installed, the function the
-attribute points to will not be used).
-
-SPEEDUP
-=======
-The generic argcomplete script for bash-completion
-(/etc/bash_completion.d/python-argcomplete.sh )
-uses a python program to determine startup script generated by pip.
-You can speed up completion somewhat by changing this script to include
- # PYTHON_ARGCOMPLETE_OK
-so the the python-argcomplete-check-easy-install-script does not
-need to be called to find the entry point of the code and see if that is
-marked with PYTHON_ARGCOMPLETE_OK
-
-INSTALL/DEBUGGING
-=================
-To include this support in another application that has setup.py generated
-scripts:
-- add the line:
- # PYTHON_ARGCOMPLETE_OK
- near the top of the main python entry point
-- include in the file calling parse_args():
- from _argcomplete import try_argcomplete, filescompleter
- , call try_argcomplete just before parse_args(), and optionally add
- filescompleter to the positional arguments' add_argument()
-If things do not work right away:
-- switch on argcomplete debugging with (also helpful when doing custom
- completers):
- export _ARC_DEBUG=1
-- run:
- python-argcomplete-check-easy-install-script $(which appname)
- echo $?
- will echo 0 if the magic line has been found, 1 if not
-- sometimes it helps to find early on errors using:
- _ARGCOMPLETE=1 _ARC_DEBUG=1 appname
- which should throw a KeyError: 'COMPLINE' (which is properly set by the
- global argcomplete script).
-"""
-from __future__ import absolute_import, division, print_function
-import sys
-import os
-from glob import glob
-
-
-class FastFilesCompleter:
- 'Fast file completer class'
-
- def __init__(self, directories=True):
- self.directories = directories
-
- def __call__(self, prefix, **kwargs):
- """only called on non option completions"""
- if os.path.sep in prefix[1:]:
- prefix_dir = len(os.path.dirname(prefix) + os.path.sep)
- else:
- prefix_dir = 0
- completion = []
- globbed = []
- if '*' not in prefix and '?' not in prefix:
- # we are on unix, otherwise no bash
- if not prefix or prefix[-1] == os.path.sep:
- globbed.extend(glob(prefix + '.*'))
- prefix += '*'
- globbed.extend(glob(prefix))
- for x in sorted(globbed):
- if os.path.isdir(x):
- x += '/'
- # append stripping the prefix (like bash, not like compgen)
- completion.append(x[prefix_dir:])
- return completion
-
-
-if os.environ.get('_ARGCOMPLETE'):
- try:
- import argcomplete.completers
- except ImportError:
- sys.exit(-1)
- filescompleter = FastFilesCompleter()
-
- def try_argcomplete(parser):
- argcomplete.autocomplete(parser, always_complete_options=False)
-else:
- def try_argcomplete(parser):
- pass
- filescompleter = None
diff --git a/lib/spack/external/pytest-fallback/_pytest/_code/__init__.py b/lib/spack/external/pytest-fallback/_pytest/_code/__init__.py
deleted file mode 100644
index 815c13b42c..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/_code/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-""" python inspection/code generation API """
-from __future__ import absolute_import, division, print_function
-from .code import Code # noqa
-from .code import ExceptionInfo # noqa
-from .code import Frame # noqa
-from .code import Traceback # noqa
-from .code import getrawcode # noqa
-from .source import Source # noqa
-from .source import compile_ as compile # noqa
-from .source import getfslineno # noqa
diff --git a/lib/spack/external/pytest-fallback/_pytest/_code/_py2traceback.py b/lib/spack/external/pytest-fallback/_pytest/_code/_py2traceback.py
deleted file mode 100644
index 5aacf0a428..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/_code/_py2traceback.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# copied from python-2.7.3's traceback.py
-# CHANGES:
-# - some_str is replaced, trying to create unicode strings
-#
-from __future__ import absolute_import, division, print_function
-import types
-
-
-def format_exception_only(etype, value):
- """Format the exception part of a traceback.
-
- The arguments are the exception type and value such as given by
- sys.last_type and sys.last_value. The return value is a list of
- strings, each ending in a newline.
-
- Normally, the list contains a single string; however, for
- SyntaxError exceptions, it contains several lines that (when
- printed) display detailed information about where the syntax
- error occurred.
-
- The message indicating which exception occurred is always the last
- string in the list.
-
- """
-
- # An instance should not have a meaningful value parameter, but
- # sometimes does, particularly for string exceptions, such as
- # >>> raise string1, string2 # deprecated
- #
- # Clear these out first because issubtype(string1, SyntaxError)
- # would throw another exception and mask the original problem.
- if (isinstance(etype, BaseException) or
- isinstance(etype, types.InstanceType) or
- etype is None or type(etype) is str):
- return [_format_final_exc_line(etype, value)]
-
- stype = etype.__name__
-
- if not issubclass(etype, SyntaxError):
- return [_format_final_exc_line(stype, value)]
-
- # It was a syntax error; show exactly where the problem was found.
- lines = []
- try:
- msg, (filename, lineno, offset, badline) = value.args
- except Exception:
- pass
- else:
- filename = filename or "<string>"
- lines.append(' File "%s", line %d\n' % (filename, lineno))
- if badline is not None:
- if isinstance(badline, bytes): # python 2 only
- badline = badline.decode('utf-8', 'replace')
- lines.append(u' %s\n' % badline.strip())
- if offset is not None:
- caretspace = badline.rstrip('\n')[:offset].lstrip()
- # non-space whitespace (likes tabs) must be kept for alignment
- caretspace = ((c.isspace() and c or ' ') for c in caretspace)
- # only three spaces to account for offset1 == pos 0
- lines.append(' %s^\n' % ''.join(caretspace))
- value = msg
-
- lines.append(_format_final_exc_line(stype, value))
- return lines
-
-
-def _format_final_exc_line(etype, value):
- """Return a list of a single line -- normal case for format_exception_only"""
- valuestr = _some_str(value)
- if value is None or not valuestr:
- line = "%s\n" % etype
- else:
- line = "%s: %s\n" % (etype, valuestr)
- return line
-
-
-def _some_str(value):
- try:
- return unicode(value)
- except Exception:
- try:
- return str(value)
- except Exception:
- pass
- return '<unprintable %s object>' % type(value).__name__
diff --git a/lib/spack/external/pytest-fallback/_pytest/_code/code.py b/lib/spack/external/pytest-fallback/_pytest/_code/code.py
deleted file mode 100644
index f3b7eedfce..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/_code/code.py
+++ /dev/null
@@ -1,908 +0,0 @@
-from __future__ import absolute_import, division, print_function
-import sys
-from inspect import CO_VARARGS, CO_VARKEYWORDS
-import re
-from weakref import ref
-from _pytest.compat import _PY2, _PY3, PY35, safe_str
-
-import py
-builtin_repr = repr
-
-reprlib = py.builtin._tryimport('repr', 'reprlib')
-
-if _PY3:
- from traceback import format_exception_only
-else:
- from ._py2traceback import format_exception_only
-
-
-class Code(object):
- """ wrapper around Python code objects """
-
- def __init__(self, rawcode):
- if not hasattr(rawcode, "co_filename"):
- rawcode = getrawcode(rawcode)
- try:
- self.filename = rawcode.co_filename
- self.firstlineno = rawcode.co_firstlineno - 1
- self.name = rawcode.co_name
- except AttributeError:
- raise TypeError("not a code object: %r" % (rawcode,))
- self.raw = rawcode
-
- def __eq__(self, other):
- return self.raw == other.raw
-
- __hash__ = None
-
- def __ne__(self, other):
- return not self == other
-
- @property
- def path(self):
- """ return a path object pointing to source code (note that it
- might not point to an actually existing file). """
- try:
- p = py.path.local(self.raw.co_filename)
- # maybe don't try this checking
- if not p.check():
- raise OSError("py.path check failed.")
- except OSError:
- # XXX maybe try harder like the weird logic
- # in the standard lib [linecache.updatecache] does?
- p = self.raw.co_filename
-
- return p
-
- @property
- def fullsource(self):
- """ return a _pytest._code.Source object for the full source file of the code
- """
- from _pytest._code import source
- full, _ = source.findsource(self.raw)
- return full
-
- def source(self):
- """ return a _pytest._code.Source object for the code object's source only
- """
- # return source only for that part of code
- import _pytest._code
- return _pytest._code.Source(self.raw)
-
- def getargs(self, var=False):
- """ return a tuple with the argument names for the code object
-
- if 'var' is set True also return the names of the variable and
- keyword arguments when present
- """
- # handfull shortcut for getting args
- raw = self.raw
- argcount = raw.co_argcount
- if var:
- argcount += raw.co_flags & CO_VARARGS
- argcount += raw.co_flags & CO_VARKEYWORDS
- return raw.co_varnames[:argcount]
-
-
-class Frame(object):
- """Wrapper around a Python frame holding f_locals and f_globals
- in which expressions can be evaluated."""
-
- def __init__(self, frame):
- self.lineno = frame.f_lineno - 1
- self.f_globals = frame.f_globals
- self.f_locals = frame.f_locals
- self.raw = frame
- self.code = Code(frame.f_code)
-
- @property
- def statement(self):
- """ statement this frame is at """
- import _pytest._code
- if self.code.fullsource is None:
- return _pytest._code.Source("")
- return self.code.fullsource.getstatement(self.lineno)
-
- def eval(self, code, **vars):
- """ evaluate 'code' in the frame
-
- 'vars' are optional additional local variables
-
- returns the result of the evaluation
- """
- f_locals = self.f_locals.copy()
- f_locals.update(vars)
- return eval(code, self.f_globals, f_locals)
-
- def exec_(self, code, **vars):
- """ exec 'code' in the frame
-
- 'vars' are optiona; additional local variables
- """
- f_locals = self.f_locals.copy()
- f_locals.update(vars)
- py.builtin.exec_(code, self.f_globals, f_locals)
-
- def repr(self, object):
- """ return a 'safe' (non-recursive, one-line) string repr for 'object'
- """
- return py.io.saferepr(object)
-
- def is_true(self, object):
- return object
-
- def getargs(self, var=False):
- """ return a list of tuples (name, value) for all arguments
-
- if 'var' is set True also include the variable and keyword
- arguments when present
- """
- retval = []
- for arg in self.code.getargs(var):
- try:
- retval.append((arg, self.f_locals[arg]))
- except KeyError:
- pass # this can occur when using Psyco
- return retval
-
-
-class TracebackEntry(object):
- """ a single entry in a traceback """
-
- _repr_style = None
- exprinfo = None
-
- def __init__(self, rawentry, excinfo=None):
- self._excinfo = excinfo
- self._rawentry = rawentry
- self.lineno = rawentry.tb_lineno - 1
-
- def set_repr_style(self, mode):
- assert mode in ("short", "long")
- self._repr_style = mode
-
- @property
- def frame(self):
- import _pytest._code
- return _pytest._code.Frame(self._rawentry.tb_frame)
-
- @property
- def relline(self):
- return self.lineno - self.frame.code.firstlineno
-
- def __repr__(self):
- return "<TracebackEntry %s:%d>" % (self.frame.code.path, self.lineno + 1)
-
- @property
- def statement(self):
- """ _pytest._code.Source object for the current statement """
- source = self.frame.code.fullsource
- return source.getstatement(self.lineno)
-
- @property
- def path(self):
- """ path to the source code """
- return self.frame.code.path
-
- def getlocals(self):
- return self.frame.f_locals
- locals = property(getlocals, None, None, "locals of underlaying frame")
-
- def getfirstlinesource(self):
- # on Jython this firstlineno can be -1 apparently
- return max(self.frame.code.firstlineno, 0)
-
- def getsource(self, astcache=None):
- """ return failing source code. """
- # we use the passed in astcache to not reparse asttrees
- # within exception info printing
- from _pytest._code.source import getstatementrange_ast
- source = self.frame.code.fullsource
- if source is None:
- return None
- key = astnode = None
- if astcache is not None:
- key = self.frame.code.path
- if key is not None:
- astnode = astcache.get(key, None)
- start = self.getfirstlinesource()
- try:
- astnode, _, end = getstatementrange_ast(self.lineno, source,
- astnode=astnode)
- except SyntaxError:
- end = self.lineno + 1
- else:
- if key is not None:
- astcache[key] = astnode
- return source[start:end]
-
- source = property(getsource)
-
- def ishidden(self):
- """ return True if the current frame has a var __tracebackhide__
- resolving to True
-
- If __tracebackhide__ is a callable, it gets called with the
- ExceptionInfo instance and can decide whether to hide the traceback.
-
- mostly for internal use
- """
- try:
- tbh = self.frame.f_locals['__tracebackhide__']
- except KeyError:
- try:
- tbh = self.frame.f_globals['__tracebackhide__']
- except KeyError:
- return False
-
- if py.builtin.callable(tbh):
- return tbh(None if self._excinfo is None else self._excinfo())
- else:
- return tbh
-
- def __str__(self):
- try:
- fn = str(self.path)
- except py.error.Error:
- fn = '???'
- name = self.frame.code.name
- try:
- line = str(self.statement).lstrip()
- except KeyboardInterrupt:
- raise
- except: # noqa
- line = "???"
- return " File %r:%d in %s\n %s\n" % (fn, self.lineno + 1, name, line)
-
- def name(self):
- return self.frame.code.raw.co_name
- name = property(name, None, None, "co_name of underlaying code")
-
-
-class Traceback(list):
- """ Traceback objects encapsulate and offer higher level
- access to Traceback entries.
- """
- Entry = TracebackEntry
-
- def __init__(self, tb, excinfo=None):
- """ initialize from given python traceback object and ExceptionInfo """
- self._excinfo = excinfo
- if hasattr(tb, 'tb_next'):
- def f(cur):
- while cur is not None:
- yield self.Entry(cur, excinfo=excinfo)
- cur = cur.tb_next
- list.__init__(self, f(tb))
- else:
- list.__init__(self, tb)
-
- def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
- """ return a Traceback instance wrapping part of this Traceback
-
- by provding any combination of path, lineno and firstlineno, the
- first frame to start the to-be-returned traceback is determined
-
- this allows cutting the first part of a Traceback instance e.g.
- for formatting reasons (removing some uninteresting bits that deal
- with handling of the exception/traceback)
- """
- for x in self:
- code = x.frame.code
- codepath = code.path
- if ((path is None or codepath == path) and
- (excludepath is None or not hasattr(codepath, 'relto') or
- not codepath.relto(excludepath)) and
- (lineno is None or x.lineno == lineno) and
- (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
- return Traceback(x._rawentry, self._excinfo)
- return self
-
- def __getitem__(self, key):
- val = super(Traceback, self).__getitem__(key)
- if isinstance(key, type(slice(0))):
- val = self.__class__(val)
- return val
-
- def filter(self, fn=lambda x: not x.ishidden()):
- """ return a Traceback instance with certain items removed
-
- fn is a function that gets a single argument, a TracebackEntry
- instance, and should return True when the item should be added
- to the Traceback, False when not
-
- by default this removes all the TracebackEntries which are hidden
- (see ishidden() above)
- """
- return Traceback(filter(fn, self), self._excinfo)
-
- def getcrashentry(self):
- """ return last non-hidden traceback entry that lead
- to the exception of a traceback.
- """
- for i in range(-1, -len(self) - 1, -1):
- entry = self[i]
- if not entry.ishidden():
- return entry
- return self[-1]
-
- def recursionindex(self):
- """ return the index of the frame/TracebackEntry where recursion
- originates if appropriate, None if no recursion occurred
- """
- cache = {}
- for i, entry in enumerate(self):
- # id for the code.raw is needed to work around
- # the strange metaprogramming in the decorator lib from pypi
- # which generates code objects that have hash/value equality
- # XXX needs a test
- key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
- # print "checking for recursion at", key
- values = cache.setdefault(key, [])
- if values:
- f = entry.frame
- loc = f.f_locals
- for otherloc in values:
- if f.is_true(f.eval(co_equal,
- __recursioncache_locals_1=loc,
- __recursioncache_locals_2=otherloc)):
- return i
- values.append(entry.frame.f_locals)
- return None
-
-
-co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
- '?', 'eval')
-
-
-class ExceptionInfo(object):
- """ wraps sys.exc_info() objects and offers
- help for navigating the traceback.
- """
- _striptext = ''
- _assert_start_repr = "AssertionError(u\'assert " if _PY2 else "AssertionError(\'assert "
-
- def __init__(self, tup=None, exprinfo=None):
- import _pytest._code
- if tup is None:
- tup = sys.exc_info()
- if exprinfo is None and isinstance(tup[1], AssertionError):
- exprinfo = getattr(tup[1], 'msg', None)
- if exprinfo is None:
- exprinfo = py.io.saferepr(tup[1])
- if exprinfo and exprinfo.startswith(self._assert_start_repr):
- self._striptext = 'AssertionError: '
- self._excinfo = tup
- #: the exception class
- self.type = tup[0]
- #: the exception instance
- self.value = tup[1]
- #: the exception raw traceback
- self.tb = tup[2]
- #: the exception type name
- self.typename = self.type.__name__
- #: the exception traceback (_pytest._code.Traceback instance)
- self.traceback = _pytest._code.Traceback(self.tb, excinfo=ref(self))
-
- def __repr__(self):
- return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
-
- def exconly(self, tryshort=False):
- """ return the exception as a string
-
- when 'tryshort' resolves to True, and the exception is a
- _pytest._code._AssertionError, only the actual exception part of
- the exception representation is returned (so 'AssertionError: ' is
- removed from the beginning)
- """
- lines = format_exception_only(self.type, self.value)
- text = ''.join(lines)
- text = text.rstrip()
- if tryshort:
- if text.startswith(self._striptext):
- text = text[len(self._striptext):]
- return text
-
- def errisinstance(self, exc):
- """ return True if the exception is an instance of exc """
- return isinstance(self.value, exc)
-
- def _getreprcrash(self):
- exconly = self.exconly(tryshort=True)
- entry = self.traceback.getcrashentry()
- path, lineno = entry.frame.code.raw.co_filename, entry.lineno
- return ReprFileLocation(path, lineno + 1, exconly)
-
- def getrepr(self, showlocals=False, style="long",
- abspath=False, tbfilter=True, funcargs=False):
- """ return str()able representation of this exception info.
- showlocals: show locals per traceback entry
- style: long|short|no|native traceback style
- tbfilter: hide entries (where __tracebackhide__ is true)
-
- in case of style==native, tbfilter and showlocals is ignored.
- """
- if style == 'native':
- return ReprExceptionInfo(ReprTracebackNative(
- py.std.traceback.format_exception(
- self.type,
- self.value,
- self.traceback[0]._rawentry,
- )), self._getreprcrash())
-
- fmt = FormattedExcinfo(showlocals=showlocals, style=style,
- abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
- return fmt.repr_excinfo(self)
-
- def __str__(self):
- entry = self.traceback[-1]
- loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
- return str(loc)
-
- def __unicode__(self):
- entry = self.traceback[-1]
- loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
- return unicode(loc)
-
- def match(self, regexp):
- """
- Match the regular expression 'regexp' on the string representation of
- the exception. If it matches then True is returned (so that it is
- possible to write 'assert excinfo.match()'). If it doesn't match an
- AssertionError is raised.
- """
- __tracebackhide__ = True
- if not re.search(regexp, str(self.value)):
- assert 0, "Pattern '{0!s}' not found in '{1!s}'".format(
- regexp, self.value)
- return True
-
-
-class FormattedExcinfo(object):
- """ presenting information about failing Functions and Generators. """
- # for traceback entries
- flow_marker = ">"
- fail_marker = "E"
-
- def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False):
- self.showlocals = showlocals
- self.style = style
- self.tbfilter = tbfilter
- self.funcargs = funcargs
- self.abspath = abspath
- self.astcache = {}
-
- def _getindent(self, source):
- # figure out indent for given source
- try:
- s = str(source.getstatement(len(source) - 1))
- except KeyboardInterrupt:
- raise
- except: # noqa
- try:
- s = str(source[-1])
- except KeyboardInterrupt:
- raise
- except: # noqa
- return 0
- return 4 + (len(s) - len(s.lstrip()))
-
- def _getentrysource(self, entry):
- source = entry.getsource(self.astcache)
- if source is not None:
- source = source.deindent()
- return source
-
- def _saferepr(self, obj):
- return py.io.saferepr(obj)
-
- def repr_args(self, entry):
- if self.funcargs:
- args = []
- for argname, argvalue in entry.frame.getargs(var=True):
- args.append((argname, self._saferepr(argvalue)))
- return ReprFuncArgs(args)
-
- def get_source(self, source, line_index=-1, excinfo=None, short=False):
- """ return formatted and marked up source lines. """
- import _pytest._code
- lines = []
- if source is None or line_index >= len(source.lines):
- source = _pytest._code.Source("???")
- line_index = 0
- if line_index < 0:
- line_index += len(source)
- space_prefix = " "
- if short:
- lines.append(space_prefix + source.lines[line_index].strip())
- else:
- for line in source.lines[:line_index]:
- lines.append(space_prefix + line)
- lines.append(self.flow_marker + " " + source.lines[line_index])
- for line in source.lines[line_index + 1:]:
- lines.append(space_prefix + line)
- if excinfo is not None:
- indent = 4 if short else self._getindent(source)
- lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
- return lines
-
- def get_exconly(self, excinfo, indent=4, markall=False):
- lines = []
- indent = " " * indent
- # get the real exception information out
- exlines = excinfo.exconly(tryshort=True).split('\n')
- failindent = self.fail_marker + indent[1:]
- for line in exlines:
- lines.append(failindent + line)
- if not markall:
- failindent = indent
- return lines
-
- def repr_locals(self, locals):
- if self.showlocals:
- lines = []
- keys = [loc for loc in locals if loc[0] != "@"]
- keys.sort()
- for name in keys:
- value = locals[name]
- if name == '__builtins__':
- lines.append("__builtins__ = <builtins>")
- else:
- # This formatting could all be handled by the
- # _repr() function, which is only reprlib.Repr in
- # disguise, so is very configurable.
- str_repr = self._saferepr(value)
- # if len(str_repr) < 70 or not isinstance(value,
- # (list, tuple, dict)):
- lines.append("%-10s = %s" % (name, str_repr))
- # else:
- # self._line("%-10s =\\" % (name,))
- # # XXX
- # py.std.pprint.pprint(value, stream=self.excinfowriter)
- return ReprLocals(lines)
-
- def repr_traceback_entry(self, entry, excinfo=None):
- import _pytest._code
- source = self._getentrysource(entry)
- if source is None:
- source = _pytest._code.Source("???")
- line_index = 0
- else:
- # entry.getfirstlinesource() can be -1, should be 0 on jython
- line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
-
- lines = []
- style = entry._repr_style
- if style is None:
- style = self.style
- if style in ("short", "long"):
- short = style == "short"
- reprargs = self.repr_args(entry) if not short else None
- s = self.get_source(source, line_index, excinfo, short=short)
- lines.extend(s)
- if short:
- message = "in %s" % (entry.name)
- else:
- message = excinfo and excinfo.typename or ""
- path = self._makepath(entry.path)
- filelocrepr = ReprFileLocation(path, entry.lineno + 1, message)
- localsrepr = None
- if not short:
- localsrepr = self.repr_locals(entry.locals)
- return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
- if excinfo:
- lines.extend(self.get_exconly(excinfo, indent=4))
- return ReprEntry(lines, None, None, None, style)
-
- def _makepath(self, path):
- if not self.abspath:
- try:
- np = py.path.local().bestrelpath(path)
- except OSError:
- return path
- if len(np) < len(str(path)):
- path = np
- return path
-
- def repr_traceback(self, excinfo):
- traceback = excinfo.traceback
- if self.tbfilter:
- traceback = traceback.filter()
-
- if is_recursion_error(excinfo):
- traceback, extraline = self._truncate_recursive_traceback(traceback)
- else:
- extraline = None
-
- last = traceback[-1]
- entries = []
- for index, entry in enumerate(traceback):
- einfo = (last == entry) and excinfo or None
- reprentry = self.repr_traceback_entry(entry, einfo)
- entries.append(reprentry)
- return ReprTraceback(entries, extraline, style=self.style)
-
- def _truncate_recursive_traceback(self, traceback):
- """
- Truncate the given recursive traceback trying to find the starting point
- of the recursion.
-
- The detection is done by going through each traceback entry and finding the
- point in which the locals of the frame are equal to the locals of a previous frame (see ``recursionindex()``.
-
- Handle the situation where the recursion process might raise an exception (for example
- comparing numpy arrays using equality raises a TypeError), in which case we do our best to
- warn the user of the error and show a limited traceback.
- """
- try:
- recursionindex = traceback.recursionindex()
- except Exception as e:
- max_frames = 10
- extraline = (
- '!!! Recursion error detected, but an error occurred locating the origin of recursion.\n'
- ' The following exception happened when comparing locals in the stack frame:\n'
- ' {exc_type}: {exc_msg}\n'
- ' Displaying first and last {max_frames} stack frames out of {total}.'
- ).format(exc_type=type(e).__name__, exc_msg=safe_str(e), max_frames=max_frames, total=len(traceback))
- traceback = traceback[:max_frames] + traceback[-max_frames:]
- else:
- if recursionindex is not None:
- extraline = "!!! Recursion detected (same locals & position)"
- traceback = traceback[:recursionindex + 1]
- else:
- extraline = None
-
- return traceback, extraline
-
- def repr_excinfo(self, excinfo):
- if _PY2:
- reprtraceback = self.repr_traceback(excinfo)
- reprcrash = excinfo._getreprcrash()
-
- return ReprExceptionInfo(reprtraceback, reprcrash)
- else:
- repr_chain = []
- e = excinfo.value
- descr = None
- while e is not None:
- if excinfo:
- reprtraceback = self.repr_traceback(excinfo)
- reprcrash = excinfo._getreprcrash()
- else:
- # fallback to native repr if the exception doesn't have a traceback:
- # ExceptionInfo objects require a full traceback to work
- reprtraceback = ReprTracebackNative(py.std.traceback.format_exception(type(e), e, None))
- reprcrash = None
-
- repr_chain += [(reprtraceback, reprcrash, descr)]
- if e.__cause__ is not None:
- e = e.__cause__
- excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None
- descr = 'The above exception was the direct cause of the following exception:'
- elif (e.__context__ is not None and not e.__suppress_context__):
- e = e.__context__
- excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None
- descr = 'During handling of the above exception, another exception occurred:'
- else:
- e = None
- repr_chain.reverse()
- return ExceptionChainRepr(repr_chain)
-
-
-class TerminalRepr(object):
- def __str__(self):
- s = self.__unicode__()
- if _PY2:
- s = s.encode('utf-8')
- return s
-
- def __unicode__(self):
- # FYI this is called from pytest-xdist's serialization of exception
- # information.
- io = py.io.TextIO()
- tw = py.io.TerminalWriter(file=io)
- self.toterminal(tw)
- return io.getvalue().strip()
-
- def __repr__(self):
- return "<%s instance at %0x>" % (self.__class__, id(self))
-
-
-class ExceptionRepr(TerminalRepr):
- def __init__(self):
- self.sections = []
-
- def addsection(self, name, content, sep="-"):
- self.sections.append((name, content, sep))
-
- def toterminal(self, tw):
- for name, content, sep in self.sections:
- tw.sep(sep, name)
- tw.line(content)
-
-
-class ExceptionChainRepr(ExceptionRepr):
- def __init__(self, chain):
- super(ExceptionChainRepr, self).__init__()
- self.chain = chain
- # reprcrash and reprtraceback of the outermost (the newest) exception
- # in the chain
- self.reprtraceback = chain[-1][0]
- self.reprcrash = chain[-1][1]
-
- def toterminal(self, tw):
- for element in self.chain:
- element[0].toterminal(tw)
- if element[2] is not None:
- tw.line("")
- tw.line(element[2], yellow=True)
- super(ExceptionChainRepr, self).toterminal(tw)
-
-
-class ReprExceptionInfo(ExceptionRepr):
- def __init__(self, reprtraceback, reprcrash):
- super(ReprExceptionInfo, self).__init__()
- self.reprtraceback = reprtraceback
- self.reprcrash = reprcrash
-
- def toterminal(self, tw):
- self.reprtraceback.toterminal(tw)
- super(ReprExceptionInfo, self).toterminal(tw)
-
-
-class ReprTraceback(TerminalRepr):
- entrysep = "_ "
-
- def __init__(self, reprentries, extraline, style):
- self.reprentries = reprentries
- self.extraline = extraline
- self.style = style
-
- def toterminal(self, tw):
- # the entries might have different styles
- for i, entry in enumerate(self.reprentries):
- if entry.style == "long":
- tw.line("")
- entry.toterminal(tw)
- if i < len(self.reprentries) - 1:
- next_entry = self.reprentries[i + 1]
- if entry.style == "long" or \
- entry.style == "short" and next_entry.style == "long":
- tw.sep(self.entrysep)
-
- if self.extraline:
- tw.line(self.extraline)
-
-
-class ReprTracebackNative(ReprTraceback):
- def __init__(self, tblines):
- self.style = "native"
- self.reprentries = [ReprEntryNative(tblines)]
- self.extraline = None
-
-
-class ReprEntryNative(TerminalRepr):
- style = "native"
-
- def __init__(self, tblines):
- self.lines = tblines
-
- def toterminal(self, tw):
- tw.write("".join(self.lines))
-
-
-class ReprEntry(TerminalRepr):
- localssep = "_ "
-
- def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
- self.lines = lines
- self.reprfuncargs = reprfuncargs
- self.reprlocals = reprlocals
- self.reprfileloc = filelocrepr
- self.style = style
-
- def toterminal(self, tw):
- if self.style == "short":
- self.reprfileloc.toterminal(tw)
- for line in self.lines:
- red = line.startswith("E ")
- tw.line(line, bold=True, red=red)
- # tw.line("")
- return
- if self.reprfuncargs:
- self.reprfuncargs.toterminal(tw)
- for line in self.lines:
- red = line.startswith("E ")
- tw.line(line, bold=True, red=red)
- if self.reprlocals:
- # tw.sep(self.localssep, "Locals")
- tw.line("")
- self.reprlocals.toterminal(tw)
- if self.reprfileloc:
- if self.lines:
- tw.line("")
- self.reprfileloc.toterminal(tw)
-
- def __str__(self):
- return "%s\n%s\n%s" % ("\n".join(self.lines),
- self.reprlocals,
- self.reprfileloc)
-
-
-class ReprFileLocation(TerminalRepr):
- def __init__(self, path, lineno, message):
- self.path = str(path)
- self.lineno = lineno
- self.message = message
-
- def toterminal(self, tw):
- # filename and lineno output for each entry,
- # using an output format that most editors unterstand
- msg = self.message
- i = msg.find("\n")
- if i != -1:
- msg = msg[:i]
- tw.write(self.path, bold=True, red=True)
- tw.line(":%s: %s" % (self.lineno, msg))
-
-
-class ReprLocals(TerminalRepr):
- def __init__(self, lines):
- self.lines = lines
-
- def toterminal(self, tw):
- for line in self.lines:
- tw.line(line)
-
-
-class ReprFuncArgs(TerminalRepr):
- def __init__(self, args):
- self.args = args
-
- def toterminal(self, tw):
- if self.args:
- linesofar = ""
- for name, value in self.args:
- ns = "%s = %s" % (safe_str(name), safe_str(value))
- if len(ns) + len(linesofar) + 2 > tw.fullwidth:
- if linesofar:
- tw.line(linesofar)
- linesofar = ns
- else:
- if linesofar:
- linesofar += ", " + ns
- else:
- linesofar = ns
- if linesofar:
- tw.line(linesofar)
- tw.line("")
-
-
-def getrawcode(obj, trycall=True):
- """ return code object for given function. """
- try:
- return obj.__code__
- except AttributeError:
- obj = getattr(obj, 'im_func', obj)
- obj = getattr(obj, 'func_code', obj)
- obj = getattr(obj, 'f_code', obj)
- obj = getattr(obj, '__code__', obj)
- if trycall and not hasattr(obj, 'co_firstlineno'):
- if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj):
- x = getrawcode(obj.__call__, trycall=False)
- if hasattr(x, 'co_firstlineno'):
- return x
- return obj
-
-
-if PY35: # RecursionError introduced in 3.5
- def is_recursion_error(excinfo):
- return excinfo.errisinstance(RecursionError) # noqa
-else:
- def is_recursion_error(excinfo):
- if not excinfo.errisinstance(RuntimeError):
- return False
- try:
- return "maximum recursion depth exceeded" in str(excinfo.value)
- except UnicodeError:
- return False
diff --git a/lib/spack/external/pytest-fallback/_pytest/_code/source.py b/lib/spack/external/pytest-fallback/_pytest/_code/source.py
deleted file mode 100644
index fc41712649..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/_code/source.py
+++ /dev/null
@@ -1,416 +0,0 @@
-from __future__ import absolute_import, division, generators, print_function
-
-from bisect import bisect_right
-import sys
-import inspect
-import tokenize
-import py
-cpy_compile = compile
-
-try:
- import _ast
- from _ast import PyCF_ONLY_AST as _AST_FLAG
-except ImportError:
- _AST_FLAG = 0
- _ast = None
-
-
-class Source(object):
- """ a immutable object holding a source code fragment,
- possibly deindenting it.
- """
- _compilecounter = 0
-
- def __init__(self, *parts, **kwargs):
- self.lines = lines = []
- de = kwargs.get('deindent', True)
- rstrip = kwargs.get('rstrip', True)
- for part in parts:
- if not part:
- partlines = []
- if isinstance(part, Source):
- partlines = part.lines
- elif isinstance(part, (tuple, list)):
- partlines = [x.rstrip("\n") for x in part]
- elif isinstance(part, py.builtin._basestring):
- partlines = part.split('\n')
- if rstrip:
- while partlines:
- if partlines[-1].strip():
- break
- partlines.pop()
- else:
- partlines = getsource(part, deindent=de).lines
- if de:
- partlines = deindent(partlines)
- lines.extend(partlines)
-
- def __eq__(self, other):
- try:
- return self.lines == other.lines
- except AttributeError:
- if isinstance(other, str):
- return str(self) == other
- return False
-
- __hash__ = None
-
- def __getitem__(self, key):
- if isinstance(key, int):
- return self.lines[key]
- else:
- if key.step not in (None, 1):
- raise IndexError("cannot slice a Source with a step")
- newsource = Source()
- newsource.lines = self.lines[key.start:key.stop]
- return newsource
-
- def __len__(self):
- return len(self.lines)
-
- def strip(self):
- """ return new source object with trailing
- and leading blank lines removed.
- """
- start, end = 0, len(self)
- while start < end and not self.lines[start].strip():
- start += 1
- while end > start and not self.lines[end - 1].strip():
- end -= 1
- source = Source()
- source.lines[:] = self.lines[start:end]
- return source
-
- def putaround(self, before='', after='', indent=' ' * 4):
- """ return a copy of the source object with
- 'before' and 'after' wrapped around it.
- """
- before = Source(before)
- after = Source(after)
- newsource = Source()
- lines = [(indent + line) for line in self.lines]
- newsource.lines = before.lines + lines + after.lines
- return newsource
-
- def indent(self, indent=' ' * 4):
- """ return a copy of the source object with
- all lines indented by the given indent-string.
- """
- newsource = Source()
- newsource.lines = [(indent + line) for line in self.lines]
- return newsource
-
- def getstatement(self, lineno, assertion=False):
- """ return Source statement which contains the
- given linenumber (counted from 0).
- """
- start, end = self.getstatementrange(lineno, assertion)
- return self[start:end]
-
- def getstatementrange(self, lineno, assertion=False):
- """ return (start, end) tuple which spans the minimal
- statement region which containing the given lineno.
- """
- if not (0 <= lineno < len(self)):
- raise IndexError("lineno out of range")
- ast, start, end = getstatementrange_ast(lineno, self)
- return start, end
-
- def deindent(self, offset=None):
- """ return a new source object deindented by offset.
- If offset is None then guess an indentation offset from
- the first non-blank line. Subsequent lines which have a
- lower indentation offset will be copied verbatim as
- they are assumed to be part of multilines.
- """
- # XXX maybe use the tokenizer to properly handle multiline
- # strings etc.pp?
- newsource = Source()
- newsource.lines[:] = deindent(self.lines, offset)
- return newsource
-
- def isparseable(self, deindent=True):
- """ return True if source is parseable, heuristically
- deindenting it by default.
- """
- try:
- import parser
- except ImportError:
- def syntax_checker(x):
- return compile(x, 'asd', 'exec')
- else:
- syntax_checker = parser.suite
-
- if deindent:
- source = str(self.deindent())
- else:
- source = str(self)
- try:
- # compile(source+'\n', "x", "exec")
- syntax_checker(source + '\n')
- except KeyboardInterrupt:
- raise
- except Exception:
- return False
- else:
- return True
-
- def __str__(self):
- return "\n".join(self.lines)
-
- def compile(self, filename=None, mode='exec',
- flag=generators.compiler_flag,
- dont_inherit=0, _genframe=None):
- """ return compiled code object. if filename is None
- invent an artificial filename which displays
- the source/line position of the caller frame.
- """
- if not filename or py.path.local(filename).check(file=0):
- if _genframe is None:
- _genframe = sys._getframe(1) # the caller
- fn, lineno = _genframe.f_code.co_filename, _genframe.f_lineno
- base = "<%d-codegen " % self._compilecounter
- self.__class__._compilecounter += 1
- if not filename:
- filename = base + '%s:%d>' % (fn, lineno)
- else:
- filename = base + '%r %s:%d>' % (filename, fn, lineno)
- source = "\n".join(self.lines) + '\n'
- try:
- co = cpy_compile(source, filename, mode, flag)
- except SyntaxError:
- ex = sys.exc_info()[1]
- # re-represent syntax errors from parsing python strings
- msglines = self.lines[:ex.lineno]
- if ex.offset:
- msglines.append(" " * ex.offset + '^')
- msglines.append("(code was compiled probably from here: %s)" % filename)
- newex = SyntaxError('\n'.join(msglines))
- newex.offset = ex.offset
- newex.lineno = ex.lineno
- newex.text = ex.text
- raise newex
- else:
- if flag & _AST_FLAG:
- return co
- lines = [(x + "\n") for x in self.lines]
- py.std.linecache.cache[filename] = (1, None, lines, filename)
- return co
-
-#
-# public API shortcut functions
-#
-
-
-def compile_(source, filename=None, mode='exec', flags=generators.compiler_flag, dont_inherit=0):
- """ compile the given source to a raw code object,
- and maintain an internal cache which allows later
- retrieval of the source code for the code object
- and any recursively created code objects.
- """
- if _ast is not None and isinstance(source, _ast.AST):
- # XXX should Source support having AST?
- return cpy_compile(source, filename, mode, flags, dont_inherit)
- _genframe = sys._getframe(1) # the caller
- s = Source(source)
- co = s.compile(filename, mode, flags, _genframe=_genframe)
- return co
-
-
-def getfslineno(obj):
- """ Return source location (path, lineno) for the given object.
- If the source cannot be determined return ("", -1)
- """
- import _pytest._code
- try:
- code = _pytest._code.Code(obj)
- except TypeError:
- try:
- fn = (py.std.inspect.getsourcefile(obj) or
- py.std.inspect.getfile(obj))
- except TypeError:
- return "", -1
-
- fspath = fn and py.path.local(fn) or None
- lineno = -1
- if fspath:
- try:
- _, lineno = findsource(obj)
- except IOError:
- pass
- else:
- fspath = code.path
- lineno = code.firstlineno
- assert isinstance(lineno, int)
- return fspath, lineno
-
-#
-# helper functions
-#
-
-
-def findsource(obj):
- try:
- sourcelines, lineno = py.std.inspect.findsource(obj)
- except py.builtin._sysex:
- raise
- except: # noqa
- return None, -1
- source = Source()
- source.lines = [line.rstrip() for line in sourcelines]
- return source, lineno
-
-
-def getsource(obj, **kwargs):
- import _pytest._code
- obj = _pytest._code.getrawcode(obj)
- try:
- strsrc = inspect.getsource(obj)
- except IndentationError:
- strsrc = "\"Buggy python version consider upgrading, cannot get source\""
- assert isinstance(strsrc, str)
- return Source(strsrc, **kwargs)
-
-
-def deindent(lines, offset=None):
- if offset is None:
- for line in lines:
- line = line.expandtabs()
- s = line.lstrip()
- if s:
- offset = len(line) - len(s)
- break
- else:
- offset = 0
- if offset == 0:
- return list(lines)
- newlines = []
-
- def readline_generator(lines):
- for line in lines:
- yield line + '\n'
- while True:
- yield ''
-
- it = readline_generator(lines)
-
- try:
- for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
- if sline > len(lines):
- break # End of input reached
- if sline > len(newlines):
- line = lines[sline - 1].expandtabs()
- if line.lstrip() and line[:offset].isspace():
- line = line[offset:] # Deindent
- newlines.append(line)
-
- for i in range(sline, eline):
- # Don't deindent continuing lines of
- # multiline tokens (i.e. multiline strings)
- newlines.append(lines[i])
- except (IndentationError, tokenize.TokenError):
- pass
- # Add any lines we didn't see. E.g. if an exception was raised.
- newlines.extend(lines[len(newlines):])
- return newlines
-
-
-def get_statement_startend2(lineno, node):
- import ast
- # flatten all statements and except handlers into one lineno-list
- # AST's line numbers start indexing at 1
- values = []
- for x in ast.walk(node):
- if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
- values.append(x.lineno - 1)
- for name in "finalbody", "orelse":
- val = getattr(x, name, None)
- if val:
- # treat the finally/orelse part as its own statement
- values.append(val[0].lineno - 1 - 1)
- values.sort()
- insert_index = bisect_right(values, lineno)
- start = values[insert_index - 1]
- if insert_index >= len(values):
- end = None
- else:
- end = values[insert_index]
- return start, end
-
-
-def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
- if astnode is None:
- content = str(source)
- if sys.version_info < (2, 7):
- content += "\n"
- try:
- astnode = compile(content, "source", "exec", 1024) # 1024 for AST
- except ValueError:
- start, end = getstatementrange_old(lineno, source, assertion)
- return None, start, end
- start, end = get_statement_startend2(lineno, astnode)
- # we need to correct the end:
- # - ast-parsing strips comments
- # - there might be empty lines
- # - we might have lesser indented code blocks at the end
- if end is None:
- end = len(source.lines)
-
- if end > start + 1:
- # make sure we don't span differently indented code blocks
- # by using the BlockFinder helper used which inspect.getsource() uses itself
- block_finder = inspect.BlockFinder()
- # if we start with an indented line, put blockfinder to "started" mode
- block_finder.started = source.lines[start][0].isspace()
- it = ((x + "\n") for x in source.lines[start:end])
- try:
- for tok in tokenize.generate_tokens(lambda: next(it)):
- block_finder.tokeneater(*tok)
- except (inspect.EndOfBlock, IndentationError):
- end = block_finder.last + start
- except Exception:
- pass
-
- # the end might still point to a comment or empty line, correct it
- while end:
- line = source.lines[end - 1].lstrip()
- if line.startswith("#") or not line:
- end -= 1
- else:
- break
- return astnode, start, end
-
-
-def getstatementrange_old(lineno, source, assertion=False):
- """ return (start, end) tuple which spans the minimal
- statement region which containing the given lineno.
- raise an IndexError if no such statementrange can be found.
- """
- # XXX this logic is only used on python2.4 and below
- # 1. find the start of the statement
- from codeop import compile_command
- for start in range(lineno, -1, -1):
- if assertion:
- line = source.lines[start]
- # the following lines are not fully tested, change with care
- if 'super' in line and 'self' in line and '__init__' in line:
- raise IndexError("likely a subclass")
- if "assert" not in line and "raise" not in line:
- continue
- trylines = source.lines[start:lineno + 1]
- # quick hack to prepare parsing an indented line with
- # compile_command() (which errors on "return" outside defs)
- trylines.insert(0, 'def xxx():')
- trysource = '\n '.join(trylines)
- # ^ space here
- try:
- compile_command(trysource)
- except (SyntaxError, OverflowError, ValueError):
- continue
-
- # 2. find the end of the statement
- for end in range(lineno + 1, len(source) + 1):
- trysource = source[start:end]
- if trysource.isparseable():
- return start, end
- raise SyntaxError("no valid source range around line %d " % (lineno,))
diff --git a/lib/spack/external/pytest-fallback/_pytest/_pluggy.py b/lib/spack/external/pytest-fallback/_pytest/_pluggy.py
deleted file mode 100644
index 6cc1d3d54a..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/_pluggy.py
+++ /dev/null
@@ -1,11 +0,0 @@
-"""
-imports symbols from vendored "pluggy" if available, otherwise
-falls back to importing "pluggy" from the default namespace.
-"""
-from __future__ import absolute_import, division, print_function
-try:
- from _pytest.vendored_packages.pluggy import * # noqa
- from _pytest.vendored_packages.pluggy import __version__ # noqa
-except ImportError:
- from pluggy import * # noqa
- from pluggy import __version__ # noqa
diff --git a/lib/spack/external/pytest-fallback/_pytest/_version.py b/lib/spack/external/pytest-fallback/_pytest/_version.py
deleted file mode 100644
index 3edb7da9ad..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/_version.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# coding: utf-8
-# file generated by setuptools_scm
-# don't change, don't track in version control
-version = '3.2.5'
diff --git a/lib/spack/external/pytest-fallback/_pytest/assertion/__init__.py b/lib/spack/external/pytest-fallback/_pytest/assertion/__init__.py
deleted file mode 100644
index b0ef667d56..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/assertion/__init__.py
+++ /dev/null
@@ -1,148 +0,0 @@
-"""
-support for presenting detailed information in failing assertions.
-"""
-from __future__ import absolute_import, division, print_function
-import py
-import sys
-
-from _pytest.assertion import util
-from _pytest.assertion import rewrite
-from _pytest.assertion import truncate
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("debugconfig")
- group.addoption('--assert',
- action="store",
- dest="assertmode",
- choices=("rewrite", "plain",),
- default="rewrite",
- metavar="MODE",
- help="""Control assertion debugging tools. 'plain'
- performs no assertion debugging. 'rewrite'
- (the default) rewrites assert statements in
- test modules on import to provide assert
- expression information.""")
-
-
-def register_assert_rewrite(*names):
- """Register one or more module names to be rewritten on import.
-
- This function will make sure that this module or all modules inside
- the package will get their assert statements rewritten.
- Thus you should make sure to call this before the module is
- actually imported, usually in your __init__.py if you are a plugin
- using a package.
-
- :raise TypeError: if the given module names are not strings.
- """
- for name in names:
- if not isinstance(name, str):
- msg = 'expected module names as *args, got {0} instead'
- raise TypeError(msg.format(repr(names)))
- for hook in sys.meta_path:
- if isinstance(hook, rewrite.AssertionRewritingHook):
- importhook = hook
- break
- else:
- importhook = DummyRewriteHook()
- importhook.mark_rewrite(*names)
-
-
-class DummyRewriteHook(object):
- """A no-op import hook for when rewriting is disabled."""
-
- def mark_rewrite(self, *names):
- pass
-
-
-class AssertionState:
- """State for the assertion plugin."""
-
- def __init__(self, config, mode):
- self.mode = mode
- self.trace = config.trace.root.get("assertion")
- self.hook = None
-
-
-def install_importhook(config):
- """Try to install the rewrite hook, raise SystemError if it fails."""
- # Both Jython and CPython 2.6.0 have AST bugs that make the
- # assertion rewriting hook malfunction.
- if (sys.platform.startswith('java') or
- sys.version_info[:3] == (2, 6, 0)):
- raise SystemError('rewrite not supported')
-
- config._assertstate = AssertionState(config, 'rewrite')
- config._assertstate.hook = hook = rewrite.AssertionRewritingHook(config)
- sys.meta_path.insert(0, hook)
- config._assertstate.trace('installed rewrite import hook')
-
- def undo():
- hook = config._assertstate.hook
- if hook is not None and hook in sys.meta_path:
- sys.meta_path.remove(hook)
-
- config.add_cleanup(undo)
- return hook
-
-
-def pytest_collection(session):
- # this hook is only called when test modules are collected
- # so for example not in the master process of pytest-xdist
- # (which does not collect test modules)
- assertstate = getattr(session.config, '_assertstate', None)
- if assertstate:
- if assertstate.hook is not None:
- assertstate.hook.set_session(session)
-
-
-def pytest_runtest_setup(item):
- """Setup the pytest_assertrepr_compare hook
-
- The newinterpret and rewrite modules will use util._reprcompare if
- it exists to use custom reporting via the
- pytest_assertrepr_compare hook. This sets up this custom
- comparison for the test.
- """
- def callbinrepr(op, left, right):
- """Call the pytest_assertrepr_compare hook and prepare the result
-
- This uses the first result from the hook and then ensures the
- following:
- * Overly verbose explanations are truncated unless configured otherwise
- (eg. if running in verbose mode).
- * Embedded newlines are escaped to help util.format_explanation()
- later.
- * If the rewrite mode is used embedded %-characters are replaced
- to protect later % formatting.
-
- The result can be formatted by util.format_explanation() for
- pretty printing.
- """
- hook_result = item.ihook.pytest_assertrepr_compare(
- config=item.config, op=op, left=left, right=right)
- for new_expl in hook_result:
- if new_expl:
- new_expl = truncate.truncate_if_required(new_expl, item)
- new_expl = [line.replace("\n", "\\n") for line in new_expl]
- res = py.builtin._totext("\n~").join(new_expl)
- if item.config.getvalue("assertmode") == "rewrite":
- res = res.replace("%", "%%")
- return res
- util._reprcompare = callbinrepr
-
-
-def pytest_runtest_teardown(item):
- util._reprcompare = None
-
-
-def pytest_sessionfinish(session):
- assertstate = getattr(session.config, '_assertstate', None)
- if assertstate:
- if assertstate.hook is not None:
- assertstate.hook.set_session(None)
-
-
-# Expose this plugin's implementation for the pytest_assertrepr_compare hook
-pytest_assertrepr_compare = util.assertrepr_compare
diff --git a/lib/spack/external/pytest-fallback/_pytest/assertion/rewrite.py b/lib/spack/external/pytest-fallback/_pytest/assertion/rewrite.py
deleted file mode 100644
index d48b6648fb..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/assertion/rewrite.py
+++ /dev/null
@@ -1,952 +0,0 @@
-"""Rewrite assertion AST to produce nice error messages"""
-from __future__ import absolute_import, division, print_function
-import ast
-import _ast
-import errno
-import itertools
-import imp
-import marshal
-import os
-import re
-import struct
-import sys
-import types
-
-import py
-from _pytest.assertion import util
-
-
-# pytest caches rewritten pycs in __pycache__.
-if hasattr(imp, "get_tag"):
- PYTEST_TAG = imp.get_tag() + "-PYTEST"
-else:
- if hasattr(sys, "pypy_version_info"):
- impl = "pypy"
- elif sys.platform == "java":
- impl = "jython"
- else:
- impl = "cpython"
- ver = sys.version_info
- PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
- del ver, impl
-
-PYC_EXT = ".py" + (__debug__ and "c" or "o")
-PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
-
-REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2)
-ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3
-
-if sys.version_info >= (3, 5):
- ast_Call = ast.Call
-else:
- def ast_Call(a, b, c):
- return ast.Call(a, b, c, None, None)
-
-
-class AssertionRewritingHook(object):
- """PEP302 Import hook which rewrites asserts."""
-
- def __init__(self, config):
- self.config = config
- self.fnpats = config.getini("python_files")
- self.session = None
- self.modules = {}
- self._rewritten_names = set()
- self._register_with_pkg_resources()
- self._must_rewrite = set()
-
- def set_session(self, session):
- self.session = session
-
- def find_module(self, name, path=None):
- state = self.config._assertstate
- state.trace("find_module called for: %s" % name)
- names = name.rsplit(".", 1)
- lastname = names[-1]
- pth = None
- if path is not None:
- # Starting with Python 3.3, path is a _NamespacePath(), which
- # causes problems if not converted to list.
- path = list(path)
- if len(path) == 1:
- pth = path[0]
- if pth is None:
- try:
- fd, fn, desc = imp.find_module(lastname, path)
- except ImportError:
- return None
- if fd is not None:
- fd.close()
- tp = desc[2]
- if tp == imp.PY_COMPILED:
- if hasattr(imp, "source_from_cache"):
- try:
- fn = imp.source_from_cache(fn)
- except ValueError:
- # Python 3 doesn't like orphaned but still-importable
- # .pyc files.
- fn = fn[:-1]
- else:
- fn = fn[:-1]
- elif tp != imp.PY_SOURCE:
- # Don't know what this is.
- return None
- else:
- fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
-
- fn_pypath = py.path.local(fn)
- if not self._should_rewrite(name, fn_pypath, state):
- return None
-
- self._rewritten_names.add(name)
-
- # The requested module looks like a test file, so rewrite it. This is
- # the most magical part of the process: load the source, rewrite the
- # asserts, and load the rewritten source. We also cache the rewritten
- # module code in a special pyc. We must be aware of the possibility of
- # concurrent pytest processes rewriting and loading pycs. To avoid
- # tricky race conditions, we maintain the following invariant: The
- # cached pyc is always a complete, valid pyc. Operations on it must be
- # atomic. POSIX's atomic rename comes in handy.
- write = not sys.dont_write_bytecode
- cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
- if write:
- try:
- os.mkdir(cache_dir)
- except OSError:
- e = sys.exc_info()[1].errno
- if e == errno.EEXIST:
- # Either the __pycache__ directory already exists (the
- # common case) or it's blocked by a non-dir node. In the
- # latter case, we'll ignore it in _write_pyc.
- pass
- elif e in [errno.ENOENT, errno.ENOTDIR]:
- # One of the path components was not a directory, likely
- # because we're in a zip file.
- write = False
- elif e in [errno.EACCES, errno.EROFS, errno.EPERM]:
- state.trace("read only directory: %r" % fn_pypath.dirname)
- write = False
- else:
- raise
- cache_name = fn_pypath.basename[:-3] + PYC_TAIL
- pyc = os.path.join(cache_dir, cache_name)
- # Notice that even if we're in a read-only directory, I'm going
- # to check for a cached pyc. This may not be optimal...
- co = _read_pyc(fn_pypath, pyc, state.trace)
- if co is None:
- state.trace("rewriting %r" % (fn,))
- source_stat, co = _rewrite_test(self.config, fn_pypath)
- if co is None:
- # Probably a SyntaxError in the test.
- return None
- if write:
- _make_rewritten_pyc(state, source_stat, pyc, co)
- else:
- state.trace("found cached rewritten pyc for %r" % (fn,))
- self.modules[name] = co, pyc
- return self
-
- def _should_rewrite(self, name, fn_pypath, state):
- # always rewrite conftest files
- fn = str(fn_pypath)
- if fn_pypath.basename == 'conftest.py':
- state.trace("rewriting conftest file: %r" % (fn,))
- return True
-
- if self.session is not None:
- if self.session.isinitpath(fn):
- state.trace("matched test file (was specified on cmdline): %r" %
- (fn,))
- return True
-
- # modules not passed explicitly on the command line are only
- # rewritten if they match the naming convention for test files
- for pat in self.fnpats:
- if fn_pypath.fnmatch(pat):
- state.trace("matched test file %r" % (fn,))
- return True
-
- for marked in self._must_rewrite:
- if name.startswith(marked):
- state.trace("matched marked file %r (from %r)" % (name, marked))
- return True
-
- return False
-
- def mark_rewrite(self, *names):
- """Mark import names as needing to be re-written.
-
- The named module or package as well as any nested modules will
- be re-written on import.
- """
- already_imported = set(names).intersection(set(sys.modules))
- if already_imported:
- for name in already_imported:
- if name not in self._rewritten_names:
- self._warn_already_imported(name)
- self._must_rewrite.update(names)
-
- def _warn_already_imported(self, name):
- self.config.warn(
- 'P1',
- 'Module already imported so can not be re-written: %s' % name)
-
- def load_module(self, name):
- # If there is an existing module object named 'fullname' in
- # sys.modules, the loader must use that existing module. (Otherwise,
- # the reload() builtin will not work correctly.)
- if name in sys.modules:
- return sys.modules[name]
-
- co, pyc = self.modules.pop(name)
- # I wish I could just call imp.load_compiled here, but __file__ has to
- # be set properly. In Python 3.2+, this all would be handled correctly
- # by load_compiled.
- mod = sys.modules[name] = imp.new_module(name)
- try:
- mod.__file__ = co.co_filename
- # Normally, this attribute is 3.2+.
- mod.__cached__ = pyc
- mod.__loader__ = self
- py.builtin.exec_(co, mod.__dict__)
- except: # noqa
- if name in sys.modules:
- del sys.modules[name]
- raise
- return sys.modules[name]
-
- def is_package(self, name):
- try:
- fd, fn, desc = imp.find_module(name)
- except ImportError:
- return False
- if fd is not None:
- fd.close()
- tp = desc[2]
- return tp == imp.PKG_DIRECTORY
-
- @classmethod
- def _register_with_pkg_resources(cls):
- """
- Ensure package resources can be loaded from this loader. May be called
- multiple times, as the operation is idempotent.
- """
- try:
- import pkg_resources
- # access an attribute in case a deferred importer is present
- pkg_resources.__name__
- except ImportError:
- return
-
- # Since pytest tests are always located in the file system, the
- # DefaultProvider is appropriate.
- pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider)
-
- def get_data(self, pathname):
- """Optional PEP302 get_data API.
- """
- with open(pathname, 'rb') as f:
- return f.read()
-
-
-def _write_pyc(state, co, source_stat, pyc):
- # Technically, we don't have to have the same pyc format as
- # (C)Python, since these "pycs" should never be seen by builtin
- # import. However, there's little reason deviate, and I hope
- # sometime to be able to use imp.load_compiled to load them. (See
- # the comment in load_module above.)
- try:
- fp = open(pyc, "wb")
- except IOError:
- err = sys.exc_info()[1].errno
- state.trace("error writing pyc file at %s: errno=%s" % (pyc, err))
- # we ignore any failure to write the cache file
- # there are many reasons, permission-denied, __pycache__ being a
- # file etc.
- return False
- try:
- fp.write(imp.get_magic())
- mtime = int(source_stat.mtime)
- size = source_stat.size & 0xFFFFFFFF
- fp.write(struct.pack("<ll", mtime, size))
- marshal.dump(co, fp)
- finally:
- fp.close()
- return True
-
-
-RN = "\r\n".encode("utf-8")
-N = "\n".encode("utf-8")
-
-cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+")
-BOM_UTF8 = '\xef\xbb\xbf'
-
-
-def _rewrite_test(config, fn):
- """Try to read and rewrite *fn* and return the code object."""
- state = config._assertstate
- try:
- stat = fn.stat()
- source = fn.read("rb")
- except EnvironmentError:
- return None, None
- if ASCII_IS_DEFAULT_ENCODING:
- # ASCII is the default encoding in Python 2. Without a coding
- # declaration, Python 2 will complain about any bytes in the file
- # outside the ASCII range. Sadly, this behavior does not extend to
- # compile() or ast.parse(), which prefer to interpret the bytes as
- # latin-1. (At least they properly handle explicit coding cookies.) To
- # preserve this error behavior, we could force ast.parse() to use ASCII
- # as the encoding by inserting a coding cookie. Unfortunately, that
- # messes up line numbers. Thus, we have to check ourselves if anything
- # is outside the ASCII range in the case no encoding is explicitly
- # declared. For more context, see issue #269. Yay for Python 3 which
- # gets this right.
- end1 = source.find("\n")
- end2 = source.find("\n", end1 + 1)
- if (not source.startswith(BOM_UTF8) and
- cookie_re.match(source[0:end1]) is None and
- cookie_re.match(source[end1 + 1:end2]) is None):
- if hasattr(state, "_indecode"):
- # encodings imported us again, so don't rewrite.
- return None, None
- state._indecode = True
- try:
- try:
- source.decode("ascii")
- except UnicodeDecodeError:
- # Let it fail in real import.
- return None, None
- finally:
- del state._indecode
- # On Python versions which are not 2.7 and less than or equal to 3.1, the
- # parser expects *nix newlines.
- if REWRITE_NEWLINES:
- source = source.replace(RN, N) + N
- try:
- tree = ast.parse(source)
- except SyntaxError:
- # Let this pop up again in the real import.
- state.trace("failed to parse: %r" % (fn,))
- return None, None
- rewrite_asserts(tree, fn, config)
- try:
- co = compile(tree, fn.strpath, "exec", dont_inherit=True)
- except SyntaxError:
- # It's possible that this error is from some bug in the
- # assertion rewriting, but I don't know of a fast way to tell.
- state.trace("failed to compile: %r" % (fn,))
- return None, None
- return stat, co
-
-
-def _make_rewritten_pyc(state, source_stat, pyc, co):
- """Try to dump rewritten code to *pyc*."""
- if sys.platform.startswith("win"):
- # Windows grants exclusive access to open files and doesn't have atomic
- # rename, so just write into the final file.
- _write_pyc(state, co, source_stat, pyc)
- else:
- # When not on windows, assume rename is atomic. Dump the code object
- # into a file specific to this process and atomically replace it.
- proc_pyc = pyc + "." + str(os.getpid())
- if _write_pyc(state, co, source_stat, proc_pyc):
- os.rename(proc_pyc, pyc)
-
-
-def _read_pyc(source, pyc, trace=lambda x: None):
- """Possibly read a pytest pyc containing rewritten code.
-
- Return rewritten code if successful or None if not.
- """
- try:
- fp = open(pyc, "rb")
- except IOError:
- return None
- with fp:
- try:
- mtime = int(source.mtime())
- size = source.size()
- data = fp.read(12)
- except EnvironmentError as e:
- trace('_read_pyc(%s): EnvironmentError %s' % (source, e))
- return None
- # Check for invalid or out of date pyc file.
- if (len(data) != 12 or data[:4] != imp.get_magic() or
- struct.unpack("<ll", data[4:]) != (mtime, size)):
- trace('_read_pyc(%s): invalid or out of date pyc' % source)
- return None
- try:
- co = marshal.load(fp)
- except Exception as e:
- trace('_read_pyc(%s): marshal.load error %s' % (source, e))
- return None
- if not isinstance(co, types.CodeType):
- trace('_read_pyc(%s): not a code object' % source)
- return None
- return co
-
-
-def rewrite_asserts(mod, module_path=None, config=None):
- """Rewrite the assert statements in mod."""
- AssertionRewriter(module_path, config).run(mod)
-
-
-def _saferepr(obj):
- """Get a safe repr of an object for assertion error messages.
-
- The assertion formatting (util.format_explanation()) requires
- newlines to be escaped since they are a special character for it.
- Normally assertion.util.format_explanation() does this but for a
- custom repr it is possible to contain one of the special escape
- sequences, especially '\n{' and '\n}' are likely to be present in
- JSON reprs.
-
- """
- repr = py.io.saferepr(obj)
- if py.builtin._istext(repr):
- t = py.builtin.text
- else:
- t = py.builtin.bytes
- return repr.replace(t("\n"), t("\\n"))
-
-
-from _pytest.assertion.util import format_explanation as _format_explanation # noqa
-
-
-def _format_assertmsg(obj):
- """Format the custom assertion message given.
-
- For strings this simply replaces newlines with '\n~' so that
- util.format_explanation() will preserve them instead of escaping
- newlines. For other objects py.io.saferepr() is used first.
-
- """
- # reprlib appears to have a bug which means that if a string
- # contains a newline it gets escaped, however if an object has a
- # .__repr__() which contains newlines it does not get escaped.
- # However in either case we want to preserve the newline.
- if py.builtin._istext(obj) or py.builtin._isbytes(obj):
- s = obj
- is_repr = False
- else:
- s = py.io.saferepr(obj)
- is_repr = True
- if py.builtin._istext(s):
- t = py.builtin.text
- else:
- t = py.builtin.bytes
- s = s.replace(t("\n"), t("\n~")).replace(t("%"), t("%%"))
- if is_repr:
- s = s.replace(t("\\n"), t("\n~"))
- return s
-
-
-def _should_repr_global_name(obj):
- return not hasattr(obj, "__name__") and not py.builtin.callable(obj)
-
-
-def _format_boolop(explanations, is_or):
- explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
- if py.builtin._istext(explanation):
- t = py.builtin.text
- else:
- t = py.builtin.bytes
- return explanation.replace(t('%'), t('%%'))
-
-
-def _call_reprcompare(ops, results, expls, each_obj):
- for i, res, expl in zip(range(len(ops)), results, expls):
- try:
- done = not res
- except Exception:
- done = True
- if done:
- break
- if util._reprcompare is not None:
- custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
- if custom is not None:
- return custom
- return expl
-
-
-unary_map = {
- ast.Not: "not %s",
- ast.Invert: "~%s",
- ast.USub: "-%s",
- ast.UAdd: "+%s"
-}
-
-binop_map = {
- ast.BitOr: "|",
- ast.BitXor: "^",
- ast.BitAnd: "&",
- ast.LShift: "<<",
- ast.RShift: ">>",
- ast.Add: "+",
- ast.Sub: "-",
- ast.Mult: "*",
- ast.Div: "/",
- ast.FloorDiv: "//",
- ast.Mod: "%%", # escaped for string formatting
- ast.Eq: "==",
- ast.NotEq: "!=",
- ast.Lt: "<",
- ast.LtE: "<=",
- ast.Gt: ">",
- ast.GtE: ">=",
- ast.Pow: "**",
- ast.Is: "is",
- ast.IsNot: "is not",
- ast.In: "in",
- ast.NotIn: "not in"
-}
-# Python 3.5+ compatibility
-try:
- binop_map[ast.MatMult] = "@"
-except AttributeError:
- pass
-
-# Python 3.4+ compatibility
-if hasattr(ast, "NameConstant"):
- _NameConstant = ast.NameConstant
-else:
- def _NameConstant(c):
- return ast.Name(str(c), ast.Load())
-
-
-def set_location(node, lineno, col_offset):
- """Set node location information recursively."""
- def _fix(node, lineno, col_offset):
- if "lineno" in node._attributes:
- node.lineno = lineno
- if "col_offset" in node._attributes:
- node.col_offset = col_offset
- for child in ast.iter_child_nodes(node):
- _fix(child, lineno, col_offset)
- _fix(node, lineno, col_offset)
- return node
-
-
-class AssertionRewriter(ast.NodeVisitor):
- """Assertion rewriting implementation.
-
- The main entrypoint is to call .run() with an ast.Module instance,
- this will then find all the assert statements and re-write them to
- provide intermediate values and a detailed assertion error. See
- http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
- for an overview of how this works.
-
- The entry point here is .run() which will iterate over all the
- statements in an ast.Module and for each ast.Assert statement it
- finds call .visit() with it. Then .visit_Assert() takes over and
- is responsible for creating new ast statements to replace the
- original assert statement: it re-writes the test of an assertion
- to provide intermediate values and replace it with an if statement
- which raises an assertion error with a detailed explanation in
- case the expression is false.
-
- For this .visit_Assert() uses the visitor pattern to visit all the
- AST nodes of the ast.Assert.test field, each visit call returning
- an AST node and the corresponding explanation string. During this
- state is kept in several instance attributes:
-
- :statements: All the AST statements which will replace the assert
- statement.
-
- :variables: This is populated by .variable() with each variable
- used by the statements so that they can all be set to None at
- the end of the statements.
-
- :variable_counter: Counter to create new unique variables needed
- by statements. Variables are created using .variable() and
- have the form of "@py_assert0".
-
- :on_failure: The AST statements which will be executed if the
- assertion test fails. This is the code which will construct
- the failure message and raises the AssertionError.
-
- :explanation_specifiers: A dict filled by .explanation_param()
- with %-formatting placeholders and their corresponding
- expressions to use in the building of an assertion message.
- This is used by .pop_format_context() to build a message.
-
- :stack: A stack of the explanation_specifiers dicts maintained by
- .push_format_context() and .pop_format_context() which allows
- to build another %-formatted string while already building one.
-
- This state is reset on every new assert statement visited and used
- by the other visitors.
-
- """
-
- def __init__(self, module_path, config):
- super(AssertionRewriter, self).__init__()
- self.module_path = module_path
- self.config = config
-
- def run(self, mod):
- """Find all assert statements in *mod* and rewrite them."""
- if not mod.body:
- # Nothing to do.
- return
- # Insert some special imports at the top of the module but after any
- # docstrings and __future__ imports.
- aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
- ast.alias("_pytest.assertion.rewrite", "@pytest_ar")]
- doc = getattr(mod, "docstring", None)
- expect_docstring = doc is None
- if doc is not None and self.is_rewrite_disabled(doc):
- return
- pos = 0
- lineno = 1
- for item in mod.body:
- if (expect_docstring and isinstance(item, ast.Expr) and
- isinstance(item.value, ast.Str)):
- doc = item.value.s
- if self.is_rewrite_disabled(doc):
- return
- expect_docstring = False
- elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
- item.module != "__future__"):
- lineno = item.lineno
- break
- pos += 1
- else:
- lineno = item.lineno
- imports = [ast.Import([alias], lineno=lineno, col_offset=0)
- for alias in aliases]
- mod.body[pos:pos] = imports
- # Collect asserts.
- nodes = [mod]
- while nodes:
- node = nodes.pop()
- for name, field in ast.iter_fields(node):
- if isinstance(field, list):
- new = []
- for i, child in enumerate(field):
- if isinstance(child, ast.Assert):
- # Transform assert.
- new.extend(self.visit(child))
- else:
- new.append(child)
- if isinstance(child, ast.AST):
- nodes.append(child)
- setattr(node, name, new)
- elif (isinstance(field, ast.AST) and
- # Don't recurse into expressions as they can't contain
- # asserts.
- not isinstance(field, ast.expr)):
- nodes.append(field)
-
- def is_rewrite_disabled(self, docstring):
- return "PYTEST_DONT_REWRITE" in docstring
-
- def variable(self):
- """Get a new variable."""
- # Use a character invalid in python identifiers to avoid clashing.
- name = "@py_assert" + str(next(self.variable_counter))
- self.variables.append(name)
- return name
-
- def assign(self, expr):
- """Give *expr* a name."""
- name = self.variable()
- self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
- return ast.Name(name, ast.Load())
-
- def display(self, expr):
- """Call py.io.saferepr on the expression."""
- return self.helper("saferepr", expr)
-
- def helper(self, name, *args):
- """Call a helper in this module."""
- py_name = ast.Name("@pytest_ar", ast.Load())
- attr = ast.Attribute(py_name, "_" + name, ast.Load())
- return ast_Call(attr, list(args), [])
-
- def builtin(self, name):
- """Return the builtin called *name*."""
- builtin_name = ast.Name("@py_builtins", ast.Load())
- return ast.Attribute(builtin_name, name, ast.Load())
-
- def explanation_param(self, expr):
- """Return a new named %-formatting placeholder for expr.
-
- This creates a %-formatting placeholder for expr in the
- current formatting context, e.g. ``%(py0)s``. The placeholder
- and expr are placed in the current format context so that it
- can be used on the next call to .pop_format_context().
-
- """
- specifier = "py" + str(next(self.variable_counter))
- self.explanation_specifiers[specifier] = expr
- return "%(" + specifier + ")s"
-
- def push_format_context(self):
- """Create a new formatting context.
-
- The format context is used for when an explanation wants to
- have a variable value formatted in the assertion message. In
- this case the value required can be added using
- .explanation_param(). Finally .pop_format_context() is used
- to format a string of %-formatted values as added by
- .explanation_param().
-
- """
- self.explanation_specifiers = {}
- self.stack.append(self.explanation_specifiers)
-
- def pop_format_context(self, expl_expr):
- """Format the %-formatted string with current format context.
-
- The expl_expr should be an ast.Str instance constructed from
- the %-placeholders created by .explanation_param(). This will
- add the required code to format said string to .on_failure and
- return the ast.Name instance of the formatted string.
-
- """
- current = self.stack.pop()
- if self.stack:
- self.explanation_specifiers = self.stack[-1]
- keys = [ast.Str(key) for key in current.keys()]
- format_dict = ast.Dict(keys, list(current.values()))
- form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
- name = "@py_format" + str(next(self.variable_counter))
- self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form))
- return ast.Name(name, ast.Load())
-
- def generic_visit(self, node):
- """Handle expressions we don't have custom code for."""
- assert isinstance(node, ast.expr)
- res = self.assign(node)
- return res, self.explanation_param(self.display(res))
-
- def visit_Assert(self, assert_):
- """Return the AST statements to replace the ast.Assert instance.
-
- This re-writes the test of an assertion to provide
- intermediate values and replace it with an if statement which
- raises an assertion error with a detailed explanation in case
- the expression is false.
-
- """
- if isinstance(assert_.test, ast.Tuple) and self.config is not None:
- fslocation = (self.module_path, assert_.lineno)
- self.config.warn('R1', 'assertion is always true, perhaps '
- 'remove parentheses?', fslocation=fslocation)
- self.statements = []
- self.variables = []
- self.variable_counter = itertools.count()
- self.stack = []
- self.on_failure = []
- self.push_format_context()
- # Rewrite assert into a bunch of statements.
- top_condition, explanation = self.visit(assert_.test)
- # Create failure message.
- body = self.on_failure
- negation = ast.UnaryOp(ast.Not(), top_condition)
- self.statements.append(ast.If(negation, body, []))
- if assert_.msg:
- assertmsg = self.helper('format_assertmsg', assert_.msg)
- explanation = "\n>assert " + explanation
- else:
- assertmsg = ast.Str("")
- explanation = "assert " + explanation
- template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
- msg = self.pop_format_context(template)
- fmt = self.helper("format_explanation", msg)
- err_name = ast.Name("AssertionError", ast.Load())
- exc = ast_Call(err_name, [fmt], [])
- if sys.version_info[0] >= 3:
- raise_ = ast.Raise(exc, None)
- else:
- raise_ = ast.Raise(exc, None, None)
- body.append(raise_)
- # Clear temporary variables by setting them to None.
- if self.variables:
- variables = [ast.Name(name, ast.Store())
- for name in self.variables]
- clear = ast.Assign(variables, _NameConstant(None))
- self.statements.append(clear)
- # Fix line numbers.
- for stmt in self.statements:
- set_location(stmt, assert_.lineno, assert_.col_offset)
- return self.statements
-
- def visit_Name(self, name):
- # Display the repr of the name if it's a local variable or
- # _should_repr_global_name() thinks it's acceptable.
- locs = ast_Call(self.builtin("locals"), [], [])
- inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
- dorepr = self.helper("should_repr_global_name", name)
- test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
- expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
- return name, self.explanation_param(expr)
-
- def visit_BoolOp(self, boolop):
- res_var = self.variable()
- expl_list = self.assign(ast.List([], ast.Load()))
- app = ast.Attribute(expl_list, "append", ast.Load())
- is_or = int(isinstance(boolop.op, ast.Or))
- body = save = self.statements
- fail_save = self.on_failure
- levels = len(boolop.values) - 1
- self.push_format_context()
- # Process each operand, short-circuting if needed.
- for i, v in enumerate(boolop.values):
- if i:
- fail_inner = []
- # cond is set in a prior loop iteration below
- self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa
- self.on_failure = fail_inner
- self.push_format_context()
- res, expl = self.visit(v)
- body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
- expl_format = self.pop_format_context(ast.Str(expl))
- call = ast_Call(app, [expl_format], [])
- self.on_failure.append(ast.Expr(call))
- if i < levels:
- cond = res
- if is_or:
- cond = ast.UnaryOp(ast.Not(), cond)
- inner = []
- self.statements.append(ast.If(cond, inner, []))
- self.statements = body = inner
- self.statements = save
- self.on_failure = fail_save
- expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
- expl = self.pop_format_context(expl_template)
- return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
-
- def visit_UnaryOp(self, unary):
- pattern = unary_map[unary.op.__class__]
- operand_res, operand_expl = self.visit(unary.operand)
- res = self.assign(ast.UnaryOp(unary.op, operand_res))
- return res, pattern % (operand_expl,)
-
- def visit_BinOp(self, binop):
- symbol = binop_map[binop.op.__class__]
- left_expr, left_expl = self.visit(binop.left)
- right_expr, right_expl = self.visit(binop.right)
- explanation = "(%s %s %s)" % (left_expl, symbol, right_expl)
- res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
- return res, explanation
-
- def visit_Call_35(self, call):
- """
- visit `ast.Call` nodes on Python3.5 and after
- """
- new_func, func_expl = self.visit(call.func)
- arg_expls = []
- new_args = []
- new_kwargs = []
- for arg in call.args:
- res, expl = self.visit(arg)
- arg_expls.append(expl)
- new_args.append(res)
- for keyword in call.keywords:
- res, expl = self.visit(keyword.value)
- new_kwargs.append(ast.keyword(keyword.arg, res))
- if keyword.arg:
- arg_expls.append(keyword.arg + "=" + expl)
- else: # **args have `arg` keywords with an .arg of None
- arg_expls.append("**" + expl)
-
- expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
- new_call = ast.Call(new_func, new_args, new_kwargs)
- res = self.assign(new_call)
- res_expl = self.explanation_param(self.display(res))
- outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
- return res, outer_expl
-
- def visit_Starred(self, starred):
- # From Python 3.5, a Starred node can appear in a function call
- res, expl = self.visit(starred.value)
- return starred, '*' + expl
-
- def visit_Call_legacy(self, call):
- """
- visit `ast.Call nodes on 3.4 and below`
- """
- new_func, func_expl = self.visit(call.func)
- arg_expls = []
- new_args = []
- new_kwargs = []
- new_star = new_kwarg = None
- for arg in call.args:
- res, expl = self.visit(arg)
- new_args.append(res)
- arg_expls.append(expl)
- for keyword in call.keywords:
- res, expl = self.visit(keyword.value)
- new_kwargs.append(ast.keyword(keyword.arg, res))
- arg_expls.append(keyword.arg + "=" + expl)
- if call.starargs:
- new_star, expl = self.visit(call.starargs)
- arg_expls.append("*" + expl)
- if call.kwargs:
- new_kwarg, expl = self.visit(call.kwargs)
- arg_expls.append("**" + expl)
- expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
- new_call = ast.Call(new_func, new_args, new_kwargs,
- new_star, new_kwarg)
- res = self.assign(new_call)
- res_expl = self.explanation_param(self.display(res))
- outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
- return res, outer_expl
-
- # ast.Call signature changed on 3.5,
- # conditionally change which methods is named
- # visit_Call depending on Python version
- if sys.version_info >= (3, 5):
- visit_Call = visit_Call_35
- else:
- visit_Call = visit_Call_legacy
-
- def visit_Attribute(self, attr):
- if not isinstance(attr.ctx, ast.Load):
- return self.generic_visit(attr)
- value, value_expl = self.visit(attr.value)
- res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
- res_expl = self.explanation_param(self.display(res))
- pat = "%s\n{%s = %s.%s\n}"
- expl = pat % (res_expl, res_expl, value_expl, attr.attr)
- return res, expl
-
- def visit_Compare(self, comp):
- self.push_format_context()
- left_res, left_expl = self.visit(comp.left)
- if isinstance(comp.left, (_ast.Compare, _ast.BoolOp)):
- left_expl = "({0})".format(left_expl)
- res_variables = [self.variable() for i in range(len(comp.ops))]
- load_names = [ast.Name(v, ast.Load()) for v in res_variables]
- store_names = [ast.Name(v, ast.Store()) for v in res_variables]
- it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
- expls = []
- syms = []
- results = [left_res]
- for i, op, next_operand in it:
- next_res, next_expl = self.visit(next_operand)
- if isinstance(next_operand, (_ast.Compare, _ast.BoolOp)):
- next_expl = "({0})".format(next_expl)
- results.append(next_res)
- sym = binop_map[op.__class__]
- syms.append(ast.Str(sym))
- expl = "%s %s %s" % (left_expl, sym, next_expl)
- expls.append(ast.Str(expl))
- res_expr = ast.Compare(left_res, [op], [next_res])
- self.statements.append(ast.Assign([store_names[i]], res_expr))
- left_res, left_expl = next_res, next_expl
- # Use pytest.assertion.util._reprcompare if that's available.
- expl_call = self.helper("call_reprcompare",
- ast.Tuple(syms, ast.Load()),
- ast.Tuple(load_names, ast.Load()),
- ast.Tuple(expls, ast.Load()),
- ast.Tuple(results, ast.Load()))
- if len(comp.ops) > 1:
- res = ast.BoolOp(ast.And(), load_names)
- else:
- res = load_names[0]
- return res, self.explanation_param(self.pop_format_context(expl_call))
diff --git a/lib/spack/external/pytest-fallback/_pytest/assertion/truncate.py b/lib/spack/external/pytest-fallback/_pytest/assertion/truncate.py
deleted file mode 100644
index 1e13063569..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/assertion/truncate.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""
-Utilities for truncating assertion output.
-
-Current default behaviour is to truncate assertion explanations at
-~8 terminal lines, unless running in "-vv" mode or running on CI.
-"""
-from __future__ import absolute_import, division, print_function
-import os
-
-import py
-
-
-DEFAULT_MAX_LINES = 8
-DEFAULT_MAX_CHARS = 8 * 80
-USAGE_MSG = "use '-vv' to show"
-
-
-def truncate_if_required(explanation, item, max_length=None):
- """
- Truncate this assertion explanation if the given test item is eligible.
- """
- if _should_truncate_item(item):
- return _truncate_explanation(explanation)
- return explanation
-
-
-def _should_truncate_item(item):
- """
- Whether or not this test item is eligible for truncation.
- """
- verbose = item.config.option.verbose
- return verbose < 2 and not _running_on_ci()
-
-
-def _running_on_ci():
- """Check if we're currently running on a CI system."""
- env_vars = ['CI', 'BUILD_NUMBER']
- return any(var in os.environ for var in env_vars)
-
-
-def _truncate_explanation(input_lines, max_lines=None, max_chars=None):
- """
- Truncate given list of strings that makes up the assertion explanation.
-
- Truncates to either 8 lines, or 640 characters - whichever the input reaches
- first. The remaining lines will be replaced by a usage message.
- """
-
- if max_lines is None:
- max_lines = DEFAULT_MAX_LINES
- if max_chars is None:
- max_chars = DEFAULT_MAX_CHARS
-
- # Check if truncation required
- input_char_count = len("".join(input_lines))
- if len(input_lines) <= max_lines and input_char_count <= max_chars:
- return input_lines
-
- # Truncate first to max_lines, and then truncate to max_chars if max_chars
- # is exceeded.
- truncated_explanation = input_lines[:max_lines]
- truncated_explanation = _truncate_by_char_count(truncated_explanation, max_chars)
-
- # Add ellipsis to final line
- truncated_explanation[-1] = truncated_explanation[-1] + "..."
-
- # Append useful message to explanation
- truncated_line_count = len(input_lines) - len(truncated_explanation)
- truncated_line_count += 1 # Account for the part-truncated final line
- msg = '...Full output truncated'
- if truncated_line_count == 1:
- msg += ' ({0} line hidden)'.format(truncated_line_count)
- else:
- msg += ' ({0} lines hidden)'.format(truncated_line_count)
- msg += ", {0}" .format(USAGE_MSG)
- truncated_explanation.extend([
- py.builtin._totext(""),
- py.builtin._totext(msg),
- ])
- return truncated_explanation
-
-
-def _truncate_by_char_count(input_lines, max_chars):
- # Check if truncation required
- if len("".join(input_lines)) <= max_chars:
- return input_lines
-
- # Find point at which input length exceeds total allowed length
- iterated_char_count = 0
- for iterated_index, input_line in enumerate(input_lines):
- if iterated_char_count + len(input_line) > max_chars:
- break
- iterated_char_count += len(input_line)
-
- # Create truncated explanation with modified final line
- truncated_result = input_lines[:iterated_index]
- final_line = input_lines[iterated_index]
- if final_line:
- final_line_truncate_point = max_chars - iterated_char_count
- final_line = final_line[:final_line_truncate_point]
- truncated_result.append(final_line)
- return truncated_result
diff --git a/lib/spack/external/pytest-fallback/_pytest/assertion/util.py b/lib/spack/external/pytest-fallback/_pytest/assertion/util.py
deleted file mode 100644
index c09eff06b0..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/assertion/util.py
+++ /dev/null
@@ -1,310 +0,0 @@
-"""Utilities for assertion debugging"""
-from __future__ import absolute_import, division, print_function
-import pprint
-
-import _pytest._code
-import py
-try:
- from collections.abc import Sequence
-except ImportError:
- try:
- from collections import Sequence
- except ImportError:
- Sequence = list
-
-
-u = py.builtin._totext
-
-# The _reprcompare attribute on the util module is used by the new assertion
-# interpretation code and assertion rewriter to detect this plugin was
-# loaded and in turn call the hooks defined here as part of the
-# DebugInterpreter.
-_reprcompare = None
-
-
-# the re-encoding is needed for python2 repr
-# with non-ascii characters (see issue 877 and 1379)
-def ecu(s):
- try:
- return u(s, 'utf-8', 'replace')
- except TypeError:
- return s
-
-
-def format_explanation(explanation):
- """This formats an explanation
-
- Normally all embedded newlines are escaped, however there are
- three exceptions: \n{, \n} and \n~. The first two are intended
- cover nested explanations, see function and attribute explanations
- for examples (.visit_Call(), visit_Attribute()). The last one is
- for when one explanation needs to span multiple lines, e.g. when
- displaying diffs.
- """
- explanation = ecu(explanation)
- lines = _split_explanation(explanation)
- result = _format_lines(lines)
- return u('\n').join(result)
-
-
-def _split_explanation(explanation):
- """Return a list of individual lines in the explanation
-
- This will return a list of lines split on '\n{', '\n}' and '\n~'.
- Any other newlines will be escaped and appear in the line as the
- literal '\n' characters.
- """
- raw_lines = (explanation or u('')).split('\n')
- lines = [raw_lines[0]]
- for values in raw_lines[1:]:
- if values and values[0] in ['{', '}', '~', '>']:
- lines.append(values)
- else:
- lines[-1] += '\\n' + values
- return lines
-
-
-def _format_lines(lines):
- """Format the individual lines
-
- This will replace the '{', '}' and '~' characters of our mini
- formatting language with the proper 'where ...', 'and ...' and ' +
- ...' text, taking care of indentation along the way.
-
- Return a list of formatted lines.
- """
- result = lines[:1]
- stack = [0]
- stackcnt = [0]
- for line in lines[1:]:
- if line.startswith('{'):
- if stackcnt[-1]:
- s = u('and ')
- else:
- s = u('where ')
- stack.append(len(result))
- stackcnt[-1] += 1
- stackcnt.append(0)
- result.append(u(' +') + u(' ') * (len(stack) - 1) + s + line[1:])
- elif line.startswith('}'):
- stack.pop()
- stackcnt.pop()
- result[stack[-1]] += line[1:]
- else:
- assert line[0] in ['~', '>']
- stack[-1] += 1
- indent = len(stack) if line.startswith('~') else len(stack) - 1
- result.append(u(' ') * indent + line[1:])
- assert len(stack) == 1
- return result
-
-
-# Provide basestring in python3
-try:
- basestring = basestring
-except NameError:
- basestring = str
-
-
-def assertrepr_compare(config, op, left, right):
- """Return specialised explanations for some operators/operands"""
- width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op
- left_repr = py.io.saferepr(left, maxsize=int(width // 2))
- right_repr = py.io.saferepr(right, maxsize=width - len(left_repr))
-
- summary = u('%s %s %s') % (ecu(left_repr), op, ecu(right_repr))
-
- def issequence(x):
- return (isinstance(x, (list, tuple, Sequence)) and not isinstance(x, basestring))
-
- def istext(x):
- return isinstance(x, basestring)
-
- def isdict(x):
- return isinstance(x, dict)
-
- def isset(x):
- return isinstance(x, (set, frozenset))
-
- def isiterable(obj):
- try:
- iter(obj)
- return not istext(obj)
- except TypeError:
- return False
-
- verbose = config.getoption('verbose')
- explanation = None
- try:
- if op == '==':
- if istext(left) and istext(right):
- explanation = _diff_text(left, right, verbose)
- else:
- if issequence(left) and issequence(right):
- explanation = _compare_eq_sequence(left, right, verbose)
- elif isset(left) and isset(right):
- explanation = _compare_eq_set(left, right, verbose)
- elif isdict(left) and isdict(right):
- explanation = _compare_eq_dict(left, right, verbose)
- if isiterable(left) and isiterable(right):
- expl = _compare_eq_iterable(left, right, verbose)
- if explanation is not None:
- explanation.extend(expl)
- else:
- explanation = expl
- elif op == 'not in':
- if istext(left) and istext(right):
- explanation = _notin_text(left, right, verbose)
- except Exception:
- explanation = [
- u('(pytest_assertion plugin: representation of details failed. '
- 'Probably an object has a faulty __repr__.)'),
- u(_pytest._code.ExceptionInfo())]
-
- if not explanation:
- return None
-
- return [summary] + explanation
-
-
-def _diff_text(left, right, verbose=False):
- """Return the explanation for the diff between text or bytes
-
- Unless --verbose is used this will skip leading and trailing
- characters which are identical to keep the diff minimal.
-
- If the input are bytes they will be safely converted to text.
- """
- from difflib import ndiff
- explanation = []
- if isinstance(left, py.builtin.bytes):
- left = u(repr(left)[1:-1]).replace(r'\n', '\n')
- if isinstance(right, py.builtin.bytes):
- right = u(repr(right)[1:-1]).replace(r'\n', '\n')
- if not verbose:
- i = 0 # just in case left or right has zero length
- for i in range(min(len(left), len(right))):
- if left[i] != right[i]:
- break
- if i > 42:
- i -= 10 # Provide some context
- explanation = [u('Skipping %s identical leading '
- 'characters in diff, use -v to show') % i]
- left = left[i:]
- right = right[i:]
- if len(left) == len(right):
- for i in range(len(left)):
- if left[-i] != right[-i]:
- break
- if i > 42:
- i -= 10 # Provide some context
- explanation += [u('Skipping %s identical trailing '
- 'characters in diff, use -v to show') % i]
- left = left[:-i]
- right = right[:-i]
- keepends = True
- explanation += [line.strip('\n')
- for line in ndiff(left.splitlines(keepends),
- right.splitlines(keepends))]
- return explanation
-
-
-def _compare_eq_iterable(left, right, verbose=False):
- if not verbose:
- return [u('Use -v to get the full diff')]
- # dynamic import to speedup pytest
- import difflib
-
- try:
- left_formatting = pprint.pformat(left).splitlines()
- right_formatting = pprint.pformat(right).splitlines()
- explanation = [u('Full diff:')]
- except Exception:
- # hack: PrettyPrinter.pformat() in python 2 fails when formatting items that can't be sorted(), ie, calling
- # sorted() on a list would raise. See issue #718.
- # As a workaround, the full diff is generated by using the repr() string of each item of each container.
- left_formatting = sorted(repr(x) for x in left)
- right_formatting = sorted(repr(x) for x in right)
- explanation = [u('Full diff (fallback to calling repr on each item):')]
- explanation.extend(line.strip() for line in difflib.ndiff(left_formatting, right_formatting))
- return explanation
-
-
-def _compare_eq_sequence(left, right, verbose=False):
- explanation = []
- for i in range(min(len(left), len(right))):
- if left[i] != right[i]:
- explanation += [u('At index %s diff: %r != %r')
- % (i, left[i], right[i])]
- break
- if len(left) > len(right):
- explanation += [u('Left contains more items, first extra item: %s')
- % py.io.saferepr(left[len(right)],)]
- elif len(left) < len(right):
- explanation += [
- u('Right contains more items, first extra item: %s') %
- py.io.saferepr(right[len(left)],)]
- return explanation
-
-
-def _compare_eq_set(left, right, verbose=False):
- explanation = []
- diff_left = left - right
- diff_right = right - left
- if diff_left:
- explanation.append(u('Extra items in the left set:'))
- for item in diff_left:
- explanation.append(py.io.saferepr(item))
- if diff_right:
- explanation.append(u('Extra items in the right set:'))
- for item in diff_right:
- explanation.append(py.io.saferepr(item))
- return explanation
-
-
-def _compare_eq_dict(left, right, verbose=False):
- explanation = []
- common = set(left).intersection(set(right))
- same = dict((k, left[k]) for k in common if left[k] == right[k])
- if same and verbose < 2:
- explanation += [u('Omitting %s identical items, use -vv to show') %
- len(same)]
- elif same:
- explanation += [u('Common items:')]
- explanation += pprint.pformat(same).splitlines()
- diff = set(k for k in common if left[k] != right[k])
- if diff:
- explanation += [u('Differing items:')]
- for k in diff:
- explanation += [py.io.saferepr({k: left[k]}) + ' != ' +
- py.io.saferepr({k: right[k]})]
- extra_left = set(left) - set(right)
- if extra_left:
- explanation.append(u('Left contains more items:'))
- explanation.extend(pprint.pformat(
- dict((k, left[k]) for k in extra_left)).splitlines())
- extra_right = set(right) - set(left)
- if extra_right:
- explanation.append(u('Right contains more items:'))
- explanation.extend(pprint.pformat(
- dict((k, right[k]) for k in extra_right)).splitlines())
- return explanation
-
-
-def _notin_text(term, text, verbose=False):
- index = text.find(term)
- head = text[:index]
- tail = text[index + len(term):]
- correct_text = head + tail
- diff = _diff_text(correct_text, text, verbose)
- newdiff = [u('%s is contained here:') % py.io.saferepr(term, maxsize=42)]
- for line in diff:
- if line.startswith(u('Skipping')):
- continue
- if line.startswith(u('- ')):
- continue
- if line.startswith(u('+ ')):
- newdiff.append(u(' ') + line[2:])
- else:
- newdiff.append(line)
- return newdiff
diff --git a/lib/spack/external/pytest-fallback/_pytest/cacheprovider.py b/lib/spack/external/pytest-fallback/_pytest/cacheprovider.py
deleted file mode 100755
index c537c14472..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/cacheprovider.py
+++ /dev/null
@@ -1,260 +0,0 @@
-"""
-merged implementation of the cache provider
-
-the name cache was not chosen to ensure pluggy automatically
-ignores the external pytest-cache
-"""
-from __future__ import absolute_import, division, print_function
-import py
-import pytest
-import json
-import os
-from os.path import sep as _sep, altsep as _altsep
-
-
-class Cache(object):
- def __init__(self, config):
- self.config = config
- self._cachedir = Cache.cache_dir_from_config(config)
- self.trace = config.trace.root.get("cache")
- if config.getvalue("cacheclear"):
- self.trace("clearing cachedir")
- if self._cachedir.check():
- self._cachedir.remove()
- self._cachedir.mkdir()
-
- @staticmethod
- def cache_dir_from_config(config):
- cache_dir = config.getini("cache_dir")
- cache_dir = os.path.expanduser(cache_dir)
- cache_dir = os.path.expandvars(cache_dir)
- if os.path.isabs(cache_dir):
- return py.path.local(cache_dir)
- else:
- return config.rootdir.join(cache_dir)
-
- def makedir(self, name):
- """ return a directory path object with the given name. If the
- directory does not yet exist, it will be created. You can use it
- to manage files likes e. g. store/retrieve database
- dumps across test sessions.
-
- :param name: must be a string not containing a ``/`` separator.
- Make sure the name contains your plugin or application
- identifiers to prevent clashes with other cache users.
- """
- if _sep in name or _altsep is not None and _altsep in name:
- raise ValueError("name is not allowed to contain path separators")
- return self._cachedir.ensure_dir("d", name)
-
- def _getvaluepath(self, key):
- return self._cachedir.join('v', *key.split('/'))
-
- def get(self, key, default):
- """ return cached value for the given key. If no value
- was yet cached or the value cannot be read, the specified
- default is returned.
-
- :param key: must be a ``/`` separated value. Usually the first
- name is the name of your plugin or your application.
- :param default: must be provided in case of a cache-miss or
- invalid cache values.
-
- """
- path = self._getvaluepath(key)
- if path.check():
- try:
- with path.open("r") as f:
- return json.load(f)
- except ValueError:
- self.trace("cache-invalid at %s" % (path,))
- return default
-
- def set(self, key, value):
- """ save value for the given key.
-
- :param key: must be a ``/`` separated value. Usually the first
- name is the name of your plugin or your application.
- :param value: must be of any combination of basic
- python types, including nested types
- like e. g. lists of dictionaries.
- """
- path = self._getvaluepath(key)
- try:
- path.dirpath().ensure_dir()
- except (py.error.EEXIST, py.error.EACCES):
- self.config.warn(
- code='I9', message='could not create cache path %s' % (path,)
- )
- return
- try:
- f = path.open('w')
- except py.error.ENOTDIR:
- self.config.warn(
- code='I9', message='cache could not write path %s' % (path,))
- else:
- with f:
- self.trace("cache-write %s: %r" % (key, value,))
- json.dump(value, f, indent=2, sort_keys=True)
-
-
-class LFPlugin:
- """ Plugin which implements the --lf (run last-failing) option """
-
- def __init__(self, config):
- self.config = config
- active_keys = 'lf', 'failedfirst'
- self.active = any(config.getvalue(key) for key in active_keys)
- self.lastfailed = config.cache.get("cache/lastfailed", {})
- self._previously_failed_count = None
-
- def pytest_report_collectionfinish(self):
- if self.active:
- if not self._previously_failed_count:
- mode = "run all (no recorded failures)"
- else:
- noun = 'failure' if self._previously_failed_count == 1 else 'failures'
- suffix = " first" if self.config.getvalue("failedfirst") else ""
- mode = "rerun previous {count} {noun}{suffix}".format(
- count=self._previously_failed_count, suffix=suffix, noun=noun
- )
- return "run-last-failure: %s" % mode
-
- def pytest_runtest_logreport(self, report):
- if (report.when == 'call' and report.passed) or report.skipped:
- self.lastfailed.pop(report.nodeid, None)
- elif report.failed:
- self.lastfailed[report.nodeid] = True
-
- def pytest_collectreport(self, report):
- passed = report.outcome in ('passed', 'skipped')
- if passed:
- if report.nodeid in self.lastfailed:
- self.lastfailed.pop(report.nodeid)
- self.lastfailed.update(
- (item.nodeid, True)
- for item in report.result)
- else:
- self.lastfailed[report.nodeid] = True
-
- def pytest_collection_modifyitems(self, session, config, items):
- if self.active and self.lastfailed:
- previously_failed = []
- previously_passed = []
- for item in items:
- if item.nodeid in self.lastfailed:
- previously_failed.append(item)
- else:
- previously_passed.append(item)
- self._previously_failed_count = len(previously_failed)
- if not previously_failed:
- # running a subset of all tests with recorded failures outside
- # of the set of tests currently executing
- return
- if self.config.getvalue("lf"):
- items[:] = previously_failed
- config.hook.pytest_deselected(items=previously_passed)
- else:
- items[:] = previously_failed + previously_passed
-
- def pytest_sessionfinish(self, session):
- config = self.config
- if config.getvalue("cacheshow") or hasattr(config, "slaveinput"):
- return
-
- saved_lastfailed = config.cache.get("cache/lastfailed", {})
- if saved_lastfailed != self.lastfailed:
- config.cache.set("cache/lastfailed", self.lastfailed)
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group.addoption(
- '--lf', '--last-failed', action='store_true', dest="lf",
- help="rerun only the tests that failed "
- "at the last run (or all if none failed)")
- group.addoption(
- '--ff', '--failed-first', action='store_true', dest="failedfirst",
- help="run all tests but run the last failures first. "
- "This may re-order tests and thus lead to "
- "repeated fixture setup/teardown")
- group.addoption(
- '--cache-show', action='store_true', dest="cacheshow",
- help="show cache contents, don't perform collection or tests")
- group.addoption(
- '--cache-clear', action='store_true', dest="cacheclear",
- help="remove all cache contents at start of test run.")
- parser.addini(
- "cache_dir", default='.cache',
- help="cache directory path.")
-
-
-def pytest_cmdline_main(config):
- if config.option.cacheshow:
- from _pytest.main import wrap_session
- return wrap_session(config, cacheshow)
-
-
-@pytest.hookimpl(tryfirst=True)
-def pytest_configure(config):
- config.cache = Cache(config)
- config.pluginmanager.register(LFPlugin(config), "lfplugin")
-
-
-@pytest.fixture
-def cache(request):
- """
- Return a cache object that can persist state between testing sessions.
-
- cache.get(key, default)
- cache.set(key, value)
-
- Keys must be a ``/`` separated value, where the first part is usually the
- name of your plugin or application to avoid clashes with other cache users.
-
- Values can be any object handled by the json stdlib module.
- """
- return request.config.cache
-
-
-def pytest_report_header(config):
- if config.option.verbose:
- relpath = py.path.local().bestrelpath(config.cache._cachedir)
- return "cachedir: %s" % relpath
-
-
-def cacheshow(config, session):
- from pprint import pprint
- tw = py.io.TerminalWriter()
- tw.line("cachedir: " + str(config.cache._cachedir))
- if not config.cache._cachedir.check():
- tw.line("cache is empty")
- return 0
- dummy = object()
- basedir = config.cache._cachedir
- vdir = basedir.join("v")
- tw.sep("-", "cache values")
- for valpath in sorted(vdir.visit(lambda x: x.isfile())):
- key = valpath.relto(vdir).replace(valpath.sep, "/")
- val = config.cache.get(key, dummy)
- if val is dummy:
- tw.line("%s contains unreadable content, "
- "will be ignored" % key)
- else:
- tw.line("%s contains:" % key)
- stream = py.io.TextIO()
- pprint(val, stream=stream)
- for line in stream.getvalue().splitlines():
- tw.line(" " + line)
-
- ddir = basedir.join("d")
- if ddir.isdir() and ddir.listdir():
- tw.sep("-", "cache directories")
- for p in sorted(basedir.join("d").visit()):
- # if p.check(dir=1):
- # print("%s/" % p.relto(basedir))
- if p.isfile():
- key = p.relto(basedir)
- tw.line("%s is a file of length %d" % (
- key, p.size()))
- return 0
diff --git a/lib/spack/external/pytest-fallback/_pytest/capture.py b/lib/spack/external/pytest-fallback/_pytest/capture.py
deleted file mode 100644
index cb5af6fcb3..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/capture.py
+++ /dev/null
@@ -1,577 +0,0 @@
-"""
-per-test stdout/stderr capturing mechanism.
-
-"""
-from __future__ import absolute_import, division, print_function
-
-import contextlib
-import sys
-import os
-import io
-from io import UnsupportedOperation
-from tempfile import TemporaryFile
-
-import py
-import pytest
-from _pytest.compat import CaptureIO
-
-unicode = py.builtin.text
-
-patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group._addoption(
- '--capture', action="store",
- default="fd" if hasattr(os, "dup") else "sys",
- metavar="method", choices=['fd', 'sys', 'no'],
- help="per-test capturing method: one of fd|sys|no.")
- group._addoption(
- '-s', action="store_const", const="no", dest="capture",
- help="shortcut for --capture=no.")
-
-
-@pytest.hookimpl(hookwrapper=True)
-def pytest_load_initial_conftests(early_config, parser, args):
- ns = early_config.known_args_namespace
- if ns.capture == "fd":
- _py36_windowsconsoleio_workaround(sys.stdout)
- _colorama_workaround()
- _readline_workaround()
- pluginmanager = early_config.pluginmanager
- capman = CaptureManager(ns.capture)
- pluginmanager.register(capman, "capturemanager")
-
- # make sure that capturemanager is properly reset at final shutdown
- early_config.add_cleanup(capman.reset_capturings)
-
- # make sure logging does not raise exceptions at the end
- def silence_logging_at_shutdown():
- if "logging" in sys.modules:
- sys.modules["logging"].raiseExceptions = False
- early_config.add_cleanup(silence_logging_at_shutdown)
-
- # finally trigger conftest loading but while capturing (issue93)
- capman.init_capturings()
- outcome = yield
- out, err = capman.suspendcapture()
- if outcome.excinfo is not None:
- sys.stdout.write(out)
- sys.stderr.write(err)
-
-
-class CaptureManager:
- def __init__(self, method):
- self._method = method
-
- def _getcapture(self, method):
- if method == "fd":
- return MultiCapture(out=True, err=True, Capture=FDCapture)
- elif method == "sys":
- return MultiCapture(out=True, err=True, Capture=SysCapture)
- elif method == "no":
- return MultiCapture(out=False, err=False, in_=False)
- else:
- raise ValueError("unknown capturing method: %r" % method)
-
- def init_capturings(self):
- assert not hasattr(self, "_capturing")
- self._capturing = self._getcapture(self._method)
- self._capturing.start_capturing()
-
- def reset_capturings(self):
- cap = self.__dict__.pop("_capturing", None)
- if cap is not None:
- cap.pop_outerr_to_orig()
- cap.stop_capturing()
-
- def resumecapture(self):
- self._capturing.resume_capturing()
-
- def suspendcapture(self, in_=False):
- self.deactivate_funcargs()
- cap = getattr(self, "_capturing", None)
- if cap is not None:
- try:
- outerr = cap.readouterr()
- finally:
- cap.suspend_capturing(in_=in_)
- return outerr
-
- def activate_funcargs(self, pyfuncitem):
- capfuncarg = pyfuncitem.__dict__.pop("_capfuncarg", None)
- if capfuncarg is not None:
- capfuncarg._start()
- self._capfuncarg = capfuncarg
-
- def deactivate_funcargs(self):
- capfuncarg = self.__dict__.pop("_capfuncarg", None)
- if capfuncarg is not None:
- capfuncarg.close()
-
- @pytest.hookimpl(hookwrapper=True)
- def pytest_make_collect_report(self, collector):
- if isinstance(collector, pytest.File):
- self.resumecapture()
- outcome = yield
- out, err = self.suspendcapture()
- rep = outcome.get_result()
- if out:
- rep.sections.append(("Captured stdout", out))
- if err:
- rep.sections.append(("Captured stderr", err))
- else:
- yield
-
- @pytest.hookimpl(hookwrapper=True)
- def pytest_runtest_setup(self, item):
- self.resumecapture()
- yield
- self.suspendcapture_item(item, "setup")
-
- @pytest.hookimpl(hookwrapper=True)
- def pytest_runtest_call(self, item):
- self.resumecapture()
- self.activate_funcargs(item)
- yield
- # self.deactivate_funcargs() called from suspendcapture()
- self.suspendcapture_item(item, "call")
-
- @pytest.hookimpl(hookwrapper=True)
- def pytest_runtest_teardown(self, item):
- self.resumecapture()
- yield
- self.suspendcapture_item(item, "teardown")
-
- @pytest.hookimpl(tryfirst=True)
- def pytest_keyboard_interrupt(self, excinfo):
- self.reset_capturings()
-
- @pytest.hookimpl(tryfirst=True)
- def pytest_internalerror(self, excinfo):
- self.reset_capturings()
-
- def suspendcapture_item(self, item, when, in_=False):
- out, err = self.suspendcapture(in_=in_)
- item.add_report_section(when, "stdout", out)
- item.add_report_section(when, "stderr", err)
-
-
-error_capsysfderror = "cannot use capsys and capfd at the same time"
-
-
-@pytest.fixture
-def capsys(request):
- """Enable capturing of writes to sys.stdout/sys.stderr and make
- captured output available via ``capsys.readouterr()`` method calls
- which return a ``(out, err)`` tuple.
- """
- if "capfd" in request.fixturenames:
- raise request.raiseerror(error_capsysfderror)
- request.node._capfuncarg = c = CaptureFixture(SysCapture, request)
- return c
-
-
-@pytest.fixture
-def capfd(request):
- """Enable capturing of writes to file descriptors 1 and 2 and make
- captured output available via ``capfd.readouterr()`` method calls
- which return a ``(out, err)`` tuple.
- """
- if "capsys" in request.fixturenames:
- request.raiseerror(error_capsysfderror)
- if not hasattr(os, 'dup'):
- pytest.skip("capfd funcarg needs os.dup")
- request.node._capfuncarg = c = CaptureFixture(FDCapture, request)
- return c
-
-
-class CaptureFixture:
- def __init__(self, captureclass, request):
- self.captureclass = captureclass
- self.request = request
-
- def _start(self):
- self._capture = MultiCapture(out=True, err=True, in_=False,
- Capture=self.captureclass)
- self._capture.start_capturing()
-
- def close(self):
- cap = self.__dict__.pop("_capture", None)
- if cap is not None:
- self._outerr = cap.pop_outerr_to_orig()
- cap.stop_capturing()
-
- def readouterr(self):
- try:
- return self._capture.readouterr()
- except AttributeError:
- return self._outerr
-
- @contextlib.contextmanager
- def disabled(self):
- capmanager = self.request.config.pluginmanager.getplugin('capturemanager')
- capmanager.suspendcapture_item(self.request.node, "call", in_=True)
- try:
- yield
- finally:
- capmanager.resumecapture()
-
-
-def safe_text_dupfile(f, mode, default_encoding="UTF8"):
- """ return a open text file object that's a duplicate of f on the
- FD-level if possible.
- """
- encoding = getattr(f, "encoding", None)
- try:
- fd = f.fileno()
- except Exception:
- if "b" not in getattr(f, "mode", "") and hasattr(f, "encoding"):
- # we seem to have a text stream, let's just use it
- return f
- else:
- newfd = os.dup(fd)
- if "b" not in mode:
- mode += "b"
- f = os.fdopen(newfd, mode, 0) # no buffering
- return EncodedFile(f, encoding or default_encoding)
-
-
-class EncodedFile(object):
- errors = "strict" # possibly needed by py3 code (issue555)
-
- def __init__(self, buffer, encoding):
- self.buffer = buffer
- self.encoding = encoding
-
- def write(self, obj):
- if isinstance(obj, unicode):
- obj = obj.encode(self.encoding, "replace")
- self.buffer.write(obj)
-
- def writelines(self, linelist):
- data = ''.join(linelist)
- self.write(data)
-
- @property
- def name(self):
- """Ensure that file.name is a string."""
- return repr(self.buffer)
-
- def __getattr__(self, name):
- return getattr(object.__getattribute__(self, "buffer"), name)
-
-
-class MultiCapture(object):
- out = err = in_ = None
-
- def __init__(self, out=True, err=True, in_=True, Capture=None):
- if in_:
- self.in_ = Capture(0)
- if out:
- self.out = Capture(1)
- if err:
- self.err = Capture(2)
-
- def start_capturing(self):
- if self.in_:
- self.in_.start()
- if self.out:
- self.out.start()
- if self.err:
- self.err.start()
-
- def pop_outerr_to_orig(self):
- """ pop current snapshot out/err capture and flush to orig streams. """
- out, err = self.readouterr()
- if out:
- self.out.writeorg(out)
- if err:
- self.err.writeorg(err)
- return out, err
-
- def suspend_capturing(self, in_=False):
- if self.out:
- self.out.suspend()
- if self.err:
- self.err.suspend()
- if in_ and self.in_:
- self.in_.suspend()
- self._in_suspended = True
-
- def resume_capturing(self):
- if self.out:
- self.out.resume()
- if self.err:
- self.err.resume()
- if hasattr(self, "_in_suspended"):
- self.in_.resume()
- del self._in_suspended
-
- def stop_capturing(self):
- """ stop capturing and reset capturing streams """
- if hasattr(self, '_reset'):
- raise ValueError("was already stopped")
- self._reset = True
- if self.out:
- self.out.done()
- if self.err:
- self.err.done()
- if self.in_:
- self.in_.done()
-
- def readouterr(self):
- """ return snapshot unicode value of stdout/stderr capturings. """
- return (self.out.snap() if self.out is not None else "",
- self.err.snap() if self.err is not None else "")
-
-
-class NoCapture:
- __init__ = start = done = suspend = resume = lambda *args: None
-
-
-class FDCapture:
- """ Capture IO to/from a given os-level filedescriptor. """
-
- def __init__(self, targetfd, tmpfile=None):
- self.targetfd = targetfd
- try:
- self.targetfd_save = os.dup(self.targetfd)
- except OSError:
- self.start = lambda: None
- self.done = lambda: None
- else:
- if targetfd == 0:
- assert not tmpfile, "cannot set tmpfile with stdin"
- tmpfile = open(os.devnull, "r")
- self.syscapture = SysCapture(targetfd)
- else:
- if tmpfile is None:
- f = TemporaryFile()
- with f:
- tmpfile = safe_text_dupfile(f, mode="wb+")
- if targetfd in patchsysdict:
- self.syscapture = SysCapture(targetfd, tmpfile)
- else:
- self.syscapture = NoCapture()
- self.tmpfile = tmpfile
- self.tmpfile_fd = tmpfile.fileno()
-
- def __repr__(self):
- return "<FDCapture %s oldfd=%s>" % (self.targetfd, self.targetfd_save)
-
- def start(self):
- """ Start capturing on targetfd using memorized tmpfile. """
- try:
- os.fstat(self.targetfd_save)
- except (AttributeError, OSError):
- raise ValueError("saved filedescriptor not valid anymore")
- os.dup2(self.tmpfile_fd, self.targetfd)
- self.syscapture.start()
-
- def snap(self):
- f = self.tmpfile
- f.seek(0)
- res = f.read()
- if res:
- enc = getattr(f, "encoding", None)
- if enc and isinstance(res, bytes):
- res = py.builtin._totext(res, enc, "replace")
- f.truncate(0)
- f.seek(0)
- return res
- return ''
-
- def done(self):
- """ stop capturing, restore streams, return original capture file,
- seeked to position zero. """
- targetfd_save = self.__dict__.pop("targetfd_save")
- os.dup2(targetfd_save, self.targetfd)
- os.close(targetfd_save)
- self.syscapture.done()
- self.tmpfile.close()
-
- def suspend(self):
- self.syscapture.suspend()
- os.dup2(self.targetfd_save, self.targetfd)
-
- def resume(self):
- self.syscapture.resume()
- os.dup2(self.tmpfile_fd, self.targetfd)
-
- def writeorg(self, data):
- """ write to original file descriptor. """
- if py.builtin._istext(data):
- data = data.encode("utf8") # XXX use encoding of original stream
- os.write(self.targetfd_save, data)
-
-
-class SysCapture:
- def __init__(self, fd, tmpfile=None):
- name = patchsysdict[fd]
- self._old = getattr(sys, name)
- self.name = name
- if tmpfile is None:
- if name == "stdin":
- tmpfile = DontReadFromInput()
- else:
- tmpfile = CaptureIO()
- self.tmpfile = tmpfile
-
- def start(self):
- setattr(sys, self.name, self.tmpfile)
-
- def snap(self):
- f = self.tmpfile
- res = f.getvalue()
- f.truncate(0)
- f.seek(0)
- return res
-
- def done(self):
- setattr(sys, self.name, self._old)
- del self._old
- self.tmpfile.close()
-
- def suspend(self):
- setattr(sys, self.name, self._old)
-
- def resume(self):
- setattr(sys, self.name, self.tmpfile)
-
- def writeorg(self, data):
- self._old.write(data)
- self._old.flush()
-
-
-class DontReadFromInput:
- """Temporary stub class. Ideally when stdin is accessed, the
- capturing should be turned off, with possibly all data captured
- so far sent to the screen. This should be configurable, though,
- because in automated test runs it is better to crash than
- hang indefinitely.
- """
-
- encoding = None
-
- def read(self, *args):
- raise IOError("reading from stdin while output is captured")
- readline = read
- readlines = read
- __iter__ = read
-
- def fileno(self):
- raise UnsupportedOperation("redirected stdin is pseudofile, "
- "has no fileno()")
-
- def isatty(self):
- return False
-
- def close(self):
- pass
-
- @property
- def buffer(self):
- if sys.version_info >= (3, 0):
- return self
- else:
- raise AttributeError('redirected stdin has no attribute buffer')
-
-
-def _colorama_workaround():
- """
- Ensure colorama is imported so that it attaches to the correct stdio
- handles on Windows.
-
- colorama uses the terminal on import time. So if something does the
- first import of colorama while I/O capture is active, colorama will
- fail in various ways.
- """
-
- if not sys.platform.startswith('win32'):
- return
- try:
- import colorama # noqa
- except ImportError:
- pass
-
-
-def _readline_workaround():
- """
- Ensure readline is imported so that it attaches to the correct stdio
- handles on Windows.
-
- Pdb uses readline support where available--when not running from the Python
- prompt, the readline module is not imported until running the pdb REPL. If
- running pytest with the --pdb option this means the readline module is not
- imported until after I/O capture has been started.
-
- This is a problem for pyreadline, which is often used to implement readline
- support on Windows, as it does not attach to the correct handles for stdout
- and/or stdin if they have been redirected by the FDCapture mechanism. This
- workaround ensures that readline is imported before I/O capture is setup so
- that it can attach to the actual stdin/out for the console.
-
- See https://github.com/pytest-dev/pytest/pull/1281
- """
-
- if not sys.platform.startswith('win32'):
- return
- try:
- import readline # noqa
- except ImportError:
- pass
-
-
-def _py36_windowsconsoleio_workaround(stream):
- """
- Python 3.6 implemented unicode console handling for Windows. This works
- by reading/writing to the raw console handle using
- ``{Read,Write}ConsoleW``.
-
- The problem is that we are going to ``dup2`` over the stdio file
- descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the
- handles used by Python to write to the console. Though there is still some
- weirdness and the console handle seems to only be closed randomly and not
- on the first call to ``CloseHandle``, or maybe it gets reopened with the
- same handle value when we suspend capturing.
-
- The workaround in this case will reopen stdio with a different fd which
- also means a different handle by replicating the logic in
- "Py_lifecycle.c:initstdio/create_stdio".
-
- :param stream: in practice ``sys.stdout`` or ``sys.stderr``, but given
- here as parameter for unittesting purposes.
-
- See https://github.com/pytest-dev/py/issues/103
- """
- if not sys.platform.startswith('win32') or sys.version_info[:2] < (3, 6):
- return
-
- # bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666)
- if not hasattr(stream, 'buffer'):
- return
-
- buffered = hasattr(stream.buffer, 'raw')
- raw_stdout = stream.buffer.raw if buffered else stream.buffer
-
- if not isinstance(raw_stdout, io._WindowsConsoleIO):
- return
-
- def _reopen_stdio(f, mode):
- if not buffered and mode[0] == 'w':
- buffering = 0
- else:
- buffering = -1
-
- return io.TextIOWrapper(
- open(os.dup(f.fileno()), mode, buffering),
- f.encoding,
- f.errors,
- f.newlines,
- f.line_buffering)
-
- sys.__stdin__ = sys.stdin = _reopen_stdio(sys.stdin, 'rb')
- sys.__stdout__ = sys.stdout = _reopen_stdio(sys.stdout, 'wb')
- sys.__stderr__ = sys.stderr = _reopen_stdio(sys.stderr, 'wb')
diff --git a/lib/spack/external/pytest-fallback/_pytest/compat.py b/lib/spack/external/pytest-fallback/_pytest/compat.py
deleted file mode 100644
index 255f69ce0d..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/compat.py
+++ /dev/null
@@ -1,326 +0,0 @@
-"""
-python version compatibility code
-"""
-from __future__ import absolute_import, division, print_function
-import sys
-import inspect
-import types
-import re
-import functools
-
-import py
-
-import _pytest
-from _pytest.outcomes import TEST_OUTCOME
-
-
-try:
- import enum
-except ImportError: # pragma: no cover
- # Only available in Python 3.4+ or as a backport
- enum = None
-
-
-_PY3 = sys.version_info > (3, 0)
-_PY2 = not _PY3
-
-
-NoneType = type(None)
-NOTSET = object()
-
-PY35 = sys.version_info[:2] >= (3, 5)
-PY36 = sys.version_info[:2] >= (3, 6)
-MODULE_NOT_FOUND_ERROR = 'ModuleNotFoundError' if PY36 else 'ImportError'
-
-if hasattr(inspect, 'signature'):
- def _format_args(func):
- return str(inspect.signature(func))
-else:
- def _format_args(func):
- return inspect.formatargspec(*inspect.getargspec(func))
-
-isfunction = inspect.isfunction
-isclass = inspect.isclass
-# used to work around a python2 exception info leak
-exc_clear = getattr(sys, 'exc_clear', lambda: None)
-# The type of re.compile objects is not exposed in Python.
-REGEX_TYPE = type(re.compile(''))
-
-
-def is_generator(func):
- genfunc = inspect.isgeneratorfunction(func)
- return genfunc and not iscoroutinefunction(func)
-
-
-def iscoroutinefunction(func):
- """Return True if func is a decorated coroutine function.
-
- Note: copied and modified from Python 3.5's builtin couroutines.py to avoid import asyncio directly,
- which in turns also initializes the "logging" module as side-effect (see issue #8).
- """
- return (getattr(func, '_is_coroutine', False) or
- (hasattr(inspect, 'iscoroutinefunction') and inspect.iscoroutinefunction(func)))
-
-
-def getlocation(function, curdir):
- import inspect
- fn = py.path.local(inspect.getfile(function))
- lineno = py.builtin._getcode(function).co_firstlineno
- if fn.relto(curdir):
- fn = fn.relto(curdir)
- return "%s:%d" % (fn, lineno + 1)
-
-
-def num_mock_patch_args(function):
- """ return number of arguments used up by mock arguments (if any) """
- patchings = getattr(function, "patchings", None)
- if not patchings:
- return 0
- mock = sys.modules.get("mock", sys.modules.get("unittest.mock", None))
- if mock is not None:
- return len([p for p in patchings
- if not p.attribute_name and p.new is mock.DEFAULT])
- return len(patchings)
-
-
-def getfuncargnames(function, startindex=None, cls=None):
- """
- @RonnyPfannschmidt: This function should be refactored when we revisit fixtures. The
- fixture mechanism should ask the node for the fixture names, and not try to obtain
- directly from the function object well after collection has occurred.
- """
- if startindex is None and cls is not None:
- is_staticmethod = isinstance(cls.__dict__.get(function.__name__, None), staticmethod)
- startindex = 0 if is_staticmethod else 1
- # XXX merge with main.py's varnames
- # assert not isclass(function)
- realfunction = function
- while hasattr(realfunction, "__wrapped__"):
- realfunction = realfunction.__wrapped__
- if startindex is None:
- startindex = inspect.ismethod(function) and 1 or 0
- if realfunction != function:
- startindex += num_mock_patch_args(function)
- function = realfunction
- if isinstance(function, functools.partial):
- argnames = inspect.getargs(_pytest._code.getrawcode(function.func))[0]
- partial = function
- argnames = argnames[len(partial.args):]
- if partial.keywords:
- for kw in partial.keywords:
- argnames.remove(kw)
- else:
- argnames = inspect.getargs(_pytest._code.getrawcode(function))[0]
- defaults = getattr(function, 'func_defaults',
- getattr(function, '__defaults__', None)) or ()
- numdefaults = len(defaults)
- if numdefaults:
- return tuple(argnames[startindex:-numdefaults])
- return tuple(argnames[startindex:])
-
-
-if sys.version_info[:2] == (2, 6):
- def isclass(object):
- """ Return true if the object is a class. Overrides inspect.isclass for
- python 2.6 because it will return True for objects which always return
- something on __getattr__ calls (see #1035).
- Backport of https://hg.python.org/cpython/rev/35bf8f7a8edc
- """
- return isinstance(object, (type, types.ClassType))
-
-
-if _PY3:
- import codecs
- imap = map
- izip = zip
- STRING_TYPES = bytes, str
- UNICODE_TYPES = str,
-
- def _ascii_escaped(val):
- """If val is pure ascii, returns it as a str(). Otherwise, escapes
- bytes objects into a sequence of escaped bytes:
-
- b'\xc3\xb4\xc5\xd6' -> u'\\xc3\\xb4\\xc5\\xd6'
-
- and escapes unicode objects into a sequence of escaped unicode
- ids, e.g.:
-
- '4\\nV\\U00043efa\\x0eMXWB\\x1e\\u3028\\u15fd\\xcd\\U0007d944'
-
- note:
- the obvious "v.decode('unicode-escape')" will return
- valid utf-8 unicode if it finds them in bytes, but we
- want to return escaped bytes for any byte, even if they match
- a utf-8 string.
-
- """
- if isinstance(val, bytes):
- if val:
- # source: http://goo.gl/bGsnwC
- encoded_bytes, _ = codecs.escape_encode(val)
- return encoded_bytes.decode('ascii')
- else:
- # empty bytes crashes codecs.escape_encode (#1087)
- return ''
- else:
- return val.encode('unicode_escape').decode('ascii')
-else:
- STRING_TYPES = bytes, str, unicode
- UNICODE_TYPES = unicode,
-
- from itertools import imap, izip # NOQA
-
- def _ascii_escaped(val):
- """In py2 bytes and str are the same type, so return if it's a bytes
- object, return it unchanged if it is a full ascii string,
- otherwise escape it into its binary form.
-
- If it's a unicode string, change the unicode characters into
- unicode escapes.
-
- """
- if isinstance(val, bytes):
- try:
- return val.encode('ascii')
- except UnicodeDecodeError:
- return val.encode('string-escape')
- else:
- return val.encode('unicode-escape')
-
-
-def get_real_func(obj):
- """ gets the real function object of the (possibly) wrapped object by
- functools.wraps or functools.partial.
- """
- start_obj = obj
- for i in range(100):
- new_obj = getattr(obj, '__wrapped__', None)
- if new_obj is None:
- break
- obj = new_obj
- else:
- raise ValueError(
- ("could not find real function of {start}"
- "\nstopped at {current}").format(
- start=py.io.saferepr(start_obj),
- current=py.io.saferepr(obj)))
- if isinstance(obj, functools.partial):
- obj = obj.func
- return obj
-
-
-def getfslineno(obj):
- # xxx let decorators etc specify a sane ordering
- obj = get_real_func(obj)
- if hasattr(obj, 'place_as'):
- obj = obj.place_as
- fslineno = _pytest._code.getfslineno(obj)
- assert isinstance(fslineno[1], int), obj
- return fslineno
-
-
-def getimfunc(func):
- try:
- return func.__func__
- except AttributeError:
- try:
- return func.im_func
- except AttributeError:
- return func
-
-
-def safe_getattr(object, name, default):
- """ Like getattr but return default upon any Exception or any OutcomeException.
-
- Attribute access can potentially fail for 'evil' Python objects.
- See issue #214.
- It catches OutcomeException because of #2490 (issue #580), new outcomes are derived from BaseException
- instead of Exception (for more details check #2707)
- """
- try:
- return getattr(object, name, default)
- except TEST_OUTCOME:
- return default
-
-
-def _is_unittest_unexpected_success_a_failure():
- """Return if the test suite should fail if a @expectedFailure unittest test PASSES.
-
- From https://docs.python.org/3/library/unittest.html?highlight=unittest#unittest.TestResult.wasSuccessful:
- Changed in version 3.4: Returns False if there were any
- unexpectedSuccesses from tests marked with the expectedFailure() decorator.
- """
- return sys.version_info >= (3, 4)
-
-
-if _PY3:
- def safe_str(v):
- """returns v as string"""
- return str(v)
-else:
- def safe_str(v):
- """returns v as string, converting to ascii if necessary"""
- try:
- return str(v)
- except UnicodeError:
- if not isinstance(v, unicode):
- v = unicode(v)
- errors = 'replace'
- return v.encode('utf-8', errors)
-
-
-COLLECT_FAKEMODULE_ATTRIBUTES = (
- 'Collector',
- 'Module',
- 'Generator',
- 'Function',
- 'Instance',
- 'Session',
- 'Item',
- 'Class',
- 'File',
- '_fillfuncargs',
-)
-
-
-def _setup_collect_fakemodule():
- from types import ModuleType
- import pytest
- pytest.collect = ModuleType('pytest.collect')
- pytest.collect.__all__ = [] # used for setns
- for attr in COLLECT_FAKEMODULE_ATTRIBUTES:
- setattr(pytest.collect, attr, getattr(pytest, attr))
-
-
-if _PY2:
- # Without this the test_dupfile_on_textio will fail, otherwise CaptureIO could directly inherit from StringIO.
- from py.io import TextIO
-
- class CaptureIO(TextIO):
-
- @property
- def encoding(self):
- return getattr(self, '_encoding', 'UTF-8')
-
-else:
- import io
-
- class CaptureIO(io.TextIOWrapper):
- def __init__(self):
- super(CaptureIO, self).__init__(
- io.BytesIO(),
- encoding='UTF-8', newline='', write_through=True,
- )
-
- def getvalue(self):
- return self.buffer.getvalue().decode('UTF-8')
-
-
-class FuncargnamesCompatAttr(object):
- """ helper class so that Metafunc, Function and FixtureRequest
- don't need to each define the "funcargnames" compatibility attribute.
- """
- @property
- def funcargnames(self):
- """ alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
- return self.fixturenames
diff --git a/lib/spack/external/pytest-fallback/_pytest/config.py b/lib/spack/external/pytest-fallback/_pytest/config.py
deleted file mode 100644
index 513478a972..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/config.py
+++ /dev/null
@@ -1,1398 +0,0 @@
-""" command line options, ini-file and conftest.py processing. """
-from __future__ import absolute_import, division, print_function
-import argparse
-import shlex
-import traceback
-import types
-import warnings
-
-import py
-# DON't import pytest here because it causes import cycle troubles
-import sys
-import os
-import _pytest._code
-import _pytest.hookspec # the extension point definitions
-import _pytest.assertion
-from _pytest._pluggy import PluginManager, HookimplMarker, HookspecMarker
-from _pytest.compat import safe_str
-
-hookimpl = HookimplMarker("pytest")
-hookspec = HookspecMarker("pytest")
-
-# pytest startup
-#
-
-
-class ConftestImportFailure(Exception):
- def __init__(self, path, excinfo):
- Exception.__init__(self, path, excinfo)
- self.path = path
- self.excinfo = excinfo
-
- def __str__(self):
- etype, evalue, etb = self.excinfo
- formatted = traceback.format_tb(etb)
- # The level of the tracebacks we want to print is hand crafted :(
- return repr(evalue) + '\n' + ''.join(formatted[2:])
-
-
-def main(args=None, plugins=None):
- """ return exit code, after performing an in-process test run.
-
- :arg args: list of command line arguments.
-
- :arg plugins: list of plugin objects to be auto-registered during
- initialization.
- """
- try:
- try:
- config = _prepareconfig(args, plugins)
- except ConftestImportFailure as e:
- tw = py.io.TerminalWriter(sys.stderr)
- for line in traceback.format_exception(*e.excinfo):
- tw.line(line.rstrip(), red=True)
- tw.line("ERROR: could not load %s\n" % (e.path), red=True)
- return 4
- else:
- try:
- return config.hook.pytest_cmdline_main(config=config)
- finally:
- config._ensure_unconfigure()
- except UsageError as e:
- for msg in e.args:
- sys.stderr.write("ERROR: %s\n" % (msg,))
- return 4
-
-
-class cmdline: # compatibility namespace
- main = staticmethod(main)
-
-
-class UsageError(Exception):
- """ error in pytest usage or invocation"""
-
-
-class PrintHelp(Exception):
- """Raised when pytest should print it's help to skip the rest of the
- argument parsing and validation."""
- pass
-
-
-def filename_arg(path, optname):
- """ Argparse type validator for filename arguments.
-
- :path: path of filename
- :optname: name of the option
- """
- if os.path.isdir(path):
- raise UsageError("{0} must be a filename, given: {1}".format(optname, path))
- return path
-
-
-def directory_arg(path, optname):
- """Argparse type validator for directory arguments.
-
- :path: path of directory
- :optname: name of the option
- """
- if not os.path.isdir(path):
- raise UsageError("{0} must be a directory, given: {1}".format(optname, path))
- return path
-
-
-_preinit = []
-
-default_plugins = (
- "mark main terminal runner python fixtures debugging unittest capture skipping "
- "tmpdir monkeypatch recwarn pastebin helpconfig nose assertion "
- "junitxml resultlog doctest cacheprovider freeze_support "
- "setuponly setupplan warnings").split()
-
-
-builtin_plugins = set(default_plugins)
-builtin_plugins.add("pytester")
-
-
-def _preloadplugins():
- assert not _preinit
- _preinit.append(get_config())
-
-
-def get_config():
- if _preinit:
- return _preinit.pop(0)
- # subsequent calls to main will create a fresh instance
- pluginmanager = PytestPluginManager()
- config = Config(pluginmanager)
- for spec in default_plugins:
- pluginmanager.import_plugin(spec)
- return config
-
-
-def get_plugin_manager():
- """
- Obtain a new instance of the
- :py:class:`_pytest.config.PytestPluginManager`, with default plugins
- already loaded.
-
- This function can be used by integration with other tools, like hooking
- into pytest to run tests into an IDE.
- """
- return get_config().pluginmanager
-
-
-def _prepareconfig(args=None, plugins=None):
- warning = None
- if args is None:
- args = sys.argv[1:]
- elif isinstance(args, py.path.local):
- args = [str(args)]
- elif not isinstance(args, (tuple, list)):
- if not isinstance(args, str):
- raise ValueError("not a string or argument list: %r" % (args,))
- args = shlex.split(args, posix=sys.platform != "win32")
- from _pytest import deprecated
- warning = deprecated.MAIN_STR_ARGS
- config = get_config()
- pluginmanager = config.pluginmanager
- try:
- if plugins:
- for plugin in plugins:
- if isinstance(plugin, py.builtin._basestring):
- pluginmanager.consider_pluginarg(plugin)
- else:
- pluginmanager.register(plugin)
- if warning:
- config.warn('C1', warning)
- return pluginmanager.hook.pytest_cmdline_parse(
- pluginmanager=pluginmanager, args=args)
- except BaseException:
- config._ensure_unconfigure()
- raise
-
-
-class PytestPluginManager(PluginManager):
- """
- Overwrites :py:class:`pluggy.PluginManager <_pytest.vendored_packages.pluggy.PluginManager>` to add pytest-specific
- functionality:
-
- * loading plugins from the command line, ``PYTEST_PLUGIN`` env variable and
- ``pytest_plugins`` global variables found in plugins being loaded;
- * ``conftest.py`` loading during start-up;
- """
-
- def __init__(self):
- super(PytestPluginManager, self).__init__("pytest", implprefix="pytest_")
- self._conftest_plugins = set()
-
- # state related to local conftest plugins
- self._path2confmods = {}
- self._conftestpath2mod = {}
- self._confcutdir = None
- self._noconftest = False
- self._duplicatepaths = set()
-
- self.add_hookspecs(_pytest.hookspec)
- self.register(self)
- if os.environ.get('PYTEST_DEBUG'):
- err = sys.stderr
- encoding = getattr(err, 'encoding', 'utf8')
- try:
- err = py.io.dupfile(err, encoding=encoding)
- except Exception:
- pass
- self.trace.root.setwriter(err.write)
- self.enable_tracing()
-
- # Config._consider_importhook will set a real object if required.
- self.rewrite_hook = _pytest.assertion.DummyRewriteHook()
-
- def addhooks(self, module_or_class):
- """
- .. deprecated:: 2.8
-
- Use :py:meth:`pluggy.PluginManager.add_hookspecs <_pytest.vendored_packages.pluggy.PluginManager.add_hookspecs>`
- instead.
- """
- warning = dict(code="I2",
- fslocation=_pytest._code.getfslineno(sys._getframe(1)),
- nodeid=None,
- message="use pluginmanager.add_hookspecs instead of "
- "deprecated addhooks() method.")
- self._warn(warning)
- return self.add_hookspecs(module_or_class)
-
- def parse_hookimpl_opts(self, plugin, name):
- # pytest hooks are always prefixed with pytest_
- # so we avoid accessing possibly non-readable attributes
- # (see issue #1073)
- if not name.startswith("pytest_"):
- return
- # ignore some historic special names which can not be hooks anyway
- if name == "pytest_plugins" or name.startswith("pytest_funcarg__"):
- return
-
- method = getattr(plugin, name)
- opts = super(PytestPluginManager, self).parse_hookimpl_opts(plugin, name)
- if opts is not None:
- for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"):
- opts.setdefault(name, hasattr(method, name))
- return opts
-
- def parse_hookspec_opts(self, module_or_class, name):
- opts = super(PytestPluginManager, self).parse_hookspec_opts(
- module_or_class, name)
- if opts is None:
- method = getattr(module_or_class, name)
- if name.startswith("pytest_"):
- opts = {"firstresult": hasattr(method, "firstresult"),
- "historic": hasattr(method, "historic")}
- return opts
-
- def _verify_hook(self, hook, hookmethod):
- super(PytestPluginManager, self)._verify_hook(hook, hookmethod)
- if "__multicall__" in hookmethod.argnames:
- fslineno = _pytest._code.getfslineno(hookmethod.function)
- warning = dict(code="I1",
- fslocation=fslineno,
- nodeid=None,
- message="%r hook uses deprecated __multicall__ "
- "argument" % (hook.name))
- self._warn(warning)
-
- def register(self, plugin, name=None):
- ret = super(PytestPluginManager, self).register(plugin, name)
- if ret:
- self.hook.pytest_plugin_registered.call_historic(
- kwargs=dict(plugin=plugin, manager=self))
-
- if isinstance(plugin, types.ModuleType):
- self.consider_module(plugin)
- return ret
-
- def getplugin(self, name):
- # support deprecated naming because plugins (xdist e.g.) use it
- return self.get_plugin(name)
-
- def hasplugin(self, name):
- """Return True if the plugin with the given name is registered."""
- return bool(self.get_plugin(name))
-
- def pytest_configure(self, config):
- # XXX now that the pluginmanager exposes hookimpl(tryfirst...)
- # we should remove tryfirst/trylast as markers
- config.addinivalue_line("markers",
- "tryfirst: mark a hook implementation function such that the "
- "plugin machinery will try to call it first/as early as possible.")
- config.addinivalue_line("markers",
- "trylast: mark a hook implementation function such that the "
- "plugin machinery will try to call it last/as late as possible.")
-
- def _warn(self, message):
- kwargs = message if isinstance(message, dict) else {
- 'code': 'I1',
- 'message': message,
- 'fslocation': None,
- 'nodeid': None,
- }
- self.hook.pytest_logwarning.call_historic(kwargs=kwargs)
-
- #
- # internal API for local conftest plugin handling
- #
- def _set_initial_conftests(self, namespace):
- """ load initial conftest files given a preparsed "namespace".
- As conftest files may add their own command line options
- which have arguments ('--my-opt somepath') we might get some
- false positives. All builtin and 3rd party plugins will have
- been loaded, however, so common options will not confuse our logic
- here.
- """
- current = py.path.local()
- self._confcutdir = current.join(namespace.confcutdir, abs=True) \
- if namespace.confcutdir else None
- self._noconftest = namespace.noconftest
- testpaths = namespace.file_or_dir
- foundanchor = False
- for path in testpaths:
- path = str(path)
- # remove node-id syntax
- i = path.find("::")
- if i != -1:
- path = path[:i]
- anchor = current.join(path, abs=1)
- if exists(anchor): # we found some file object
- self._try_load_conftest(anchor)
- foundanchor = True
- if not foundanchor:
- self._try_load_conftest(current)
-
- def _try_load_conftest(self, anchor):
- self._getconftestmodules(anchor)
- # let's also consider test* subdirs
- if anchor.check(dir=1):
- for x in anchor.listdir("test*"):
- if x.check(dir=1):
- self._getconftestmodules(x)
-
- def _getconftestmodules(self, path):
- if self._noconftest:
- return []
- try:
- return self._path2confmods[path]
- except KeyError:
- if path.isfile():
- clist = self._getconftestmodules(path.dirpath())
- else:
- # XXX these days we may rather want to use config.rootdir
- # and allow users to opt into looking into the rootdir parent
- # directories instead of requiring to specify confcutdir
- clist = []
- for parent in path.parts():
- if self._confcutdir and self._confcutdir.relto(parent):
- continue
- conftestpath = parent.join("conftest.py")
- if conftestpath.isfile():
- mod = self._importconftest(conftestpath)
- clist.append(mod)
-
- self._path2confmods[path] = clist
- return clist
-
- def _rget_with_confmod(self, name, path):
- modules = self._getconftestmodules(path)
- for mod in reversed(modules):
- try:
- return mod, getattr(mod, name)
- except AttributeError:
- continue
- raise KeyError(name)
-
- def _importconftest(self, conftestpath):
- try:
- return self._conftestpath2mod[conftestpath]
- except KeyError:
- pkgpath = conftestpath.pypkgpath()
- if pkgpath is None:
- _ensure_removed_sysmodule(conftestpath.purebasename)
- try:
- mod = conftestpath.pyimport()
- except Exception:
- raise ConftestImportFailure(conftestpath, sys.exc_info())
-
- self._conftest_plugins.add(mod)
- self._conftestpath2mod[conftestpath] = mod
- dirpath = conftestpath.dirpath()
- if dirpath in self._path2confmods:
- for path, mods in self._path2confmods.items():
- if path and path.relto(dirpath) or path == dirpath:
- assert mod not in mods
- mods.append(mod)
- self.trace("loaded conftestmodule %r" % (mod))
- self.consider_conftest(mod)
- return mod
-
- #
- # API for bootstrapping plugin loading
- #
- #
-
- def consider_preparse(self, args):
- for opt1, opt2 in zip(args, args[1:]):
- if opt1 == "-p":
- self.consider_pluginarg(opt2)
-
- def consider_pluginarg(self, arg):
- if arg.startswith("no:"):
- name = arg[3:]
- self.set_blocked(name)
- if not name.startswith("pytest_"):
- self.set_blocked("pytest_" + name)
- else:
- self.import_plugin(arg)
-
- def consider_conftest(self, conftestmodule):
- self.register(conftestmodule, name=conftestmodule.__file__)
-
- def consider_env(self):
- self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS"))
-
- def consider_module(self, mod):
- self._import_plugin_specs(getattr(mod, 'pytest_plugins', []))
-
- def _import_plugin_specs(self, spec):
- plugins = _get_plugin_specs_as_list(spec)
- for import_spec in plugins:
- self.import_plugin(import_spec)
-
- def import_plugin(self, modname):
- # most often modname refers to builtin modules, e.g. "pytester",
- # "terminal" or "capture". Those plugins are registered under their
- # basename for historic purposes but must be imported with the
- # _pytest prefix.
- assert isinstance(modname, (py.builtin.text, str)), "module name as text required, got %r" % modname
- modname = str(modname)
- if self.get_plugin(modname) is not None:
- return
- if modname in builtin_plugins:
- importspec = "_pytest." + modname
- else:
- importspec = modname
- self.rewrite_hook.mark_rewrite(importspec)
- try:
- __import__(importspec)
- except ImportError as e:
- new_exc = ImportError('Error importing plugin "%s": %s' % (modname, safe_str(e.args[0])))
- # copy over name and path attributes
- for attr in ('name', 'path'):
- if hasattr(e, attr):
- setattr(new_exc, attr, getattr(e, attr))
- raise new_exc
- except Exception as e:
- import pytest
- if not hasattr(pytest, 'skip') or not isinstance(e, pytest.skip.Exception):
- raise
- self._warn("skipped plugin %r: %s" % ((modname, e.msg)))
- else:
- mod = sys.modules[importspec]
- self.register(mod, modname)
-
-
-def _get_plugin_specs_as_list(specs):
- """
- Parses a list of "plugin specs" and returns a list of plugin names.
-
- Plugin specs can be given as a list of strings separated by "," or already as a list/tuple in
- which case it is returned as a list. Specs can also be `None` in which case an
- empty list is returned.
- """
- if specs is not None:
- if isinstance(specs, str):
- specs = specs.split(',') if specs else []
- if not isinstance(specs, (list, tuple)):
- raise UsageError("Plugin specs must be a ','-separated string or a "
- "list/tuple of strings for plugin names. Given: %r" % specs)
- return list(specs)
- return []
-
-
-class Parser:
- """ Parser for command line arguments and ini-file values.
-
- :ivar extra_info: dict of generic param -> value to display in case
- there's an error processing the command line arguments.
- """
-
- def __init__(self, usage=None, processopt=None):
- self._anonymous = OptionGroup("custom options", parser=self)
- self._groups = []
- self._processopt = processopt
- self._usage = usage
- self._inidict = {}
- self._ininames = []
- self.extra_info = {}
-
- def processoption(self, option):
- if self._processopt:
- if option.dest:
- self._processopt(option)
-
- def getgroup(self, name, description="", after=None):
- """ get (or create) a named option Group.
-
- :name: name of the option group.
- :description: long description for --help output.
- :after: name of other group, used for ordering --help output.
-
- The returned group object has an ``addoption`` method with the same
- signature as :py:func:`parser.addoption
- <_pytest.config.Parser.addoption>` but will be shown in the
- respective group in the output of ``pytest. --help``.
- """
- for group in self._groups:
- if group.name == name:
- return group
- group = OptionGroup(name, description, parser=self)
- i = 0
- for i, grp in enumerate(self._groups):
- if grp.name == after:
- break
- self._groups.insert(i + 1, group)
- return group
-
- def addoption(self, *opts, **attrs):
- """ register a command line option.
-
- :opts: option names, can be short or long options.
- :attrs: same attributes which the ``add_option()`` function of the
- `argparse library
- <http://docs.python.org/2/library/argparse.html>`_
- accepts.
-
- After command line parsing options are available on the pytest config
- object via ``config.option.NAME`` where ``NAME`` is usually set
- by passing a ``dest`` attribute, for example
- ``addoption("--long", dest="NAME", ...)``.
- """
- self._anonymous.addoption(*opts, **attrs)
-
- def parse(self, args, namespace=None):
- from _pytest._argcomplete import try_argcomplete
- self.optparser = self._getparser()
- try_argcomplete(self.optparser)
- return self.optparser.parse_args([str(x) for x in args], namespace=namespace)
-
- def _getparser(self):
- from _pytest._argcomplete import filescompleter
- optparser = MyOptionParser(self, self.extra_info)
- groups = self._groups + [self._anonymous]
- for group in groups:
- if group.options:
- desc = group.description or group.name
- arggroup = optparser.add_argument_group(desc)
- for option in group.options:
- n = option.names()
- a = option.attrs()
- arggroup.add_argument(*n, **a)
- # bash like autocompletion for dirs (appending '/')
- optparser.add_argument(FILE_OR_DIR, nargs='*').completer = filescompleter
- return optparser
-
- def parse_setoption(self, args, option, namespace=None):
- parsedoption = self.parse(args, namespace=namespace)
- for name, value in parsedoption.__dict__.items():
- setattr(option, name, value)
- return getattr(parsedoption, FILE_OR_DIR)
-
- def parse_known_args(self, args, namespace=None):
- """parses and returns a namespace object with known arguments at this
- point.
- """
- return self.parse_known_and_unknown_args(args, namespace=namespace)[0]
-
- def parse_known_and_unknown_args(self, args, namespace=None):
- """parses and returns a namespace object with known arguments, and
- the remaining arguments unknown at this point.
- """
- optparser = self._getparser()
- args = [str(x) for x in args]
- return optparser.parse_known_args(args, namespace=namespace)
-
- def addini(self, name, help, type=None, default=None):
- """ register an ini-file option.
-
- :name: name of the ini-variable
- :type: type of the variable, can be ``pathlist``, ``args``, ``linelist``
- or ``bool``.
- :default: default value if no ini-file option exists but is queried.
-
- The value of ini-variables can be retrieved via a call to
- :py:func:`config.getini(name) <_pytest.config.Config.getini>`.
- """
- assert type in (None, "pathlist", "args", "linelist", "bool")
- self._inidict[name] = (help, type, default)
- self._ininames.append(name)
-
-
-class ArgumentError(Exception):
- """
- Raised if an Argument instance is created with invalid or
- inconsistent arguments.
- """
-
- def __init__(self, msg, option):
- self.msg = msg
- self.option_id = str(option)
-
- def __str__(self):
- if self.option_id:
- return "option %s: %s" % (self.option_id, self.msg)
- else:
- return self.msg
-
-
-class Argument:
- """class that mimics the necessary behaviour of optparse.Option
-
- its currently a least effort implementation
- and ignoring choices and integer prefixes
- https://docs.python.org/3/library/optparse.html#optparse-standard-option-types
- """
- _typ_map = {
- 'int': int,
- 'string': str,
- 'float': float,
- 'complex': complex,
- }
-
- def __init__(self, *names, **attrs):
- """store parms in private vars for use in add_argument"""
- self._attrs = attrs
- self._short_opts = []
- self._long_opts = []
- self.dest = attrs.get('dest')
- if '%default' in (attrs.get('help') or ''):
- warnings.warn(
- 'pytest now uses argparse. "%default" should be'
- ' changed to "%(default)s" ',
- DeprecationWarning,
- stacklevel=3)
- try:
- typ = attrs['type']
- except KeyError:
- pass
- else:
- # this might raise a keyerror as well, don't want to catch that
- if isinstance(typ, py.builtin._basestring):
- if typ == 'choice':
- warnings.warn(
- 'type argument to addoption() is a string %r.'
- ' For parsearg this is optional and when supplied'
- ' should be a type.'
- ' (options: %s)' % (typ, names),
- DeprecationWarning,
- stacklevel=3)
- # argparse expects a type here take it from
- # the type of the first element
- attrs['type'] = type(attrs['choices'][0])
- else:
- warnings.warn(
- 'type argument to addoption() is a string %r.'
- ' For parsearg this should be a type.'
- ' (options: %s)' % (typ, names),
- DeprecationWarning,
- stacklevel=3)
- attrs['type'] = Argument._typ_map[typ]
- # used in test_parseopt -> test_parse_defaultgetter
- self.type = attrs['type']
- else:
- self.type = typ
- try:
- # attribute existence is tested in Config._processopt
- self.default = attrs['default']
- except KeyError:
- pass
- self._set_opt_strings(names)
- if not self.dest:
- if self._long_opts:
- self.dest = self._long_opts[0][2:].replace('-', '_')
- else:
- try:
- self.dest = self._short_opts[0][1:]
- except IndexError:
- raise ArgumentError(
- 'need a long or short option', self)
-
- def names(self):
- return self._short_opts + self._long_opts
-
- def attrs(self):
- # update any attributes set by processopt
- attrs = 'default dest help'.split()
- if self.dest:
- attrs.append(self.dest)
- for attr in attrs:
- try:
- self._attrs[attr] = getattr(self, attr)
- except AttributeError:
- pass
- if self._attrs.get('help'):
- a = self._attrs['help']
- a = a.replace('%default', '%(default)s')
- # a = a.replace('%prog', '%(prog)s')
- self._attrs['help'] = a
- return self._attrs
-
- def _set_opt_strings(self, opts):
- """directly from optparse
-
- might not be necessary as this is passed to argparse later on"""
- for opt in opts:
- if len(opt) < 2:
- raise ArgumentError(
- "invalid option string %r: "
- "must be at least two characters long" % opt, self)
- elif len(opt) == 2:
- if not (opt[0] == "-" and opt[1] != "-"):
- raise ArgumentError(
- "invalid short option string %r: "
- "must be of the form -x, (x any non-dash char)" % opt,
- self)
- self._short_opts.append(opt)
- else:
- if not (opt[0:2] == "--" and opt[2] != "-"):
- raise ArgumentError(
- "invalid long option string %r: "
- "must start with --, followed by non-dash" % opt,
- self)
- self._long_opts.append(opt)
-
- def __repr__(self):
- args = []
- if self._short_opts:
- args += ['_short_opts: ' + repr(self._short_opts)]
- if self._long_opts:
- args += ['_long_opts: ' + repr(self._long_opts)]
- args += ['dest: ' + repr(self.dest)]
- if hasattr(self, 'type'):
- args += ['type: ' + repr(self.type)]
- if hasattr(self, 'default'):
- args += ['default: ' + repr(self.default)]
- return 'Argument({0})'.format(', '.join(args))
-
-
-class OptionGroup:
- def __init__(self, name, description="", parser=None):
- self.name = name
- self.description = description
- self.options = []
- self.parser = parser
-
- def addoption(self, *optnames, **attrs):
- """ add an option to this group.
-
- if a shortened version of a long option is specified it will
- be suppressed in the help. addoption('--twowords', '--two-words')
- results in help showing '--two-words' only, but --twowords gets
- accepted **and** the automatic destination is in args.twowords
- """
- conflict = set(optnames).intersection(
- name for opt in self.options for name in opt.names())
- if conflict:
- raise ValueError("option names %s already added" % conflict)
- option = Argument(*optnames, **attrs)
- self._addoption_instance(option, shortupper=False)
-
- def _addoption(self, *optnames, **attrs):
- option = Argument(*optnames, **attrs)
- self._addoption_instance(option, shortupper=True)
-
- def _addoption_instance(self, option, shortupper=False):
- if not shortupper:
- for opt in option._short_opts:
- if opt[0] == '-' and opt[1].islower():
- raise ValueError("lowercase shortoptions reserved")
- if self.parser:
- self.parser.processoption(option)
- self.options.append(option)
-
-
-class MyOptionParser(argparse.ArgumentParser):
- def __init__(self, parser, extra_info=None):
- if not extra_info:
- extra_info = {}
- self._parser = parser
- argparse.ArgumentParser.__init__(self, usage=parser._usage,
- add_help=False, formatter_class=DropShorterLongHelpFormatter)
- # extra_info is a dict of (param -> value) to display if there's
- # an usage error to provide more contextual information to the user
- self.extra_info = extra_info
-
- def parse_args(self, args=None, namespace=None):
- """allow splitting of positional arguments"""
- args, argv = self.parse_known_args(args, namespace)
- if argv:
- for arg in argv:
- if arg and arg[0] == '-':
- lines = ['unrecognized arguments: %s' % (' '.join(argv))]
- for k, v in sorted(self.extra_info.items()):
- lines.append(' %s: %s' % (k, v))
- self.error('\n'.join(lines))
- getattr(args, FILE_OR_DIR).extend(argv)
- return args
-
-
-class DropShorterLongHelpFormatter(argparse.HelpFormatter):
- """shorten help for long options that differ only in extra hyphens
-
- - collapse **long** options that are the same except for extra hyphens
- - special action attribute map_long_option allows surpressing additional
- long options
- - shortcut if there are only two options and one of them is a short one
- - cache result on action object as this is called at least 2 times
- """
-
- def _format_action_invocation(self, action):
- orgstr = argparse.HelpFormatter._format_action_invocation(self, action)
- if orgstr and orgstr[0] != '-': # only optional arguments
- return orgstr
- res = getattr(action, '_formatted_action_invocation', None)
- if res:
- return res
- options = orgstr.split(', ')
- if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2):
- # a shortcut for '-h, --help' or '--abc', '-a'
- action._formatted_action_invocation = orgstr
- return orgstr
- return_list = []
- option_map = getattr(action, 'map_long_option', {})
- if option_map is None:
- option_map = {}
- short_long = {}
- for option in options:
- if len(option) == 2 or option[2] == ' ':
- continue
- if not option.startswith('--'):
- raise ArgumentError('long optional argument without "--": [%s]'
- % (option), self)
- xxoption = option[2:]
- if xxoption.split()[0] not in option_map:
- shortened = xxoption.replace('-', '')
- if shortened not in short_long or \
- len(short_long[shortened]) < len(xxoption):
- short_long[shortened] = xxoption
- # now short_long has been filled out to the longest with dashes
- # **and** we keep the right option ordering from add_argument
- for option in options:
- if len(option) == 2 or option[2] == ' ':
- return_list.append(option)
- if option[2:] == short_long.get(option.replace('-', '')):
- return_list.append(option.replace(' ', '=', 1))
- action._formatted_action_invocation = ', '.join(return_list)
- return action._formatted_action_invocation
-
-
-def _ensure_removed_sysmodule(modname):
- try:
- del sys.modules[modname]
- except KeyError:
- pass
-
-
-class CmdOptions(object):
- """ holds cmdline options as attributes."""
-
- def __init__(self, values=()):
- self.__dict__.update(values)
-
- def __repr__(self):
- return "<CmdOptions %r>" % (self.__dict__,)
-
- def copy(self):
- return CmdOptions(self.__dict__)
-
-
-class Notset:
- def __repr__(self):
- return "<NOTSET>"
-
-
-notset = Notset()
-FILE_OR_DIR = 'file_or_dir'
-
-
-def _iter_rewritable_modules(package_files):
- for fn in package_files:
- is_simple_module = '/' not in fn and fn.endswith('.py')
- is_package = fn.count('/') == 1 and fn.endswith('__init__.py')
- if is_simple_module:
- module_name, _ = os.path.splitext(fn)
- yield module_name
- elif is_package:
- package_name = os.path.dirname(fn)
- yield package_name
-
-
-class Config(object):
- """ access to configuration values, pluginmanager and plugin hooks. """
-
- def __init__(self, pluginmanager):
- #: access to command line option as attributes.
- #: (deprecated), use :py:func:`getoption() <_pytest.config.Config.getoption>` instead
- self.option = CmdOptions()
- _a = FILE_OR_DIR
- self._parser = Parser(
- usage="%%(prog)s [options] [%s] [%s] [...]" % (_a, _a),
- processopt=self._processopt,
- )
- #: a pluginmanager instance
- self.pluginmanager = pluginmanager
- self.trace = self.pluginmanager.trace.root.get("config")
- self.hook = self.pluginmanager.hook
- self._inicache = {}
- self._override_ini = ()
- self._opt2dest = {}
- self._cleanup = []
- self._warn = self.pluginmanager._warn
- self.pluginmanager.register(self, "pytestconfig")
- self._configured = False
-
- def do_setns(dic):
- import pytest
- setns(pytest, dic)
-
- self.hook.pytest_namespace.call_historic(do_setns, {})
- self.hook.pytest_addoption.call_historic(kwargs=dict(parser=self._parser))
-
- def add_cleanup(self, func):
- """ Add a function to be called when the config object gets out of
- use (usually coninciding with pytest_unconfigure)."""
- self._cleanup.append(func)
-
- def _do_configure(self):
- assert not self._configured
- self._configured = True
- self.hook.pytest_configure.call_historic(kwargs=dict(config=self))
-
- def _ensure_unconfigure(self):
- if self._configured:
- self._configured = False
- self.hook.pytest_unconfigure(config=self)
- self.hook.pytest_configure._call_history = []
- while self._cleanup:
- fin = self._cleanup.pop()
- fin()
-
- def warn(self, code, message, fslocation=None, nodeid=None):
- """ generate a warning for this test session. """
- self.hook.pytest_logwarning.call_historic(kwargs=dict(
- code=code, message=message,
- fslocation=fslocation, nodeid=nodeid))
-
- def get_terminal_writer(self):
- return self.pluginmanager.get_plugin("terminalreporter")._tw
-
- def pytest_cmdline_parse(self, pluginmanager, args):
- # REF1 assert self == pluginmanager.config, (self, pluginmanager.config)
- self.parse(args)
- return self
-
- def notify_exception(self, excinfo, option=None):
- if option and option.fulltrace:
- style = "long"
- else:
- style = "native"
- excrepr = excinfo.getrepr(funcargs=True,
- showlocals=getattr(option, 'showlocals', False),
- style=style,
- )
- res = self.hook.pytest_internalerror(excrepr=excrepr,
- excinfo=excinfo)
- if not py.builtin.any(res):
- for line in str(excrepr).split("\n"):
- sys.stderr.write("INTERNALERROR> %s\n" % line)
- sys.stderr.flush()
-
- def cwd_relative_nodeid(self, nodeid):
- # nodeid's are relative to the rootpath, compute relative to cwd
- if self.invocation_dir != self.rootdir:
- fullpath = self.rootdir.join(nodeid)
- nodeid = self.invocation_dir.bestrelpath(fullpath)
- return nodeid
-
- @classmethod
- def fromdictargs(cls, option_dict, args):
- """ constructor useable for subprocesses. """
- config = get_config()
- config.option.__dict__.update(option_dict)
- config.parse(args, addopts=False)
- for x in config.option.plugins:
- config.pluginmanager.consider_pluginarg(x)
- return config
-
- def _processopt(self, opt):
- for name in opt._short_opts + opt._long_opts:
- self._opt2dest[name] = opt.dest
-
- if hasattr(opt, 'default') and opt.dest:
- if not hasattr(self.option, opt.dest):
- setattr(self.option, opt.dest, opt.default)
-
- @hookimpl(trylast=True)
- def pytest_load_initial_conftests(self, early_config):
- self.pluginmanager._set_initial_conftests(early_config.known_args_namespace)
-
- def _initini(self, args):
- ns, unknown_args = self._parser.parse_known_and_unknown_args(args, namespace=self.option.copy())
- r = determine_setup(ns.inifilename, ns.file_or_dir + unknown_args, warnfunc=self.warn)
- self.rootdir, self.inifile, self.inicfg = r
- self._parser.extra_info['rootdir'] = self.rootdir
- self._parser.extra_info['inifile'] = self.inifile
- self.invocation_dir = py.path.local()
- self._parser.addini('addopts', 'extra command line options', 'args')
- self._parser.addini('minversion', 'minimally required pytest version')
- self._override_ini = ns.override_ini or ()
-
- def _consider_importhook(self, args):
- """Install the PEP 302 import hook if using assertion re-writing.
-
- Needs to parse the --assert=<mode> option from the commandline
- and find all the installed plugins to mark them for re-writing
- by the importhook.
- """
- ns, unknown_args = self._parser.parse_known_and_unknown_args(args)
- mode = ns.assertmode
- if mode == 'rewrite':
- try:
- hook = _pytest.assertion.install_importhook(self)
- except SystemError:
- mode = 'plain'
- else:
- # REMOVED FOR SPACK: This routine imports `pkg_resources` from
- # `setuptools`, but we do not need it for Spack. We have removed
- # it from Spack to avoid a dependency on setuptools.
- # self._mark_plugins_for_rewrite(hook)
- pass
- self._warn_about_missing_assertion(mode)
-
- def _warn_about_missing_assertion(self, mode):
- try:
- assert False
- except AssertionError:
- pass
- else:
- if mode == 'plain':
- sys.stderr.write("WARNING: ASSERTIONS ARE NOT EXECUTED"
- " and FAILING TESTS WILL PASS. Are you"
- " using python -O?")
- else:
- sys.stderr.write("WARNING: assertions not in test modules or"
- " plugins will be ignored"
- " because assert statements are not executed "
- "by the underlying Python interpreter "
- "(are you using python -O?)\n")
-
- def _preparse(self, args, addopts=True):
- self._initini(args)
- if addopts:
- args[:] = shlex.split(os.environ.get('PYTEST_ADDOPTS', '')) + args
- args[:] = self.getini("addopts") + args
- self._checkversion()
- self._consider_importhook(args)
- self.pluginmanager.consider_preparse(args)
-
- # REMOVED FOR SPACK: This routine imports `pkg_resources` from
- # `setuptools`, but we do not need it for Spack. We have removed
- # it from Spack to avoid a dependency on setuptools.
- # self.pluginmanager.load_setuptools_entrypoints('pytest11')
-
- self.pluginmanager.consider_env()
- self.known_args_namespace = ns = self._parser.parse_known_args(args, namespace=self.option.copy())
- if self.known_args_namespace.confcutdir is None and self.inifile:
- confcutdir = py.path.local(self.inifile).dirname
- self.known_args_namespace.confcutdir = confcutdir
- try:
- self.hook.pytest_load_initial_conftests(early_config=self,
- args=args, parser=self._parser)
- except ConftestImportFailure:
- e = sys.exc_info()[1]
- if ns.help or ns.version:
- # we don't want to prevent --help/--version to work
- # so just let is pass and print a warning at the end
- self._warn("could not load initial conftests (%s)\n" % e.path)
- else:
- raise
-
- def _checkversion(self):
- import pytest
- minver = self.inicfg.get('minversion', None)
- if minver:
- ver = minver.split(".")
- myver = pytest.__version__.split(".")
- if myver < ver:
- raise pytest.UsageError(
- "%s:%d: requires pytest-%s, actual pytest-%s'" % (
- self.inicfg.config.path, self.inicfg.lineof('minversion'),
- minver, pytest.__version__))
-
- def parse(self, args, addopts=True):
- # parse given cmdline arguments into this config object.
- assert not hasattr(self, 'args'), (
- "can only parse cmdline args at most once per Config object")
- self._origargs = args
- self.hook.pytest_addhooks.call_historic(
- kwargs=dict(pluginmanager=self.pluginmanager))
- self._preparse(args, addopts=addopts)
- # XXX deprecated hook:
- self.hook.pytest_cmdline_preparse(config=self, args=args)
- self._parser.after_preparse = True
- try:
- args = self._parser.parse_setoption(args, self.option, namespace=self.option)
- if not args:
- cwd = os.getcwd()
- if cwd == self.rootdir:
- args = self.getini('testpaths')
- if not args:
- args = [cwd]
- self.args = args
- except PrintHelp:
- pass
-
- def addinivalue_line(self, name, line):
- """ add a line to an ini-file option. The option must have been
- declared but might not yet be set in which case the line becomes the
- the first line in its value. """
- x = self.getini(name)
- assert isinstance(x, list)
- x.append(line) # modifies the cached list inline
-
- def getini(self, name):
- """ return configuration value from an :ref:`ini file <inifiles>`. If the
- specified name hasn't been registered through a prior
- :py:func:`parser.addini <_pytest.config.Parser.addini>`
- call (usually from a plugin), a ValueError is raised. """
- try:
- return self._inicache[name]
- except KeyError:
- self._inicache[name] = val = self._getini(name)
- return val
-
- def _getini(self, name):
- try:
- description, type, default = self._parser._inidict[name]
- except KeyError:
- raise ValueError("unknown configuration value: %r" % (name,))
- value = self._get_override_ini_value(name)
- if value is None:
- try:
- value = self.inicfg[name]
- except KeyError:
- if default is not None:
- return default
- if type is None:
- return ''
- return []
- if type == "pathlist":
- dp = py.path.local(self.inicfg.config.path).dirpath()
- values = []
- for relpath in shlex.split(value):
- values.append(dp.join(relpath, abs=True))
- return values
- elif type == "args":
- return shlex.split(value)
- elif type == "linelist":
- return [t for t in map(lambda x: x.strip(), value.split("\n")) if t]
- elif type == "bool":
- return bool(_strtobool(value.strip()))
- else:
- assert type is None
- return value
-
- def _getconftest_pathlist(self, name, path):
- try:
- mod, relroots = self.pluginmanager._rget_with_confmod(name, path)
- except KeyError:
- return None
- modpath = py.path.local(mod.__file__).dirpath()
- values = []
- for relroot in relroots:
- if not isinstance(relroot, py.path.local):
- relroot = relroot.replace("/", py.path.local.sep)
- relroot = modpath.join(relroot, abs=True)
- values.append(relroot)
- return values
-
- def _get_override_ini_value(self, name):
- value = None
- # override_ini is a list of list, to support both -o foo1=bar1 foo2=bar2 and
- # and -o foo1=bar1 -o foo2=bar2 options
- # always use the last item if multiple value set for same ini-name,
- # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2
- for ini_config_list in self._override_ini:
- for ini_config in ini_config_list:
- try:
- (key, user_ini_value) = ini_config.split("=", 1)
- except ValueError:
- raise UsageError("-o/--override-ini expects option=value style.")
- if key == name:
- value = user_ini_value
- return value
-
- def getoption(self, name, default=notset, skip=False):
- """ return command line option value.
-
- :arg name: name of the option. You may also specify
- the literal ``--OPT`` option instead of the "dest" option name.
- :arg default: default value if no option of that name exists.
- :arg skip: if True raise pytest.skip if option does not exists
- or has a None value.
- """
- name = self._opt2dest.get(name, name)
- try:
- val = getattr(self.option, name)
- if val is None and skip:
- raise AttributeError(name)
- return val
- except AttributeError:
- if default is not notset:
- return default
- if skip:
- import pytest
- pytest.skip("no %r option found" % (name,))
- raise ValueError("no option named %r" % (name,))
-
- def getvalue(self, name, path=None):
- """ (deprecated, use getoption()) """
- return self.getoption(name)
-
- def getvalueorskip(self, name, path=None):
- """ (deprecated, use getoption(skip=True)) """
- return self.getoption(name, skip=True)
-
-
-def exists(path, ignore=EnvironmentError):
- try:
- return path.check()
- except ignore:
- return False
-
-
-def getcfg(args, warnfunc=None):
- """
- Search the list of arguments for a valid ini-file for pytest,
- and return a tuple of (rootdir, inifile, cfg-dict).
-
- note: warnfunc is an optional function used to warn
- about ini-files that use deprecated features.
- This parameter should be removed when pytest
- adopts standard deprecation warnings (#1804).
- """
- from _pytest.deprecated import SETUP_CFG_PYTEST
- inibasenames = ["pytest.ini", "tox.ini", "setup.cfg"]
- args = [x for x in args if not str(x).startswith("-")]
- if not args:
- args = [py.path.local()]
- for arg in args:
- arg = py.path.local(arg)
- for base in arg.parts(reverse=True):
- for inibasename in inibasenames:
- p = base.join(inibasename)
- if exists(p):
- iniconfig = py.iniconfig.IniConfig(p)
- if 'pytest' in iniconfig.sections:
- if inibasename == 'setup.cfg' and warnfunc:
- warnfunc('C1', SETUP_CFG_PYTEST)
- return base, p, iniconfig['pytest']
- if inibasename == 'setup.cfg' and 'tool:pytest' in iniconfig.sections:
- return base, p, iniconfig['tool:pytest']
- elif inibasename == "pytest.ini":
- # allowed to be empty
- return base, p, {}
- return None, None, None
-
-
-def get_common_ancestor(paths):
- common_ancestor = None
- for path in paths:
- if not path.exists():
- continue
- if common_ancestor is None:
- common_ancestor = path
- else:
- if path.relto(common_ancestor) or path == common_ancestor:
- continue
- elif common_ancestor.relto(path):
- common_ancestor = path
- else:
- shared = path.common(common_ancestor)
- if shared is not None:
- common_ancestor = shared
- if common_ancestor is None:
- common_ancestor = py.path.local()
- elif common_ancestor.isfile():
- common_ancestor = common_ancestor.dirpath()
- return common_ancestor
-
-
-def get_dirs_from_args(args):
- def is_option(x):
- return str(x).startswith('-')
-
- def get_file_part_from_node_id(x):
- return str(x).split('::')[0]
-
- def get_dir_from_path(path):
- if path.isdir():
- return path
- return py.path.local(path.dirname)
-
- # These look like paths but may not exist
- possible_paths = (
- py.path.local(get_file_part_from_node_id(arg))
- for arg in args
- if not is_option(arg)
- )
-
- return [
- get_dir_from_path(path)
- for path in possible_paths
- if path.exists()
- ]
-
-
-def determine_setup(inifile, args, warnfunc=None):
- dirs = get_dirs_from_args(args)
- if inifile:
- iniconfig = py.iniconfig.IniConfig(inifile)
- try:
- inicfg = iniconfig["pytest"]
- except KeyError:
- inicfg = None
- rootdir = get_common_ancestor(dirs)
- else:
- ancestor = get_common_ancestor(dirs)
- rootdir, inifile, inicfg = getcfg([ancestor], warnfunc=warnfunc)
- if rootdir is None:
- for rootdir in ancestor.parts(reverse=True):
- if rootdir.join("setup.py").exists():
- break
- else:
- rootdir, inifile, inicfg = getcfg(dirs, warnfunc=warnfunc)
- if rootdir is None:
- rootdir = get_common_ancestor([py.path.local(), ancestor])
- is_fs_root = os.path.splitdrive(str(rootdir))[1] == '/'
- if is_fs_root:
- rootdir = ancestor
- return rootdir, inifile, inicfg or {}
-
-
-def setns(obj, dic):
- import pytest
- for name, value in dic.items():
- if isinstance(value, dict):
- mod = getattr(obj, name, None)
- if mod is None:
- modname = "pytest.%s" % name
- mod = types.ModuleType(modname)
- sys.modules[modname] = mod
- mod.__all__ = []
- setattr(obj, name, mod)
- obj.__all__.append(name)
- setns(mod, value)
- else:
- setattr(obj, name, value)
- obj.__all__.append(name)
- # if obj != pytest:
- # pytest.__all__.append(name)
- setattr(pytest, name, value)
-
-
-def create_terminal_writer(config, *args, **kwargs):
- """Create a TerminalWriter instance configured according to the options
- in the config object. Every code which requires a TerminalWriter object
- and has access to a config object should use this function.
- """
- tw = py.io.TerminalWriter(*args, **kwargs)
- if config.option.color == 'yes':
- tw.hasmarkup = True
- if config.option.color == 'no':
- tw.hasmarkup = False
- return tw
-
-
-def _strtobool(val):
- """Convert a string representation of truth to true (1) or false (0).
-
- True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
- are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
- 'val' is anything else.
-
- .. note:: copied from distutils.util
- """
- val = val.lower()
- if val in ('y', 'yes', 't', 'true', 'on', '1'):
- return 1
- elif val in ('n', 'no', 'f', 'false', 'off', '0'):
- return 0
- else:
- raise ValueError("invalid truth value %r" % (val,))
diff --git a/lib/spack/external/pytest-fallback/_pytest/debugging.py b/lib/spack/external/pytest-fallback/_pytest/debugging.py
deleted file mode 100644
index aa9c9a3863..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/debugging.py
+++ /dev/null
@@ -1,123 +0,0 @@
-""" interactive debugging with PDB, the Python Debugger. """
-from __future__ import absolute_import, division, print_function
-import pdb
-import sys
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group._addoption(
- '--pdb', dest="usepdb", action="store_true",
- help="start the interactive Python debugger on errors.")
- group._addoption(
- '--pdbcls', dest="usepdb_cls", metavar="modulename:classname",
- help="start a custom interactive Python debugger on errors. "
- "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb")
-
-
-def pytest_configure(config):
- if config.getvalue("usepdb_cls"):
- modname, classname = config.getvalue("usepdb_cls").split(":")
- __import__(modname)
- pdb_cls = getattr(sys.modules[modname], classname)
- else:
- pdb_cls = pdb.Pdb
-
- if config.getvalue("usepdb"):
- config.pluginmanager.register(PdbInvoke(), 'pdbinvoke')
-
- old = (pdb.set_trace, pytestPDB._pluginmanager)
-
- def fin():
- pdb.set_trace, pytestPDB._pluginmanager = old
- pytestPDB._config = None
- pytestPDB._pdb_cls = pdb.Pdb
-
- pdb.set_trace = pytestPDB.set_trace
- pytestPDB._pluginmanager = config.pluginmanager
- pytestPDB._config = config
- pytestPDB._pdb_cls = pdb_cls
- config._cleanup.append(fin)
-
-
-class pytestPDB:
- """ Pseudo PDB that defers to the real pdb. """
- _pluginmanager = None
- _config = None
- _pdb_cls = pdb.Pdb
-
- @classmethod
- def set_trace(cls):
- """ invoke PDB set_trace debugging, dropping any IO capturing. """
- import _pytest.config
- frame = sys._getframe().f_back
- if cls._pluginmanager is not None:
- capman = cls._pluginmanager.getplugin("capturemanager")
- if capman:
- capman.suspendcapture(in_=True)
- tw = _pytest.config.create_terminal_writer(cls._config)
- tw.line()
- tw.sep(">", "PDB set_trace (IO-capturing turned off)")
- cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config)
- cls._pdb_cls().set_trace(frame)
-
-
-class PdbInvoke:
- def pytest_exception_interact(self, node, call, report):
- capman = node.config.pluginmanager.getplugin("capturemanager")
- if capman:
- out, err = capman.suspendcapture(in_=True)
- sys.stdout.write(out)
- sys.stdout.write(err)
- _enter_pdb(node, call.excinfo, report)
-
- def pytest_internalerror(self, excrepr, excinfo):
- for line in str(excrepr).split("\n"):
- sys.stderr.write("INTERNALERROR> %s\n" % line)
- sys.stderr.flush()
- tb = _postmortem_traceback(excinfo)
- post_mortem(tb)
-
-
-def _enter_pdb(node, excinfo, rep):
- # XXX we re-use the TerminalReporter's terminalwriter
- # because this seems to avoid some encoding related troubles
- # for not completely clear reasons.
- tw = node.config.pluginmanager.getplugin("terminalreporter")._tw
- tw.line()
- tw.sep(">", "traceback")
- rep.toterminal(tw)
- tw.sep(">", "entering PDB")
- tb = _postmortem_traceback(excinfo)
- post_mortem(tb)
- rep._pdbshown = True
- return rep
-
-
-def _postmortem_traceback(excinfo):
- # A doctest.UnexpectedException is not useful for post_mortem.
- # Use the underlying exception instead:
- from doctest import UnexpectedException
- if isinstance(excinfo.value, UnexpectedException):
- return excinfo.value.exc_info[2]
- else:
- return excinfo._excinfo[2]
-
-
-def _find_last_non_hidden_frame(stack):
- i = max(0, len(stack) - 1)
- while i and stack[i][0].f_locals.get("__tracebackhide__", False):
- i -= 1
- return i
-
-
-def post_mortem(t):
- class Pdb(pytestPDB._pdb_cls):
- def get_stack(self, f, t):
- stack, i = pdb.Pdb.get_stack(self, f, t)
- if f is None:
- i = _find_last_non_hidden_frame(stack)
- return stack, i
- p = Pdb()
- p.reset()
- p.interaction(None, t)
diff --git a/lib/spack/external/pytest-fallback/_pytest/deprecated.py b/lib/spack/external/pytest-fallback/_pytest/deprecated.py
deleted file mode 100644
index 38e9496778..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/deprecated.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""
-This module contains deprecation messages and bits of code used elsewhere in the codebase
-that is planned to be removed in the next pytest release.
-
-Keeping it in a central location makes it easy to track what is deprecated and should
-be removed when the time comes.
-"""
-from __future__ import absolute_import, division, print_function
-
-
-class RemovedInPytest4Warning(DeprecationWarning):
- """warning class for features removed in pytest 4.0"""
-
-
-MAIN_STR_ARGS = 'passing a string to pytest.main() is deprecated, ' \
- 'pass a list of arguments instead.'
-
-YIELD_TESTS = 'yield tests are deprecated, and scheduled to be removed in pytest 4.0'
-
-FUNCARG_PREFIX = (
- '{name}: declaring fixtures using "pytest_funcarg__" prefix is deprecated '
- 'and scheduled to be removed in pytest 4.0. '
- 'Please remove the prefix and use the @pytest.fixture decorator instead.')
-
-SETUP_CFG_PYTEST = '[pytest] section in setup.cfg files is deprecated, use [tool:pytest] instead.'
-
-GETFUNCARGVALUE = "use of getfuncargvalue is deprecated, use getfixturevalue"
-
-RESULT_LOG = (
- '--result-log is deprecated and scheduled for removal in pytest 4.0.\n'
- 'See https://docs.pytest.org/en/latest/usage.html#creating-resultlog-format-files for more information.'
-)
-
-MARK_INFO_ATTRIBUTE = RemovedInPytest4Warning(
- "MarkInfo objects are deprecated as they contain the merged marks"
-)
-
-MARK_PARAMETERSET_UNPACKING = RemovedInPytest4Warning(
- "Applying marks directly to parameters is deprecated,"
- " please use pytest.param(..., marks=...) instead.\n"
- "For more details, see: https://docs.pytest.org/en/latest/parametrize.html"
-)
diff --git a/lib/spack/external/pytest-fallback/_pytest/doctest.py b/lib/spack/external/pytest-fallback/_pytest/doctest.py
deleted file mode 100644
index 4c05acddf7..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/doctest.py
+++ /dev/null
@@ -1,362 +0,0 @@
-""" discover and run doctests in modules and test files."""
-from __future__ import absolute_import, division, print_function
-
-import traceback
-
-import pytest
-from _pytest._code.code import ExceptionInfo, ReprFileLocation, TerminalRepr
-from _pytest.fixtures import FixtureRequest
-
-
-DOCTEST_REPORT_CHOICE_NONE = 'none'
-DOCTEST_REPORT_CHOICE_CDIFF = 'cdiff'
-DOCTEST_REPORT_CHOICE_NDIFF = 'ndiff'
-DOCTEST_REPORT_CHOICE_UDIFF = 'udiff'
-DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = 'only_first_failure'
-
-DOCTEST_REPORT_CHOICES = (
- DOCTEST_REPORT_CHOICE_NONE,
- DOCTEST_REPORT_CHOICE_CDIFF,
- DOCTEST_REPORT_CHOICE_NDIFF,
- DOCTEST_REPORT_CHOICE_UDIFF,
- DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE,
-)
-
-
-def pytest_addoption(parser):
- parser.addini('doctest_optionflags', 'option flags for doctests',
- type="args", default=["ELLIPSIS"])
- parser.addini("doctest_encoding", 'encoding used for doctest files', default="utf-8")
- group = parser.getgroup("collect")
- group.addoption("--doctest-modules",
- action="store_true", default=False,
- help="run doctests in all .py modules",
- dest="doctestmodules")
- group.addoption("--doctest-report",
- type=str.lower, default="udiff",
- help="choose another output format for diffs on doctest failure",
- choices=DOCTEST_REPORT_CHOICES,
- dest="doctestreport")
- group.addoption("--doctest-glob",
- action="append", default=[], metavar="pat",
- help="doctests file matching pattern, default: test*.txt",
- dest="doctestglob")
- group.addoption("--doctest-ignore-import-errors",
- action="store_true", default=False,
- help="ignore doctest ImportErrors",
- dest="doctest_ignore_import_errors")
-
-
-def pytest_collect_file(path, parent):
- config = parent.config
- if path.ext == ".py":
- if config.option.doctestmodules:
- return DoctestModule(path, parent)
- elif _is_doctest(config, path, parent):
- return DoctestTextfile(path, parent)
-
-
-def _is_doctest(config, path, parent):
- if path.ext in ('.txt', '.rst') and parent.session.isinitpath(path):
- return True
- globs = config.getoption("doctestglob") or ['test*.txt']
- for glob in globs:
- if path.check(fnmatch=glob):
- return True
- return False
-
-
-class ReprFailDoctest(TerminalRepr):
-
- def __init__(self, reprlocation, lines):
- self.reprlocation = reprlocation
- self.lines = lines
-
- def toterminal(self, tw):
- for line in self.lines:
- tw.line(line)
- self.reprlocation.toterminal(tw)
-
-
-class DoctestItem(pytest.Item):
- def __init__(self, name, parent, runner=None, dtest=None):
- super(DoctestItem, self).__init__(name, parent)
- self.runner = runner
- self.dtest = dtest
- self.obj = None
- self.fixture_request = None
-
- def setup(self):
- if self.dtest is not None:
- self.fixture_request = _setup_fixtures(self)
- globs = dict(getfixture=self.fixture_request.getfixturevalue)
- for name, value in self.fixture_request.getfixturevalue('doctest_namespace').items():
- globs[name] = value
- self.dtest.globs.update(globs)
-
- def runtest(self):
- _check_all_skipped(self.dtest)
- self.runner.run(self.dtest)
-
- def repr_failure(self, excinfo):
- import doctest
- if excinfo.errisinstance((doctest.DocTestFailure,
- doctest.UnexpectedException)):
- doctestfailure = excinfo.value
- example = doctestfailure.example
- test = doctestfailure.test
- filename = test.filename
- if test.lineno is None:
- lineno = None
- else:
- lineno = test.lineno + example.lineno + 1
- message = excinfo.type.__name__
- reprlocation = ReprFileLocation(filename, lineno, message)
- checker = _get_checker()
- report_choice = _get_report_choice(self.config.getoption("doctestreport"))
- if lineno is not None:
- lines = doctestfailure.test.docstring.splitlines(False)
- # add line numbers to the left of the error message
- lines = ["%03d %s" % (i + test.lineno + 1, x)
- for (i, x) in enumerate(lines)]
- # trim docstring error lines to 10
- lines = lines[max(example.lineno - 9, 0):example.lineno + 1]
- else:
- lines = ['EXAMPLE LOCATION UNKNOWN, not showing all tests of that example']
- indent = '>>>'
- for line in example.source.splitlines():
- lines.append('??? %s %s' % (indent, line))
- indent = '...'
- if excinfo.errisinstance(doctest.DocTestFailure):
- lines += checker.output_difference(example,
- doctestfailure.got, report_choice).split("\n")
- else:
- inner_excinfo = ExceptionInfo(excinfo.value.exc_info)
- lines += ["UNEXPECTED EXCEPTION: %s" %
- repr(inner_excinfo.value)]
- lines += traceback.format_exception(*excinfo.value.exc_info)
- return ReprFailDoctest(reprlocation, lines)
- else:
- return super(DoctestItem, self).repr_failure(excinfo)
-
- def reportinfo(self):
- return self.fspath, self.dtest.lineno, "[doctest] %s" % self.name
-
-
-def _get_flag_lookup():
- import doctest
- return dict(DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1,
- DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE,
- NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE,
- ELLIPSIS=doctest.ELLIPSIS,
- IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL,
- COMPARISON_FLAGS=doctest.COMPARISON_FLAGS,
- ALLOW_UNICODE=_get_allow_unicode_flag(),
- ALLOW_BYTES=_get_allow_bytes_flag(),
- )
-
-
-def get_optionflags(parent):
- optionflags_str = parent.config.getini("doctest_optionflags")
- flag_lookup_table = _get_flag_lookup()
- flag_acc = 0
- for flag in optionflags_str:
- flag_acc |= flag_lookup_table[flag]
- return flag_acc
-
-
-class DoctestTextfile(pytest.Module):
- obj = None
-
- def collect(self):
- import doctest
-
- # inspired by doctest.testfile; ideally we would use it directly,
- # but it doesn't support passing a custom checker
- encoding = self.config.getini("doctest_encoding")
- text = self.fspath.read_text(encoding)
- filename = str(self.fspath)
- name = self.fspath.basename
- globs = {'__name__': '__main__'}
-
- optionflags = get_optionflags(self)
- runner = doctest.DebugRunner(verbose=0, optionflags=optionflags,
- checker=_get_checker())
- _fix_spoof_python2(runner, encoding)
-
- parser = doctest.DocTestParser()
- test = parser.get_doctest(text, globs, name, filename, 0)
- if test.examples:
- yield DoctestItem(test.name, self, runner, test)
-
-
-def _check_all_skipped(test):
- """raises pytest.skip() if all examples in the given DocTest have the SKIP
- option set.
- """
- import doctest
- all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples)
- if all_skipped:
- pytest.skip('all tests skipped by +SKIP option')
-
-
-class DoctestModule(pytest.Module):
- def collect(self):
- import doctest
- if self.fspath.basename == "conftest.py":
- module = self.config.pluginmanager._importconftest(self.fspath)
- else:
- try:
- module = self.fspath.pyimport()
- except ImportError:
- if self.config.getvalue('doctest_ignore_import_errors'):
- pytest.skip('unable to import module %r' % self.fspath)
- else:
- raise
- # uses internal doctest module parsing mechanism
- finder = doctest.DocTestFinder()
- optionflags = get_optionflags(self)
- runner = doctest.DebugRunner(verbose=0, optionflags=optionflags,
- checker=_get_checker())
-
- for test in finder.find(module, module.__name__):
- if test.examples: # skip empty doctests
- yield DoctestItem(test.name, self, runner, test)
-
-
-def _setup_fixtures(doctest_item):
- """
- Used by DoctestTextfile and DoctestItem to setup fixture information.
- """
- def func():
- pass
-
- doctest_item.funcargs = {}
- fm = doctest_item.session._fixturemanager
- doctest_item._fixtureinfo = fm.getfixtureinfo(node=doctest_item, func=func,
- cls=None, funcargs=False)
- fixture_request = FixtureRequest(doctest_item)
- fixture_request._fillfixtures()
- return fixture_request
-
-
-def _get_checker():
- """
- Returns a doctest.OutputChecker subclass that takes in account the
- ALLOW_UNICODE option to ignore u'' prefixes in strings and ALLOW_BYTES
- to strip b'' prefixes.
- Useful when the same doctest should run in Python 2 and Python 3.
-
- An inner class is used to avoid importing "doctest" at the module
- level.
- """
- if hasattr(_get_checker, 'LiteralsOutputChecker'):
- return _get_checker.LiteralsOutputChecker()
-
- import doctest
- import re
-
- class LiteralsOutputChecker(doctest.OutputChecker):
- """
- Copied from doctest_nose_plugin.py from the nltk project:
- https://github.com/nltk/nltk
-
- Further extended to also support byte literals.
- """
-
- _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE)
- _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE)
-
- def check_output(self, want, got, optionflags):
- res = doctest.OutputChecker.check_output(self, want, got,
- optionflags)
- if res:
- return True
-
- allow_unicode = optionflags & _get_allow_unicode_flag()
- allow_bytes = optionflags & _get_allow_bytes_flag()
- if not allow_unicode and not allow_bytes:
- return False
-
- else: # pragma: no cover
- def remove_prefixes(regex, txt):
- return re.sub(regex, r'\1\2', txt)
-
- if allow_unicode:
- want = remove_prefixes(self._unicode_literal_re, want)
- got = remove_prefixes(self._unicode_literal_re, got)
- if allow_bytes:
- want = remove_prefixes(self._bytes_literal_re, want)
- got = remove_prefixes(self._bytes_literal_re, got)
- res = doctest.OutputChecker.check_output(self, want, got,
- optionflags)
- return res
-
- _get_checker.LiteralsOutputChecker = LiteralsOutputChecker
- return _get_checker.LiteralsOutputChecker()
-
-
-def _get_allow_unicode_flag():
- """
- Registers and returns the ALLOW_UNICODE flag.
- """
- import doctest
- return doctest.register_optionflag('ALLOW_UNICODE')
-
-
-def _get_allow_bytes_flag():
- """
- Registers and returns the ALLOW_BYTES flag.
- """
- import doctest
- return doctest.register_optionflag('ALLOW_BYTES')
-
-
-def _get_report_choice(key):
- """
- This function returns the actual `doctest` module flag value, we want to do it as late as possible to avoid
- importing `doctest` and all its dependencies when parsing options, as it adds overhead and breaks tests.
- """
- import doctest
-
- return {
- DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF,
- DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF,
- DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF,
- DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE,
- DOCTEST_REPORT_CHOICE_NONE: 0,
- }[key]
-
-
-def _fix_spoof_python2(runner, encoding):
- """
- Installs a "SpoofOut" into the given DebugRunner so it properly deals with unicode output. This
- should patch only doctests for text files because they don't have a way to declare their
- encoding. Doctests in docstrings from Python modules don't have the same problem given that
- Python already decoded the strings.
-
- This fixes the problem related in issue #2434.
- """
- from _pytest.compat import _PY2
- if not _PY2:
- return
-
- from doctest import _SpoofOut
-
- class UnicodeSpoof(_SpoofOut):
-
- def getvalue(self):
- result = _SpoofOut.getvalue(self)
- if encoding:
- result = result.decode(encoding)
- return result
-
- runner._fakeout = UnicodeSpoof()
-
-
-@pytest.fixture(scope='session')
-def doctest_namespace():
- """
- Inject names into the doctest namespace.
- """
- return dict()
diff --git a/lib/spack/external/pytest-fallback/_pytest/fixtures.py b/lib/spack/external/pytest-fallback/_pytest/fixtures.py
deleted file mode 100644
index 7ad495615e..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/fixtures.py
+++ /dev/null
@@ -1,1135 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import collections
-import inspect
-import sys
-import warnings
-
-import py
-from py._code.code import FormattedExcinfo
-
-import _pytest
-from _pytest import nodes
-from _pytest._code.code import TerminalRepr
-from _pytest.compat import (
- NOTSET, exc_clear, _format_args,
- getfslineno, get_real_func,
- is_generator, isclass, getimfunc,
- getlocation, getfuncargnames,
- safe_getattr,
- FuncargnamesCompatAttr,
-)
-from _pytest.outcomes import fail, TEST_OUTCOME
-
-
-def pytest_sessionstart(session):
- import _pytest.python
- scopename2class.update({
- 'class': _pytest.python.Class,
- 'module': _pytest.python.Module,
- 'function': _pytest.main.Item,
- })
- session._fixturemanager = FixtureManager(session)
-
-
-scopename2class = {}
-
-
-scope2props = dict(session=())
-scope2props["module"] = ("fspath", "module")
-scope2props["class"] = scope2props["module"] + ("cls",)
-scope2props["instance"] = scope2props["class"] + ("instance", )
-scope2props["function"] = scope2props["instance"] + ("function", "keywords")
-
-
-def scopeproperty(name=None, doc=None):
- def decoratescope(func):
- scopename = name or func.__name__
-
- def provide(self):
- if func.__name__ in scope2props[self.scope]:
- return func(self)
- raise AttributeError("%s not available in %s-scoped context" % (
- scopename, self.scope))
-
- return property(provide, None, None, func.__doc__)
- return decoratescope
-
-
-def get_scope_node(node, scope):
- cls = scopename2class.get(scope)
- if cls is None:
- if scope == "session":
- return node.session
- raise ValueError("unknown scope")
- return node.getparent(cls)
-
-
-def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager):
- # this function will transform all collected calls to a functions
- # if they use direct funcargs (i.e. direct parametrization)
- # because we want later test execution to be able to rely on
- # an existing FixtureDef structure for all arguments.
- # XXX we can probably avoid this algorithm if we modify CallSpec2
- # to directly care for creating the fixturedefs within its methods.
- if not metafunc._calls[0].funcargs:
- return # this function call does not have direct parametrization
- # collect funcargs of all callspecs into a list of values
- arg2params = {}
- arg2scope = {}
- for callspec in metafunc._calls:
- for argname, argvalue in callspec.funcargs.items():
- assert argname not in callspec.params
- callspec.params[argname] = argvalue
- arg2params_list = arg2params.setdefault(argname, [])
- callspec.indices[argname] = len(arg2params_list)
- arg2params_list.append(argvalue)
- if argname not in arg2scope:
- scopenum = callspec._arg2scopenum.get(argname,
- scopenum_function)
- arg2scope[argname] = scopes[scopenum]
- callspec.funcargs.clear()
-
- # register artificial FixtureDef's so that later at test execution
- # time we can rely on a proper FixtureDef to exist for fixture setup.
- arg2fixturedefs = metafunc._arg2fixturedefs
- for argname, valuelist in arg2params.items():
- # if we have a scope that is higher than function we need
- # to make sure we only ever create an according fixturedef on
- # a per-scope basis. We thus store and cache the fixturedef on the
- # node related to the scope.
- scope = arg2scope[argname]
- node = None
- if scope != "function":
- node = get_scope_node(collector, scope)
- if node is None:
- assert scope == "class" and isinstance(collector, _pytest.python.Module)
- # use module-level collector for class-scope (for now)
- node = collector
- if node and argname in node._name2pseudofixturedef:
- arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]]
- else:
- fixturedef = FixtureDef(fixturemanager, '', argname,
- get_direct_param_fixture_func,
- arg2scope[argname],
- valuelist, False, False)
- arg2fixturedefs[argname] = [fixturedef]
- if node is not None:
- node._name2pseudofixturedef[argname] = fixturedef
-
-
-def getfixturemarker(obj):
- """ return fixturemarker or None if it doesn't exist or raised
- exceptions."""
- try:
- return getattr(obj, "_pytestfixturefunction", None)
- except TEST_OUTCOME:
- # some objects raise errors like request (from flask import request)
- # we don't expect them to be fixture functions
- return None
-
-
-def get_parametrized_fixture_keys(item, scopenum):
- """ return list of keys for all parametrized arguments which match
- the specified scope. """
- assert scopenum < scopenum_function # function
- try:
- cs = item.callspec
- except AttributeError:
- pass
- else:
- # cs.indices.items() is random order of argnames. Need to
- # sort this so that different calls to
- # get_parametrized_fixture_keys will be deterministic.
- for argname, param_index in sorted(cs.indices.items()):
- if cs._arg2scopenum[argname] != scopenum:
- continue
- if scopenum == 0: # session
- key = (argname, param_index)
- elif scopenum == 1: # module
- key = (argname, param_index, item.fspath)
- elif scopenum == 2: # class
- key = (argname, param_index, item.fspath, item.cls)
- yield key
-
-
-# algorithm for sorting on a per-parametrized resource setup basis
-# it is called for scopenum==0 (session) first and performs sorting
-# down to the lower scopes such as to minimize number of "high scope"
-# setups and teardowns
-
-def reorder_items(items):
- argkeys_cache = {}
- for scopenum in range(0, scopenum_function):
- argkeys_cache[scopenum] = d = {}
- for item in items:
- keys = collections.OrderedDict.fromkeys(get_parametrized_fixture_keys(item, scopenum))
- if keys:
- d[item] = keys
- return reorder_items_atscope(items, set(), argkeys_cache, 0)
-
-
-def reorder_items_atscope(items, ignore, argkeys_cache, scopenum):
- if scopenum >= scopenum_function or len(items) < 3:
- return items
- items_done = []
- while 1:
- items_before, items_same, items_other, newignore = \
- slice_items(items, ignore, argkeys_cache[scopenum])
- items_before = reorder_items_atscope(
- items_before, ignore, argkeys_cache, scopenum + 1)
- if items_same is None:
- # nothing to reorder in this scope
- assert items_other is None
- return items_done + items_before
- items_done.extend(items_before)
- items = items_same + items_other
- ignore = newignore
-
-
-def slice_items(items, ignore, scoped_argkeys_cache):
- # we pick the first item which uses a fixture instance in the
- # requested scope and which we haven't seen yet. We slice the input
- # items list into a list of items_nomatch, items_same and
- # items_other
- if scoped_argkeys_cache: # do we need to do work at all?
- it = iter(items)
- # first find a slicing key
- for i, item in enumerate(it):
- argkeys = scoped_argkeys_cache.get(item)
- if argkeys is not None:
- newargkeys = collections.OrderedDict.fromkeys(k for k in argkeys if k not in ignore)
- if newargkeys: # found a slicing key
- slicing_argkey, _ = newargkeys.popitem()
- items_before = items[:i]
- items_same = [item]
- items_other = []
- # now slice the remainder of the list
- for item in it:
- argkeys = scoped_argkeys_cache.get(item)
- if argkeys and slicing_argkey in argkeys and \
- slicing_argkey not in ignore:
- items_same.append(item)
- else:
- items_other.append(item)
- newignore = ignore.copy()
- newignore.add(slicing_argkey)
- return (items_before, items_same, items_other, newignore)
- return items, None, None, None
-
-
-def fillfixtures(function):
- """ fill missing funcargs for a test function. """
- try:
- request = function._request
- except AttributeError:
- # XXX this special code path is only expected to execute
- # with the oejskit plugin. It uses classes with funcargs
- # and we thus have to work a bit to allow this.
- fm = function.session._fixturemanager
- fi = fm.getfixtureinfo(function.parent, function.obj, None)
- function._fixtureinfo = fi
- request = function._request = FixtureRequest(function)
- request._fillfixtures()
- # prune out funcargs for jstests
- newfuncargs = {}
- for name in fi.argnames:
- newfuncargs[name] = function.funcargs[name]
- function.funcargs = newfuncargs
- else:
- request._fillfixtures()
-
-
-def get_direct_param_fixture_func(request):
- return request.param
-
-
-class FuncFixtureInfo:
- def __init__(self, argnames, names_closure, name2fixturedefs):
- self.argnames = argnames
- self.names_closure = names_closure
- self.name2fixturedefs = name2fixturedefs
-
-
-class FixtureRequest(FuncargnamesCompatAttr):
- """ A request for a fixture from a test or fixture function.
-
- A request object gives access to the requesting test context
- and has an optional ``param`` attribute in case
- the fixture is parametrized indirectly.
- """
-
- def __init__(self, pyfuncitem):
- self._pyfuncitem = pyfuncitem
- #: fixture for which this request is being performed
- self.fixturename = None
- #: Scope string, one of "function", "class", "module", "session"
- self.scope = "function"
- self._fixture_values = {} # argname -> fixture value
- self._fixture_defs = {} # argname -> FixtureDef
- fixtureinfo = pyfuncitem._fixtureinfo
- self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy()
- self._arg2index = {}
- self._fixturemanager = pyfuncitem.session._fixturemanager
-
- @property
- def fixturenames(self):
- # backward incompatible note: now a readonly property
- return list(self._pyfuncitem._fixtureinfo.names_closure)
-
- @property
- def node(self):
- """ underlying collection node (depends on current request scope)"""
- return self._getscopeitem(self.scope)
-
- def _getnextfixturedef(self, argname):
- fixturedefs = self._arg2fixturedefs.get(argname, None)
- if fixturedefs is None:
- # we arrive here because of a a dynamic call to
- # getfixturevalue(argname) usage which was naturally
- # not known at parsing/collection time
- parentid = self._pyfuncitem.parent.nodeid
- fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid)
- self._arg2fixturedefs[argname] = fixturedefs
- # fixturedefs list is immutable so we maintain a decreasing index
- index = self._arg2index.get(argname, 0) - 1
- if fixturedefs is None or (-index > len(fixturedefs)):
- raise FixtureLookupError(argname, self)
- self._arg2index[argname] = index
- return fixturedefs[index]
-
- @property
- def config(self):
- """ the pytest config object associated with this request. """
- return self._pyfuncitem.config
-
- @scopeproperty()
- def function(self):
- """ test function object if the request has a per-function scope. """
- return self._pyfuncitem.obj
-
- @scopeproperty("class")
- def cls(self):
- """ class (can be None) where the test function was collected. """
- clscol = self._pyfuncitem.getparent(_pytest.python.Class)
- if clscol:
- return clscol.obj
-
- @property
- def instance(self):
- """ instance (can be None) on which test function was collected. """
- # unittest support hack, see _pytest.unittest.TestCaseFunction
- try:
- return self._pyfuncitem._testcase
- except AttributeError:
- function = getattr(self, "function", None)
- if function is not None:
- return py.builtin._getimself(function)
-
- @scopeproperty()
- def module(self):
- """ python module object where the test function was collected. """
- return self._pyfuncitem.getparent(_pytest.python.Module).obj
-
- @scopeproperty()
- def fspath(self):
- """ the file system path of the test module which collected this test. """
- return self._pyfuncitem.fspath
-
- @property
- def keywords(self):
- """ keywords/markers dictionary for the underlying node. """
- return self.node.keywords
-
- @property
- def session(self):
- """ pytest session object. """
- return self._pyfuncitem.session
-
- def addfinalizer(self, finalizer):
- """ add finalizer/teardown function to be called after the
- last test within the requesting test context finished
- execution. """
- # XXX usually this method is shadowed by fixturedef specific ones
- self._addfinalizer(finalizer, scope=self.scope)
-
- def _addfinalizer(self, finalizer, scope):
- colitem = self._getscopeitem(scope)
- self._pyfuncitem.session._setupstate.addfinalizer(
- finalizer=finalizer, colitem=colitem)
-
- def applymarker(self, marker):
- """ Apply a marker to a single test function invocation.
- This method is useful if you don't want to have a keyword/marker
- on all function invocations.
-
- :arg marker: a :py:class:`_pytest.mark.MarkDecorator` object
- created by a call to ``pytest.mark.NAME(...)``.
- """
- try:
- self.node.keywords[marker.markname] = marker
- except AttributeError:
- raise ValueError(marker)
-
- def raiseerror(self, msg):
- """ raise a FixtureLookupError with the given message. """
- raise self._fixturemanager.FixtureLookupError(None, self, msg)
-
- def _fillfixtures(self):
- item = self._pyfuncitem
- fixturenames = getattr(item, "fixturenames", self.fixturenames)
- for argname in fixturenames:
- if argname not in item.funcargs:
- item.funcargs[argname] = self.getfixturevalue(argname)
-
- def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
- """ (deprecated) Return a testing resource managed by ``setup`` &
- ``teardown`` calls. ``scope`` and ``extrakey`` determine when the
- ``teardown`` function will be called so that subsequent calls to
- ``setup`` would recreate the resource. With pytest-2.3 you often
- do not need ``cached_setup()`` as you can directly declare a scope
- on a fixture function and register a finalizer through
- ``request.addfinalizer()``.
-
- :arg teardown: function receiving a previously setup resource.
- :arg setup: a no-argument function creating a resource.
- :arg scope: a string value out of ``function``, ``class``, ``module``
- or ``session`` indicating the caching lifecycle of the resource.
- :arg extrakey: added to internal caching key of (funcargname, scope).
- """
- if not hasattr(self.config, '_setupcache'):
- self.config._setupcache = {} # XXX weakref?
- cachekey = (self.fixturename, self._getscopeitem(scope), extrakey)
- cache = self.config._setupcache
- try:
- val = cache[cachekey]
- except KeyError:
- self._check_scope(self.fixturename, self.scope, scope)
- val = setup()
- cache[cachekey] = val
- if teardown is not None:
- def finalizer():
- del cache[cachekey]
- teardown(val)
- self._addfinalizer(finalizer, scope=scope)
- return val
-
- def getfixturevalue(self, argname):
- """ Dynamically run a named fixture function.
-
- Declaring fixtures via function argument is recommended where possible.
- But if you can only decide whether to use another fixture at test
- setup time, you may use this function to retrieve it inside a fixture
- or test function body.
- """
- return self._get_active_fixturedef(argname).cached_result[0]
-
- def getfuncargvalue(self, argname):
- """ Deprecated, use getfixturevalue. """
- from _pytest import deprecated
- warnings.warn(
- deprecated.GETFUNCARGVALUE,
- DeprecationWarning,
- stacklevel=2)
- return self.getfixturevalue(argname)
-
- def _get_active_fixturedef(self, argname):
- try:
- return self._fixture_defs[argname]
- except KeyError:
- try:
- fixturedef = self._getnextfixturedef(argname)
- except FixtureLookupError:
- if argname == "request":
- class PseudoFixtureDef:
- cached_result = (self, [0], None)
- scope = "function"
- return PseudoFixtureDef
- raise
- # remove indent to prevent the python3 exception
- # from leaking into the call
- result = self._getfixturevalue(fixturedef)
- self._fixture_values[argname] = result
- self._fixture_defs[argname] = fixturedef
- return fixturedef
-
- def _get_fixturestack(self):
- current = self
- values = []
- while 1:
- fixturedef = getattr(current, "_fixturedef", None)
- if fixturedef is None:
- values.reverse()
- return values
- values.append(fixturedef)
- current = current._parent_request
-
- def _getfixturevalue(self, fixturedef):
- # prepare a subrequest object before calling fixture function
- # (latter managed by fixturedef)
- argname = fixturedef.argname
- funcitem = self._pyfuncitem
- scope = fixturedef.scope
- try:
- param = funcitem.callspec.getparam(argname)
- except (AttributeError, ValueError):
- param = NOTSET
- param_index = 0
- if fixturedef.params is not None:
- frame = inspect.stack()[3]
- frameinfo = inspect.getframeinfo(frame[0])
- source_path = frameinfo.filename
- source_lineno = frameinfo.lineno
- source_path = py.path.local(source_path)
- if source_path.relto(funcitem.config.rootdir):
- source_path = source_path.relto(funcitem.config.rootdir)
- msg = (
- "The requested fixture has no parameter defined for the "
- "current test.\n\nRequested fixture '{0}' defined in:\n{1}"
- "\n\nRequested here:\n{2}:{3}".format(
- fixturedef.argname,
- getlocation(fixturedef.func, funcitem.config.rootdir),
- source_path,
- source_lineno,
- )
- )
- fail(msg)
- else:
- # indices might not be set if old-style metafunc.addcall() was used
- param_index = funcitem.callspec.indices.get(argname, 0)
- # if a parametrize invocation set a scope it will override
- # the static scope defined with the fixture function
- paramscopenum = funcitem.callspec._arg2scopenum.get(argname)
- if paramscopenum is not None:
- scope = scopes[paramscopenum]
-
- subrequest = SubRequest(self, scope, param, param_index, fixturedef)
-
- # check if a higher-level scoped fixture accesses a lower level one
- subrequest._check_scope(argname, self.scope, scope)
-
- # clear sys.exc_info before invoking the fixture (python bug?)
- # if its not explicitly cleared it will leak into the call
- exc_clear()
- try:
- # call the fixture function
- val = fixturedef.execute(request=subrequest)
- finally:
- # if fixture function failed it might have registered finalizers
- self.session._setupstate.addfinalizer(fixturedef.finish,
- subrequest.node)
- return val
-
- def _check_scope(self, argname, invoking_scope, requested_scope):
- if argname == "request":
- return
- if scopemismatch(invoking_scope, requested_scope):
- # try to report something helpful
- lines = self._factorytraceback()
- fail("ScopeMismatch: You tried to access the %r scoped "
- "fixture %r with a %r scoped request object, "
- "involved factories\n%s" % (
- (requested_scope, argname, invoking_scope, "\n".join(lines))),
- pytrace=False)
-
- def _factorytraceback(self):
- lines = []
- for fixturedef in self._get_fixturestack():
- factory = fixturedef.func
- fs, lineno = getfslineno(factory)
- p = self._pyfuncitem.session.fspath.bestrelpath(fs)
- args = _format_args(factory)
- lines.append("%s:%d: def %s%s" % (
- p, lineno, factory.__name__, args))
- return lines
-
- def _getscopeitem(self, scope):
- if scope == "function":
- # this might also be a non-function Item despite its attribute name
- return self._pyfuncitem
- node = get_scope_node(self._pyfuncitem, scope)
- if node is None and scope == "class":
- # fallback to function item itself
- node = self._pyfuncitem
- assert node
- return node
-
- def __repr__(self):
- return "<FixtureRequest for %r>" % (self.node)
-
-
-class SubRequest(FixtureRequest):
- """ a sub request for handling getting a fixture from a
- test function/fixture. """
-
- def __init__(self, request, scope, param, param_index, fixturedef):
- self._parent_request = request
- self.fixturename = fixturedef.argname
- if param is not NOTSET:
- self.param = param
- self.param_index = param_index
- self.scope = scope
- self._fixturedef = fixturedef
- self._pyfuncitem = request._pyfuncitem
- self._fixture_values = request._fixture_values
- self._fixture_defs = request._fixture_defs
- self._arg2fixturedefs = request._arg2fixturedefs
- self._arg2index = request._arg2index
- self._fixturemanager = request._fixturemanager
-
- def __repr__(self):
- return "<SubRequest %r for %r>" % (self.fixturename, self._pyfuncitem)
-
- def addfinalizer(self, finalizer):
- self._fixturedef.addfinalizer(finalizer)
-
-
-class ScopeMismatchError(Exception):
- """ A fixture function tries to use a different fixture function which
- which has a lower scope (e.g. a Session one calls a function one)
- """
-
-
-scopes = "session module class function".split()
-scopenum_function = scopes.index("function")
-
-
-def scopemismatch(currentscope, newscope):
- return scopes.index(newscope) > scopes.index(currentscope)
-
-
-def scope2index(scope, descr, where=None):
- """Look up the index of ``scope`` and raise a descriptive value error
- if not defined.
- """
- try:
- return scopes.index(scope)
- except ValueError:
- raise ValueError(
- "{0} {1}has an unsupported scope value '{2}'".format(
- descr, 'from {0} '.format(where) if where else '',
- scope)
- )
-
-
-class FixtureLookupError(LookupError):
- """ could not return a requested Fixture (missing or invalid). """
-
- def __init__(self, argname, request, msg=None):
- self.argname = argname
- self.request = request
- self.fixturestack = request._get_fixturestack()
- self.msg = msg
-
- def formatrepr(self):
- tblines = []
- addline = tblines.append
- stack = [self.request._pyfuncitem.obj]
- stack.extend(map(lambda x: x.func, self.fixturestack))
- msg = self.msg
- if msg is not None:
- # the last fixture raise an error, let's present
- # it at the requesting side
- stack = stack[:-1]
- for function in stack:
- fspath, lineno = getfslineno(function)
- try:
- lines, _ = inspect.getsourcelines(get_real_func(function))
- except (IOError, IndexError, TypeError):
- error_msg = "file %s, line %s: source code not available"
- addline(error_msg % (fspath, lineno + 1))
- else:
- addline("file %s, line %s" % (fspath, lineno + 1))
- for i, line in enumerate(lines):
- line = line.rstrip()
- addline(" " + line)
- if line.lstrip().startswith('def'):
- break
-
- if msg is None:
- fm = self.request._fixturemanager
- available = []
- parentid = self.request._pyfuncitem.parent.nodeid
- for name, fixturedefs in fm._arg2fixturedefs.items():
- faclist = list(fm._matchfactories(fixturedefs, parentid))
- if faclist and name not in available:
- available.append(name)
- msg = "fixture %r not found" % (self.argname,)
- msg += "\n available fixtures: %s" % (", ".join(sorted(available)),)
- msg += "\n use 'pytest --fixtures [testpath]' for help on them."
-
- return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname)
-
-
-class FixtureLookupErrorRepr(TerminalRepr):
- def __init__(self, filename, firstlineno, tblines, errorstring, argname):
- self.tblines = tblines
- self.errorstring = errorstring
- self.filename = filename
- self.firstlineno = firstlineno
- self.argname = argname
-
- def toterminal(self, tw):
- # tw.line("FixtureLookupError: %s" %(self.argname), red=True)
- for tbline in self.tblines:
- tw.line(tbline.rstrip())
- lines = self.errorstring.split("\n")
- if lines:
- tw.line('{0} {1}'.format(FormattedExcinfo.fail_marker,
- lines[0].strip()), red=True)
- for line in lines[1:]:
- tw.line('{0} {1}'.format(FormattedExcinfo.flow_marker,
- line.strip()), red=True)
- tw.line()
- tw.line("%s:%d" % (self.filename, self.firstlineno + 1))
-
-
-def fail_fixturefunc(fixturefunc, msg):
- fs, lineno = getfslineno(fixturefunc)
- location = "%s:%s" % (fs, lineno + 1)
- source = _pytest._code.Source(fixturefunc)
- fail(msg + ":\n\n" + str(source.indent()) + "\n" + location,
- pytrace=False)
-
-
-def call_fixture_func(fixturefunc, request, kwargs):
- yieldctx = is_generator(fixturefunc)
- if yieldctx:
- it = fixturefunc(**kwargs)
- res = next(it)
-
- def teardown():
- try:
- next(it)
- except StopIteration:
- pass
- else:
- fail_fixturefunc(fixturefunc,
- "yield_fixture function has more than one 'yield'")
-
- request.addfinalizer(teardown)
- else:
- res = fixturefunc(**kwargs)
- return res
-
-
-class FixtureDef:
- """ A container for a factory definition. """
-
- def __init__(self, fixturemanager, baseid, argname, func, scope, params,
- unittest=False, ids=None):
- self._fixturemanager = fixturemanager
- self.baseid = baseid or ''
- self.has_location = baseid is not None
- self.func = func
- self.argname = argname
- self.scope = scope
- self.scopenum = scope2index(
- scope or "function",
- descr='fixture {0}'.format(func.__name__),
- where=baseid
- )
- self.params = params
- startindex = unittest and 1 or None
- self.argnames = getfuncargnames(func, startindex=startindex)
- self.unittest = unittest
- self.ids = ids
- self._finalizer = []
-
- def addfinalizer(self, finalizer):
- self._finalizer.append(finalizer)
-
- def finish(self):
- exceptions = []
- try:
- while self._finalizer:
- try:
- func = self._finalizer.pop()
- func()
- except: # noqa
- exceptions.append(sys.exc_info())
- if exceptions:
- e = exceptions[0]
- del exceptions # ensure we don't keep all frames alive because of the traceback
- py.builtin._reraise(*e)
-
- finally:
- ihook = self._fixturemanager.session.ihook
- ihook.pytest_fixture_post_finalizer(fixturedef=self)
- # even if finalization fails, we invalidate
- # the cached fixture value
- if hasattr(self, "cached_result"):
- del self.cached_result
-
- def execute(self, request):
- # get required arguments and register our own finish()
- # with their finalization
- for argname in self.argnames:
- fixturedef = request._get_active_fixturedef(argname)
- if argname != "request":
- fixturedef.addfinalizer(self.finish)
-
- my_cache_key = request.param_index
- cached_result = getattr(self, "cached_result", None)
- if cached_result is not None:
- result, cache_key, err = cached_result
- if my_cache_key == cache_key:
- if err is not None:
- py.builtin._reraise(*err)
- else:
- return result
- # we have a previous but differently parametrized fixture instance
- # so we need to tear it down before creating a new one
- self.finish()
- assert not hasattr(self, "cached_result")
-
- ihook = self._fixturemanager.session.ihook
- return ihook.pytest_fixture_setup(fixturedef=self, request=request)
-
- def __repr__(self):
- return ("<FixtureDef name=%r scope=%r baseid=%r >" %
- (self.argname, self.scope, self.baseid))
-
-
-def pytest_fixture_setup(fixturedef, request):
- """ Execution of fixture setup. """
- kwargs = {}
- for argname in fixturedef.argnames:
- fixdef = request._get_active_fixturedef(argname)
- result, arg_cache_key, exc = fixdef.cached_result
- request._check_scope(argname, request.scope, fixdef.scope)
- kwargs[argname] = result
-
- fixturefunc = fixturedef.func
- if fixturedef.unittest:
- if request.instance is not None:
- # bind the unbound method to the TestCase instance
- fixturefunc = fixturedef.func.__get__(request.instance)
- else:
- # the fixture function needs to be bound to the actual
- # request.instance so that code working with "fixturedef" behaves
- # as expected.
- if request.instance is not None:
- fixturefunc = getimfunc(fixturedef.func)
- if fixturefunc != fixturedef.func:
- fixturefunc = fixturefunc.__get__(request.instance)
- my_cache_key = request.param_index
- try:
- result = call_fixture_func(fixturefunc, request, kwargs)
- except TEST_OUTCOME:
- fixturedef.cached_result = (None, my_cache_key, sys.exc_info())
- raise
- fixturedef.cached_result = (result, my_cache_key, None)
- return result
-
-
-class FixtureFunctionMarker:
- def __init__(self, scope, params, autouse=False, ids=None, name=None):
- self.scope = scope
- self.params = params
- self.autouse = autouse
- self.ids = ids
- self.name = name
-
- def __call__(self, function):
- if isclass(function):
- raise ValueError(
- "class fixtures not supported (may be in the future)")
- function._pytestfixturefunction = self
- return function
-
-
-def fixture(scope="function", params=None, autouse=False, ids=None, name=None):
- """ (return a) decorator to mark a fixture factory function.
-
- This decorator can be used (with or without parameters) to define a
- fixture function. The name of the fixture function can later be
- referenced to cause its invocation ahead of running tests: test
- modules or classes can use the pytest.mark.usefixtures(fixturename)
- marker. Test functions can directly use fixture names as input
- arguments in which case the fixture instance returned from the fixture
- function will be injected.
-
- :arg scope: the scope for which this fixture is shared, one of
- "function" (default), "class", "module" or "session".
-
- :arg params: an optional list of parameters which will cause multiple
- invocations of the fixture function and all of the tests
- using it.
-
- :arg autouse: if True, the fixture func is activated for all tests that
- can see it. If False (the default) then an explicit
- reference is needed to activate the fixture.
-
- :arg ids: list of string ids each corresponding to the params
- so that they are part of the test id. If no ids are provided
- they will be generated automatically from the params.
-
- :arg name: the name of the fixture. This defaults to the name of the
- decorated function. If a fixture is used in the same module in
- which it is defined, the function name of the fixture will be
- shadowed by the function arg that requests the fixture; one way
- to resolve this is to name the decorated function
- ``fixture_<fixturename>`` and then use
- ``@pytest.fixture(name='<fixturename>')``.
-
- Fixtures can optionally provide their values to test functions using a ``yield`` statement,
- instead of ``return``. In this case, the code block after the ``yield`` statement is executed
- as teardown code regardless of the test outcome. A fixture function must yield exactly once.
- """
- if callable(scope) and params is None and autouse is False:
- # direct decoration
- return FixtureFunctionMarker(
- "function", params, autouse, name=name)(scope)
- if params is not None and not isinstance(params, (list, tuple)):
- params = list(params)
- return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name)
-
-
-def yield_fixture(scope="function", params=None, autouse=False, ids=None, name=None):
- """ (return a) decorator to mark a yield-fixture factory function.
-
- .. deprecated:: 3.0
- Use :py:func:`pytest.fixture` directly instead.
- """
- if callable(scope) and params is None and not autouse:
- # direct decoration
- return FixtureFunctionMarker(
- "function", params, autouse, ids=ids, name=name)(scope)
- else:
- return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name)
-
-
-defaultfuncargprefixmarker = fixture()
-
-
-@fixture(scope="session")
-def pytestconfig(request):
- """ the pytest config object with access to command line opts."""
- return request.config
-
-
-class FixtureManager:
- """
- pytest fixtures definitions and information is stored and managed
- from this class.
-
- During collection fm.parsefactories() is called multiple times to parse
- fixture function definitions into FixtureDef objects and internal
- data structures.
-
- During collection of test functions, metafunc-mechanics instantiate
- a FuncFixtureInfo object which is cached per node/func-name.
- This FuncFixtureInfo object is later retrieved by Function nodes
- which themselves offer a fixturenames attribute.
-
- The FuncFixtureInfo object holds information about fixtures and FixtureDefs
- relevant for a particular function. An initial list of fixtures is
- assembled like this:
-
- - ini-defined usefixtures
- - autouse-marked fixtures along the collection chain up from the function
- - usefixtures markers at module/class/function level
- - test function funcargs
-
- Subsequently the funcfixtureinfo.fixturenames attribute is computed
- as the closure of the fixtures needed to setup the initial fixtures,
- i. e. fixtures needed by fixture functions themselves are appended
- to the fixturenames list.
-
- Upon the test-setup phases all fixturenames are instantiated, retrieved
- by a lookup of their FuncFixtureInfo.
- """
-
- _argprefix = "pytest_funcarg__"
- FixtureLookupError = FixtureLookupError
- FixtureLookupErrorRepr = FixtureLookupErrorRepr
-
- def __init__(self, session):
- self.session = session
- self.config = session.config
- self._arg2fixturedefs = {}
- self._holderobjseen = set()
- self._arg2finish = {}
- self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))]
- session.config.pluginmanager.register(self, "funcmanage")
-
- def getfixtureinfo(self, node, func, cls, funcargs=True):
- if funcargs and not hasattr(node, "nofuncargs"):
- argnames = getfuncargnames(func, cls=cls)
- else:
- argnames = ()
- usefixtures = getattr(func, "usefixtures", None)
- initialnames = argnames
- if usefixtures is not None:
- initialnames = usefixtures.args + initialnames
- fm = node.session._fixturemanager
- names_closure, arg2fixturedefs = fm.getfixtureclosure(initialnames,
- node)
- return FuncFixtureInfo(argnames, names_closure, arg2fixturedefs)
-
- def pytest_plugin_registered(self, plugin):
- nodeid = None
- try:
- p = py.path.local(plugin.__file__)
- except AttributeError:
- pass
- else:
- # construct the base nodeid which is later used to check
- # what fixtures are visible for particular tests (as denoted
- # by their test id)
- if p.basename.startswith("conftest.py"):
- nodeid = p.dirpath().relto(self.config.rootdir)
- if p.sep != nodes.SEP:
- nodeid = nodeid.replace(p.sep, nodes.SEP)
- self.parsefactories(plugin, nodeid)
-
- def _getautousenames(self, nodeid):
- """ return a tuple of fixture names to be used. """
- autousenames = []
- for baseid, basenames in self._nodeid_and_autousenames:
- if nodeid.startswith(baseid):
- if baseid:
- i = len(baseid)
- nextchar = nodeid[i:i + 1]
- if nextchar and nextchar not in ":/":
- continue
- autousenames.extend(basenames)
- # make sure autousenames are sorted by scope, scopenum 0 is session
- autousenames.sort(
- key=lambda x: self._arg2fixturedefs[x][-1].scopenum)
- return autousenames
-
- def getfixtureclosure(self, fixturenames, parentnode):
- # collect the closure of all fixtures , starting with the given
- # fixturenames as the initial set. As we have to visit all
- # factory definitions anyway, we also return a arg2fixturedefs
- # mapping so that the caller can reuse it and does not have
- # to re-discover fixturedefs again for each fixturename
- # (discovering matching fixtures for a given name/node is expensive)
-
- parentid = parentnode.nodeid
- fixturenames_closure = self._getautousenames(parentid)
-
- def merge(otherlist):
- for arg in otherlist:
- if arg not in fixturenames_closure:
- fixturenames_closure.append(arg)
-
- merge(fixturenames)
- arg2fixturedefs = {}
- lastlen = -1
- while lastlen != len(fixturenames_closure):
- lastlen = len(fixturenames_closure)
- for argname in fixturenames_closure:
- if argname in arg2fixturedefs:
- continue
- fixturedefs = self.getfixturedefs(argname, parentid)
- if fixturedefs:
- arg2fixturedefs[argname] = fixturedefs
- merge(fixturedefs[-1].argnames)
- return fixturenames_closure, arg2fixturedefs
-
- def pytest_generate_tests(self, metafunc):
- for argname in metafunc.fixturenames:
- faclist = metafunc._arg2fixturedefs.get(argname)
- if faclist:
- fixturedef = faclist[-1]
- if fixturedef.params is not None:
- parametrize_func = getattr(metafunc.function, 'parametrize', None)
- func_params = getattr(parametrize_func, 'args', [[None]])
- func_kwargs = getattr(parametrize_func, 'kwargs', {})
- # skip directly parametrized arguments
- if "argnames" in func_kwargs:
- argnames = parametrize_func.kwargs["argnames"]
- else:
- argnames = func_params[0]
- if not isinstance(argnames, (tuple, list)):
- argnames = [x.strip() for x in argnames.split(",") if x.strip()]
- if argname not in func_params and argname not in argnames:
- metafunc.parametrize(argname, fixturedef.params,
- indirect=True, scope=fixturedef.scope,
- ids=fixturedef.ids)
- else:
- continue # will raise FixtureLookupError at setup time
-
- def pytest_collection_modifyitems(self, items):
- # separate parametrized setups
- items[:] = reorder_items(items)
-
- def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False):
- if nodeid is not NOTSET:
- holderobj = node_or_obj
- else:
- holderobj = node_or_obj.obj
- nodeid = node_or_obj.nodeid
- if holderobj in self._holderobjseen:
- return
- self._holderobjseen.add(holderobj)
- autousenames = []
- for name in dir(holderobj):
- # The attribute can be an arbitrary descriptor, so the attribute
- # access below can raise. safe_getatt() ignores such exceptions.
- obj = safe_getattr(holderobj, name, None)
- # fixture functions have a pytest_funcarg__ prefix (pre-2.3 style)
- # or are "@pytest.fixture" marked
- marker = getfixturemarker(obj)
- if marker is None:
- if not name.startswith(self._argprefix):
- continue
- if not callable(obj):
- continue
- marker = defaultfuncargprefixmarker
- from _pytest import deprecated
- self.config.warn('C1', deprecated.FUNCARG_PREFIX.format(name=name), nodeid=nodeid)
- name = name[len(self._argprefix):]
- elif not isinstance(marker, FixtureFunctionMarker):
- # magic globals with __getattr__ might have got us a wrong
- # fixture attribute
- continue
- else:
- if marker.name:
- name = marker.name
- msg = 'fixtures cannot have "pytest_funcarg__" prefix ' \
- 'and be decorated with @pytest.fixture:\n%s' % name
- assert not name.startswith(self._argprefix), msg
-
- fixture_def = FixtureDef(self, nodeid, name, obj,
- marker.scope, marker.params,
- unittest=unittest, ids=marker.ids)
-
- faclist = self._arg2fixturedefs.setdefault(name, [])
- if fixture_def.has_location:
- faclist.append(fixture_def)
- else:
- # fixturedefs with no location are at the front
- # so this inserts the current fixturedef after the
- # existing fixturedefs from external plugins but
- # before the fixturedefs provided in conftests.
- i = len([f for f in faclist if not f.has_location])
- faclist.insert(i, fixture_def)
- if marker.autouse:
- autousenames.append(name)
-
- if autousenames:
- self._nodeid_and_autousenames.append((nodeid or '', autousenames))
-
- def getfixturedefs(self, argname, nodeid):
- """
- Gets a list of fixtures which are applicable to the given node id.
-
- :param str argname: name of the fixture to search for
- :param str nodeid: full node id of the requesting test.
- :return: list[FixtureDef]
- """
- try:
- fixturedefs = self._arg2fixturedefs[argname]
- except KeyError:
- return None
- else:
- return tuple(self._matchfactories(fixturedefs, nodeid))
-
- def _matchfactories(self, fixturedefs, nodeid):
- for fixturedef in fixturedefs:
- if nodes.ischildnode(fixturedef.baseid, nodeid):
- yield fixturedef
diff --git a/lib/spack/external/pytest-fallback/_pytest/freeze_support.py b/lib/spack/external/pytest-fallback/_pytest/freeze_support.py
deleted file mode 100644
index 97147a8825..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/freeze_support.py
+++ /dev/null
@@ -1,43 +0,0 @@
-"""
-Provides a function to report all internal modules for using freezing tools
-pytest
-"""
-from __future__ import absolute_import, division, print_function
-
-
-def freeze_includes():
- """
- Returns a list of module names used by py.test that should be
- included by cx_freeze.
- """
- import py
- import _pytest
- result = list(_iter_all_modules(py))
- result += list(_iter_all_modules(_pytest))
- return result
-
-
-def _iter_all_modules(package, prefix=''):
- """
- Iterates over the names of all modules that can be found in the given
- package, recursively.
- Example:
- _iter_all_modules(_pytest) ->
- ['_pytest.assertion.newinterpret',
- '_pytest.capture',
- '_pytest.core',
- ...
- ]
- """
- import os
- import pkgutil
- if type(package) is not str:
- path, prefix = package.__path__[0], package.__name__ + '.'
- else:
- path = package
- for _, name, is_package in pkgutil.iter_modules([path]):
- if is_package:
- for m in _iter_all_modules(os.path.join(path, name), prefix=name + '.'):
- yield prefix + m
- else:
- yield prefix + name
diff --git a/lib/spack/external/pytest-fallback/_pytest/helpconfig.py b/lib/spack/external/pytest-fallback/_pytest/helpconfig.py
deleted file mode 100644
index e744637f86..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/helpconfig.py
+++ /dev/null
@@ -1,184 +0,0 @@
-""" version info, help messages, tracing configuration. """
-from __future__ import absolute_import, division, print_function
-
-import py
-import pytest
-from _pytest.config import PrintHelp
-import os
-import sys
-from argparse import Action
-
-
-class HelpAction(Action):
- """This is an argparse Action that will raise an exception in
- order to skip the rest of the argument parsing when --help is passed.
- This prevents argparse from quitting due to missing required arguments
- when any are defined, for example by ``pytest_addoption``.
- This is similar to the way that the builtin argparse --help option is
- implemented by raising SystemExit.
- """
-
- def __init__(self,
- option_strings,
- dest=None,
- default=False,
- help=None):
- super(HelpAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- const=True,
- default=default,
- nargs=0,
- help=help)
-
- def __call__(self, parser, namespace, values, option_string=None):
- setattr(namespace, self.dest, self.const)
-
- # We should only skip the rest of the parsing after preparse is done
- if getattr(parser._parser, 'after_preparse', False):
- raise PrintHelp
-
-
-def pytest_addoption(parser):
- group = parser.getgroup('debugconfig')
- group.addoption('--version', action="store_true",
- help="display pytest lib version and import information.")
- group._addoption("-h", "--help", action=HelpAction, dest="help",
- help="show help message and configuration info")
- group._addoption('-p', action="append", dest="plugins", default=[],
- metavar="name",
- help="early-load given plugin (multi-allowed). "
- "To avoid loading of plugins, use the `no:` prefix, e.g. "
- "`no:doctest`.")
- group.addoption('--traceconfig', '--trace-config',
- action="store_true", default=False,
- help="trace considerations of conftest.py files."),
- group.addoption('--debug',
- action="store_true", dest="debug", default=False,
- help="store internal tracing debug information in 'pytestdebug.log'.")
- group._addoption(
- '-o', '--override-ini', nargs='*', dest="override_ini",
- action="append",
- help="override config option with option=value style, e.g. `-o xfail_strict=True`.")
-
-
-@pytest.hookimpl(hookwrapper=True)
-def pytest_cmdline_parse():
- outcome = yield
- config = outcome.get_result()
- if config.option.debug:
- path = os.path.abspath("pytestdebug.log")
- debugfile = open(path, 'w')
- debugfile.write("versions pytest-%s, py-%s, "
- "python-%s\ncwd=%s\nargs=%s\n\n" % (
- pytest.__version__, py.__version__,
- ".".join(map(str, sys.version_info)),
- os.getcwd(), config._origargs))
- config.trace.root.setwriter(debugfile.write)
- undo_tracing = config.pluginmanager.enable_tracing()
- sys.stderr.write("writing pytestdebug information to %s\n" % path)
-
- def unset_tracing():
- debugfile.close()
- sys.stderr.write("wrote pytestdebug information to %s\n" %
- debugfile.name)
- config.trace.root.setwriter(None)
- undo_tracing()
-
- config.add_cleanup(unset_tracing)
-
-
-def pytest_cmdline_main(config):
- if config.option.version:
- p = py.path.local(pytest.__file__)
- sys.stderr.write("This is pytest version %s, imported from %s\n" %
- (pytest.__version__, p))
- plugininfo = getpluginversioninfo(config)
- if plugininfo:
- for line in plugininfo:
- sys.stderr.write(line + "\n")
- return 0
- elif config.option.help:
- config._do_configure()
- showhelp(config)
- config._ensure_unconfigure()
- return 0
-
-
-def showhelp(config):
- reporter = config.pluginmanager.get_plugin('terminalreporter')
- tw = reporter._tw
- tw.write(config._parser.optparser.format_help())
- tw.line()
- tw.line()
- tw.line("[pytest] ini-options in the first "
- "pytest.ini|tox.ini|setup.cfg file found:")
- tw.line()
-
- for name in config._parser._ininames:
- help, type, default = config._parser._inidict[name]
- if type is None:
- type = "string"
- spec = "%s (%s)" % (name, type)
- line = " %-24s %s" % (spec, help)
- tw.line(line[:tw.fullwidth])
-
- tw.line()
- tw.line("environment variables:")
- vars = [
- ("PYTEST_ADDOPTS", "extra command line options"),
- ("PYTEST_PLUGINS", "comma-separated plugins to load during startup"),
- ("PYTEST_DEBUG", "set to enable debug tracing of pytest's internals")
- ]
- for name, help in vars:
- tw.line(" %-24s %s" % (name, help))
- tw.line()
- tw.line()
-
- tw.line("to see available markers type: pytest --markers")
- tw.line("to see available fixtures type: pytest --fixtures")
- tw.line("(shown according to specified file_or_dir or current dir "
- "if not specified)")
-
- for warningreport in reporter.stats.get('warnings', []):
- tw.line("warning : " + warningreport.message, red=True)
- return
-
-
-conftest_options = [
- ('pytest_plugins', 'list of plugin names to load'),
-]
-
-
-def getpluginversioninfo(config):
- lines = []
- plugininfo = config.pluginmanager.list_plugin_distinfo()
- if plugininfo:
- lines.append("setuptools registered plugins:")
- for plugin, dist in plugininfo:
- loc = getattr(plugin, '__file__', repr(plugin))
- content = "%s-%s at %s" % (dist.project_name, dist.version, loc)
- lines.append(" " + content)
- return lines
-
-
-def pytest_report_header(config):
- lines = []
- if config.option.debug or config.option.traceconfig:
- lines.append("using: pytest-%s pylib-%s" %
- (pytest.__version__, py.__version__))
-
- verinfo = getpluginversioninfo(config)
- if verinfo:
- lines.extend(verinfo)
-
- if config.option.traceconfig:
- lines.append("active plugins:")
- items = config.pluginmanager.list_name_plugin()
- for name, plugin in items:
- if hasattr(plugin, '__file__'):
- r = plugin.__file__
- else:
- r = repr(plugin)
- lines.append(" %-20s: %s" % (name, r))
- return lines
diff --git a/lib/spack/external/pytest-fallback/_pytest/hookspec.py b/lib/spack/external/pytest-fallback/_pytest/hookspec.py
deleted file mode 100644
index e5c966e58b..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/hookspec.py
+++ /dev/null
@@ -1,423 +0,0 @@
-""" hook specifications for pytest plugins, invoked from main.py and builtin plugins. """
-
-from _pytest._pluggy import HookspecMarker
-
-hookspec = HookspecMarker("pytest")
-
-# -------------------------------------------------------------------------
-# Initialization hooks called for every plugin
-# -------------------------------------------------------------------------
-
-
-@hookspec(historic=True)
-def pytest_addhooks(pluginmanager):
- """called at plugin registration time to allow adding new hooks via a call to
- pluginmanager.add_hookspecs(module_or_class, prefix)."""
-
-
-@hookspec(historic=True)
-def pytest_namespace():
- """
- DEPRECATED: this hook causes direct monkeypatching on pytest, its use is strongly discouraged
- return dict of name->object to be made globally available in
- the pytest namespace. This hook is called at plugin registration
- time.
- """
-
-
-@hookspec(historic=True)
-def pytest_plugin_registered(plugin, manager):
- """ a new pytest plugin got registered. """
-
-
-@hookspec(historic=True)
-def pytest_addoption(parser):
- """register argparse-style options and ini-style config values,
- called once at the beginning of a test run.
-
- .. note::
-
- This function should be implemented only in plugins or ``conftest.py``
- files situated at the tests root directory due to how pytest
- :ref:`discovers plugins during startup <pluginorder>`.
-
- :arg parser: To add command line options, call
- :py:func:`parser.addoption(...) <_pytest.config.Parser.addoption>`.
- To add ini-file values call :py:func:`parser.addini(...)
- <_pytest.config.Parser.addini>`.
-
- Options can later be accessed through the
- :py:class:`config <_pytest.config.Config>` object, respectively:
-
- - :py:func:`config.getoption(name) <_pytest.config.Config.getoption>` to
- retrieve the value of a command line option.
-
- - :py:func:`config.getini(name) <_pytest.config.Config.getini>` to retrieve
- a value read from an ini-style file.
-
- The config object is passed around on many internal objects via the ``.config``
- attribute or can be retrieved as the ``pytestconfig`` fixture or accessed
- via (deprecated) ``pytest.config``.
- """
-
-
-@hookspec(historic=True)
-def pytest_configure(config):
- """
- Allows plugins and conftest files to perform initial configuration.
-
- This hook is called for every plugin and initial conftest file
- after command line options have been parsed.
-
- After that, the hook is called for other conftest files as they are
- imported.
-
- :arg config: pytest config object
- :type config: _pytest.config.Config
- """
-
-# -------------------------------------------------------------------------
-# Bootstrapping hooks called for plugins registered early enough:
-# internal and 3rd party plugins as well as directly
-# discoverable conftest.py local plugins.
-# -------------------------------------------------------------------------
-
-
-@hookspec(firstresult=True)
-def pytest_cmdline_parse(pluginmanager, args):
- """return initialized config object, parsing the specified args.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-def pytest_cmdline_preparse(config, args):
- """(deprecated) modify command line arguments before option parsing. """
-
-
-@hookspec(firstresult=True)
-def pytest_cmdline_main(config):
- """ called for performing the main command line action. The default
- implementation will invoke the configure hooks and runtest_mainloop.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-def pytest_load_initial_conftests(early_config, parser, args):
- """ implements the loading of initial conftest files ahead
- of command line option parsing. """
-
-
-# -------------------------------------------------------------------------
-# collection hooks
-# -------------------------------------------------------------------------
-
-@hookspec(firstresult=True)
-def pytest_collection(session):
- """ perform the collection protocol for the given session.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-def pytest_collection_modifyitems(session, config, items):
- """ called after collection has been performed, may filter or re-order
- the items in-place."""
-
-
-def pytest_collection_finish(session):
- """ called after collection has been performed and modified. """
-
-
-@hookspec(firstresult=True)
-def pytest_ignore_collect(path, config):
- """ return True to prevent considering this path for collection.
- This hook is consulted for all files and directories prior to calling
- more specific hooks.
-
- Stops at first non-None result, see :ref:`firstresult`
- """
-
-
-@hookspec(firstresult=True)
-def pytest_collect_directory(path, parent):
- """ called before traversing a directory for collection files.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-def pytest_collect_file(path, parent):
- """ return collection Node or None for the given path. Any new node
- needs to have the specified ``parent`` as a parent."""
-
-# logging hooks for collection
-
-
-def pytest_collectstart(collector):
- """ collector starts collecting. """
-
-
-def pytest_itemcollected(item):
- """ we just collected a test item. """
-
-
-def pytest_collectreport(report):
- """ collector finished collecting. """
-
-
-def pytest_deselected(items):
- """ called for test items deselected by keyword. """
-
-
-@hookspec(firstresult=True)
-def pytest_make_collect_report(collector):
- """ perform ``collector.collect()`` and return a CollectReport.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-# -------------------------------------------------------------------------
-# Python test function related hooks
-# -------------------------------------------------------------------------
-
-
-@hookspec(firstresult=True)
-def pytest_pycollect_makemodule(path, parent):
- """ return a Module collector or None for the given path.
- This hook will be called for each matching test module path.
- The pytest_collect_file hook needs to be used if you want to
- create test modules for files that do not match as a test module.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-@hookspec(firstresult=True)
-def pytest_pycollect_makeitem(collector, name, obj):
- """ return custom item/collector for a python object in a module, or None.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-@hookspec(firstresult=True)
-def pytest_pyfunc_call(pyfuncitem):
- """ call underlying test function.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-def pytest_generate_tests(metafunc):
- """ generate (multiple) parametrized calls to a test function."""
-
-
-@hookspec(firstresult=True)
-def pytest_make_parametrize_id(config, val, argname):
- """Return a user-friendly string representation of the given ``val`` that will be used
- by @pytest.mark.parametrize calls. Return None if the hook doesn't know about ``val``.
- The parameter name is available as ``argname``, if required.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-# -------------------------------------------------------------------------
-# generic runtest related hooks
-# -------------------------------------------------------------------------
-
-
-@hookspec(firstresult=True)
-def pytest_runtestloop(session):
- """ called for performing the main runtest loop
- (after collection finished).
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-def pytest_itemstart(item, node):
- """ (deprecated, use pytest_runtest_logstart). """
-
-
-@hookspec(firstresult=True)
-def pytest_runtest_protocol(item, nextitem):
- """ implements the runtest_setup/call/teardown protocol for
- the given test item, including capturing exceptions and calling
- reporting hooks.
-
- :arg item: test item for which the runtest protocol is performed.
-
- :arg nextitem: the scheduled-to-be-next test item (or None if this
- is the end my friend). This argument is passed on to
- :py:func:`pytest_runtest_teardown`.
-
- :return boolean: True if no further hook implementations should be invoked.
-
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-def pytest_runtest_logstart(nodeid, location):
- """ signal the start of running a single test item. """
-
-
-def pytest_runtest_setup(item):
- """ called before ``pytest_runtest_call(item)``. """
-
-
-def pytest_runtest_call(item):
- """ called to execute the test ``item``. """
-
-
-def pytest_runtest_teardown(item, nextitem):
- """ called after ``pytest_runtest_call``.
-
- :arg nextitem: the scheduled-to-be-next test item (None if no further
- test item is scheduled). This argument can be used to
- perform exact teardowns, i.e. calling just enough finalizers
- so that nextitem only needs to call setup-functions.
- """
-
-
-@hookspec(firstresult=True)
-def pytest_runtest_makereport(item, call):
- """ return a :py:class:`_pytest.runner.TestReport` object
- for the given :py:class:`pytest.Item <_pytest.main.Item>` and
- :py:class:`_pytest.runner.CallInfo`.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-def pytest_runtest_logreport(report):
- """ process a test setup/call/teardown report relating to
- the respective phase of executing a test. """
-
-# -------------------------------------------------------------------------
-# Fixture related hooks
-# -------------------------------------------------------------------------
-
-
-@hookspec(firstresult=True)
-def pytest_fixture_setup(fixturedef, request):
- """ performs fixture setup execution.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-def pytest_fixture_post_finalizer(fixturedef):
- """ called after fixture teardown, but before the cache is cleared so
- the fixture result cache ``fixturedef.cached_result`` can
- still be accessed."""
-
-# -------------------------------------------------------------------------
-# test session related hooks
-# -------------------------------------------------------------------------
-
-
-def pytest_sessionstart(session):
- """ before session.main() is called. """
-
-
-def pytest_sessionfinish(session, exitstatus):
- """ whole test run finishes. """
-
-
-def pytest_unconfigure(config):
- """ called before test process is exited. """
-
-
-# -------------------------------------------------------------------------
-# hooks for customizing the assert methods
-# -------------------------------------------------------------------------
-
-def pytest_assertrepr_compare(config, op, left, right):
- """return explanation for comparisons in failing assert expressions.
-
- Return None for no custom explanation, otherwise return a list
- of strings. The strings will be joined by newlines but any newlines
- *in* a string will be escaped. Note that all but the first line will
- be indented slightly, the intention is for the first line to be a summary.
- """
-
-# -------------------------------------------------------------------------
-# hooks for influencing reporting (invoked from _pytest_terminal)
-# -------------------------------------------------------------------------
-
-
-def pytest_report_header(config, startdir):
- """ return a string or list of strings to be displayed as header info for terminal reporting.
-
- :param config: the pytest config object.
- :param startdir: py.path object with the starting dir
-
- .. note::
-
- This function should be implemented only in plugins or ``conftest.py``
- files situated at the tests root directory due to how pytest
- :ref:`discovers plugins during startup <pluginorder>`.
- """
-
-
-def pytest_report_collectionfinish(config, startdir, items):
- """
- .. versionadded:: 3.2
-
- return a string or list of strings to be displayed after collection has finished successfully.
-
- This strings will be displayed after the standard "collected X items" message.
-
- :param config: the pytest config object.
- :param startdir: py.path object with the starting dir
- :param items: list of pytest items that are going to be executed; this list should not be modified.
- """
-
-
-@hookspec(firstresult=True)
-def pytest_report_teststatus(report):
- """ return result-category, shortletter and verbose word for reporting.
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-
-def pytest_terminal_summary(terminalreporter, exitstatus):
- """ add additional section in terminal summary reporting. """
-
-
-@hookspec(historic=True)
-def pytest_logwarning(message, code, nodeid, fslocation):
- """ process a warning specified by a message, a code string,
- a nodeid and fslocation (both of which may be None
- if the warning is not tied to a partilar node/location)."""
-
-# -------------------------------------------------------------------------
-# doctest hooks
-# -------------------------------------------------------------------------
-
-
-@hookspec(firstresult=True)
-def pytest_doctest_prepare_content(content):
- """ return processed content for a given doctest
-
- Stops at first non-None result, see :ref:`firstresult` """
-
-# -------------------------------------------------------------------------
-# error handling and internal debugging hooks
-# -------------------------------------------------------------------------
-
-
-def pytest_internalerror(excrepr, excinfo):
- """ called for internal errors. """
-
-
-def pytest_keyboard_interrupt(excinfo):
- """ called for keyboard interrupt. """
-
-
-def pytest_exception_interact(node, call, report):
- """called when an exception was raised which can potentially be
- interactively handled.
-
- This hook is only called if an exception was raised
- that is not an internal exception like ``skip.Exception``.
- """
-
-
-def pytest_enter_pdb(config):
- """ called upon pdb.set_trace(), can be used by plugins to take special
- action just before the python debugger enters in interactive mode.
-
- :arg config: pytest config object
- :type config: _pytest.config.Config
- """
diff --git a/lib/spack/external/pytest-fallback/_pytest/junitxml.py b/lib/spack/external/pytest-fallback/_pytest/junitxml.py
deleted file mode 100644
index 7fb40dc354..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/junitxml.py
+++ /dev/null
@@ -1,453 +0,0 @@
-"""
- report test results in JUnit-XML format,
- for use with Jenkins and build integration servers.
-
-
-Based on initial code from Ross Lawley.
-
-Output conforms to https://github.com/jenkinsci/xunit-plugin/blob/master/
-src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd
-"""
-from __future__ import absolute_import, division, print_function
-
-import functools
-import py
-import os
-import re
-import sys
-import time
-import pytest
-from _pytest import nodes
-from _pytest.config import filename_arg
-
-# Python 2.X and 3.X compatibility
-if sys.version_info[0] < 3:
- from codecs import open
-else:
- unichr = chr
- unicode = str
- long = int
-
-
-class Junit(py.xml.Namespace):
- pass
-
-
-# We need to get the subset of the invalid unicode ranges according to
-# XML 1.0 which are valid in this python build. Hence we calculate
-# this dynamically instead of hardcoding it. The spec range of valid
-# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD]
-# | [#x10000-#x10FFFF]
-_legal_chars = (0x09, 0x0A, 0x0d)
-_legal_ranges = (
- (0x20, 0x7E), (0x80, 0xD7FF), (0xE000, 0xFFFD), (0x10000, 0x10FFFF),
-)
-_legal_xml_re = [
- unicode("%s-%s") % (unichr(low), unichr(high))
- for (low, high) in _legal_ranges if low < sys.maxunicode
-]
-_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re
-illegal_xml_re = re.compile(unicode('[^%s]') % unicode('').join(_legal_xml_re))
-del _legal_chars
-del _legal_ranges
-del _legal_xml_re
-
-_py_ext_re = re.compile(r"\.py$")
-
-
-def bin_xml_escape(arg):
- def repl(matchobj):
- i = ord(matchobj.group())
- if i <= 0xFF:
- return unicode('#x%02X') % i
- else:
- return unicode('#x%04X') % i
-
- return py.xml.raw(illegal_xml_re.sub(repl, py.xml.escape(arg)))
-
-
-class _NodeReporter(object):
- def __init__(self, nodeid, xml):
-
- self.id = nodeid
- self.xml = xml
- self.add_stats = self.xml.add_stats
- self.duration = 0
- self.properties = []
- self.nodes = []
- self.testcase = None
- self.attrs = {}
-
- def append(self, node):
- self.xml.add_stats(type(node).__name__)
- self.nodes.append(node)
-
- def add_property(self, name, value):
- self.properties.append((str(name), bin_xml_escape(value)))
-
- def make_properties_node(self):
- """Return a Junit node containing custom properties, if any.
- """
- if self.properties:
- return Junit.properties([
- Junit.property(name=name, value=value)
- for name, value in self.properties
- ])
- return ''
-
- def record_testreport(self, testreport):
- assert not self.testcase
- names = mangle_test_address(testreport.nodeid)
- classnames = names[:-1]
- if self.xml.prefix:
- classnames.insert(0, self.xml.prefix)
- attrs = {
- "classname": ".".join(classnames),
- "name": bin_xml_escape(names[-1]),
- "file": testreport.location[0],
- }
- if testreport.location[1] is not None:
- attrs["line"] = testreport.location[1]
- if hasattr(testreport, "url"):
- attrs["url"] = testreport.url
- self.attrs = attrs
-
- def to_xml(self):
- testcase = Junit.testcase(time=self.duration, **self.attrs)
- testcase.append(self.make_properties_node())
- for node in self.nodes:
- testcase.append(node)
- return testcase
-
- def _add_simple(self, kind, message, data=None):
- data = bin_xml_escape(data)
- node = kind(data, message=message)
- self.append(node)
-
- def write_captured_output(self, report):
- for capname in ('out', 'err'):
- content = getattr(report, 'capstd' + capname)
- if content:
- tag = getattr(Junit, 'system-' + capname)
- self.append(tag(bin_xml_escape(content)))
-
- def append_pass(self, report):
- self.add_stats('passed')
-
- def append_failure(self, report):
- # msg = str(report.longrepr.reprtraceback.extraline)
- if hasattr(report, "wasxfail"):
- self._add_simple(
- Junit.skipped,
- "xfail-marked test passes unexpectedly")
- else:
- if hasattr(report.longrepr, "reprcrash"):
- message = report.longrepr.reprcrash.message
- elif isinstance(report.longrepr, (unicode, str)):
- message = report.longrepr
- else:
- message = str(report.longrepr)
- message = bin_xml_escape(message)
- fail = Junit.failure(message=message)
- fail.append(bin_xml_escape(report.longrepr))
- self.append(fail)
-
- def append_collect_error(self, report):
- # msg = str(report.longrepr.reprtraceback.extraline)
- self.append(Junit.error(bin_xml_escape(report.longrepr),
- message="collection failure"))
-
- def append_collect_skipped(self, report):
- self._add_simple(
- Junit.skipped, "collection skipped", report.longrepr)
-
- def append_error(self, report):
- if getattr(report, 'when', None) == 'teardown':
- msg = "test teardown failure"
- else:
- msg = "test setup failure"
- self._add_simple(
- Junit.error, msg, report.longrepr)
-
- def append_skipped(self, report):
- if hasattr(report, "wasxfail"):
- self._add_simple(
- Junit.skipped, "expected test failure", report.wasxfail
- )
- else:
- filename, lineno, skipreason = report.longrepr
- if skipreason.startswith("Skipped: "):
- skipreason = bin_xml_escape(skipreason[9:])
- self.append(
- Junit.skipped("%s:%s: %s" % (filename, lineno, skipreason),
- type="pytest.skip",
- message=skipreason))
- self.write_captured_output(report)
-
- def finalize(self):
- data = self.to_xml().unicode(indent=0)
- self.__dict__.clear()
- self.to_xml = lambda: py.xml.raw(data)
-
-
-@pytest.fixture
-def record_xml_property(request):
- """Add extra xml properties to the tag for the calling test.
- The fixture is callable with ``(name, value)``, with value being automatically
- xml-encoded.
- """
- request.node.warn(
- code='C3',
- message='record_xml_property is an experimental feature',
- )
- xml = getattr(request.config, "_xml", None)
- if xml is not None:
- node_reporter = xml.node_reporter(request.node.nodeid)
- return node_reporter.add_property
- else:
- def add_property_noop(name, value):
- pass
-
- return add_property_noop
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("terminal reporting")
- group.addoption(
- '--junitxml', '--junit-xml',
- action="store",
- dest="xmlpath",
- metavar="path",
- type=functools.partial(filename_arg, optname="--junitxml"),
- default=None,
- help="create junit-xml style report file at given path.")
- group.addoption(
- '--junitprefix', '--junit-prefix',
- action="store",
- metavar="str",
- default=None,
- help="prepend prefix to classnames in junit-xml output")
- parser.addini("junit_suite_name", "Test suite name for JUnit report", default="pytest")
-
-
-def pytest_configure(config):
- xmlpath = config.option.xmlpath
- # prevent opening xmllog on slave nodes (xdist)
- if xmlpath and not hasattr(config, 'slaveinput'):
- config._xml = LogXML(xmlpath, config.option.junitprefix, config.getini("junit_suite_name"))
- config.pluginmanager.register(config._xml)
-
-
-def pytest_unconfigure(config):
- xml = getattr(config, '_xml', None)
- if xml:
- del config._xml
- config.pluginmanager.unregister(xml)
-
-
-def mangle_test_address(address):
- path, possible_open_bracket, params = address.partition('[')
- names = path.split("::")
- try:
- names.remove('()')
- except ValueError:
- pass
- # convert file path to dotted path
- names[0] = names[0].replace(nodes.SEP, '.')
- names[0] = _py_ext_re.sub("", names[0])
- # put any params back
- names[-1] += possible_open_bracket + params
- return names
-
-
-class LogXML(object):
- def __init__(self, logfile, prefix, suite_name="pytest"):
- logfile = os.path.expanduser(os.path.expandvars(logfile))
- self.logfile = os.path.normpath(os.path.abspath(logfile))
- self.prefix = prefix
- self.suite_name = suite_name
- self.stats = dict.fromkeys([
- 'error',
- 'passed',
- 'failure',
- 'skipped',
- ], 0)
- self.node_reporters = {} # nodeid -> _NodeReporter
- self.node_reporters_ordered = []
- self.global_properties = []
- # List of reports that failed on call but teardown is pending.
- self.open_reports = []
- self.cnt_double_fail_tests = 0
-
- def finalize(self, report):
- nodeid = getattr(report, 'nodeid', report)
- # local hack to handle xdist report order
- slavenode = getattr(report, 'node', None)
- reporter = self.node_reporters.pop((nodeid, slavenode))
- if reporter is not None:
- reporter.finalize()
-
- def node_reporter(self, report):
- nodeid = getattr(report, 'nodeid', report)
- # local hack to handle xdist report order
- slavenode = getattr(report, 'node', None)
-
- key = nodeid, slavenode
-
- if key in self.node_reporters:
- # TODO: breasks for --dist=each
- return self.node_reporters[key]
-
- reporter = _NodeReporter(nodeid, self)
-
- self.node_reporters[key] = reporter
- self.node_reporters_ordered.append(reporter)
-
- return reporter
-
- def add_stats(self, key):
- if key in self.stats:
- self.stats[key] += 1
-
- def _opentestcase(self, report):
- reporter = self.node_reporter(report)
- reporter.record_testreport(report)
- return reporter
-
- def pytest_runtest_logreport(self, report):
- """handle a setup/call/teardown report, generating the appropriate
- xml tags as necessary.
-
- note: due to plugins like xdist, this hook may be called in interlaced
- order with reports from other nodes. for example:
-
- usual call order:
- -> setup node1
- -> call node1
- -> teardown node1
- -> setup node2
- -> call node2
- -> teardown node2
-
- possible call order in xdist:
- -> setup node1
- -> call node1
- -> setup node2
- -> call node2
- -> teardown node2
- -> teardown node1
- """
- close_report = None
- if report.passed:
- if report.when == "call": # ignore setup/teardown
- reporter = self._opentestcase(report)
- reporter.append_pass(report)
- elif report.failed:
- if report.when == "teardown":
- # The following vars are needed when xdist plugin is used
- report_wid = getattr(report, "worker_id", None)
- report_ii = getattr(report, "item_index", None)
- close_report = next(
- (rep for rep in self.open_reports
- if (rep.nodeid == report.nodeid and
- getattr(rep, "item_index", None) == report_ii and
- getattr(rep, "worker_id", None) == report_wid
- )
- ), None)
- if close_report:
- # We need to open new testcase in case we have failure in
- # call and error in teardown in order to follow junit
- # schema
- self.finalize(close_report)
- self.cnt_double_fail_tests += 1
- reporter = self._opentestcase(report)
- if report.when == "call":
- reporter.append_failure(report)
- self.open_reports.append(report)
- else:
- reporter.append_error(report)
- elif report.skipped:
- reporter = self._opentestcase(report)
- reporter.append_skipped(report)
- self.update_testcase_duration(report)
- if report.when == "teardown":
- reporter = self._opentestcase(report)
- reporter.write_captured_output(report)
- self.finalize(report)
- report_wid = getattr(report, "worker_id", None)
- report_ii = getattr(report, "item_index", None)
- close_report = next(
- (rep for rep in self.open_reports
- if (rep.nodeid == report.nodeid and
- getattr(rep, "item_index", None) == report_ii and
- getattr(rep, "worker_id", None) == report_wid
- )
- ), None)
- if close_report:
- self.open_reports.remove(close_report)
-
- def update_testcase_duration(self, report):
- """accumulates total duration for nodeid from given report and updates
- the Junit.testcase with the new total if already created.
- """
- reporter = self.node_reporter(report)
- reporter.duration += getattr(report, 'duration', 0.0)
-
- def pytest_collectreport(self, report):
- if not report.passed:
- reporter = self._opentestcase(report)
- if report.failed:
- reporter.append_collect_error(report)
- else:
- reporter.append_collect_skipped(report)
-
- def pytest_internalerror(self, excrepr):
- reporter = self.node_reporter('internal')
- reporter.attrs.update(classname="pytest", name='internal')
- reporter._add_simple(Junit.error, 'internal error', excrepr)
-
- def pytest_sessionstart(self):
- self.suite_start_time = time.time()
-
- def pytest_sessionfinish(self):
- dirname = os.path.dirname(os.path.abspath(self.logfile))
- if not os.path.isdir(dirname):
- os.makedirs(dirname)
- logfile = open(self.logfile, 'w', encoding='utf-8')
- suite_stop_time = time.time()
- suite_time_delta = suite_stop_time - self.suite_start_time
-
- numtests = (self.stats['passed'] + self.stats['failure'] +
- self.stats['skipped'] + self.stats['error'] -
- self.cnt_double_fail_tests)
- logfile.write('<?xml version="1.0" encoding="utf-8"?>')
-
- logfile.write(Junit.testsuite(
- self._get_global_properties_node(),
- [x.to_xml() for x in self.node_reporters_ordered],
- name=self.suite_name,
- errors=self.stats['error'],
- failures=self.stats['failure'],
- skips=self.stats['skipped'],
- tests=numtests,
- time="%.3f" % suite_time_delta, ).unicode(indent=0))
- logfile.close()
-
- def pytest_terminal_summary(self, terminalreporter):
- terminalreporter.write_sep("-",
- "generated xml file: %s" % (self.logfile))
-
- def add_global_property(self, name, value):
- self.global_properties.append((str(name), bin_xml_escape(value)))
-
- def _get_global_properties_node(self):
- """Return a Junit node containing custom properties, if any.
- """
- if self.global_properties:
- return Junit.properties(
- [
- Junit.property(name=name, value=value)
- for name, value in self.global_properties
- ]
- )
- return ''
diff --git a/lib/spack/external/pytest-fallback/_pytest/main.py b/lib/spack/external/pytest-fallback/_pytest/main.py
deleted file mode 100644
index 98aa28eb34..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/main.py
+++ /dev/null
@@ -1,838 +0,0 @@
-""" core implementation of testing process: init, session, runtest loop. """
-from __future__ import absolute_import, division, print_function
-
-import functools
-import os
-import sys
-
-import _pytest
-from _pytest import nodes
-import _pytest._code
-import py
-try:
- from collections.abc import MutableMapping as MappingMixin
-except ImportError:
- try:
- from collections import MutableMapping as MappingMixin
- except ImportError:
- from UserDict import DictMixin as MappingMixin
-
-from _pytest.config import directory_arg, UsageError, hookimpl
-from _pytest.outcomes import exit
-from _pytest.runner import collect_one_node
-
-tracebackcutdir = py.path.local(_pytest.__file__).dirpath()
-
-# exitcodes for the command line
-EXIT_OK = 0
-EXIT_TESTSFAILED = 1
-EXIT_INTERRUPTED = 2
-EXIT_INTERNALERROR = 3
-EXIT_USAGEERROR = 4
-EXIT_NOTESTSCOLLECTED = 5
-
-
-def pytest_addoption(parser):
- parser.addini("norecursedirs", "directory patterns to avoid for recursion",
- type="args", default=['.*', 'build', 'dist', 'CVS', '_darcs', '{arch}', '*.egg', 'venv'])
- parser.addini("testpaths", "directories to search for tests when no files or directories are given in the "
- "command line.",
- type="args", default=[])
- # parser.addini("dirpatterns",
- # "patterns specifying possible locations of test files",
- # type="linelist", default=["**/test_*.txt",
- # "**/test_*.py", "**/*_test.py"]
- # )
- group = parser.getgroup("general", "running and selection options")
- group._addoption('-x', '--exitfirst', action="store_const",
- dest="maxfail", const=1,
- help="exit instantly on first error or failed test."),
- group._addoption('--maxfail', metavar="num",
- action="store", type=int, dest="maxfail", default=0,
- help="exit after first num failures or errors.")
- group._addoption('--strict', action="store_true",
- help="marks not registered in configuration file raise errors.")
- group._addoption("-c", metavar="file", type=str, dest="inifilename",
- help="load configuration from `file` instead of trying to locate one of the implicit "
- "configuration files.")
- group._addoption("--continue-on-collection-errors", action="store_true",
- default=False, dest="continue_on_collection_errors",
- help="Force test execution even if collection errors occur.")
-
- group = parser.getgroup("collect", "collection")
- group.addoption('--collectonly', '--collect-only', action="store_true",
- help="only collect tests, don't execute them."),
- group.addoption('--pyargs', action="store_true",
- help="try to interpret all arguments as python packages.")
- group.addoption("--ignore", action="append", metavar="path",
- help="ignore path during collection (multi-allowed).")
- # when changing this to --conf-cut-dir, config.py Conftest.setinitial
- # needs upgrading as well
- group.addoption('--confcutdir', dest="confcutdir", default=None,
- metavar="dir", type=functools.partial(directory_arg, optname="--confcutdir"),
- help="only load conftest.py's relative to specified dir.")
- group.addoption('--noconftest', action="store_true",
- dest="noconftest", default=False,
- help="Don't load any conftest.py files.")
- group.addoption('--keepduplicates', '--keep-duplicates', action="store_true",
- dest="keepduplicates", default=False,
- help="Keep duplicate tests.")
- group.addoption('--collect-in-virtualenv', action='store_true',
- dest='collect_in_virtualenv', default=False,
- help="Don't ignore tests in a local virtualenv directory")
-
- group = parser.getgroup("debugconfig",
- "test session debugging and configuration")
- group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir",
- help="base temporary directory for this test run.")
-
-
-def pytest_namespace():
- """keeping this one works around a deeper startup issue in pytest
-
- i tried to find it for a while but the amount of time turned unsustainable,
- so i put a hack in to revisit later
- """
- return {}
-
-
-def pytest_configure(config):
- __import__('pytest').config = config # compatibiltiy
-
-
-def wrap_session(config, doit):
- """Skeleton command line program"""
- session = Session(config)
- session.exitstatus = EXIT_OK
- initstate = 0
- try:
- try:
- config._do_configure()
- initstate = 1
- config.hook.pytest_sessionstart(session=session)
- initstate = 2
- session.exitstatus = doit(config, session) or 0
- except UsageError:
- raise
- except KeyboardInterrupt:
- excinfo = _pytest._code.ExceptionInfo()
- if initstate < 2 and isinstance(excinfo.value, exit.Exception):
- sys.stderr.write('{0}: {1}\n'.format(
- excinfo.typename, excinfo.value.msg))
- config.hook.pytest_keyboard_interrupt(excinfo=excinfo)
- session.exitstatus = EXIT_INTERRUPTED
- except: # noqa
- excinfo = _pytest._code.ExceptionInfo()
- config.notify_exception(excinfo, config.option)
- session.exitstatus = EXIT_INTERNALERROR
- if excinfo.errisinstance(SystemExit):
- sys.stderr.write("mainloop: caught Spurious SystemExit!\n")
-
- finally:
- excinfo = None # Explicitly break reference cycle.
- session.startdir.chdir()
- if initstate >= 2:
- config.hook.pytest_sessionfinish(
- session=session,
- exitstatus=session.exitstatus)
- config._ensure_unconfigure()
- return session.exitstatus
-
-
-def pytest_cmdline_main(config):
- return wrap_session(config, _main)
-
-
-def _main(config, session):
- """ default command line protocol for initialization, session,
- running tests and reporting. """
- config.hook.pytest_collection(session=session)
- config.hook.pytest_runtestloop(session=session)
-
- if session.testsfailed:
- return EXIT_TESTSFAILED
- elif session.testscollected == 0:
- return EXIT_NOTESTSCOLLECTED
-
-
-def pytest_collection(session):
- return session.perform_collect()
-
-
-def pytest_runtestloop(session):
- if (session.testsfailed and
- not session.config.option.continue_on_collection_errors):
- raise session.Interrupted(
- "%d errors during collection" % session.testsfailed)
-
- if session.config.option.collectonly:
- return True
-
- for i, item in enumerate(session.items):
- nextitem = session.items[i + 1] if i + 1 < len(session.items) else None
- item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
- if session.shouldstop:
- raise session.Interrupted(session.shouldstop)
- return True
-
-
-def _in_venv(path):
- """Attempts to detect if ``path`` is the root of a Virtual Environment by
- checking for the existence of the appropriate activate script"""
- bindir = path.join('Scripts' if sys.platform.startswith('win') else 'bin')
- if not bindir.exists():
- return False
- activates = ('activate', 'activate.csh', 'activate.fish',
- 'Activate', 'Activate.bat', 'Activate.ps1')
- return any([fname.basename in activates for fname in bindir.listdir()])
-
-
-def pytest_ignore_collect(path, config):
- ignore_paths = config._getconftest_pathlist("collect_ignore", path=path.dirpath())
- ignore_paths = ignore_paths or []
- excludeopt = config.getoption("ignore")
- if excludeopt:
- ignore_paths.extend([py.path.local(x) for x in excludeopt])
-
- if py.path.local(path) in ignore_paths:
- return True
-
- allow_in_venv = config.getoption("collect_in_virtualenv")
- if _in_venv(path) and not allow_in_venv:
- return True
-
- # Skip duplicate paths.
- keepduplicates = config.getoption("keepduplicates")
- duplicate_paths = config.pluginmanager._duplicatepaths
- if not keepduplicates:
- if path in duplicate_paths:
- return True
- else:
- duplicate_paths.add(path)
-
- return False
-
-
-class FSHookProxy:
- def __init__(self, fspath, pm, remove_mods):
- self.fspath = fspath
- self.pm = pm
- self.remove_mods = remove_mods
-
- def __getattr__(self, name):
- x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods)
- self.__dict__[name] = x
- return x
-
-
-class _CompatProperty(object):
- def __init__(self, name):
- self.name = name
-
- def __get__(self, obj, owner):
- if obj is None:
- return self
-
- # TODO: reenable in the features branch
- # warnings.warn(
- # "usage of {owner!r}.{name} is deprecated, please use pytest.{name} instead".format(
- # name=self.name, owner=type(owner).__name__),
- # PendingDeprecationWarning, stacklevel=2)
- return getattr(__import__('pytest'), self.name)
-
-
-class NodeKeywords(MappingMixin):
- def __init__(self, node):
- self.node = node
- self.parent = node.parent
- self._markers = {node.name: True}
-
- def __getitem__(self, key):
- try:
- return self._markers[key]
- except KeyError:
- if self.parent is None:
- raise
- return self.parent.keywords[key]
-
- def __setitem__(self, key, value):
- self._markers[key] = value
-
- def __delitem__(self, key):
- raise ValueError("cannot delete key in keywords dict")
-
- def __iter__(self):
- seen = set(self._markers)
- if self.parent is not None:
- seen.update(self.parent.keywords)
- return iter(seen)
-
- def __len__(self):
- return len(self.__iter__())
-
- def keys(self):
- return list(self)
-
- def __repr__(self):
- return "<NodeKeywords for node %s>" % (self.node, )
-
-
-class Node(object):
- """ base class for Collector and Item the test collection tree.
- Collector subclasses have children, Items are terminal nodes."""
-
- def __init__(self, name, parent=None, config=None, session=None):
- #: a unique name within the scope of the parent node
- self.name = name
-
- #: the parent collector node.
- self.parent = parent
-
- #: the pytest config object
- self.config = config or parent.config
-
- #: the session this node is part of
- self.session = session or parent.session
-
- #: filesystem path where this node was collected from (can be None)
- self.fspath = getattr(parent, 'fspath', None)
-
- #: keywords/markers collected from all scopes
- self.keywords = NodeKeywords(self)
-
- #: allow adding of extra keywords to use for matching
- self.extra_keyword_matches = set()
-
- # used for storing artificial fixturedefs for direct parametrization
- self._name2pseudofixturedef = {}
-
- @property
- def ihook(self):
- """ fspath sensitive hook proxy used to call pytest hooks"""
- return self.session.gethookproxy(self.fspath)
-
- Module = _CompatProperty("Module")
- Class = _CompatProperty("Class")
- Instance = _CompatProperty("Instance")
- Function = _CompatProperty("Function")
- File = _CompatProperty("File")
- Item = _CompatProperty("Item")
-
- def _getcustomclass(self, name):
- maybe_compatprop = getattr(type(self), name)
- if isinstance(maybe_compatprop, _CompatProperty):
- return getattr(__import__('pytest'), name)
- else:
- cls = getattr(self, name)
- # TODO: reenable in the features branch
- # warnings.warn("use of node.%s is deprecated, "
- # "use pytest_pycollect_makeitem(...) to create custom "
- # "collection nodes" % name, category=DeprecationWarning)
- return cls
-
- def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__,
- getattr(self, 'name', None))
-
- def warn(self, code, message):
- """ generate a warning with the given code and message for this
- item. """
- assert isinstance(code, str)
- fslocation = getattr(self, "location", None)
- if fslocation is None:
- fslocation = getattr(self, "fspath", None)
- self.ihook.pytest_logwarning.call_historic(kwargs=dict(
- code=code, message=message,
- nodeid=self.nodeid, fslocation=fslocation))
-
- # methods for ordering nodes
- @property
- def nodeid(self):
- """ a ::-separated string denoting its collection tree address. """
- try:
- return self._nodeid
- except AttributeError:
- self._nodeid = x = self._makeid()
- return x
-
- def _makeid(self):
- return self.parent.nodeid + "::" + self.name
-
- def __hash__(self):
- return hash(self.nodeid)
-
- def setup(self):
- pass
-
- def teardown(self):
- pass
-
- def _memoizedcall(self, attrname, function):
- exattrname = "_ex_" + attrname
- failure = getattr(self, exattrname, None)
- if failure is not None:
- py.builtin._reraise(failure[0], failure[1], failure[2])
- if hasattr(self, attrname):
- return getattr(self, attrname)
- try:
- res = function()
- except py.builtin._sysex:
- raise
- except: # noqa
- failure = sys.exc_info()
- setattr(self, exattrname, failure)
- raise
- setattr(self, attrname, res)
- return res
-
- def listchain(self):
- """ return list of all parent collectors up to self,
- starting from root of collection tree. """
- chain = []
- item = self
- while item is not None:
- chain.append(item)
- item = item.parent
- chain.reverse()
- return chain
-
- def add_marker(self, marker):
- """ dynamically add a marker object to the node.
-
- ``marker`` can be a string or pytest.mark.* instance.
- """
- from _pytest.mark import MarkDecorator, MARK_GEN
- if isinstance(marker, py.builtin._basestring):
- marker = getattr(MARK_GEN, marker)
- elif not isinstance(marker, MarkDecorator):
- raise ValueError("is not a string or pytest.mark.* Marker")
- self.keywords[marker.name] = marker
-
- def get_marker(self, name):
- """ get a marker object from this node or None if
- the node doesn't have a marker with that name. """
- val = self.keywords.get(name, None)
- if val is not None:
- from _pytest.mark import MarkInfo, MarkDecorator
- if isinstance(val, (MarkDecorator, MarkInfo)):
- return val
-
- def listextrakeywords(self):
- """ Return a set of all extra keywords in self and any parents."""
- extra_keywords = set()
- item = self
- for item in self.listchain():
- extra_keywords.update(item.extra_keyword_matches)
- return extra_keywords
-
- def listnames(self):
- return [x.name for x in self.listchain()]
-
- def addfinalizer(self, fin):
- """ register a function to be called when this node is finalized.
-
- This method can only be called when this node is active
- in a setup chain, for example during self.setup().
- """
- self.session._setupstate.addfinalizer(fin, self)
-
- def getparent(self, cls):
- """ get the next parent node (including ourself)
- which is an instance of the given class"""
- current = self
- while current and not isinstance(current, cls):
- current = current.parent
- return current
-
- def _prunetraceback(self, excinfo):
- pass
-
- def _repr_failure_py(self, excinfo, style=None):
- fm = self.session._fixturemanager
- if excinfo.errisinstance(fm.FixtureLookupError):
- return excinfo.value.formatrepr()
- tbfilter = True
- if self.config.option.fulltrace:
- style = "long"
- else:
- tb = _pytest._code.Traceback([excinfo.traceback[-1]])
- self._prunetraceback(excinfo)
- if len(excinfo.traceback) == 0:
- excinfo.traceback = tb
- tbfilter = False # prunetraceback already does it
- if style == "auto":
- style = "long"
- # XXX should excinfo.getrepr record all data and toterminal() process it?
- if style is None:
- if self.config.option.tbstyle == "short":
- style = "short"
- else:
- style = "long"
-
- try:
- os.getcwd()
- abspath = False
- except OSError:
- abspath = True
-
- return excinfo.getrepr(funcargs=True, abspath=abspath,
- showlocals=self.config.option.showlocals,
- style=style, tbfilter=tbfilter)
-
- repr_failure = _repr_failure_py
-
-
-class Collector(Node):
- """ Collector instances create children through collect()
- and thus iteratively build a tree.
- """
-
- class CollectError(Exception):
- """ an error during collection, contains a custom message. """
-
- def collect(self):
- """ returns a list of children (items and collectors)
- for this collection node.
- """
- raise NotImplementedError("abstract")
-
- def repr_failure(self, excinfo):
- """ represent a collection failure. """
- if excinfo.errisinstance(self.CollectError):
- exc = excinfo.value
- return str(exc.args[0])
- return self._repr_failure_py(excinfo, style="short")
-
- def _prunetraceback(self, excinfo):
- if hasattr(self, 'fspath'):
- traceback = excinfo.traceback
- ntraceback = traceback.cut(path=self.fspath)
- if ntraceback == traceback:
- ntraceback = ntraceback.cut(excludepath=tracebackcutdir)
- excinfo.traceback = ntraceback.filter()
-
-
-class FSCollector(Collector):
- def __init__(self, fspath, parent=None, config=None, session=None):
- fspath = py.path.local(fspath) # xxx only for test_resultlog.py?
- name = fspath.basename
- if parent is not None:
- rel = fspath.relto(parent.fspath)
- if rel:
- name = rel
- name = name.replace(os.sep, nodes.SEP)
- super(FSCollector, self).__init__(name, parent, config, session)
- self.fspath = fspath
-
- def _check_initialpaths_for_relpath(self):
- for initialpath in self.session._initialpaths:
- if self.fspath.common(initialpath) == initialpath:
- return self.fspath.relto(initialpath.dirname)
-
- def _makeid(self):
- relpath = self.fspath.relto(self.config.rootdir)
-
- if not relpath:
- relpath = self._check_initialpaths_for_relpath()
- if os.sep != nodes.SEP:
- relpath = relpath.replace(os.sep, nodes.SEP)
- return relpath
-
-
-class File(FSCollector):
- """ base class for collecting tests from a file. """
-
-
-class Item(Node):
- """ a basic test invocation item. Note that for a single function
- there might be multiple test invocation items.
- """
- nextitem = None
-
- def __init__(self, name, parent=None, config=None, session=None):
- super(Item, self).__init__(name, parent, config, session)
- self._report_sections = []
-
- def add_report_section(self, when, key, content):
- """
- Adds a new report section, similar to what's done internally to add stdout and
- stderr captured output::
-
- item.add_report_section("call", "stdout", "report section contents")
-
- :param str when:
- One of the possible capture states, ``"setup"``, ``"call"``, ``"teardown"``.
- :param str key:
- Name of the section, can be customized at will. Pytest uses ``"stdout"`` and
- ``"stderr"`` internally.
-
- :param str content:
- The full contents as a string.
- """
- if content:
- self._report_sections.append((when, key, content))
-
- def reportinfo(self):
- return self.fspath, None, ""
-
- @property
- def location(self):
- try:
- return self._location
- except AttributeError:
- location = self.reportinfo()
- # bestrelpath is a quite slow function
- cache = self.config.__dict__.setdefault("_bestrelpathcache", {})
- try:
- fspath = cache[location[0]]
- except KeyError:
- fspath = self.session.fspath.bestrelpath(location[0])
- cache[location[0]] = fspath
- location = (fspath, location[1], str(location[2]))
- self._location = location
- return location
-
-
-class NoMatch(Exception):
- """ raised if matching cannot locate a matching names. """
-
-
-class Interrupted(KeyboardInterrupt):
- """ signals an interrupted test run. """
- __module__ = 'builtins' # for py3
-
-
-class Session(FSCollector):
- Interrupted = Interrupted
-
- def __init__(self, config):
- FSCollector.__init__(self, config.rootdir, parent=None,
- config=config, session=self)
- self.testsfailed = 0
- self.testscollected = 0
- self.shouldstop = False
- self.trace = config.trace.root.get("collection")
- self._norecursepatterns = config.getini("norecursedirs")
- self.startdir = py.path.local()
- self.config.pluginmanager.register(self, name="session")
-
- def _makeid(self):
- return ""
-
- @hookimpl(tryfirst=True)
- def pytest_collectstart(self):
- if self.shouldstop:
- raise self.Interrupted(self.shouldstop)
-
- @hookimpl(tryfirst=True)
- def pytest_runtest_logreport(self, report):
- if report.failed and not hasattr(report, 'wasxfail'):
- self.testsfailed += 1
- maxfail = self.config.getvalue("maxfail")
- if maxfail and self.testsfailed >= maxfail:
- self.shouldstop = "stopping after %d failures" % (
- self.testsfailed)
- pytest_collectreport = pytest_runtest_logreport
-
- def isinitpath(self, path):
- return path in self._initialpaths
-
- def gethookproxy(self, fspath):
- # check if we have the common case of running
- # hooks with all conftest.py filesall conftest.py
- pm = self.config.pluginmanager
- my_conftestmodules = pm._getconftestmodules(fspath)
- remove_mods = pm._conftest_plugins.difference(my_conftestmodules)
- if remove_mods:
- # one or more conftests are not in use at this fspath
- proxy = FSHookProxy(fspath, pm, remove_mods)
- else:
- # all plugis are active for this fspath
- proxy = self.config.hook
- return proxy
-
- def perform_collect(self, args=None, genitems=True):
- hook = self.config.hook
- try:
- items = self._perform_collect(args, genitems)
- self.config.pluginmanager.check_pending()
- hook.pytest_collection_modifyitems(session=self,
- config=self.config, items=items)
- finally:
- hook.pytest_collection_finish(session=self)
- self.testscollected = len(items)
- return items
-
- def _perform_collect(self, args, genitems):
- if args is None:
- args = self.config.args
- self.trace("perform_collect", self, args)
- self.trace.root.indent += 1
- self._notfound = []
- self._initialpaths = set()
- self._initialparts = []
- self.items = items = []
- for arg in args:
- parts = self._parsearg(arg)
- self._initialparts.append(parts)
- self._initialpaths.add(parts[0])
- rep = collect_one_node(self)
- self.ihook.pytest_collectreport(report=rep)
- self.trace.root.indent -= 1
- if self._notfound:
- errors = []
- for arg, exc in self._notfound:
- line = "(no name %r in any of %r)" % (arg, exc.args[0])
- errors.append("not found: %s\n%s" % (arg, line))
- # XXX: test this
- raise UsageError(*errors)
- if not genitems:
- return rep.result
- else:
- if rep.passed:
- for node in rep.result:
- self.items.extend(self.genitems(node))
- return items
-
- def collect(self):
- for parts in self._initialparts:
- arg = "::".join(map(str, parts))
- self.trace("processing argument", arg)
- self.trace.root.indent += 1
- try:
- for x in self._collect(arg):
- yield x
- except NoMatch:
- # we are inside a make_report hook so
- # we cannot directly pass through the exception
- self._notfound.append((arg, sys.exc_info()[1]))
-
- self.trace.root.indent -= 1
-
- def _collect(self, arg):
- names = self._parsearg(arg)
- path = names.pop(0)
- if path.check(dir=1):
- assert not names, "invalid arg %r" % (arg,)
- for path in path.visit(fil=lambda x: x.check(file=1),
- rec=self._recurse, bf=True, sort=True):
- for x in self._collectfile(path):
- yield x
- else:
- assert path.check(file=1)
- for x in self.matchnodes(self._collectfile(path), names):
- yield x
-
- def _collectfile(self, path):
- ihook = self.gethookproxy(path)
- if not self.isinitpath(path):
- if ihook.pytest_ignore_collect(path=path, config=self.config):
- return ()
- return ihook.pytest_collect_file(path=path, parent=self)
-
- def _recurse(self, path):
- ihook = self.gethookproxy(path.dirpath())
- if ihook.pytest_ignore_collect(path=path, config=self.config):
- return
- for pat in self._norecursepatterns:
- if path.check(fnmatch=pat):
- return False
- ihook = self.gethookproxy(path)
- ihook.pytest_collect_directory(path=path, parent=self)
- return True
-
- def _tryconvertpyarg(self, x):
- """Convert a dotted module name to path.
-
- """
- import pkgutil
- try:
- loader = pkgutil.find_loader(x)
- except ImportError:
- return x
- if loader is None:
- return x
- # This method is sometimes invoked when AssertionRewritingHook, which
- # does not define a get_filename method, is already in place:
- try:
- path = loader.get_filename(x)
- except AttributeError:
- # Retrieve path from AssertionRewritingHook:
- path = loader.modules[x][0].co_filename
- if loader.is_package(x):
- path = os.path.dirname(path)
- return path
-
- def _parsearg(self, arg):
- """ return (fspath, names) tuple after checking the file exists. """
- parts = str(arg).split("::")
- if self.config.option.pyargs:
- parts[0] = self._tryconvertpyarg(parts[0])
- relpath = parts[0].replace("/", os.sep)
- path = self.config.invocation_dir.join(relpath, abs=True)
- if not path.check():
- if self.config.option.pyargs:
- raise UsageError(
- "file or package not found: " + arg +
- " (missing __init__.py?)")
- else:
- raise UsageError("file not found: " + arg)
- parts[0] = path
- return parts
-
- def matchnodes(self, matching, names):
- self.trace("matchnodes", matching, names)
- self.trace.root.indent += 1
- nodes = self._matchnodes(matching, names)
- num = len(nodes)
- self.trace("matchnodes finished -> ", num, "nodes")
- self.trace.root.indent -= 1
- if num == 0:
- raise NoMatch(matching, names[:1])
- return nodes
-
- def _matchnodes(self, matching, names):
- if not matching or not names:
- return matching
- name = names[0]
- assert name
- nextnames = names[1:]
- resultnodes = []
- for node in matching:
- if isinstance(node, Item):
- if not names:
- resultnodes.append(node)
- continue
- assert isinstance(node, Collector)
- rep = collect_one_node(node)
- if rep.passed:
- has_matched = False
- for x in rep.result:
- # TODO: remove parametrized workaround once collection structure contains parametrization
- if x.name == name or x.name.split("[")[0] == name:
- resultnodes.extend(self.matchnodes([x], nextnames))
- has_matched = True
- # XXX accept IDs that don't have "()" for class instances
- if not has_matched and len(rep.result) == 1 and x.name == "()":
- nextnames.insert(0, name)
- resultnodes.extend(self.matchnodes([x], nextnames))
- else:
- # report collection failures here to avoid failing to run some test
- # specified in the command line because the module could not be
- # imported (#134)
- node.ihook.pytest_collectreport(report=rep)
- return resultnodes
-
- def genitems(self, node):
- self.trace("genitems", node)
- if isinstance(node, Item):
- node.ihook.pytest_itemcollected(item=node)
- yield node
- else:
- assert isinstance(node, Collector)
- rep = collect_one_node(node)
- if rep.passed:
- for subnode in rep.result:
- for x in self.genitems(subnode):
- yield x
- node.ihook.pytest_collectreport(report=rep)
diff --git a/lib/spack/external/pytest-fallback/_pytest/mark.py b/lib/spack/external/pytest-fallback/_pytest/mark.py
deleted file mode 100644
index 454722ca2c..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/mark.py
+++ /dev/null
@@ -1,465 +0,0 @@
-""" generic mechanism for marking and selecting python functions. """
-from __future__ import absolute_import, division, print_function
-
-import inspect
-import warnings
-from collections import namedtuple
-from operator import attrgetter
-from .compat import imap
-from .deprecated import MARK_PARAMETERSET_UNPACKING
-
-
-def alias(name, warning=None):
- getter = attrgetter(name)
-
- def warned(self):
- warnings.warn(warning, stacklevel=2)
- return getter(self)
-
- return property(getter if warning is None else warned, doc='alias for ' + name)
-
-
-class ParameterSet(namedtuple('ParameterSet', 'values, marks, id')):
- @classmethod
- def param(cls, *values, **kw):
- marks = kw.pop('marks', ())
- if isinstance(marks, MarkDecorator):
- marks = marks,
- else:
- assert isinstance(marks, (tuple, list, set))
-
- def param_extract_id(id=None):
- return id
-
- id = param_extract_id(**kw)
- return cls(values, marks, id)
-
- @classmethod
- def extract_from(cls, parameterset, legacy_force_tuple=False):
- """
- :param parameterset:
- a legacy style parameterset that may or may not be a tuple,
- and may or may not be wrapped into a mess of mark objects
-
- :param legacy_force_tuple:
- enforce tuple wrapping so single argument tuple values
- don't get decomposed and break tests
-
- """
-
- if isinstance(parameterset, cls):
- return parameterset
- if not isinstance(parameterset, MarkDecorator) and legacy_force_tuple:
- return cls.param(parameterset)
-
- newmarks = []
- argval = parameterset
- while isinstance(argval, MarkDecorator):
- newmarks.append(MarkDecorator(Mark(
- argval.markname, argval.args[:-1], argval.kwargs)))
- argval = argval.args[-1]
- assert not isinstance(argval, ParameterSet)
- if legacy_force_tuple:
- argval = argval,
-
- if newmarks:
- warnings.warn(MARK_PARAMETERSET_UNPACKING)
-
- return cls(argval, marks=newmarks, id=None)
-
- @property
- def deprecated_arg_dict(self):
- return dict((mark.name, mark) for mark in self.marks)
-
-
-class MarkerError(Exception):
-
- """Error in use of a pytest marker/attribute."""
-
-
-def param(*values, **kw):
- return ParameterSet.param(*values, **kw)
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group._addoption(
- '-k',
- action="store", dest="keyword", default='', metavar="EXPRESSION",
- help="only run tests which match the given substring expression. "
- "An expression is a python evaluatable expression "
- "where all names are substring-matched against test names "
- "and their parent classes. Example: -k 'test_method or test_"
- "other' matches all test functions and classes whose name "
- "contains 'test_method' or 'test_other', while -k 'not test_method' "
- "matches those that don't contain 'test_method' in their names. "
- "Additionally keywords are matched to classes and functions "
- "containing extra names in their 'extra_keyword_matches' set, "
- "as well as functions which have names assigned directly to them."
- )
-
- group._addoption(
- "-m",
- action="store", dest="markexpr", default="", metavar="MARKEXPR",
- help="only run tests matching given mark expression. "
- "example: -m 'mark1 and not mark2'."
- )
-
- group.addoption(
- "--markers", action="store_true",
- help="show markers (builtin, plugin and per-project ones)."
- )
-
- parser.addini("markers", "markers for test functions", 'linelist')
-
-
-def pytest_cmdline_main(config):
- import _pytest.config
- if config.option.markers:
- config._do_configure()
- tw = _pytest.config.create_terminal_writer(config)
- for line in config.getini("markers"):
- name, rest = line.split(":", 1)
- tw.write("@pytest.mark.%s:" % name, bold=True)
- tw.line(rest)
- tw.line()
- config._ensure_unconfigure()
- return 0
-
-
-pytest_cmdline_main.tryfirst = True
-
-
-def pytest_collection_modifyitems(items, config):
- keywordexpr = config.option.keyword.lstrip()
- matchexpr = config.option.markexpr
- if not keywordexpr and not matchexpr:
- return
- # pytest used to allow "-" for negating
- # but today we just allow "-" at the beginning, use "not" instead
- # we probably remove "-" altogether soon
- if keywordexpr.startswith("-"):
- keywordexpr = "not " + keywordexpr[1:]
- selectuntil = False
- if keywordexpr[-1:] == ":":
- selectuntil = True
- keywordexpr = keywordexpr[:-1]
-
- remaining = []
- deselected = []
- for colitem in items:
- if keywordexpr and not matchkeyword(colitem, keywordexpr):
- deselected.append(colitem)
- else:
- if selectuntil:
- keywordexpr = None
- if matchexpr:
- if not matchmark(colitem, matchexpr):
- deselected.append(colitem)
- continue
- remaining.append(colitem)
-
- if deselected:
- config.hook.pytest_deselected(items=deselected)
- items[:] = remaining
-
-
-class MarkMapping:
- """Provides a local mapping for markers where item access
- resolves to True if the marker is present. """
-
- def __init__(self, keywords):
- mymarks = set()
- for key, value in keywords.items():
- if isinstance(value, MarkInfo) or isinstance(value, MarkDecorator):
- mymarks.add(key)
- self._mymarks = mymarks
-
- def __getitem__(self, name):
- return name in self._mymarks
-
-
-class KeywordMapping:
- """Provides a local mapping for keywords.
- Given a list of names, map any substring of one of these names to True.
- """
-
- def __init__(self, names):
- self._names = names
-
- def __getitem__(self, subname):
- for name in self._names:
- if subname in name:
- return True
- return False
-
-
-def matchmark(colitem, markexpr):
- """Tries to match on any marker names, attached to the given colitem."""
- return eval(markexpr, {}, MarkMapping(colitem.keywords))
-
-
-def matchkeyword(colitem, keywordexpr):
- """Tries to match given keyword expression to given collector item.
-
- Will match on the name of colitem, including the names of its parents.
- Only matches names of items which are either a :class:`Class` or a
- :class:`Function`.
- Additionally, matches on names in the 'extra_keyword_matches' set of
- any item, as well as names directly assigned to test functions.
- """
- mapped_names = set()
-
- # Add the names of the current item and any parent items
- import pytest
- for item in colitem.listchain():
- if not isinstance(item, pytest.Instance):
- mapped_names.add(item.name)
-
- # Add the names added as extra keywords to current or parent items
- for name in colitem.listextrakeywords():
- mapped_names.add(name)
-
- # Add the names attached to the current function through direct assignment
- if hasattr(colitem, 'function'):
- for name in colitem.function.__dict__:
- mapped_names.add(name)
-
- mapping = KeywordMapping(mapped_names)
- if " " not in keywordexpr:
- # special case to allow for simple "-k pass" and "-k 1.3"
- return mapping[keywordexpr]
- elif keywordexpr.startswith("not ") and " " not in keywordexpr[4:]:
- return not mapping[keywordexpr[4:]]
- return eval(keywordexpr, {}, mapping)
-
-
-def pytest_configure(config):
- config._old_mark_config = MARK_GEN._config
- if config.option.strict:
- MARK_GEN._config = config
-
-
-def pytest_unconfigure(config):
- MARK_GEN._config = getattr(config, '_old_mark_config', None)
-
-
-class MarkGenerator:
- """ Factory for :class:`MarkDecorator` objects - exposed as
- a ``pytest.mark`` singleton instance. Example::
-
- import pytest
- @pytest.mark.slowtest
- def test_function():
- pass
-
- will set a 'slowtest' :class:`MarkInfo` object
- on the ``test_function`` object. """
- _config = None
-
- def __getattr__(self, name):
- if name[0] == "_":
- raise AttributeError("Marker name must NOT start with underscore")
- if self._config is not None:
- self._check(name)
- return MarkDecorator(Mark(name, (), {}))
-
- def _check(self, name):
- try:
- if name in self._markers:
- return
- except AttributeError:
- pass
- self._markers = values = set()
- for line in self._config.getini("markers"):
- marker, _ = line.split(":", 1)
- marker = marker.rstrip()
- x = marker.split("(", 1)[0]
- values.add(x)
- if name not in self._markers:
- raise AttributeError("%r not a registered marker" % (name,))
-
-
-def istestfunc(func):
- return hasattr(func, "__call__") and \
- getattr(func, "__name__", "<lambda>") != "<lambda>"
-
-
-class MarkDecorator:
- """ A decorator for test functions and test classes. When applied
- it will create :class:`MarkInfo` objects which may be
- :ref:`retrieved by hooks as item keywords <excontrolskip>`.
- MarkDecorator instances are often created like this::
-
- mark1 = pytest.mark.NAME # simple MarkDecorator
- mark2 = pytest.mark.NAME(name1=value) # parametrized MarkDecorator
-
- and can then be applied as decorators to test functions::
-
- @mark2
- def test_function():
- pass
-
- When a MarkDecorator instance is called it does the following:
- 1. If called with a single class as its only positional argument and no
- additional keyword arguments, it attaches itself to the class so it
- gets applied automatically to all test cases found in that class.
- 2. If called with a single function as its only positional argument and
- no additional keyword arguments, it attaches a MarkInfo object to the
- function, containing all the arguments already stored internally in
- the MarkDecorator.
- 3. When called in any other case, it performs a 'fake construction' call,
- i.e. it returns a new MarkDecorator instance with the original
- MarkDecorator's content updated with the arguments passed to this
- call.
-
- Note: The rules above prevent MarkDecorator objects from storing only a
- single function or class reference as their positional argument with no
- additional keyword or positional arguments.
-
- """
-
- def __init__(self, mark):
- assert isinstance(mark, Mark), repr(mark)
- self.mark = mark
-
- name = alias('mark.name')
- args = alias('mark.args')
- kwargs = alias('mark.kwargs')
-
- @property
- def markname(self):
- return self.name # for backward-compat (2.4.1 had this attr)
-
- def __eq__(self, other):
- return self.mark == other.mark if isinstance(other, MarkDecorator) else False
-
- def __repr__(self):
- return "<MarkDecorator %r>" % (self.mark,)
-
- def with_args(self, *args, **kwargs):
- """ return a MarkDecorator with extra arguments added
-
- unlike call this can be used even if the sole argument is a callable/class
-
- :return: MarkDecorator
- """
-
- mark = Mark(self.name, args, kwargs)
- return self.__class__(self.mark.combined_with(mark))
-
- def __call__(self, *args, **kwargs):
- """ if passed a single callable argument: decorate it with mark info.
- otherwise add *args/**kwargs in-place to mark information. """
- if args and not kwargs:
- func = args[0]
- is_class = inspect.isclass(func)
- if len(args) == 1 and (istestfunc(func) or is_class):
- if is_class:
- store_mark(func, self.mark)
- else:
- store_legacy_markinfo(func, self.mark)
- store_mark(func, self.mark)
- return func
- return self.with_args(*args, **kwargs)
-
-
-def get_unpacked_marks(obj):
- """
- obtain the unpacked marks that are stored on a object
- """
- mark_list = getattr(obj, 'pytestmark', [])
-
- if not isinstance(mark_list, list):
- mark_list = [mark_list]
- return [
- getattr(mark, 'mark', mark) # unpack MarkDecorator
- for mark in mark_list
- ]
-
-
-def store_mark(obj, mark):
- """store a Mark on a object
- this is used to implement the Mark declarations/decorators correctly
- """
- assert isinstance(mark, Mark), mark
- # always reassign name to avoid updating pytestmark
- # in a reference that was only borrowed
- obj.pytestmark = get_unpacked_marks(obj) + [mark]
-
-
-def store_legacy_markinfo(func, mark):
- """create the legacy MarkInfo objects and put them onto the function
- """
- if not isinstance(mark, Mark):
- raise TypeError("got {mark!r} instead of a Mark".format(mark=mark))
- holder = getattr(func, mark.name, None)
- if holder is None:
- holder = MarkInfo(mark)
- setattr(func, mark.name, holder)
- else:
- holder.add_mark(mark)
-
-
-class Mark(namedtuple('Mark', 'name, args, kwargs')):
-
- def combined_with(self, other):
- assert self.name == other.name
- return Mark(
- self.name, self.args + other.args,
- dict(self.kwargs, **other.kwargs))
-
-
-class MarkInfo(object):
- """ Marking object created by :class:`MarkDecorator` instances. """
-
- def __init__(self, mark):
- assert isinstance(mark, Mark), repr(mark)
- self.combined = mark
- self._marks = [mark]
-
- name = alias('combined.name')
- args = alias('combined.args')
- kwargs = alias('combined.kwargs')
-
- def __repr__(self):
- return "<MarkInfo {0!r}>".format(self.combined)
-
- def add_mark(self, mark):
- """ add a MarkInfo with the given args and kwargs. """
- self._marks.append(mark)
- self.combined = self.combined.combined_with(mark)
-
- def __iter__(self):
- """ yield MarkInfo objects each relating to a marking-call. """
- return imap(MarkInfo, self._marks)
-
-
-MARK_GEN = MarkGenerator()
-
-
-def _marked(func, mark):
- """ Returns True if :func: is already marked with :mark:, False otherwise.
- This can happen if marker is applied to class and the test file is
- invoked more than once.
- """
- try:
- func_mark = getattr(func, mark.name)
- except AttributeError:
- return False
- return mark.args == func_mark.args and mark.kwargs == func_mark.kwargs
-
-
-def transfer_markers(funcobj, cls, mod):
- """
- this function transfers class level markers and module level markers
- into function level markinfo objects
-
- this is the main reason why marks are so broken
- the resolution will involve phasing out function level MarkInfo objects
-
- """
- for obj in (cls, mod):
- for mark in get_unpacked_marks(obj):
- if not _marked(funcobj, mark):
- store_legacy_markinfo(funcobj, mark)
diff --git a/lib/spack/external/pytest-fallback/_pytest/monkeypatch.py b/lib/spack/external/pytest-fallback/_pytest/monkeypatch.py
deleted file mode 100644
index 39ac770135..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/monkeypatch.py
+++ /dev/null
@@ -1,259 +0,0 @@
-""" monkeypatching and mocking functionality. """
-from __future__ import absolute_import, division, print_function
-
-import os
-import sys
-import re
-
-from py.builtin import _basestring
-from _pytest.fixtures import fixture
-
-RE_IMPORT_ERROR_NAME = re.compile("^No module named (.*)$")
-
-
-@fixture
-def monkeypatch():
- """The returned ``monkeypatch`` fixture provides these
- helper methods to modify objects, dictionaries or os.environ::
-
- monkeypatch.setattr(obj, name, value, raising=True)
- monkeypatch.delattr(obj, name, raising=True)
- monkeypatch.setitem(mapping, name, value)
- monkeypatch.delitem(obj, name, raising=True)
- monkeypatch.setenv(name, value, prepend=False)
- monkeypatch.delenv(name, value, raising=True)
- monkeypatch.syspath_prepend(path)
- monkeypatch.chdir(path)
-
- All modifications will be undone after the requesting
- test function or fixture has finished. The ``raising``
- parameter determines if a KeyError or AttributeError
- will be raised if the set/deletion operation has no target.
- """
- mpatch = MonkeyPatch()
- yield mpatch
- mpatch.undo()
-
-
-def resolve(name):
- # simplified from zope.dottedname
- parts = name.split('.')
-
- used = parts.pop(0)
- found = __import__(used)
- for part in parts:
- used += '.' + part
- try:
- found = getattr(found, part)
- except AttributeError:
- pass
- else:
- continue
- # we use explicit un-nesting of the handling block in order
- # to avoid nested exceptions on python 3
- try:
- __import__(used)
- except ImportError as ex:
- # str is used for py2 vs py3
- expected = str(ex).split()[-1]
- if expected == used:
- raise
- else:
- raise ImportError(
- 'import error in %s: %s' % (used, ex)
- )
- found = annotated_getattr(found, part, used)
- return found
-
-
-def annotated_getattr(obj, name, ann):
- try:
- obj = getattr(obj, name)
- except AttributeError:
- raise AttributeError(
- '%r object at %s has no attribute %r' % (
- type(obj).__name__, ann, name
- )
- )
- return obj
-
-
-def derive_importpath(import_path, raising):
- if not isinstance(import_path, _basestring) or "." not in import_path:
- raise TypeError("must be absolute import path string, not %r" %
- (import_path,))
- module, attr = import_path.rsplit('.', 1)
- target = resolve(module)
- if raising:
- annotated_getattr(target, attr, ann=module)
- return attr, target
-
-
-class Notset:
- def __repr__(self):
- return "<notset>"
-
-
-notset = Notset()
-
-
-class MonkeyPatch:
- """ Object returned by the ``monkeypatch`` fixture keeping a record of setattr/item/env/syspath changes.
- """
-
- def __init__(self):
- self._setattr = []
- self._setitem = []
- self._cwd = None
- self._savesyspath = None
-
- def setattr(self, target, name, value=notset, raising=True):
- """ Set attribute value on target, memorizing the old value.
- By default raise AttributeError if the attribute did not exist.
-
- For convenience you can specify a string as ``target`` which
- will be interpreted as a dotted import path, with the last part
- being the attribute name. Example:
- ``monkeypatch.setattr("os.getcwd", lambda x: "/")``
- would set the ``getcwd`` function of the ``os`` module.
-
- The ``raising`` value determines if the setattr should fail
- if the attribute is not already present (defaults to True
- which means it will raise).
- """
- __tracebackhide__ = True
- import inspect
-
- if value is notset:
- if not isinstance(target, _basestring):
- raise TypeError("use setattr(target, name, value) or "
- "setattr(target, value) with target being a dotted "
- "import string")
- value = name
- name, target = derive_importpath(target, raising)
-
- oldval = getattr(target, name, notset)
- if raising and oldval is notset:
- raise AttributeError("%r has no attribute %r" % (target, name))
-
- # avoid class descriptors like staticmethod/classmethod
- if inspect.isclass(target):
- oldval = target.__dict__.get(name, notset)
- self._setattr.append((target, name, oldval))
- setattr(target, name, value)
-
- def delattr(self, target, name=notset, raising=True):
- """ Delete attribute ``name`` from ``target``, by default raise
- AttributeError it the attribute did not previously exist.
-
- If no ``name`` is specified and ``target`` is a string
- it will be interpreted as a dotted import path with the
- last part being the attribute name.
-
- If ``raising`` is set to False, no exception will be raised if the
- attribute is missing.
- """
- __tracebackhide__ = True
- if name is notset:
- if not isinstance(target, _basestring):
- raise TypeError("use delattr(target, name) or "
- "delattr(target) with target being a dotted "
- "import string")
- name, target = derive_importpath(target, raising)
-
- if not hasattr(target, name):
- if raising:
- raise AttributeError(name)
- else:
- self._setattr.append((target, name, getattr(target, name, notset)))
- delattr(target, name)
-
- def setitem(self, dic, name, value):
- """ Set dictionary entry ``name`` to value. """
- self._setitem.append((dic, name, dic.get(name, notset)))
- dic[name] = value
-
- def delitem(self, dic, name, raising=True):
- """ Delete ``name`` from dict. Raise KeyError if it doesn't exist.
-
- If ``raising`` is set to False, no exception will be raised if the
- key is missing.
- """
- if name not in dic:
- if raising:
- raise KeyError(name)
- else:
- self._setitem.append((dic, name, dic.get(name, notset)))
- del dic[name]
-
- def setenv(self, name, value, prepend=None):
- """ Set environment variable ``name`` to ``value``. If ``prepend``
- is a character, read the current environment variable value
- and prepend the ``value`` adjoined with the ``prepend`` character."""
- value = str(value)
- if prepend and name in os.environ:
- value = value + prepend + os.environ[name]
- self.setitem(os.environ, name, value)
-
- def delenv(self, name, raising=True):
- """ Delete ``name`` from the environment. Raise KeyError it does not
- exist.
-
- If ``raising`` is set to False, no exception will be raised if the
- environment variable is missing.
- """
- self.delitem(os.environ, name, raising=raising)
-
- def syspath_prepend(self, path):
- """ Prepend ``path`` to ``sys.path`` list of import locations. """
- if self._savesyspath is None:
- self._savesyspath = sys.path[:]
- sys.path.insert(0, str(path))
-
- def chdir(self, path):
- """ Change the current working directory to the specified path.
- Path can be a string or a py.path.local object.
- """
- if self._cwd is None:
- self._cwd = os.getcwd()
- if hasattr(path, "chdir"):
- path.chdir()
- else:
- os.chdir(path)
-
- def undo(self):
- """ Undo previous changes. This call consumes the
- undo stack. Calling it a second time has no effect unless
- you do more monkeypatching after the undo call.
-
- There is generally no need to call `undo()`, since it is
- called automatically during tear-down.
-
- Note that the same `monkeypatch` fixture is used across a
- single test function invocation. If `monkeypatch` is used both by
- the test function itself and one of the test fixtures,
- calling `undo()` will undo all of the changes made in
- both functions.
- """
- for obj, name, value in reversed(self._setattr):
- if value is not notset:
- setattr(obj, name, value)
- else:
- delattr(obj, name)
- self._setattr[:] = []
- for dictionary, name, value in reversed(self._setitem):
- if value is notset:
- try:
- del dictionary[name]
- except KeyError:
- pass # was already deleted, so we have the desired state
- else:
- dictionary[name] = value
- self._setitem[:] = []
- if self._savesyspath is not None:
- sys.path[:] = self._savesyspath
- self._savesyspath = None
-
- if self._cwd is not None:
- os.chdir(self._cwd)
- self._cwd = None
diff --git a/lib/spack/external/pytest-fallback/_pytest/nodes.py b/lib/spack/external/pytest-fallback/_pytest/nodes.py
deleted file mode 100644
index ad3af2ce67..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/nodes.py
+++ /dev/null
@@ -1,37 +0,0 @@
-SEP = "/"
-
-
-def _splitnode(nodeid):
- """Split a nodeid into constituent 'parts'.
-
- Node IDs are strings, and can be things like:
- ''
- 'testing/code'
- 'testing/code/test_excinfo.py'
- 'testing/code/test_excinfo.py::TestFormattedExcinfo::()'
-
- Return values are lists e.g.
- []
- ['testing', 'code']
- ['testing', 'code', 'test_excinfo.py']
- ['testing', 'code', 'test_excinfo.py', 'TestFormattedExcinfo', '()']
- """
- if nodeid == '':
- # If there is no root node at all, return an empty list so the caller's logic can remain sane
- return []
- parts = nodeid.split(SEP)
- # Replace single last element 'test_foo.py::Bar::()' with multiple elements 'test_foo.py', 'Bar', '()'
- parts[-1:] = parts[-1].split("::")
- return parts
-
-
-def ischildnode(baseid, nodeid):
- """Return True if the nodeid is a child node of the baseid.
-
- E.g. 'foo/bar::Baz::()' is a child of 'foo', 'foo/bar' and 'foo/bar::Baz', but not of 'foo/blorp'
- """
- base_parts = _splitnode(baseid)
- node_parts = _splitnode(nodeid)
- if len(node_parts) < len(base_parts):
- return False
- return node_parts[:len(base_parts)] == base_parts
diff --git a/lib/spack/external/pytest-fallback/_pytest/nose.py b/lib/spack/external/pytest-fallback/_pytest/nose.py
deleted file mode 100644
index d246c5603d..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/nose.py
+++ /dev/null
@@ -1,73 +0,0 @@
-""" run test suites written for nose. """
-from __future__ import absolute_import, division, print_function
-
-import sys
-
-import py
-from _pytest import unittest, runner, python
-from _pytest.config import hookimpl
-
-
-def get_skip_exceptions():
- skip_classes = set()
- for module_name in ('unittest', 'unittest2', 'nose'):
- mod = sys.modules.get(module_name)
- if hasattr(mod, 'SkipTest'):
- skip_classes.add(mod.SkipTest)
- return tuple(skip_classes)
-
-
-def pytest_runtest_makereport(item, call):
- if call.excinfo and call.excinfo.errisinstance(get_skip_exceptions()):
- # let's substitute the excinfo with a pytest.skip one
- call2 = call.__class__(
- lambda: runner.skip(str(call.excinfo.value)), call.when)
- call.excinfo = call2.excinfo
-
-
-@hookimpl(trylast=True)
-def pytest_runtest_setup(item):
- if is_potential_nosetest(item):
- if isinstance(item.parent, python.Generator):
- gen = item.parent
- if not hasattr(gen, '_nosegensetup'):
- call_optional(gen.obj, 'setup')
- if isinstance(gen.parent, python.Instance):
- call_optional(gen.parent.obj, 'setup')
- gen._nosegensetup = True
- if not call_optional(item.obj, 'setup'):
- # call module level setup if there is no object level one
- call_optional(item.parent.obj, 'setup')
- # XXX this implies we only call teardown when setup worked
- item.session._setupstate.addfinalizer((lambda: teardown_nose(item)), item)
-
-
-def teardown_nose(item):
- if is_potential_nosetest(item):
- if not call_optional(item.obj, 'teardown'):
- call_optional(item.parent.obj, 'teardown')
- # if hasattr(item.parent, '_nosegensetup'):
- # #call_optional(item._nosegensetup, 'teardown')
- # del item.parent._nosegensetup
-
-
-def pytest_make_collect_report(collector):
- if isinstance(collector, python.Generator):
- call_optional(collector.obj, 'setup')
-
-
-def is_potential_nosetest(item):
- # extra check needed since we do not do nose style setup/teardown
- # on direct unittest style classes
- return isinstance(item, python.Function) and \
- not isinstance(item, unittest.TestCaseFunction)
-
-
-def call_optional(obj, name):
- method = getattr(obj, name, None)
- isfixture = hasattr(method, "_pytestfixturefunction")
- if method is not None and not isfixture and py.builtin.callable(method):
- # If there's any problems allow the exception to raise rather than
- # silently ignoring them
- method()
- return True
diff --git a/lib/spack/external/pytest-fallback/_pytest/outcomes.py b/lib/spack/external/pytest-fallback/_pytest/outcomes.py
deleted file mode 100644
index ff5ef756d9..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/outcomes.py
+++ /dev/null
@@ -1,140 +0,0 @@
-"""
-exception classes and constants handling test outcomes
-as well as functions creating them
-"""
-from __future__ import absolute_import, division, print_function
-import py
-import sys
-
-
-class OutcomeException(BaseException):
- """ OutcomeException and its subclass instances indicate and
- contain info about test and collection outcomes.
- """
- def __init__(self, msg=None, pytrace=True):
- BaseException.__init__(self, msg)
- self.msg = msg
- self.pytrace = pytrace
-
- def __repr__(self):
- if self.msg:
- val = self.msg
- if isinstance(val, bytes):
- val = py._builtin._totext(val, errors='replace')
- return val
- return "<%s instance>" % (self.__class__.__name__,)
- __str__ = __repr__
-
-
-TEST_OUTCOME = (OutcomeException, Exception)
-
-
-class Skipped(OutcomeException):
- # XXX hackish: on 3k we fake to live in the builtins
- # in order to have Skipped exception printing shorter/nicer
- __module__ = 'builtins'
-
- def __init__(self, msg=None, pytrace=True, allow_module_level=False):
- OutcomeException.__init__(self, msg=msg, pytrace=pytrace)
- self.allow_module_level = allow_module_level
-
-
-class Failed(OutcomeException):
- """ raised from an explicit call to pytest.fail() """
- __module__ = 'builtins'
-
-
-class Exit(KeyboardInterrupt):
- """ raised for immediate program exits (no tracebacks/summaries)"""
- def __init__(self, msg="unknown reason"):
- self.msg = msg
- KeyboardInterrupt.__init__(self, msg)
-
-# exposed helper methods
-
-
-def exit(msg):
- """ exit testing process as if KeyboardInterrupt was triggered. """
- __tracebackhide__ = True
- raise Exit(msg)
-
-
-exit.Exception = Exit
-
-
-def skip(msg=""):
- """ skip an executing test with the given message. Note: it's usually
- better to use the pytest.mark.skipif marker to declare a test to be
- skipped under certain conditions like mismatching platforms or
- dependencies. See the pytest_skipping plugin for details.
- """
- __tracebackhide__ = True
- raise Skipped(msg=msg)
-
-
-skip.Exception = Skipped
-
-
-def fail(msg="", pytrace=True):
- """ explicitly fail an currently-executing test with the given Message.
-
- :arg pytrace: if false the msg represents the full failure information
- and no python traceback will be reported.
- """
- __tracebackhide__ = True
- raise Failed(msg=msg, pytrace=pytrace)
-
-
-fail.Exception = Failed
-
-
-class XFailed(fail.Exception):
- """ raised from an explicit call to pytest.xfail() """
-
-
-def xfail(reason=""):
- """ xfail an executing test or setup functions with the given reason."""
- __tracebackhide__ = True
- raise XFailed(reason)
-
-
-xfail.Exception = XFailed
-
-
-def importorskip(modname, minversion=None):
- """ return imported module if it has at least "minversion" as its
- __version__ attribute. If no minversion is specified the a skip
- is only triggered if the module can not be imported.
- """
- import warnings
- __tracebackhide__ = True
- compile(modname, '', 'eval') # to catch syntaxerrors
- should_skip = False
-
- with warnings.catch_warnings():
- # make sure to ignore ImportWarnings that might happen because
- # of existing directories with the same name we're trying to
- # import but without a __init__.py file
- warnings.simplefilter('ignore')
- try:
- __import__(modname)
- except ImportError:
- # Do not raise chained exception here(#1485)
- should_skip = True
- if should_skip:
- raise Skipped("could not import %r" % (modname,), allow_module_level=True)
- mod = sys.modules[modname]
- if minversion is None:
- return mod
- verattr = getattr(mod, '__version__', None)
- if minversion is not None:
- try:
- from pkg_resources import parse_version as pv
- except ImportError:
- raise Skipped("we have a required version for %r but can not import "
- "pkg_resources to parse version strings." % (modname,),
- allow_module_level=True)
- if verattr is None or pv(verattr) < pv(minversion):
- raise Skipped("module %r has __version__ %r, required is: %r" % (
- modname, verattr, minversion), allow_module_level=True)
- return mod
diff --git a/lib/spack/external/pytest-fallback/_pytest/pastebin.py b/lib/spack/external/pytest-fallback/_pytest/pastebin.py
deleted file mode 100644
index 9d689819f0..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/pastebin.py
+++ /dev/null
@@ -1,100 +0,0 @@
-""" submit failure or test session information to a pastebin service. """
-from __future__ import absolute_import, division, print_function
-
-import pytest
-import sys
-import tempfile
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("terminal reporting")
- group._addoption('--pastebin', metavar="mode",
- action='store', dest="pastebin", default=None,
- choices=['failed', 'all'],
- help="send failed|all info to bpaste.net pastebin service.")
-
-
-@pytest.hookimpl(trylast=True)
-def pytest_configure(config):
- import py
- if config.option.pastebin == "all":
- tr = config.pluginmanager.getplugin('terminalreporter')
- # if no terminal reporter plugin is present, nothing we can do here;
- # this can happen when this function executes in a slave node
- # when using pytest-xdist, for example
- if tr is not None:
- # pastebin file will be utf-8 encoded binary file
- config._pastebinfile = tempfile.TemporaryFile('w+b')
- oldwrite = tr._tw.write
-
- def tee_write(s, **kwargs):
- oldwrite(s, **kwargs)
- if py.builtin._istext(s):
- s = s.encode('utf-8')
- config._pastebinfile.write(s)
-
- tr._tw.write = tee_write
-
-
-def pytest_unconfigure(config):
- if hasattr(config, '_pastebinfile'):
- # get terminal contents and delete file
- config._pastebinfile.seek(0)
- sessionlog = config._pastebinfile.read()
- config._pastebinfile.close()
- del config._pastebinfile
- # undo our patching in the terminal reporter
- tr = config.pluginmanager.getplugin('terminalreporter')
- del tr._tw.__dict__['write']
- # write summary
- tr.write_sep("=", "Sending information to Paste Service")
- pastebinurl = create_new_paste(sessionlog)
- tr.write_line("pastebin session-log: %s\n" % pastebinurl)
-
-
-def create_new_paste(contents):
- """
- Creates a new paste using bpaste.net service.
-
- :contents: paste contents as utf-8 encoded bytes
- :returns: url to the pasted contents
- """
- import re
- if sys.version_info < (3, 0):
- from urllib import urlopen, urlencode
- else:
- from urllib.request import urlopen
- from urllib.parse import urlencode
-
- params = {
- 'code': contents,
- 'lexer': 'python3' if sys.version_info[0] == 3 else 'python',
- 'expiry': '1week',
- }
- url = 'https://bpaste.net'
- response = urlopen(url, data=urlencode(params).encode('ascii')).read()
- m = re.search(r'href="/raw/(\w+)"', response.decode('utf-8'))
- if m:
- return '%s/show/%s' % (url, m.group(1))
- else:
- return 'bad response: ' + response
-
-
-def pytest_terminal_summary(terminalreporter):
- import _pytest.config
- if terminalreporter.config.option.pastebin != "failed":
- return
- tr = terminalreporter
- if 'failed' in tr.stats:
- terminalreporter.write_sep("=", "Sending information to Paste Service")
- for rep in terminalreporter.stats.get('failed'):
- try:
- msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc
- except AttributeError:
- msg = tr._getfailureheadline(rep)
- tw = _pytest.config.create_terminal_writer(terminalreporter.config, stringio=True)
- rep.toterminal(tw)
- s = tw.stringio.getvalue()
- assert len(s)
- pastebinurl = create_new_paste(s)
- tr.write_line("%s --> %s" % (msg, pastebinurl))
diff --git a/lib/spack/external/pytest-fallback/_pytest/pytester.py b/lib/spack/external/pytest-fallback/_pytest/pytester.py
deleted file mode 100644
index 82aa00e0d2..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/pytester.py
+++ /dev/null
@@ -1,1167 +0,0 @@
-""" (disabled by default) support for testing pytest and pytest plugins. """
-from __future__ import absolute_import, division, print_function
-
-import codecs
-import gc
-import os
-import platform
-import re
-import subprocess
-import sys
-import time
-import traceback
-from fnmatch import fnmatch
-
-from weakref import WeakKeyDictionary
-
-from _pytest.capture import MultiCapture, SysCapture
-from _pytest._code import Source
-import py
-import pytest
-from _pytest.main import Session, EXIT_OK
-from _pytest.assertion.rewrite import AssertionRewritingHook
-
-
-def pytest_addoption(parser):
- # group = parser.getgroup("pytester", "pytester (self-tests) options")
- parser.addoption('--lsof',
- action="store_true", dest="lsof", default=False,
- help=("run FD checks if lsof is available"))
-
- parser.addoption('--runpytest', default="inprocess", dest="runpytest",
- choices=("inprocess", "subprocess", ),
- help=("run pytest sub runs in tests using an 'inprocess' "
- "or 'subprocess' (python -m main) method"))
-
-
-def pytest_configure(config):
- # This might be called multiple times. Only take the first.
- global _pytest_fullpath
- try:
- _pytest_fullpath
- except NameError:
- _pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc"))
- _pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py")
-
- if config.getvalue("lsof"):
- checker = LsofFdLeakChecker()
- if checker.matching_platform():
- config.pluginmanager.register(checker)
-
-
-class LsofFdLeakChecker(object):
- def get_open_files(self):
- out = self._exec_lsof()
- open_files = self._parse_lsof_output(out)
- return open_files
-
- def _exec_lsof(self):
- pid = os.getpid()
- return py.process.cmdexec("lsof -Ffn0 -p %d" % pid)
-
- def _parse_lsof_output(self, out):
- def isopen(line):
- return line.startswith('f') and ("deleted" not in line and
- 'mem' not in line and "txt" not in line and 'cwd' not in line)
-
- open_files = []
-
- for line in out.split("\n"):
- if isopen(line):
- fields = line.split('\0')
- fd = fields[0][1:]
- filename = fields[1][1:]
- if filename.startswith('/'):
- open_files.append((fd, filename))
-
- return open_files
-
- def matching_platform(self):
- try:
- py.process.cmdexec("lsof -v")
- except (py.process.cmdexec.Error, UnicodeDecodeError):
- # cmdexec may raise UnicodeDecodeError on Windows systems
- # with locale other than english:
- # https://bitbucket.org/pytest-dev/py/issues/66
- return False
- else:
- return True
-
- @pytest.hookimpl(hookwrapper=True, tryfirst=True)
- def pytest_runtest_protocol(self, item):
- lines1 = self.get_open_files()
- yield
- if hasattr(sys, "pypy_version_info"):
- gc.collect()
- lines2 = self.get_open_files()
-
- new_fds = set([t[0] for t in lines2]) - set([t[0] for t in lines1])
- leaked_files = [t for t in lines2 if t[0] in new_fds]
- if leaked_files:
- error = []
- error.append("***** %s FD leakage detected" % len(leaked_files))
- error.extend([str(f) for f in leaked_files])
- error.append("*** Before:")
- error.extend([str(f) for f in lines1])
- error.append("*** After:")
- error.extend([str(f) for f in lines2])
- error.append(error[0])
- error.append("*** function %s:%s: %s " % item.location)
- error.append("See issue #2366")
- item.warn('', "\n".join(error))
-
-
-# XXX copied from execnet's conftest.py - needs to be merged
-winpymap = {
- 'python2.7': r'C:\Python27\python.exe',
- 'python2.6': r'C:\Python26\python.exe',
- 'python3.1': r'C:\Python31\python.exe',
- 'python3.2': r'C:\Python32\python.exe',
- 'python3.3': r'C:\Python33\python.exe',
- 'python3.4': r'C:\Python34\python.exe',
- 'python3.5': r'C:\Python35\python.exe',
-}
-
-
-def getexecutable(name, cache={}):
- try:
- return cache[name]
- except KeyError:
- executable = py.path.local.sysfind(name)
- if executable:
- import subprocess
- popen = subprocess.Popen([str(executable), "--version"],
- universal_newlines=True, stderr=subprocess.PIPE)
- out, err = popen.communicate()
- if name == "jython":
- if not err or "2.5" not in err:
- executable = None
- if "2.5.2" in err:
- executable = None # http://bugs.jython.org/issue1790
- elif popen.returncode != 0:
- # Handle pyenv's 127.
- executable = None
- cache[name] = executable
- return executable
-
-
-@pytest.fixture(params=['python2.6', 'python2.7', 'python3.3', "python3.4",
- 'pypy', 'pypy3'])
-def anypython(request):
- name = request.param
- executable = getexecutable(name)
- if executable is None:
- if sys.platform == "win32":
- executable = winpymap.get(name, None)
- if executable:
- executable = py.path.local(executable)
- if executable.check():
- return executable
- pytest.skip("no suitable %s found" % (name,))
- return executable
-
-# used at least by pytest-xdist plugin
-
-
-@pytest.fixture
-def _pytest(request):
- """ Return a helper which offers a gethookrecorder(hook)
- method which returns a HookRecorder instance which helps
- to make assertions about called hooks.
- """
- return PytestArg(request)
-
-
-class PytestArg:
- def __init__(self, request):
- self.request = request
-
- def gethookrecorder(self, hook):
- hookrecorder = HookRecorder(hook._pm)
- self.request.addfinalizer(hookrecorder.finish_recording)
- return hookrecorder
-
-
-def get_public_names(values):
- """Only return names from iterator values without a leading underscore."""
- return [x for x in values if x[0] != "_"]
-
-
-class ParsedCall:
- def __init__(self, name, kwargs):
- self.__dict__.update(kwargs)
- self._name = name
-
- def __repr__(self):
- d = self.__dict__.copy()
- del d['_name']
- return "<ParsedCall %r(**%r)>" % (self._name, d)
-
-
-class HookRecorder:
- """Record all hooks called in a plugin manager.
-
- This wraps all the hook calls in the plugin manager, recording
- each call before propagating the normal calls.
-
- """
-
- def __init__(self, pluginmanager):
- self._pluginmanager = pluginmanager
- self.calls = []
-
- def before(hook_name, hook_impls, kwargs):
- self.calls.append(ParsedCall(hook_name, kwargs))
-
- def after(outcome, hook_name, hook_impls, kwargs):
- pass
-
- self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after)
-
- def finish_recording(self):
- self._undo_wrapping()
-
- def getcalls(self, names):
- if isinstance(names, str):
- names = names.split()
- return [call for call in self.calls if call._name in names]
-
- def assert_contains(self, entries):
- __tracebackhide__ = True
- i = 0
- entries = list(entries)
- backlocals = sys._getframe(1).f_locals
- while entries:
- name, check = entries.pop(0)
- for ind, call in enumerate(self.calls[i:]):
- if call._name == name:
- print("NAMEMATCH", name, call)
- if eval(check, backlocals, call.__dict__):
- print("CHECKERMATCH", repr(check), "->", call)
- else:
- print("NOCHECKERMATCH", repr(check), "-", call)
- continue
- i += ind + 1
- break
- print("NONAMEMATCH", name, "with", call)
- else:
- pytest.fail("could not find %r check %r" % (name, check))
-
- def popcall(self, name):
- __tracebackhide__ = True
- for i, call in enumerate(self.calls):
- if call._name == name:
- del self.calls[i]
- return call
- lines = ["could not find call %r, in:" % (name,)]
- lines.extend([" %s" % str(x) for x in self.calls])
- pytest.fail("\n".join(lines))
-
- def getcall(self, name):
- values = self.getcalls(name)
- assert len(values) == 1, (name, values)
- return values[0]
-
- # functionality for test reports
-
- def getreports(self,
- names="pytest_runtest_logreport pytest_collectreport"):
- return [x.report for x in self.getcalls(names)]
-
- def matchreport(self, inamepart="",
- names="pytest_runtest_logreport pytest_collectreport", when=None):
- """ return a testreport whose dotted import path matches """
- values = []
- for rep in self.getreports(names=names):
- try:
- if not when and rep.when != "call" and rep.passed:
- # setup/teardown passing reports - let's ignore those
- continue
- except AttributeError:
- pass
- if when and getattr(rep, 'when', None) != when:
- continue
- if not inamepart or inamepart in rep.nodeid.split("::"):
- values.append(rep)
- if not values:
- raise ValueError("could not find test report matching %r: "
- "no test reports at all!" % (inamepart,))
- if len(values) > 1:
- raise ValueError(
- "found 2 or more testreports matching %r: %s" % (inamepart, values))
- return values[0]
-
- def getfailures(self,
- names='pytest_runtest_logreport pytest_collectreport'):
- return [rep for rep in self.getreports(names) if rep.failed]
-
- def getfailedcollections(self):
- return self.getfailures('pytest_collectreport')
-
- def listoutcomes(self):
- passed = []
- skipped = []
- failed = []
- for rep in self.getreports(
- "pytest_collectreport pytest_runtest_logreport"):
- if rep.passed:
- if getattr(rep, "when", None) == "call":
- passed.append(rep)
- elif rep.skipped:
- skipped.append(rep)
- elif rep.failed:
- failed.append(rep)
- return passed, skipped, failed
-
- def countoutcomes(self):
- return [len(x) for x in self.listoutcomes()]
-
- def assertoutcome(self, passed=0, skipped=0, failed=0):
- realpassed, realskipped, realfailed = self.listoutcomes()
- assert passed == len(realpassed)
- assert skipped == len(realskipped)
- assert failed == len(realfailed)
-
- def clear(self):
- self.calls[:] = []
-
-
-@pytest.fixture
-def linecomp(request):
- return LineComp()
-
-
-@pytest.fixture(name='LineMatcher')
-def LineMatcher_fixture(request):
- return LineMatcher
-
-
-@pytest.fixture
-def testdir(request, tmpdir_factory):
- return Testdir(request, tmpdir_factory)
-
-
-rex_outcome = re.compile(r"(\d+) ([\w-]+)")
-
-
-class RunResult:
- """The result of running a command.
-
- Attributes:
-
- :ret: The return value.
- :outlines: List of lines captured from stdout.
- :errlines: List of lines captures from stderr.
- :stdout: :py:class:`LineMatcher` of stdout, use ``stdout.str()`` to
- reconstruct stdout or the commonly used
- ``stdout.fnmatch_lines()`` method.
- :stderrr: :py:class:`LineMatcher` of stderr.
- :duration: Duration in seconds.
-
- """
-
- def __init__(self, ret, outlines, errlines, duration):
- self.ret = ret
- self.outlines = outlines
- self.errlines = errlines
- self.stdout = LineMatcher(outlines)
- self.stderr = LineMatcher(errlines)
- self.duration = duration
-
- def parseoutcomes(self):
- """ Return a dictionary of outcomestring->num from parsing
- the terminal output that the test process produced."""
- for line in reversed(self.outlines):
- if 'seconds' in line:
- outcomes = rex_outcome.findall(line)
- if outcomes:
- d = {}
- for num, cat in outcomes:
- d[cat] = int(num)
- return d
- raise ValueError("Pytest terminal report not found")
-
- def assert_outcomes(self, passed=0, skipped=0, failed=0, error=0):
- """ assert that the specified outcomes appear with the respective
- numbers (0 means it didn't occur) in the text output from a test run."""
- d = self.parseoutcomes()
- obtained = {
- 'passed': d.get('passed', 0),
- 'skipped': d.get('skipped', 0),
- 'failed': d.get('failed', 0),
- 'error': d.get('error', 0),
- }
- assert obtained == dict(passed=passed, skipped=skipped, failed=failed, error=error)
-
-
-class Testdir:
- """Temporary test directory with tools to test/run pytest itself.
-
- This is based on the ``tmpdir`` fixture but provides a number of
- methods which aid with testing pytest itself. Unless
- :py:meth:`chdir` is used all methods will use :py:attr:`tmpdir` as
- current working directory.
-
- Attributes:
-
- :tmpdir: The :py:class:`py.path.local` instance of the temporary
- directory.
-
- :plugins: A list of plugins to use with :py:meth:`parseconfig` and
- :py:meth:`runpytest`. Initially this is an empty list but
- plugins can be added to the list. The type of items to add to
- the list depend on the method which uses them so refer to them
- for details.
-
- """
-
- def __init__(self, request, tmpdir_factory):
- self.request = request
- self._mod_collections = WeakKeyDictionary()
- # XXX remove duplication with tmpdir plugin
- basetmp = tmpdir_factory.ensuretemp("testdir")
- name = request.function.__name__
- for i in range(100):
- try:
- tmpdir = basetmp.mkdir(name + str(i))
- except py.error.EEXIST:
- continue
- break
- self.tmpdir = tmpdir
- self.plugins = []
- self._savesyspath = (list(sys.path), list(sys.meta_path))
- self._savemodulekeys = set(sys.modules)
- self.chdir() # always chdir
- self.request.addfinalizer(self.finalize)
- method = self.request.config.getoption("--runpytest")
- if method == "inprocess":
- self._runpytest_method = self.runpytest_inprocess
- elif method == "subprocess":
- self._runpytest_method = self.runpytest_subprocess
-
- def __repr__(self):
- return "<Testdir %r>" % (self.tmpdir,)
-
- def finalize(self):
- """Clean up global state artifacts.
-
- Some methods modify the global interpreter state and this
- tries to clean this up. It does not remove the temporary
- directory however so it can be looked at after the test run
- has finished.
-
- """
- sys.path[:], sys.meta_path[:] = self._savesyspath
- if hasattr(self, '_olddir'):
- self._olddir.chdir()
- self.delete_loaded_modules()
-
- def delete_loaded_modules(self):
- """Delete modules that have been loaded during a test.
-
- This allows the interpreter to catch module changes in case
- the module is re-imported.
- """
- for name in set(sys.modules).difference(self._savemodulekeys):
- # some zope modules used by twisted-related tests keeps internal
- # state and can't be deleted; we had some trouble in the past
- # with zope.interface for example
- if not name.startswith("zope"):
- del sys.modules[name]
-
- def make_hook_recorder(self, pluginmanager):
- """Create a new :py:class:`HookRecorder` for a PluginManager."""
- assert not hasattr(pluginmanager, "reprec")
- pluginmanager.reprec = reprec = HookRecorder(pluginmanager)
- self.request.addfinalizer(reprec.finish_recording)
- return reprec
-
- def chdir(self):
- """Cd into the temporary directory.
-
- This is done automatically upon instantiation.
-
- """
- old = self.tmpdir.chdir()
- if not hasattr(self, '_olddir'):
- self._olddir = old
-
- def _makefile(self, ext, args, kwargs, encoding="utf-8"):
- items = list(kwargs.items())
- if args:
- source = py.builtin._totext("\n").join(
- map(py.builtin._totext, args)) + py.builtin._totext("\n")
- basename = self.request.function.__name__
- items.insert(0, (basename, source))
- ret = None
- for name, value in items:
- p = self.tmpdir.join(name).new(ext=ext)
- p.dirpath().ensure_dir()
- source = Source(value)
-
- def my_totext(s, encoding="utf-8"):
- if py.builtin._isbytes(s):
- s = py.builtin._totext(s, encoding=encoding)
- return s
-
- source_unicode = "\n".join([my_totext(line) for line in source.lines])
- source = py.builtin._totext(source_unicode)
- content = source.strip().encode(encoding) # + "\n"
- # content = content.rstrip() + "\n"
- p.write(content, "wb")
- if ret is None:
- ret = p
- return ret
-
- def makefile(self, ext, *args, **kwargs):
- """Create a new file in the testdir.
-
- ext: The extension the file should use, including the dot.
- E.g. ".py".
-
- args: All args will be treated as strings and joined using
- newlines. The result will be written as contents to the
- file. The name of the file will be based on the test
- function requesting this fixture.
- E.g. "testdir.makefile('.txt', 'line1', 'line2')"
-
- kwargs: Each keyword is the name of a file, while the value of
- it will be written as contents of the file.
- E.g. "testdir.makefile('.ini', pytest='[pytest]\naddopts=-rs\n')"
-
- """
- return self._makefile(ext, args, kwargs)
-
- def makeconftest(self, source):
- """Write a contest.py file with 'source' as contents."""
- return self.makepyfile(conftest=source)
-
- def makeini(self, source):
- """Write a tox.ini file with 'source' as contents."""
- return self.makefile('.ini', tox=source)
-
- def getinicfg(self, source):
- """Return the pytest section from the tox.ini config file."""
- p = self.makeini(source)
- return py.iniconfig.IniConfig(p)['pytest']
-
- def makepyfile(self, *args, **kwargs):
- """Shortcut for .makefile() with a .py extension."""
- return self._makefile('.py', args, kwargs)
-
- def maketxtfile(self, *args, **kwargs):
- """Shortcut for .makefile() with a .txt extension."""
- return self._makefile('.txt', args, kwargs)
-
- def syspathinsert(self, path=None):
- """Prepend a directory to sys.path, defaults to :py:attr:`tmpdir`.
-
- This is undone automatically after the test.
- """
- if path is None:
- path = self.tmpdir
- sys.path.insert(0, str(path))
- # a call to syspathinsert() usually means that the caller
- # wants to import some dynamically created files.
- # with python3 we thus invalidate import caches.
- self._possibly_invalidate_import_caches()
-
- def _possibly_invalidate_import_caches(self):
- # invalidate caches if we can (py33 and above)
- try:
- import importlib
- except ImportError:
- pass
- else:
- if hasattr(importlib, "invalidate_caches"):
- importlib.invalidate_caches()
-
- def mkdir(self, name):
- """Create a new (sub)directory."""
- return self.tmpdir.mkdir(name)
-
- def mkpydir(self, name):
- """Create a new python package.
-
- This creates a (sub)directory with an empty ``__init__.py``
- file so that is recognised as a python package.
-
- """
- p = self.mkdir(name)
- p.ensure("__init__.py")
- return p
-
- Session = Session
-
- def getnode(self, config, arg):
- """Return the collection node of a file.
-
- :param config: :py:class:`_pytest.config.Config` instance, see
- :py:meth:`parseconfig` and :py:meth:`parseconfigure` to
- create the configuration.
-
- :param arg: A :py:class:`py.path.local` instance of the file.
-
- """
- session = Session(config)
- assert '::' not in str(arg)
- p = py.path.local(arg)
- config.hook.pytest_sessionstart(session=session)
- res = session.perform_collect([str(p)], genitems=False)[0]
- config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
- return res
-
- def getpathnode(self, path):
- """Return the collection node of a file.
-
- This is like :py:meth:`getnode` but uses
- :py:meth:`parseconfigure` to create the (configured) pytest
- Config instance.
-
- :param path: A :py:class:`py.path.local` instance of the file.
-
- """
- config = self.parseconfigure(path)
- session = Session(config)
- x = session.fspath.bestrelpath(path)
- config.hook.pytest_sessionstart(session=session)
- res = session.perform_collect([x], genitems=False)[0]
- config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
- return res
-
- def genitems(self, colitems):
- """Generate all test items from a collection node.
-
- This recurses into the collection node and returns a list of
- all the test items contained within.
-
- """
- session = colitems[0].session
- result = []
- for colitem in colitems:
- result.extend(session.genitems(colitem))
- return result
-
- def runitem(self, source):
- """Run the "test_func" Item.
-
- The calling test instance (the class which contains the test
- method) must provide a ``.getrunner()`` method which should
- return a runner which can run the test protocol for a single
- item, like e.g. :py:func:`_pytest.runner.runtestprotocol`.
-
- """
- # used from runner functional tests
- item = self.getitem(source)
- # the test class where we are called from wants to provide the runner
- testclassinstance = self.request.instance
- runner = testclassinstance.getrunner()
- return runner(item)
-
- def inline_runsource(self, source, *cmdlineargs):
- """Run a test module in process using ``pytest.main()``.
-
- This run writes "source" into a temporary file and runs
- ``pytest.main()`` on it, returning a :py:class:`HookRecorder`
- instance for the result.
-
- :param source: The source code of the test module.
-
- :param cmdlineargs: Any extra command line arguments to use.
-
- :return: :py:class:`HookRecorder` instance of the result.
-
- """
- p = self.makepyfile(source)
- values = list(cmdlineargs) + [p]
- return self.inline_run(*values)
-
- def inline_genitems(self, *args):
- """Run ``pytest.main(['--collectonly'])`` in-process.
-
- Returns a tuple of the collected items and a
- :py:class:`HookRecorder` instance.
-
- This runs the :py:func:`pytest.main` function to run all of
- pytest inside the test process itself like
- :py:meth:`inline_run`. However the return value is a tuple of
- the collection items and a :py:class:`HookRecorder` instance.
-
- """
- rec = self.inline_run("--collect-only", *args)
- items = [x.item for x in rec.getcalls("pytest_itemcollected")]
- return items, rec
-
- def inline_run(self, *args, **kwargs):
- """Run ``pytest.main()`` in-process, returning a HookRecorder.
-
- This runs the :py:func:`pytest.main` function to run all of
- pytest inside the test process itself. This means it can
- return a :py:class:`HookRecorder` instance which gives more
- detailed results from then run then can be done by matching
- stdout/stderr from :py:meth:`runpytest`.
-
- :param args: Any command line arguments to pass to
- :py:func:`pytest.main`.
-
- :param plugin: (keyword-only) Extra plugin instances the
- ``pytest.main()`` instance should use.
-
- :return: A :py:class:`HookRecorder` instance.
- """
- # When running py.test inline any plugins active in the main
- # test process are already imported. So this disables the
- # warning which will trigger to say they can no longer be
- # re-written, which is fine as they are already re-written.
- orig_warn = AssertionRewritingHook._warn_already_imported
-
- def revert():
- AssertionRewritingHook._warn_already_imported = orig_warn
-
- self.request.addfinalizer(revert)
- AssertionRewritingHook._warn_already_imported = lambda *a: None
-
- rec = []
-
- class Collect:
- def pytest_configure(x, config):
- rec.append(self.make_hook_recorder(config.pluginmanager))
-
- plugins = kwargs.get("plugins") or []
- plugins.append(Collect())
- ret = pytest.main(list(args), plugins=plugins)
- self.delete_loaded_modules()
- if len(rec) == 1:
- reprec = rec.pop()
- else:
- class reprec:
- pass
- reprec.ret = ret
-
- # typically we reraise keyboard interrupts from the child run
- # because it's our user requesting interruption of the testing
- if ret == 2 and not kwargs.get("no_reraise_ctrlc"):
- calls = reprec.getcalls("pytest_keyboard_interrupt")
- if calls and calls[-1].excinfo.type == KeyboardInterrupt:
- raise KeyboardInterrupt()
- return reprec
-
- def runpytest_inprocess(self, *args, **kwargs):
- """ Return result of running pytest in-process, providing a similar
- interface to what self.runpytest() provides. """
- if kwargs.get("syspathinsert"):
- self.syspathinsert()
- now = time.time()
- capture = MultiCapture(Capture=SysCapture)
- capture.start_capturing()
- try:
- try:
- reprec = self.inline_run(*args, **kwargs)
- except SystemExit as e:
-
- class reprec:
- ret = e.args[0]
-
- except Exception:
- traceback.print_exc()
-
- class reprec:
- ret = 3
- finally:
- out, err = capture.readouterr()
- capture.stop_capturing()
- sys.stdout.write(out)
- sys.stderr.write(err)
-
- res = RunResult(reprec.ret,
- out.split("\n"), err.split("\n"),
- time.time() - now)
- res.reprec = reprec
- return res
-
- def runpytest(self, *args, **kwargs):
- """ Run pytest inline or in a subprocess, depending on the command line
- option "--runpytest" and return a :py:class:`RunResult`.
-
- """
- args = self._ensure_basetemp(args)
- return self._runpytest_method(*args, **kwargs)
-
- def _ensure_basetemp(self, args):
- args = [str(x) for x in args]
- for x in args:
- if str(x).startswith('--basetemp'):
- # print("basedtemp exists: %s" %(args,))
- break
- else:
- args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp'))
- # print("added basetemp: %s" %(args,))
- return args
-
- def parseconfig(self, *args):
- """Return a new pytest Config instance from given commandline args.
-
- This invokes the pytest bootstrapping code in _pytest.config
- to create a new :py:class:`_pytest.core.PluginManager` and
- call the pytest_cmdline_parse hook to create new
- :py:class:`_pytest.config.Config` instance.
-
- If :py:attr:`plugins` has been populated they should be plugin
- modules which will be registered with the PluginManager.
-
- """
- args = self._ensure_basetemp(args)
-
- import _pytest.config
- config = _pytest.config._prepareconfig(args, self.plugins)
- # we don't know what the test will do with this half-setup config
- # object and thus we make sure it gets unconfigured properly in any
- # case (otherwise capturing could still be active, for example)
- self.request.addfinalizer(config._ensure_unconfigure)
- return config
-
- def parseconfigure(self, *args):
- """Return a new pytest configured Config instance.
-
- This returns a new :py:class:`_pytest.config.Config` instance
- like :py:meth:`parseconfig`, but also calls the
- pytest_configure hook.
-
- """
- config = self.parseconfig(*args)
- config._do_configure()
- self.request.addfinalizer(config._ensure_unconfigure)
- return config
-
- def getitem(self, source, funcname="test_func"):
- """Return the test item for a test function.
-
- This writes the source to a python file and runs pytest's
- collection on the resulting module, returning the test item
- for the requested function name.
-
- :param source: The module source.
-
- :param funcname: The name of the test function for which the
- Item must be returned.
-
- """
- items = self.getitems(source)
- for item in items:
- if item.name == funcname:
- return item
- assert 0, "%r item not found in module:\n%s\nitems: %s" % (
- funcname, source, items)
-
- def getitems(self, source):
- """Return all test items collected from the module.
-
- This writes the source to a python file and runs pytest's
- collection on the resulting module, returning all test items
- contained within.
-
- """
- modcol = self.getmodulecol(source)
- return self.genitems([modcol])
-
- def getmodulecol(self, source, configargs=(), withinit=False):
- """Return the module collection node for ``source``.
-
- This writes ``source`` to a file using :py:meth:`makepyfile`
- and then runs the pytest collection on it, returning the
- collection node for the test module.
-
- :param source: The source code of the module to collect.
-
- :param configargs: Any extra arguments to pass to
- :py:meth:`parseconfigure`.
-
- :param withinit: Whether to also write a ``__init__.py`` file
- to the temporary directory to ensure it is a package.
-
- """
- kw = {self.request.function.__name__: Source(source).strip()}
- path = self.makepyfile(**kw)
- if withinit:
- self.makepyfile(__init__="#")
- self.config = config = self.parseconfigure(path, *configargs)
- node = self.getnode(config, path)
-
- return node
-
- def collect_by_name(self, modcol, name):
- """Return the collection node for name from the module collection.
-
- This will search a module collection node for a collection
- node matching the given name.
-
- :param modcol: A module collection node, see
- :py:meth:`getmodulecol`.
-
- :param name: The name of the node to return.
-
- """
- if modcol not in self._mod_collections:
- self._mod_collections[modcol] = list(modcol.collect())
- for colitem in self._mod_collections[modcol]:
- if colitem.name == name:
- return colitem
-
- def popen(self, cmdargs, stdout, stderr, **kw):
- """Invoke subprocess.Popen.
-
- This calls subprocess.Popen making sure the current working
- directory is the PYTHONPATH.
-
- You probably want to use :py:meth:`run` instead.
-
- """
- env = os.environ.copy()
- env['PYTHONPATH'] = os.pathsep.join(filter(None, [
- str(os.getcwd()), env.get('PYTHONPATH', '')]))
- kw['env'] = env
-
- popen = subprocess.Popen(cmdargs, stdin=subprocess.PIPE, stdout=stdout, stderr=stderr, **kw)
- popen.stdin.close()
-
- return popen
-
- def run(self, *cmdargs):
- """Run a command with arguments.
-
- Run a process using subprocess.Popen saving the stdout and
- stderr.
-
- Returns a :py:class:`RunResult`.
-
- """
- return self._run(*cmdargs)
-
- def _run(self, *cmdargs):
- cmdargs = [str(x) for x in cmdargs]
- p1 = self.tmpdir.join("stdout")
- p2 = self.tmpdir.join("stderr")
- print("running:", ' '.join(cmdargs))
- print(" in:", str(py.path.local()))
- f1 = codecs.open(str(p1), "w", encoding="utf8")
- f2 = codecs.open(str(p2), "w", encoding="utf8")
- try:
- now = time.time()
- popen = self.popen(cmdargs, stdout=f1, stderr=f2,
- close_fds=(sys.platform != "win32"))
- ret = popen.wait()
- finally:
- f1.close()
- f2.close()
- f1 = codecs.open(str(p1), "r", encoding="utf8")
- f2 = codecs.open(str(p2), "r", encoding="utf8")
- try:
- out = f1.read().splitlines()
- err = f2.read().splitlines()
- finally:
- f1.close()
- f2.close()
- self._dump_lines(out, sys.stdout)
- self._dump_lines(err, sys.stderr)
- return RunResult(ret, out, err, time.time() - now)
-
- def _dump_lines(self, lines, fp):
- try:
- for line in lines:
- print(line, file=fp)
- except UnicodeEncodeError:
- print("couldn't print to %s because of encoding" % (fp,))
-
- def _getpytestargs(self):
- # we cannot use "(sys.executable,script)"
- # because on windows the script is e.g. a pytest.exe
- return (sys.executable, _pytest_fullpath,) # noqa
-
- def runpython(self, script):
- """Run a python script using sys.executable as interpreter.
-
- Returns a :py:class:`RunResult`.
- """
- return self.run(sys.executable, script)
-
- def runpython_c(self, command):
- """Run python -c "command", return a :py:class:`RunResult`."""
- return self.run(sys.executable, "-c", command)
-
- def runpytest_subprocess(self, *args, **kwargs):
- """Run pytest as a subprocess with given arguments.
-
- Any plugins added to the :py:attr:`plugins` list will added
- using the ``-p`` command line option. Addtionally
- ``--basetemp`` is used put any temporary files and directories
- in a numbered directory prefixed with "runpytest-" so they do
- not conflict with the normal numberd pytest location for
- temporary files and directories.
-
- Returns a :py:class:`RunResult`.
-
- """
- p = py.path.local.make_numbered_dir(prefix="runpytest-",
- keep=None, rootdir=self.tmpdir)
- args = ('--basetemp=%s' % p, ) + args
- # for x in args:
- # if '--confcutdir' in str(x):
- # break
- # else:
- # pass
- # args = ('--confcutdir=.',) + args
- plugins = [x for x in self.plugins if isinstance(x, str)]
- if plugins:
- args = ('-p', plugins[0]) + args
- args = self._getpytestargs() + args
- return self.run(*args)
-
- def spawn_pytest(self, string, expect_timeout=10.0):
- """Run pytest using pexpect.
-
- This makes sure to use the right pytest and sets up the
- temporary directory locations.
-
- The pexpect child is returned.
-
- """
- basetemp = self.tmpdir.mkdir("temp-pexpect")
- invoke = " ".join(map(str, self._getpytestargs()))
- cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string)
- return self.spawn(cmd, expect_timeout=expect_timeout)
-
- def spawn(self, cmd, expect_timeout=10.0):
- """Run a command using pexpect.
-
- The pexpect child is returned.
- """
- pexpect = pytest.importorskip("pexpect", "3.0")
- if hasattr(sys, 'pypy_version_info') and '64' in platform.machine():
- pytest.skip("pypy-64 bit not supported")
- if sys.platform.startswith("freebsd"):
- pytest.xfail("pexpect does not work reliably on freebsd")
- logfile = self.tmpdir.join("spawn.out").open("wb")
- child = pexpect.spawn(cmd, logfile=logfile)
- self.request.addfinalizer(logfile.close)
- child.timeout = expect_timeout
- return child
-
-
-def getdecoded(out):
- try:
- return out.decode("utf-8")
- except UnicodeDecodeError:
- return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
- py.io.saferepr(out),)
-
-
-class LineComp:
- def __init__(self):
- self.stringio = py.io.TextIO()
-
- def assert_contains_lines(self, lines2):
- """ assert that lines2 are contained (linearly) in lines1.
- return a list of extralines found.
- """
- __tracebackhide__ = True
- val = self.stringio.getvalue()
- self.stringio.truncate(0)
- self.stringio.seek(0)
- lines1 = val.split("\n")
- return LineMatcher(lines1).fnmatch_lines(lines2)
-
-
-class LineMatcher:
- """Flexible matching of text.
-
- This is a convenience class to test large texts like the output of
- commands.
-
- The constructor takes a list of lines without their trailing
- newlines, i.e. ``text.splitlines()``.
-
- """
-
- def __init__(self, lines):
- self.lines = lines
- self._log_output = []
-
- def str(self):
- """Return the entire original text."""
- return "\n".join(self.lines)
-
- def _getlines(self, lines2):
- if isinstance(lines2, str):
- lines2 = Source(lines2)
- if isinstance(lines2, Source):
- lines2 = lines2.strip().lines
- return lines2
-
- def fnmatch_lines_random(self, lines2):
- """Check lines exist in the output.
-
- The argument is a list of lines which have to occur in the
- output, in any order. Each line can contain glob whildcards.
-
- """
- lines2 = self._getlines(lines2)
- for line in lines2:
- for x in self.lines:
- if line == x or fnmatch(x, line):
- self._log("matched: ", repr(line))
- break
- else:
- self._log("line %r not found in output" % line)
- raise ValueError(self._log_text)
-
- def get_lines_after(self, fnline):
- """Return all lines following the given line in the text.
-
- The given line can contain glob wildcards.
- """
- for i, line in enumerate(self.lines):
- if fnline == line or fnmatch(line, fnline):
- return self.lines[i + 1:]
- raise ValueError("line %r not found in output" % fnline)
-
- def _log(self, *args):
- self._log_output.append(' '.join((str(x) for x in args)))
-
- @property
- def _log_text(self):
- return '\n'.join(self._log_output)
-
- def fnmatch_lines(self, lines2):
- """Search the text for matching lines.
-
- The argument is a list of lines which have to match and can
- use glob wildcards. If they do not match an pytest.fail() is
- called. The matches and non-matches are also printed on
- stdout.
-
- """
- lines2 = self._getlines(lines2)
- lines1 = self.lines[:]
- nextline = None
- extralines = []
- __tracebackhide__ = True
- for line in lines2:
- nomatchprinted = False
- while lines1:
- nextline = lines1.pop(0)
- if line == nextline:
- self._log("exact match:", repr(line))
- break
- elif fnmatch(nextline, line):
- self._log("fnmatch:", repr(line))
- self._log(" with:", repr(nextline))
- break
- else:
- if not nomatchprinted:
- self._log("nomatch:", repr(line))
- nomatchprinted = True
- self._log(" and:", repr(nextline))
- extralines.append(nextline)
- else:
- self._log("remains unmatched: %r" % (line,))
- pytest.fail(self._log_text)
diff --git a/lib/spack/external/pytest-fallback/_pytest/python.py b/lib/spack/external/pytest-fallback/_pytest/python.py
deleted file mode 100644
index 41fd2bdb7f..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/python.py
+++ /dev/null
@@ -1,1173 +0,0 @@
-""" Python test discovery, setup and run of test functions. """
-from __future__ import absolute_import, division, print_function
-
-import fnmatch
-import inspect
-import sys
-import os
-import collections
-from textwrap import dedent
-from itertools import count
-
-import py
-from _pytest.mark import MarkerError
-from _pytest.config import hookimpl
-
-import _pytest
-import _pytest._pluggy as pluggy
-from _pytest import fixtures
-from _pytest import main
-from _pytest.compat import (
- isclass, isfunction, is_generator, _ascii_escaped,
- REGEX_TYPE, STRING_TYPES, NoneType, NOTSET,
- get_real_func, getfslineno, safe_getattr,
- safe_str, getlocation, enum,
-)
-from _pytest.outcomes import fail
-from _pytest.mark import transfer_markers
-
-cutdir1 = py.path.local(pluggy.__file__.rstrip("oc"))
-cutdir2 = py.path.local(_pytest.__file__).dirpath()
-cutdir3 = py.path.local(py.__file__).dirpath()
-
-
-def filter_traceback(entry):
- """Return True if a TracebackEntry instance should be removed from tracebacks:
- * dynamically generated code (no code to show up for it);
- * internal traceback from pytest or its internal libraries, py and pluggy.
- """
- # entry.path might sometimes return a str object when the entry
- # points to dynamically generated code
- # see https://bitbucket.org/pytest-dev/py/issues/71
- raw_filename = entry.frame.code.raw.co_filename
- is_generated = '<' in raw_filename and '>' in raw_filename
- if is_generated:
- return False
- # entry.path might point to an inexisting file, in which case it will
- # alsso return a str object. see #1133
- p = py.path.local(entry.path)
- return p != cutdir1 and not p.relto(cutdir2) and not p.relto(cutdir3)
-
-
-def pyobj_property(name):
- def get(self):
- node = self.getparent(getattr(__import__('pytest'), name))
- if node is not None:
- return node.obj
- doc = "python %s object this node was collected from (can be None)." % (
- name.lower(),)
- return property(get, None, None, doc)
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group.addoption('--fixtures', '--funcargs',
- action="store_true", dest="showfixtures", default=False,
- help="show available fixtures, sorted by plugin appearance")
- group.addoption(
- '--fixtures-per-test',
- action="store_true",
- dest="show_fixtures_per_test",
- default=False,
- help="show fixtures per test",
- )
- parser.addini("usefixtures", type="args", default=[],
- help="list of default fixtures to be used with this project")
- parser.addini("python_files", type="args",
- default=['test_*.py', '*_test.py'],
- help="glob-style file patterns for Python test module discovery")
- parser.addini("python_classes", type="args", default=["Test", ],
- help="prefixes or glob names for Python test class discovery")
- parser.addini("python_functions", type="args", default=["test", ],
- help="prefixes or glob names for Python test function and "
- "method discovery")
-
- group.addoption("--import-mode", default="prepend",
- choices=["prepend", "append"], dest="importmode",
- help="prepend/append to sys.path when importing test modules, "
- "default is to prepend.")
-
-
-def pytest_cmdline_main(config):
- if config.option.showfixtures:
- showfixtures(config)
- return 0
- if config.option.show_fixtures_per_test:
- show_fixtures_per_test(config)
- return 0
-
-
-def pytest_generate_tests(metafunc):
- # those alternative spellings are common - raise a specific error to alert
- # the user
- alt_spellings = ['parameterize', 'parametrise', 'parameterise']
- for attr in alt_spellings:
- if hasattr(metafunc.function, attr):
- msg = "{0} has '{1}', spelling should be 'parametrize'"
- raise MarkerError(msg.format(metafunc.function.__name__, attr))
- try:
- markers = metafunc.function.parametrize
- except AttributeError:
- return
- for marker in markers:
- metafunc.parametrize(*marker.args, **marker.kwargs)
-
-
-def pytest_configure(config):
- config.addinivalue_line("markers",
- "parametrize(argnames, argvalues): call a test function multiple "
- "times passing in different arguments in turn. argvalues generally "
- "needs to be a list of values if argnames specifies only one name "
- "or a list of tuples of values if argnames specifies multiple names. "
- "Example: @parametrize('arg1', [1,2]) would lead to two calls of the "
- "decorated test function, one with arg1=1 and another with arg1=2."
- "see http://pytest.org/latest/parametrize.html for more info and "
- "examples."
- )
- config.addinivalue_line("markers",
- "usefixtures(fixturename1, fixturename2, ...): mark tests as needing "
- "all of the specified fixtures. see http://pytest.org/latest/fixture.html#usefixtures "
- )
-
-
-@hookimpl(trylast=True)
-def pytest_pyfunc_call(pyfuncitem):
- testfunction = pyfuncitem.obj
- if pyfuncitem._isyieldedfunction():
- testfunction(*pyfuncitem._args)
- else:
- funcargs = pyfuncitem.funcargs
- testargs = {}
- for arg in pyfuncitem._fixtureinfo.argnames:
- testargs[arg] = funcargs[arg]
- testfunction(**testargs)
- return True
-
-
-def pytest_collect_file(path, parent):
- ext = path.ext
- if ext == ".py":
- if not parent.session.isinitpath(path):
- for pat in parent.config.getini('python_files'):
- if path.fnmatch(pat):
- break
- else:
- return
- ihook = parent.session.gethookproxy(path)
- return ihook.pytest_pycollect_makemodule(path=path, parent=parent)
-
-
-def pytest_pycollect_makemodule(path, parent):
- return Module(path, parent)
-
-
-@hookimpl(hookwrapper=True)
-def pytest_pycollect_makeitem(collector, name, obj):
- outcome = yield
- res = outcome.get_result()
- if res is not None:
- return
- # nothing was collected elsewhere, let's do it here
- if isclass(obj):
- if collector.istestclass(obj, name):
- Class = collector._getcustomclass("Class")
- outcome.force_result(Class(name, parent=collector))
- elif collector.istestfunction(obj, name):
- # mock seems to store unbound methods (issue473), normalize it
- obj = getattr(obj, "__func__", obj)
- # We need to try and unwrap the function if it's a functools.partial
- # or a funtools.wrapped.
- # We musn't if it's been wrapped with mock.patch (python 2 only)
- if not (isfunction(obj) or isfunction(get_real_func(obj))):
- collector.warn(code="C2", message="cannot collect %r because it is not a function."
- % name, )
- elif getattr(obj, "__test__", True):
- if is_generator(obj):
- res = Generator(name, parent=collector)
- else:
- res = list(collector._genfunctions(name, obj))
- outcome.force_result(res)
-
-
-def pytest_make_parametrize_id(config, val, argname=None):
- return None
-
-
-class PyobjContext(object):
- module = pyobj_property("Module")
- cls = pyobj_property("Class")
- instance = pyobj_property("Instance")
-
-
-class PyobjMixin(PyobjContext):
- def obj():
- def fget(self):
- obj = getattr(self, '_obj', None)
- if obj is None:
- self._obj = obj = self._getobj()
- return obj
-
- def fset(self, value):
- self._obj = value
-
- return property(fget, fset, None, "underlying python object")
-
- obj = obj()
-
- def _getobj(self):
- return getattr(self.parent.obj, self.name)
-
- def getmodpath(self, stopatmodule=True, includemodule=False):
- """ return python path relative to the containing module. """
- chain = self.listchain()
- chain.reverse()
- parts = []
- for node in chain:
- if isinstance(node, Instance):
- continue
- name = node.name
- if isinstance(node, Module):
- name = os.path.splitext(name)[0]
- if stopatmodule:
- if includemodule:
- parts.append(name)
- break
- parts.append(name)
- parts.reverse()
- s = ".".join(parts)
- return s.replace(".[", "[")
-
- def _getfslineno(self):
- return getfslineno(self.obj)
-
- def reportinfo(self):
- # XXX caching?
- obj = self.obj
- compat_co_firstlineno = getattr(obj, 'compat_co_firstlineno', None)
- if isinstance(compat_co_firstlineno, int):
- # nose compatibility
- fspath = sys.modules[obj.__module__].__file__
- if fspath.endswith(".pyc"):
- fspath = fspath[:-1]
- lineno = compat_co_firstlineno
- else:
- fspath, lineno = getfslineno(obj)
- modpath = self.getmodpath()
- assert isinstance(lineno, int)
- return fspath, lineno, modpath
-
-
-class PyCollector(PyobjMixin, main.Collector):
-
- def funcnamefilter(self, name):
- return self._matches_prefix_or_glob_option('python_functions', name)
-
- def isnosetest(self, obj):
- """ Look for the __test__ attribute, which is applied by the
- @nose.tools.istest decorator
- """
- # We explicitly check for "is True" here to not mistakenly treat
- # classes with a custom __getattr__ returning something truthy (like a
- # function) as test classes.
- return safe_getattr(obj, '__test__', False) is True
-
- def classnamefilter(self, name):
- return self._matches_prefix_or_glob_option('python_classes', name)
-
- def istestfunction(self, obj, name):
- if self.funcnamefilter(name) or self.isnosetest(obj):
- if isinstance(obj, staticmethod):
- # static methods need to be unwrapped
- obj = safe_getattr(obj, '__func__', False)
- if obj is False:
- # Python 2.6 wraps in a different way that we won't try to handle
- msg = "cannot collect static method %r because " \
- "it is not a function (always the case in Python 2.6)"
- self.warn(
- code="C2", message=msg % name)
- return False
- return (
- safe_getattr(obj, "__call__", False) and fixtures.getfixturemarker(obj) is None
- )
- else:
- return False
-
- def istestclass(self, obj, name):
- return self.classnamefilter(name) or self.isnosetest(obj)
-
- def _matches_prefix_or_glob_option(self, option_name, name):
- """
- checks if the given name matches the prefix or glob-pattern defined
- in ini configuration.
- """
- for option in self.config.getini(option_name):
- if name.startswith(option):
- return True
- # check that name looks like a glob-string before calling fnmatch
- # because this is called for every name in each collected module,
- # and fnmatch is somewhat expensive to call
- elif ('*' in option or '?' in option or '[' in option) and \
- fnmatch.fnmatch(name, option):
- return True
- return False
-
- def collect(self):
- if not getattr(self.obj, "__test__", True):
- return []
-
- # NB. we avoid random getattrs and peek in the __dict__ instead
- # (XXX originally introduced from a PyPy need, still true?)
- dicts = [getattr(self.obj, '__dict__', {})]
- for basecls in inspect.getmro(self.obj.__class__):
- dicts.append(basecls.__dict__)
- seen = {}
- values = []
- for dic in dicts:
- for name, obj in list(dic.items()):
- if name in seen:
- continue
- seen[name] = True
- res = self.makeitem(name, obj)
- if res is None:
- continue
- if not isinstance(res, list):
- res = [res]
- values.extend(res)
- values.sort(key=lambda item: item.reportinfo()[:2])
- return values
-
- def makeitem(self, name, obj):
- # assert self.ihook.fspath == self.fspath, self
- return self.ihook.pytest_pycollect_makeitem(
- collector=self, name=name, obj=obj)
-
- def _genfunctions(self, name, funcobj):
- module = self.getparent(Module).obj
- clscol = self.getparent(Class)
- cls = clscol and clscol.obj or None
- transfer_markers(funcobj, cls, module)
- fm = self.session._fixturemanager
- fixtureinfo = fm.getfixtureinfo(self, funcobj, cls)
- metafunc = Metafunc(funcobj, fixtureinfo, self.config,
- cls=cls, module=module)
- methods = []
- if hasattr(module, "pytest_generate_tests"):
- methods.append(module.pytest_generate_tests)
- if hasattr(cls, "pytest_generate_tests"):
- methods.append(cls().pytest_generate_tests)
- if methods:
- self.ihook.pytest_generate_tests.call_extra(methods,
- dict(metafunc=metafunc))
- else:
- self.ihook.pytest_generate_tests(metafunc=metafunc)
-
- Function = self._getcustomclass("Function")
- if not metafunc._calls:
- yield Function(name, parent=self, fixtureinfo=fixtureinfo)
- else:
- # add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs
- fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm)
-
- for callspec in metafunc._calls:
- subname = "%s[%s]" % (name, callspec.id)
- yield Function(name=subname, parent=self,
- callspec=callspec, callobj=funcobj,
- fixtureinfo=fixtureinfo,
- keywords={callspec.id: True},
- originalname=name,
- )
-
-
-class Module(main.File, PyCollector):
- """ Collector for test classes and functions. """
-
- def _getobj(self):
- return self._importtestmodule()
-
- def collect(self):
- self.session._fixturemanager.parsefactories(self)
- return super(Module, self).collect()
-
- def _importtestmodule(self):
- # we assume we are only called once per module
- importmode = self.config.getoption("--import-mode")
- try:
- mod = self.fspath.pyimport(ensuresyspath=importmode)
- except SyntaxError:
- raise self.CollectError(
- _pytest._code.ExceptionInfo().getrepr(style="short"))
- except self.fspath.ImportMismatchError:
- e = sys.exc_info()[1]
- raise self.CollectError(
- "import file mismatch:\n"
- "imported module %r has this __file__ attribute:\n"
- " %s\n"
- "which is not the same as the test file we want to collect:\n"
- " %s\n"
- "HINT: remove __pycache__ / .pyc files and/or use a "
- "unique basename for your test file modules"
- % e.args
- )
- except ImportError:
- from _pytest._code.code import ExceptionInfo
- exc_info = ExceptionInfo()
- if self.config.getoption('verbose') < 2:
- exc_info.traceback = exc_info.traceback.filter(filter_traceback)
- exc_repr = exc_info.getrepr(style='short') if exc_info.traceback else exc_info.exconly()
- formatted_tb = safe_str(exc_repr)
- raise self.CollectError(
- "ImportError while importing test module '{fspath}'.\n"
- "Hint: make sure your test modules/packages have valid Python names.\n"
- "Traceback:\n"
- "{traceback}".format(fspath=self.fspath, traceback=formatted_tb)
- )
- except _pytest.runner.Skipped as e:
- if e.allow_module_level:
- raise
- raise self.CollectError(
- "Using pytest.skip outside of a test is not allowed. "
- "To decorate a test function, use the @pytest.mark.skip "
- "or @pytest.mark.skipif decorators instead, and to skip a "
- "module use `pytestmark = pytest.mark.{skip,skipif}."
- )
- self.config.pluginmanager.consider_module(mod)
- return mod
-
- def setup(self):
- setup_module = _get_xunit_setup_teardown(self.obj, "setUpModule")
- if setup_module is None:
- setup_module = _get_xunit_setup_teardown(self.obj, "setup_module")
- if setup_module is not None:
- setup_module()
-
- teardown_module = _get_xunit_setup_teardown(self.obj, 'tearDownModule')
- if teardown_module is None:
- teardown_module = _get_xunit_setup_teardown(self.obj, 'teardown_module')
- if teardown_module is not None:
- self.addfinalizer(teardown_module)
-
-
-def _get_xunit_setup_teardown(holder, attr_name, param_obj=None):
- """
- Return a callable to perform xunit-style setup or teardown if
- the function exists in the ``holder`` object.
- The ``param_obj`` parameter is the parameter which will be passed to the function
- when the callable is called without arguments, defaults to the ``holder`` object.
- Return ``None`` if a suitable callable is not found.
- """
- param_obj = param_obj if param_obj is not None else holder
- result = _get_xunit_func(holder, attr_name)
- if result is not None:
- arg_count = result.__code__.co_argcount
- if inspect.ismethod(result):
- arg_count -= 1
- if arg_count:
- return lambda: result(param_obj)
- else:
- return result
-
-
-def _get_xunit_func(obj, name):
- """Return the attribute from the given object to be used as a setup/teardown
- xunit-style function, but only if not marked as a fixture to
- avoid calling it twice.
- """
- meth = getattr(obj, name, None)
- if fixtures.getfixturemarker(meth) is None:
- return meth
-
-
-class Class(PyCollector):
- """ Collector for test methods. """
-
- def collect(self):
- if not safe_getattr(self.obj, "__test__", True):
- return []
- if hasinit(self.obj):
- self.warn("C1", "cannot collect test class %r because it has a "
- "__init__ constructor" % self.obj.__name__)
- return []
- elif hasnew(self.obj):
- self.warn("C1", "cannot collect test class %r because it has a "
- "__new__ constructor" % self.obj.__name__)
- return []
- return [self._getcustomclass("Instance")(name="()", parent=self)]
-
- def setup(self):
- setup_class = _get_xunit_func(self.obj, 'setup_class')
- if setup_class is not None:
- setup_class = getattr(setup_class, 'im_func', setup_class)
- setup_class = getattr(setup_class, '__func__', setup_class)
- setup_class(self.obj)
-
- fin_class = getattr(self.obj, 'teardown_class', None)
- if fin_class is not None:
- fin_class = getattr(fin_class, 'im_func', fin_class)
- fin_class = getattr(fin_class, '__func__', fin_class)
- self.addfinalizer(lambda: fin_class(self.obj))
-
-
-class Instance(PyCollector):
- def _getobj(self):
- return self.parent.obj()
-
- def collect(self):
- self.session._fixturemanager.parsefactories(self)
- return super(Instance, self).collect()
-
- def newinstance(self):
- self.obj = self._getobj()
- return self.obj
-
-
-class FunctionMixin(PyobjMixin):
- """ mixin for the code common to Function and Generator.
- """
-
- def setup(self):
- """ perform setup for this test function. """
- if hasattr(self, '_preservedparent'):
- obj = self._preservedparent
- elif isinstance(self.parent, Instance):
- obj = self.parent.newinstance()
- self.obj = self._getobj()
- else:
- obj = self.parent.obj
- if inspect.ismethod(self.obj):
- setup_name = 'setup_method'
- teardown_name = 'teardown_method'
- else:
- setup_name = 'setup_function'
- teardown_name = 'teardown_function'
- setup_func_or_method = _get_xunit_setup_teardown(obj, setup_name, param_obj=self.obj)
- if setup_func_or_method is not None:
- setup_func_or_method()
- teardown_func_or_method = _get_xunit_setup_teardown(obj, teardown_name, param_obj=self.obj)
- if teardown_func_or_method is not None:
- self.addfinalizer(teardown_func_or_method)
-
- def _prunetraceback(self, excinfo):
- if hasattr(self, '_obj') and not self.config.option.fulltrace:
- code = _pytest._code.Code(get_real_func(self.obj))
- path, firstlineno = code.path, code.firstlineno
- traceback = excinfo.traceback
- ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
- if ntraceback == traceback:
- ntraceback = ntraceback.cut(path=path)
- if ntraceback == traceback:
- # ntraceback = ntraceback.cut(excludepath=cutdir2)
- ntraceback = ntraceback.filter(filter_traceback)
- if not ntraceback:
- ntraceback = traceback
-
- excinfo.traceback = ntraceback.filter()
- # issue364: mark all but first and last frames to
- # only show a single-line message for each frame
- if self.config.option.tbstyle == "auto":
- if len(excinfo.traceback) > 2:
- for entry in excinfo.traceback[1:-1]:
- entry.set_repr_style('short')
-
- def _repr_failure_py(self, excinfo, style="long"):
- if excinfo.errisinstance(fail.Exception):
- if not excinfo.value.pytrace:
- return py._builtin._totext(excinfo.value)
- return super(FunctionMixin, self)._repr_failure_py(excinfo,
- style=style)
-
- def repr_failure(self, excinfo, outerr=None):
- assert outerr is None, "XXX outerr usage is deprecated"
- style = self.config.option.tbstyle
- if style == "auto":
- style = "long"
- return self._repr_failure_py(excinfo, style=style)
-
-
-class Generator(FunctionMixin, PyCollector):
- def collect(self):
- # test generators are seen as collectors but they also
- # invoke setup/teardown on popular request
- # (induced by the common "test_*" naming shared with normal tests)
- from _pytest import deprecated
- self.session._setupstate.prepare(self)
- # see FunctionMixin.setup and test_setupstate_is_preserved_134
- self._preservedparent = self.parent.obj
- values = []
- seen = {}
- for i, x in enumerate(self.obj()):
- name, call, args = self.getcallargs(x)
- if not callable(call):
- raise TypeError("%r yielded non callable test %r" % (self.obj, call,))
- if name is None:
- name = "[%d]" % i
- else:
- name = "['%s']" % name
- if name in seen:
- raise ValueError("%r generated tests with non-unique name %r" % (self, name))
- seen[name] = True
- values.append(self.Function(name, self, args=args, callobj=call))
- self.warn('C1', deprecated.YIELD_TESTS)
- return values
-
- def getcallargs(self, obj):
- if not isinstance(obj, (tuple, list)):
- obj = (obj,)
- # explicit naming
- if isinstance(obj[0], py.builtin._basestring):
- name = obj[0]
- obj = obj[1:]
- else:
- name = None
- call, args = obj[0], obj[1:]
- return name, call, args
-
-
-def hasinit(obj):
- init = getattr(obj, '__init__', None)
- if init:
- return init != object.__init__
-
-
-def hasnew(obj):
- new = getattr(obj, '__new__', None)
- if new:
- return new != object.__new__
-
-
-class CallSpec2(object):
- def __init__(self, metafunc):
- self.metafunc = metafunc
- self.funcargs = {}
- self._idlist = []
- self.params = {}
- self._globalid = NOTSET
- self._globalid_args = set()
- self._globalparam = NOTSET
- self._arg2scopenum = {} # used for sorting parametrized resources
- self.keywords = {}
- self.indices = {}
-
- def copy(self, metafunc):
- cs = CallSpec2(self.metafunc)
- cs.funcargs.update(self.funcargs)
- cs.params.update(self.params)
- cs.keywords.update(self.keywords)
- cs.indices.update(self.indices)
- cs._arg2scopenum.update(self._arg2scopenum)
- cs._idlist = list(self._idlist)
- cs._globalid = self._globalid
- cs._globalid_args = self._globalid_args
- cs._globalparam = self._globalparam
- return cs
-
- def _checkargnotcontained(self, arg):
- if arg in self.params or arg in self.funcargs:
- raise ValueError("duplicate %r" % (arg,))
-
- def getparam(self, name):
- try:
- return self.params[name]
- except KeyError:
- if self._globalparam is NOTSET:
- raise ValueError(name)
- return self._globalparam
-
- @property
- def id(self):
- return "-".join(map(str, filter(None, self._idlist)))
-
- def setmulti(self, valtypes, argnames, valset, id, keywords, scopenum,
- param_index):
- for arg, val in zip(argnames, valset):
- self._checkargnotcontained(arg)
- valtype_for_arg = valtypes[arg]
- getattr(self, valtype_for_arg)[arg] = val
- self.indices[arg] = param_index
- self._arg2scopenum[arg] = scopenum
- self._idlist.append(id)
- self.keywords.update(keywords)
-
- def setall(self, funcargs, id, param):
- for x in funcargs:
- self._checkargnotcontained(x)
- self.funcargs.update(funcargs)
- if id is not NOTSET:
- self._idlist.append(id)
- if param is not NOTSET:
- assert self._globalparam is NOTSET
- self._globalparam = param
- for arg in funcargs:
- self._arg2scopenum[arg] = fixtures.scopenum_function
-
-
-class Metafunc(fixtures.FuncargnamesCompatAttr):
- """
- Metafunc objects are passed to the ``pytest_generate_tests`` hook.
- They help to inspect a test function and to generate tests according to
- test configuration or values specified in the class or module where a
- test function is defined.
- """
-
- def __init__(self, function, fixtureinfo, config, cls=None, module=None):
- #: access to the :class:`_pytest.config.Config` object for the test session
- self.config = config
-
- #: the module object where the test function is defined in.
- self.module = module
-
- #: underlying python test function
- self.function = function
-
- #: set of fixture names required by the test function
- self.fixturenames = fixtureinfo.names_closure
-
- #: class object where the test function is defined in or ``None``.
- self.cls = cls
-
- self._calls = []
- self._ids = py.builtin.set()
- self._arg2fixturedefs = fixtureinfo.name2fixturedefs
-
- def parametrize(self, argnames, argvalues, indirect=False, ids=None,
- scope=None):
- """ Add new invocations to the underlying test function using the list
- of argvalues for the given argnames. Parametrization is performed
- during the collection phase. If you need to setup expensive resources
- see about setting indirect to do it rather at test setup time.
-
- :arg argnames: a comma-separated string denoting one or more argument
- names, or a list/tuple of argument strings.
-
- :arg argvalues: The list of argvalues determines how often a
- test is invoked with different argument values. If only one
- argname was specified argvalues is a list of values. If N
- argnames were specified, argvalues must be a list of N-tuples,
- where each tuple-element specifies a value for its respective
- argname.
-
- :arg indirect: The list of argnames or boolean. A list of arguments'
- names (subset of argnames). If True the list contains all names from
- the argnames. Each argvalue corresponding to an argname in this list will
- be passed as request.param to its respective argname fixture
- function so that it can perform more expensive setups during the
- setup phase of a test rather than at collection time.
-
- :arg ids: list of string ids, or a callable.
- If strings, each is corresponding to the argvalues so that they are
- part of the test id. If None is given as id of specific test, the
- automatically generated id for that argument will be used.
- If callable, it should take one argument (a single argvalue) and return
- a string or return None. If None, the automatically generated id for that
- argument will be used.
- If no ids are provided they will be generated automatically from
- the argvalues.
-
- :arg scope: if specified it denotes the scope of the parameters.
- The scope is used for grouping tests by parameter instances.
- It will also override any fixture-function defined scope, allowing
- to set a dynamic scope using test context or configuration.
- """
- from _pytest.fixtures import scope2index
- from _pytest.mark import MARK_GEN, ParameterSet
- from py.io import saferepr
-
- if not isinstance(argnames, (tuple, list)):
- argnames = [x.strip() for x in argnames.split(",") if x.strip()]
- force_tuple = len(argnames) == 1
- else:
- force_tuple = False
- parameters = [
- ParameterSet.extract_from(x, legacy_force_tuple=force_tuple)
- for x in argvalues]
- del argvalues
-
- if not parameters:
- fs, lineno = getfslineno(self.function)
- reason = "got empty parameter set %r, function %s at %s:%d" % (
- argnames, self.function.__name__, fs, lineno)
- mark = MARK_GEN.skip(reason=reason)
- parameters.append(ParameterSet(
- values=(NOTSET,) * len(argnames),
- marks=[mark],
- id=None,
- ))
-
- if scope is None:
- scope = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect)
-
- scopenum = scope2index(scope, descr='call to {0}'.format(self.parametrize))
- valtypes = {}
- for arg in argnames:
- if arg not in self.fixturenames:
- if isinstance(indirect, (tuple, list)):
- name = 'fixture' if arg in indirect else 'argument'
- else:
- name = 'fixture' if indirect else 'argument'
- raise ValueError(
- "%r uses no %s %r" % (
- self.function, name, arg))
-
- if indirect is True:
- valtypes = dict.fromkeys(argnames, "params")
- elif indirect is False:
- valtypes = dict.fromkeys(argnames, "funcargs")
- elif isinstance(indirect, (tuple, list)):
- valtypes = dict.fromkeys(argnames, "funcargs")
- for arg in indirect:
- if arg not in argnames:
- raise ValueError("indirect given to %r: fixture %r doesn't exist" % (
- self.function, arg))
- valtypes[arg] = "params"
- idfn = None
- if callable(ids):
- idfn = ids
- ids = None
- if ids:
- if len(ids) != len(parameters):
- raise ValueError('%d tests specified with %d ids' % (
- len(parameters), len(ids)))
- for id_value in ids:
- if id_value is not None and not isinstance(id_value, py.builtin._basestring):
- msg = 'ids must be list of strings, found: %s (type: %s)'
- raise ValueError(msg % (saferepr(id_value), type(id_value).__name__))
- ids = idmaker(argnames, parameters, idfn, ids, self.config)
- newcalls = []
- for callspec in self._calls or [CallSpec2(self)]:
- elements = zip(ids, parameters, count())
- for a_id, param, param_index in elements:
- if len(param.values) != len(argnames):
- raise ValueError(
- 'In "parametrize" the number of values ({0}) must be '
- 'equal to the number of names ({1})'.format(
- param.values, argnames))
- newcallspec = callspec.copy(self)
- newcallspec.setmulti(valtypes, argnames, param.values, a_id,
- param.deprecated_arg_dict, scopenum, param_index)
- newcalls.append(newcallspec)
- self._calls = newcalls
-
- def addcall(self, funcargs=None, id=NOTSET, param=NOTSET):
- """ (deprecated, use parametrize) Add a new call to the underlying
- test function during the collection phase of a test run. Note that
- request.addcall() is called during the test collection phase prior and
- independently to actual test execution. You should only use addcall()
- if you need to specify multiple arguments of a test function.
-
- :arg funcargs: argument keyword dictionary used when invoking
- the test function.
-
- :arg id: used for reporting and identification purposes. If you
- don't supply an `id` an automatic unique id will be generated.
-
- :arg param: a parameter which will be exposed to a later fixture function
- invocation through the ``request.param`` attribute.
- """
- assert funcargs is None or isinstance(funcargs, dict)
- if funcargs is not None:
- for name in funcargs:
- if name not in self.fixturenames:
- fail("funcarg %r not used in this function." % name)
- else:
- funcargs = {}
- if id is None:
- raise ValueError("id=None not allowed")
- if id is NOTSET:
- id = len(self._calls)
- id = str(id)
- if id in self._ids:
- raise ValueError("duplicate id %r" % id)
- self._ids.add(id)
-
- cs = CallSpec2(self)
- cs.setall(funcargs, id, param)
- self._calls.append(cs)
-
-
-def _find_parametrized_scope(argnames, arg2fixturedefs, indirect):
- """Find the most appropriate scope for a parametrized call based on its arguments.
-
- When there's at least one direct argument, always use "function" scope.
-
- When a test function is parametrized and all its arguments are indirect
- (e.g. fixtures), return the most narrow scope based on the fixtures used.
-
- Related to issue #1832, based on code posted by @Kingdread.
- """
- from _pytest.fixtures import scopes
- indirect_as_list = isinstance(indirect, (list, tuple))
- all_arguments_are_fixtures = indirect is True or \
- indirect_as_list and len(indirect) == argnames
- if all_arguments_are_fixtures:
- fixturedefs = arg2fixturedefs or {}
- used_scopes = [fixturedef[0].scope for name, fixturedef in fixturedefs.items()]
- if used_scopes:
- # Takes the most narrow scope from used fixtures
- for scope in reversed(scopes):
- if scope in used_scopes:
- return scope
-
- return 'function'
-
-
-def _idval(val, argname, idx, idfn, config=None):
- if idfn:
- s = None
- try:
- s = idfn(val)
- except Exception:
- # See issue https://github.com/pytest-dev/pytest/issues/2169
- import warnings
- msg = "Raised while trying to determine id of parameter %s at position %d." % (argname, idx)
- msg += '\nUpdate your code as this will raise an error in pytest-4.0.'
- warnings.warn(msg, DeprecationWarning)
- if s:
- return _ascii_escaped(s)
-
- if config:
- hook_id = config.hook.pytest_make_parametrize_id(
- config=config, val=val, argname=argname)
- if hook_id:
- return hook_id
-
- if isinstance(val, STRING_TYPES):
- return _ascii_escaped(val)
- elif isinstance(val, (float, int, bool, NoneType)):
- return str(val)
- elif isinstance(val, REGEX_TYPE):
- return _ascii_escaped(val.pattern)
- elif enum is not None and isinstance(val, enum.Enum):
- return str(val)
- elif isclass(val) and hasattr(val, '__name__'):
- return val.__name__
- return str(argname) + str(idx)
-
-
-def _idvalset(idx, parameterset, argnames, idfn, ids, config=None):
- if parameterset.id is not None:
- return parameterset.id
- if ids is None or (idx >= len(ids) or ids[idx] is None):
- this_id = [_idval(val, argname, idx, idfn, config)
- for val, argname in zip(parameterset.values, argnames)]
- return "-".join(this_id)
- else:
- return _ascii_escaped(ids[idx])
-
-
-def idmaker(argnames, parametersets, idfn=None, ids=None, config=None):
- ids = [_idvalset(valindex, parameterset, argnames, idfn, ids, config)
- for valindex, parameterset in enumerate(parametersets)]
- if len(set(ids)) != len(ids):
- # The ids are not unique
- duplicates = [testid for testid in ids if ids.count(testid) > 1]
- counters = collections.defaultdict(lambda: 0)
- for index, testid in enumerate(ids):
- if testid in duplicates:
- ids[index] = testid + str(counters[testid])
- counters[testid] += 1
- return ids
-
-
-def show_fixtures_per_test(config):
- from _pytest.main import wrap_session
- return wrap_session(config, _show_fixtures_per_test)
-
-
-def _show_fixtures_per_test(config, session):
- import _pytest.config
- session.perform_collect()
- curdir = py.path.local()
- tw = _pytest.config.create_terminal_writer(config)
- verbose = config.getvalue("verbose")
-
- def get_best_relpath(func):
- loc = getlocation(func, curdir)
- return curdir.bestrelpath(loc)
-
- def write_fixture(fixture_def):
- argname = fixture_def.argname
- if verbose <= 0 and argname.startswith("_"):
- return
- if verbose > 0:
- bestrel = get_best_relpath(fixture_def.func)
- funcargspec = "{0} -- {1}".format(argname, bestrel)
- else:
- funcargspec = argname
- tw.line(funcargspec, green=True)
- fixture_doc = fixture_def.func.__doc__
- if fixture_doc:
- write_docstring(tw, fixture_doc)
- else:
- tw.line(' no docstring available', red=True)
-
- def write_item(item):
- try:
- info = item._fixtureinfo
- except AttributeError:
- # doctests items have no _fixtureinfo attribute
- return
- if not info.name2fixturedefs:
- # this test item does not use any fixtures
- return
- tw.line()
- tw.sep('-', 'fixtures used by {0}'.format(item.name))
- tw.sep('-', '({0})'.format(get_best_relpath(item.function)))
- # dict key not used in loop but needed for sorting
- for _, fixturedefs in sorted(info.name2fixturedefs.items()):
- assert fixturedefs is not None
- if not fixturedefs:
- continue
- # last item is expected to be the one used by the test item
- write_fixture(fixturedefs[-1])
-
- for session_item in session.items:
- write_item(session_item)
-
-
-def showfixtures(config):
- from _pytest.main import wrap_session
- return wrap_session(config, _showfixtures_main)
-
-
-def _showfixtures_main(config, session):
- import _pytest.config
- session.perform_collect()
- curdir = py.path.local()
- tw = _pytest.config.create_terminal_writer(config)
- verbose = config.getvalue("verbose")
-
- fm = session._fixturemanager
-
- available = []
- seen = set()
-
- for argname, fixturedefs in fm._arg2fixturedefs.items():
- assert fixturedefs is not None
- if not fixturedefs:
- continue
- for fixturedef in fixturedefs:
- loc = getlocation(fixturedef.func, curdir)
- if (fixturedef.argname, loc) in seen:
- continue
- seen.add((fixturedef.argname, loc))
- available.append((len(fixturedef.baseid),
- fixturedef.func.__module__,
- curdir.bestrelpath(loc),
- fixturedef.argname, fixturedef))
-
- available.sort()
- currentmodule = None
- for baseid, module, bestrel, argname, fixturedef in available:
- if currentmodule != module:
- if not module.startswith("_pytest."):
- tw.line()
- tw.sep("-", "fixtures defined from %s" % (module,))
- currentmodule = module
- if verbose <= 0 and argname[0] == "_":
- continue
- if verbose > 0:
- funcargspec = "%s -- %s" % (argname, bestrel,)
- else:
- funcargspec = argname
- tw.line(funcargspec, green=True)
- loc = getlocation(fixturedef.func, curdir)
- doc = fixturedef.func.__doc__ or ""
- if doc:
- write_docstring(tw, doc)
- else:
- tw.line(" %s: no docstring available" % (loc,),
- red=True)
-
-
-def write_docstring(tw, doc):
- INDENT = " "
- doc = doc.rstrip()
- if "\n" in doc:
- firstline, rest = doc.split("\n", 1)
- else:
- firstline, rest = doc, ""
-
- if firstline.strip():
- tw.line(INDENT + firstline.strip())
-
- if rest:
- for line in dedent(rest).split("\n"):
- tw.write(INDENT + line + "\n")
-
-
-class Function(FunctionMixin, main.Item, fixtures.FuncargnamesCompatAttr):
- """ a Function Item is responsible for setting up and executing a
- Python test function.
- """
- _genid = None
-
- def __init__(self, name, parent, args=None, config=None,
- callspec=None, callobj=NOTSET, keywords=None, session=None,
- fixtureinfo=None, originalname=None):
- super(Function, self).__init__(name, parent, config=config,
- session=session)
- self._args = args
- if callobj is not NOTSET:
- self.obj = callobj
-
- self.keywords.update(self.obj.__dict__)
- if callspec:
- self.callspec = callspec
- self.keywords.update(callspec.keywords)
- if keywords:
- self.keywords.update(keywords)
-
- if fixtureinfo is None:
- fixtureinfo = self.session._fixturemanager.getfixtureinfo(
- self.parent, self.obj, self.cls,
- funcargs=not self._isyieldedfunction())
- self._fixtureinfo = fixtureinfo
- self.fixturenames = fixtureinfo.names_closure
- self._initrequest()
-
- #: original function name, without any decorations (for example
- #: parametrization adds a ``"[...]"`` suffix to function names).
- #:
- #: .. versionadded:: 3.0
- self.originalname = originalname
-
- def _initrequest(self):
- self.funcargs = {}
- if self._isyieldedfunction():
- assert not hasattr(self, "callspec"), (
- "yielded functions (deprecated) cannot have funcargs")
- else:
- if hasattr(self, "callspec"):
- callspec = self.callspec
- assert not callspec.funcargs
- self._genid = callspec.id
- if hasattr(callspec, "param"):
- self.param = callspec.param
- self._request = fixtures.FixtureRequest(self)
-
- @property
- def function(self):
- "underlying python 'function' object"
- return getattr(self.obj, 'im_func', self.obj)
-
- def _getobj(self):
- name = self.name
- i = name.find("[") # parametrization
- if i != -1:
- name = name[:i]
- return getattr(self.parent.obj, name)
-
- @property
- def _pyfuncitem(self):
- "(compatonly) for code expecting pytest-2.2 style request objects"
- return self
-
- def _isyieldedfunction(self):
- return getattr(self, "_args", None) is not None
-
- def runtest(self):
- """ execute the underlying test function. """
- self.ihook.pytest_pyfunc_call(pyfuncitem=self)
-
- def setup(self):
- super(Function, self).setup()
- fixtures.fillfixtures(self)
diff --git a/lib/spack/external/pytest-fallback/_pytest/python_api.py b/lib/spack/external/pytest-fallback/_pytest/python_api.py
deleted file mode 100644
index a931b4d2c7..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/python_api.py
+++ /dev/null
@@ -1,629 +0,0 @@
-import math
-import sys
-
-import py
-
-from _pytest.compat import isclass, izip
-from _pytest.outcomes import fail
-import _pytest._code
-
-
-def _cmp_raises_type_error(self, other):
- """__cmp__ implementation which raises TypeError. Used
- by Approx base classes to implement only == and != and raise a
- TypeError for other comparisons.
-
- Needed in Python 2 only, Python 3 all it takes is not implementing the
- other operators at all.
- """
- __tracebackhide__ = True
- raise TypeError('Comparison operators other than == and != not supported by approx objects')
-
-
-# builtin pytest.approx helper
-
-
-class ApproxBase(object):
- """
- Provide shared utilities for making approximate comparisons between numbers
- or sequences of numbers.
- """
-
- def __init__(self, expected, rel=None, abs=None, nan_ok=False):
- self.expected = expected
- self.abs = abs
- self.rel = rel
- self.nan_ok = nan_ok
-
- def __repr__(self):
- raise NotImplementedError
-
- def __eq__(self, actual):
- return all(
- a == self._approx_scalar(x)
- for a, x in self._yield_comparisons(actual))
-
- __hash__ = None
-
- def __ne__(self, actual):
- return not (actual == self)
-
- if sys.version_info[0] == 2:
- __cmp__ = _cmp_raises_type_error
-
- def _approx_scalar(self, x):
- return ApproxScalar(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok)
-
- def _yield_comparisons(self, actual):
- """
- Yield all the pairs of numbers to be compared. This is used to
- implement the `__eq__` method.
- """
- raise NotImplementedError
-
-
-class ApproxNumpy(ApproxBase):
- """
- Perform approximate comparisons for numpy arrays.
- """
-
- # Tell numpy to use our `__eq__` operator instead of its.
- __array_priority__ = 100
-
- def __repr__(self):
- # It might be nice to rewrite this function to account for the
- # shape of the array...
- return "approx({0!r})".format(list(
- self._approx_scalar(x) for x in self.expected))
-
- if sys.version_info[0] == 2:
- __cmp__ = _cmp_raises_type_error
-
- def __eq__(self, actual):
- import numpy as np
-
- try:
- actual = np.asarray(actual)
- except: # noqa
- raise TypeError("cannot compare '{0}' to numpy.ndarray".format(actual))
-
- if actual.shape != self.expected.shape:
- return False
-
- return ApproxBase.__eq__(self, actual)
-
- def _yield_comparisons(self, actual):
- import numpy as np
-
- # We can be sure that `actual` is a numpy array, because it's
- # casted in `__eq__` before being passed to `ApproxBase.__eq__`,
- # which is the only method that calls this one.
- for i in np.ndindex(self.expected.shape):
- yield actual[i], self.expected[i]
-
-
-class ApproxMapping(ApproxBase):
- """
- Perform approximate comparisons for mappings where the values are numbers
- (the keys can be anything).
- """
-
- def __repr__(self):
- return "approx({0!r})".format(dict(
- (k, self._approx_scalar(v))
- for k, v in self.expected.items()))
-
- def __eq__(self, actual):
- if set(actual.keys()) != set(self.expected.keys()):
- return False
-
- return ApproxBase.__eq__(self, actual)
-
- def _yield_comparisons(self, actual):
- for k in self.expected.keys():
- yield actual[k], self.expected[k]
-
-
-class ApproxSequence(ApproxBase):
- """
- Perform approximate comparisons for sequences of numbers.
- """
-
- # Tell numpy to use our `__eq__` operator instead of its.
- __array_priority__ = 100
-
- def __repr__(self):
- seq_type = type(self.expected)
- if seq_type not in (tuple, list, set):
- seq_type = list
- return "approx({0!r})".format(seq_type(
- self._approx_scalar(x) for x in self.expected))
-
- def __eq__(self, actual):
- if len(actual) != len(self.expected):
- return False
- return ApproxBase.__eq__(self, actual)
-
- def _yield_comparisons(self, actual):
- return izip(actual, self.expected)
-
-
-class ApproxScalar(ApproxBase):
- """
- Perform approximate comparisons for single numbers only.
- """
-
- def __repr__(self):
- """
- Return a string communicating both the expected value and the tolerance
- for the comparison being made, e.g. '1.0 +- 1e-6'. Use the unicode
- plus/minus symbol if this is python3 (it's too hard to get right for
- python2).
- """
- if isinstance(self.expected, complex):
- return str(self.expected)
-
- # Infinities aren't compared using tolerances, so don't show a
- # tolerance.
- if math.isinf(self.expected):
- return str(self.expected)
-
- # If a sensible tolerance can't be calculated, self.tolerance will
- # raise a ValueError. In this case, display '???'.
- try:
- vetted_tolerance = '{:.1e}'.format(self.tolerance)
- except ValueError:
- vetted_tolerance = '???'
-
- if sys.version_info[0] == 2:
- return '{0} +- {1}'.format(self.expected, vetted_tolerance)
- else:
- return u'{0} \u00b1 {1}'.format(self.expected, vetted_tolerance)
-
- def __eq__(self, actual):
- """
- Return true if the given value is equal to the expected value within
- the pre-specified tolerance.
- """
-
- # Short-circuit exact equality.
- if actual == self.expected:
- return True
-
- # Allow the user to control whether NaNs are considered equal to each
- # other or not. The abs() calls are for compatibility with complex
- # numbers.
- if math.isnan(abs(self.expected)):
- return self.nan_ok and math.isnan(abs(actual))
-
- # Infinity shouldn't be approximately equal to anything but itself, but
- # if there's a relative tolerance, it will be infinite and infinity
- # will seem approximately equal to everything. The equal-to-itself
- # case would have been short circuited above, so here we can just
- # return false if the expected value is infinite. The abs() call is
- # for compatibility with complex numbers.
- if math.isinf(abs(self.expected)):
- return False
-
- # Return true if the two numbers are within the tolerance.
- return abs(self.expected - actual) <= self.tolerance
-
- __hash__ = None
-
- @property
- def tolerance(self):
- """
- Return the tolerance for the comparison. This could be either an
- absolute tolerance or a relative tolerance, depending on what the user
- specified or which would be larger.
- """
- def set_default(x, default):
- return x if x is not None else default
-
- # Figure out what the absolute tolerance should be. ``self.abs`` is
- # either None or a value specified by the user.
- absolute_tolerance = set_default(self.abs, 1e-12)
-
- if absolute_tolerance < 0:
- raise ValueError("absolute tolerance can't be negative: {0}".format(absolute_tolerance))
- if math.isnan(absolute_tolerance):
- raise ValueError("absolute tolerance can't be NaN.")
-
- # If the user specified an absolute tolerance but not a relative one,
- # just return the absolute tolerance.
- if self.rel is None:
- if self.abs is not None:
- return absolute_tolerance
-
- # Figure out what the relative tolerance should be. ``self.rel`` is
- # either None or a value specified by the user. This is done after
- # we've made sure the user didn't ask for an absolute tolerance only,
- # because we don't want to raise errors about the relative tolerance if
- # we aren't even going to use it.
- relative_tolerance = set_default(self.rel, 1e-6) * abs(self.expected)
-
- if relative_tolerance < 0:
- raise ValueError("relative tolerance can't be negative: {0}".format(absolute_tolerance))
- if math.isnan(relative_tolerance):
- raise ValueError("relative tolerance can't be NaN.")
-
- # Return the larger of the relative and absolute tolerances.
- return max(relative_tolerance, absolute_tolerance)
-
-
-def approx(expected, rel=None, abs=None, nan_ok=False):
- """
- Assert that two numbers (or two sets of numbers) are equal to each other
- within some tolerance.
-
- Due to the `intricacies of floating-point arithmetic`__, numbers that we
- would intuitively expect to be equal are not always so::
-
- >>> 0.1 + 0.2 == 0.3
- False
-
- __ https://docs.python.org/3/tutorial/floatingpoint.html
-
- This problem is commonly encountered when writing tests, e.g. when making
- sure that floating-point values are what you expect them to be. One way to
- deal with this problem is to assert that two floating-point numbers are
- equal to within some appropriate tolerance::
-
- >>> abs((0.1 + 0.2) - 0.3) < 1e-6
- True
-
- However, comparisons like this are tedious to write and difficult to
- understand. Furthermore, absolute comparisons like the one above are
- usually discouraged because there's no tolerance that works well for all
- situations. ``1e-6`` is good for numbers around ``1``, but too small for
- very big numbers and too big for very small ones. It's better to express
- the tolerance as a fraction of the expected value, but relative comparisons
- like that are even more difficult to write correctly and concisely.
-
- The ``approx`` class performs floating-point comparisons using a syntax
- that's as intuitive as possible::
-
- >>> from pytest import approx
- >>> 0.1 + 0.2 == approx(0.3)
- True
-
- The same syntax also works for sequences of numbers::
-
- >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6))
- True
-
- Dictionary *values*::
-
- >>> {'a': 0.1 + 0.2, 'b': 0.2 + 0.4} == approx({'a': 0.3, 'b': 0.6})
- True
-
- And ``numpy`` arrays::
-
- >>> import numpy as np # doctest: +SKIP
- >>> np.array([0.1, 0.2]) + np.array([0.2, 0.4]) == approx(np.array([0.3, 0.6])) # doctest: +SKIP
- True
-
- By default, ``approx`` considers numbers within a relative tolerance of
- ``1e-6`` (i.e. one part in a million) of its expected value to be equal.
- This treatment would lead to surprising results if the expected value was
- ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``.
- To handle this case less surprisingly, ``approx`` also considers numbers
- within an absolute tolerance of ``1e-12`` of its expected value to be
- equal. Infinity and NaN are special cases. Infinity is only considered
- equal to itself, regardless of the relative tolerance. NaN is not
- considered equal to anything by default, but you can make it be equal to
- itself by setting the ``nan_ok`` argument to True. (This is meant to
- facilitate comparing arrays that use NaN to mean "no data".)
-
- Both the relative and absolute tolerances can be changed by passing
- arguments to the ``approx`` constructor::
-
- >>> 1.0001 == approx(1)
- False
- >>> 1.0001 == approx(1, rel=1e-3)
- True
- >>> 1.0001 == approx(1, abs=1e-3)
- True
-
- If you specify ``abs`` but not ``rel``, the comparison will not consider
- the relative tolerance at all. In other words, two numbers that are within
- the default relative tolerance of ``1e-6`` will still be considered unequal
- if they exceed the specified absolute tolerance. If you specify both
- ``abs`` and ``rel``, the numbers will be considered equal if either
- tolerance is met::
-
- >>> 1 + 1e-8 == approx(1)
- True
- >>> 1 + 1e-8 == approx(1, abs=1e-12)
- False
- >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12)
- True
-
- If you're thinking about using ``approx``, then you might want to know how
- it compares to other good ways of comparing floating-point numbers. All of
- these algorithms are based on relative and absolute tolerances and should
- agree for the most part, but they do have meaningful differences:
-
- - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative
- tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute
- tolerance is met. Because the relative tolerance is calculated w.r.t.
- both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor
- ``b`` is a "reference value"). You have to specify an absolute tolerance
- if you want to compare to ``0.0`` because there is no tolerance by
- default. Only available in python>=3.5. `More information...`__
-
- __ https://docs.python.org/3/library/math.html#math.isclose
-
- - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference
- between ``a`` and ``b`` is less that the sum of the relative tolerance
- w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance
- is only calculated w.r.t. ``b``, this test is asymmetric and you can
- think of ``b`` as the reference value. Support for comparing sequences
- is provided by ``numpy.allclose``. `More information...`__
-
- __ http://docs.scipy.org/doc/numpy-1.10.0/reference/generated/numpy.isclose.html
-
- - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b``
- are within an absolute tolerance of ``1e-7``. No relative tolerance is
- considered and the absolute tolerance cannot be changed, so this function
- is not appropriate for very large or very small numbers. Also, it's only
- available in subclasses of ``unittest.TestCase`` and it's ugly because it
- doesn't follow PEP8. `More information...`__
-
- __ https://docs.python.org/3/library/unittest.html#unittest.TestCase.assertAlmostEqual
-
- - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative
- tolerance is met w.r.t. ``b`` or if the absolute tolerance is met.
- Because the relative tolerance is only calculated w.r.t. ``b``, this test
- is asymmetric and you can think of ``b`` as the reference value. In the
- special case that you explicitly specify an absolute tolerance but not a
- relative tolerance, only the absolute tolerance is considered.
-
- .. warning::
-
- .. versionchanged:: 3.2
-
- In order to avoid inconsistent behavior, ``TypeError`` is
- raised for ``>``, ``>=``, ``<`` and ``<=`` comparisons.
- The example below illustrates the problem::
-
- assert approx(0.1) > 0.1 + 1e-10 # calls approx(0.1).__gt__(0.1 + 1e-10)
- assert 0.1 + 1e-10 > approx(0.1) # calls approx(0.1).__lt__(0.1 + 1e-10)
-
- In the second example one expects ``approx(0.1).__le__(0.1 + 1e-10)``
- to be called. But instead, ``approx(0.1).__lt__(0.1 + 1e-10)`` is used to
- comparison. This is because the call hierarchy of rich comparisons
- follows a fixed behavior. `More information...`__
-
- __ https://docs.python.org/3/reference/datamodel.html#object.__ge__
- """
-
- if sys.version_info >= (3, 3):
- from collections.abc import Mapping, Sequence
- else:
- from collections import Mapping, Sequence
- from _pytest.compat import STRING_TYPES as String
-
- # Delegate the comparison to a class that knows how to deal with the type
- # of the expected value (e.g. int, float, list, dict, numpy.array, etc).
- #
- # This architecture is really driven by the need to support numpy arrays.
- # The only way to override `==` for arrays without requiring that approx be
- # the left operand is to inherit the approx object from `numpy.ndarray`.
- # But that can't be a general solution, because it requires (1) numpy to be
- # installed and (2) the expected value to be a numpy array. So the general
- # solution is to delegate each type of expected value to a different class.
- #
- # This has the advantage that it made it easy to support mapping types
- # (i.e. dict). The old code accepted mapping types, but would only compare
- # their keys, which is probably not what most people would expect.
-
- if _is_numpy_array(expected):
- cls = ApproxNumpy
- elif isinstance(expected, Mapping):
- cls = ApproxMapping
- elif isinstance(expected, Sequence) and not isinstance(expected, String):
- cls = ApproxSequence
- else:
- cls = ApproxScalar
-
- return cls(expected, rel, abs, nan_ok)
-
-
-def _is_numpy_array(obj):
- """
- Return true if the given object is a numpy array. Make a special effort to
- avoid importing numpy unless it's really necessary.
- """
- import inspect
-
- for cls in inspect.getmro(type(obj)):
- if cls.__module__ == 'numpy':
- try:
- import numpy as np
- return isinstance(obj, np.ndarray)
- except ImportError:
- pass
-
- return False
-
-
-# builtin pytest.raises helper
-
-def raises(expected_exception, *args, **kwargs):
- """
- Assert that a code block/function call raises ``expected_exception``
- and raise a failure exception otherwise.
-
- This helper produces a ``ExceptionInfo()`` object (see below).
-
- If using Python 2.5 or above, you may use this function as a
- context manager::
-
- >>> with raises(ZeroDivisionError):
- ... 1/0
-
- .. versionchanged:: 2.10
-
- In the context manager form you may use the keyword argument
- ``message`` to specify a custom failure message::
-
- >>> with raises(ZeroDivisionError, message="Expecting ZeroDivisionError"):
- ... pass
- Traceback (most recent call last):
- ...
- Failed: Expecting ZeroDivisionError
-
- .. note::
-
- When using ``pytest.raises`` as a context manager, it's worthwhile to
- note that normal context manager rules apply and that the exception
- raised *must* be the final line in the scope of the context manager.
- Lines of code after that, within the scope of the context manager will
- not be executed. For example::
-
- >>> value = 15
- >>> with raises(ValueError) as exc_info:
- ... if value > 10:
- ... raise ValueError("value must be <= 10")
- ... assert exc_info.type == ValueError # this will not execute
-
- Instead, the following approach must be taken (note the difference in
- scope)::
-
- >>> with raises(ValueError) as exc_info:
- ... if value > 10:
- ... raise ValueError("value must be <= 10")
- ...
- >>> assert exc_info.type == ValueError
-
-
- Since version ``3.1`` you can use the keyword argument ``match`` to assert that the
- exception matches a text or regex::
-
- >>> with raises(ValueError, match='must be 0 or None'):
- ... raise ValueError("value must be 0 or None")
-
- >>> with raises(ValueError, match=r'must be \d+$'):
- ... raise ValueError("value must be 42")
-
- **Legacy forms**
-
- The forms below are fully supported but are discouraged for new code because the
- context manager form is regarded as more readable and less error-prone.
-
- It is possible to specify a callable by passing a to-be-called lambda::
-
- >>> raises(ZeroDivisionError, lambda: 1/0)
- <ExceptionInfo ...>
-
- or you can specify an arbitrary callable with arguments::
-
- >>> def f(x): return 1/x
- ...
- >>> raises(ZeroDivisionError, f, 0)
- <ExceptionInfo ...>
- >>> raises(ZeroDivisionError, f, x=0)
- <ExceptionInfo ...>
-
- It is also possible to pass a string to be evaluated at runtime::
-
- >>> raises(ZeroDivisionError, "f(0)")
- <ExceptionInfo ...>
-
- The string will be evaluated using the same ``locals()`` and ``globals()``
- at the moment of the ``raises`` call.
-
- .. autoclass:: _pytest._code.ExceptionInfo
- :members:
-
- .. note::
- Similar to caught exception objects in Python, explicitly clearing
- local references to returned ``ExceptionInfo`` objects can
- help the Python interpreter speed up its garbage collection.
-
- Clearing those references breaks a reference cycle
- (``ExceptionInfo`` --> caught exception --> frame stack raising
- the exception --> current frame stack --> local variables -->
- ``ExceptionInfo``) which makes Python keep all objects referenced
- from that cycle (including all local variables in the current
- frame) alive until the next cyclic garbage collection run. See the
- official Python ``try`` statement documentation for more detailed
- information.
-
- """
- __tracebackhide__ = True
- msg = ("exceptions must be old-style classes or"
- " derived from BaseException, not %s")
- if isinstance(expected_exception, tuple):
- for exc in expected_exception:
- if not isclass(exc):
- raise TypeError(msg % type(exc))
- elif not isclass(expected_exception):
- raise TypeError(msg % type(expected_exception))
-
- message = "DID NOT RAISE {0}".format(expected_exception)
- match_expr = None
-
- if not args:
- if "message" in kwargs:
- message = kwargs.pop("message")
- if "match" in kwargs:
- match_expr = kwargs.pop("match")
- message += " matching '{0}'".format(match_expr)
- return RaisesContext(expected_exception, message, match_expr)
- elif isinstance(args[0], str):
- code, = args
- assert isinstance(code, str)
- frame = sys._getframe(1)
- loc = frame.f_locals.copy()
- loc.update(kwargs)
- # print "raises frame scope: %r" % frame.f_locals
- try:
- code = _pytest._code.Source(code).compile()
- py.builtin.exec_(code, frame.f_globals, loc)
- # XXX didn'T mean f_globals == f_locals something special?
- # this is destroyed here ...
- except expected_exception:
- return _pytest._code.ExceptionInfo()
- else:
- func = args[0]
- try:
- func(*args[1:], **kwargs)
- except expected_exception:
- return _pytest._code.ExceptionInfo()
- fail(message)
-
-
-raises.Exception = fail.Exception
-
-
-class RaisesContext(object):
- def __init__(self, expected_exception, message, match_expr):
- self.expected_exception = expected_exception
- self.message = message
- self.match_expr = match_expr
- self.excinfo = None
-
- def __enter__(self):
- self.excinfo = object.__new__(_pytest._code.ExceptionInfo)
- return self.excinfo
-
- def __exit__(self, *tp):
- __tracebackhide__ = True
- if tp[0] is None:
- fail(self.message)
- if sys.version_info < (2, 7):
- # py26: on __exit__() exc_value often does not contain the
- # exception value.
- # http://bugs.python.org/issue7853
- if not isinstance(tp[1], BaseException):
- exc_type, value, traceback = tp
- tp = exc_type, exc_type(value), traceback
- self.excinfo.__init__(tp)
- suppress_exception = issubclass(self.excinfo.type, self.expected_exception)
- if sys.version_info[0] == 2 and suppress_exception:
- sys.exc_clear()
- if self.match_expr:
- self.excinfo.match(self.match_expr)
- return suppress_exception
diff --git a/lib/spack/external/pytest-fallback/_pytest/recwarn.py b/lib/spack/external/pytest-fallback/_pytest/recwarn.py
deleted file mode 100644
index c9fa872c07..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/recwarn.py
+++ /dev/null
@@ -1,205 +0,0 @@
-""" recording warnings during test function execution. """
-from __future__ import absolute_import, division, print_function
-
-import inspect
-
-import _pytest._code
-import py
-import sys
-import warnings
-
-from _pytest.fixtures import yield_fixture
-from _pytest.outcomes import fail
-
-
-@yield_fixture
-def recwarn():
- """Return a WarningsRecorder instance that provides these methods:
-
- * ``pop(category=None)``: return last warning matching the category.
- * ``clear()``: clear list of warnings
-
- See http://docs.python.org/library/warnings.html for information
- on warning categories.
- """
- wrec = WarningsRecorder()
- with wrec:
- warnings.simplefilter('default')
- yield wrec
-
-
-def deprecated_call(func=None, *args, **kwargs):
- """context manager that can be used to ensure a block of code triggers a
- ``DeprecationWarning`` or ``PendingDeprecationWarning``::
-
- >>> import warnings
- >>> def api_call_v2():
- ... warnings.warn('use v3 of this api', DeprecationWarning)
- ... return 200
-
- >>> with deprecated_call():
- ... assert api_call_v2() == 200
-
- ``deprecated_call`` can also be used by passing a function and ``*args`` and ``*kwargs``,
- in which case it will ensure calling ``func(*args, **kwargs)`` produces one of the warnings
- types above.
- """
- if not func:
- return _DeprecatedCallContext()
- else:
- __tracebackhide__ = True
- with _DeprecatedCallContext():
- return func(*args, **kwargs)
-
-
-class _DeprecatedCallContext(object):
- """Implements the logic to capture deprecation warnings as a context manager."""
-
- def __enter__(self):
- self._captured_categories = []
- self._old_warn = warnings.warn
- self._old_warn_explicit = warnings.warn_explicit
- warnings.warn_explicit = self._warn_explicit
- warnings.warn = self._warn
-
- def _warn_explicit(self, message, category, *args, **kwargs):
- self._captured_categories.append(category)
-
- def _warn(self, message, category=None, *args, **kwargs):
- if isinstance(message, Warning):
- self._captured_categories.append(message.__class__)
- else:
- self._captured_categories.append(category)
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- warnings.warn_explicit = self._old_warn_explicit
- warnings.warn = self._old_warn
-
- if exc_type is None:
- deprecation_categories = (DeprecationWarning, PendingDeprecationWarning)
- if not any(issubclass(c, deprecation_categories) for c in self._captured_categories):
- __tracebackhide__ = True
- msg = "Did not produce DeprecationWarning or PendingDeprecationWarning"
- raise AssertionError(msg)
-
-
-def warns(expected_warning, *args, **kwargs):
- """Assert that code raises a particular class of warning.
-
- Specifically, the input @expected_warning can be a warning class or
- tuple of warning classes, and the code must return that warning
- (if a single class) or one of those warnings (if a tuple).
-
- This helper produces a list of ``warnings.WarningMessage`` objects,
- one for each warning raised.
-
- This function can be used as a context manager, or any of the other ways
- ``pytest.raises`` can be used::
-
- >>> with warns(RuntimeWarning):
- ... warnings.warn("my warning", RuntimeWarning)
- """
- wcheck = WarningsChecker(expected_warning)
- if not args:
- return wcheck
- elif isinstance(args[0], str):
- code, = args
- assert isinstance(code, str)
- frame = sys._getframe(1)
- loc = frame.f_locals.copy()
- loc.update(kwargs)
-
- with wcheck:
- code = _pytest._code.Source(code).compile()
- py.builtin.exec_(code, frame.f_globals, loc)
- else:
- func = args[0]
- with wcheck:
- return func(*args[1:], **kwargs)
-
-
-class WarningsRecorder(warnings.catch_warnings):
- """A context manager to record raised warnings.
-
- Adapted from `warnings.catch_warnings`.
- """
-
- def __init__(self):
- super(WarningsRecorder, self).__init__(record=True)
- self._entered = False
- self._list = []
-
- @property
- def list(self):
- """The list of recorded warnings."""
- return self._list
-
- def __getitem__(self, i):
- """Get a recorded warning by index."""
- return self._list[i]
-
- def __iter__(self):
- """Iterate through the recorded warnings."""
- return iter(self._list)
-
- def __len__(self):
- """The number of recorded warnings."""
- return len(self._list)
-
- def pop(self, cls=Warning):
- """Pop the first recorded warning, raise exception if not exists."""
- for i, w in enumerate(self._list):
- if issubclass(w.category, cls):
- return self._list.pop(i)
- __tracebackhide__ = True
- raise AssertionError("%r not found in warning list" % cls)
-
- def clear(self):
- """Clear the list of recorded warnings."""
- self._list[:] = []
-
- def __enter__(self):
- if self._entered:
- __tracebackhide__ = True
- raise RuntimeError("Cannot enter %r twice" % self)
- self._list = super(WarningsRecorder, self).__enter__()
- warnings.simplefilter('always')
- return self
-
- def __exit__(self, *exc_info):
- if not self._entered:
- __tracebackhide__ = True
- raise RuntimeError("Cannot exit %r without entering first" % self)
- super(WarningsRecorder, self).__exit__(*exc_info)
-
-
-class WarningsChecker(WarningsRecorder):
- def __init__(self, expected_warning=None):
- super(WarningsChecker, self).__init__()
-
- msg = ("exceptions must be old-style classes or "
- "derived from Warning, not %s")
- if isinstance(expected_warning, tuple):
- for exc in expected_warning:
- if not inspect.isclass(exc):
- raise TypeError(msg % type(exc))
- elif inspect.isclass(expected_warning):
- expected_warning = (expected_warning,)
- elif expected_warning is not None:
- raise TypeError(msg % type(expected_warning))
-
- self.expected_warning = expected_warning
-
- def __exit__(self, *exc_info):
- super(WarningsChecker, self).__exit__(*exc_info)
-
- # only check if we're not currently handling an exception
- if all(a is None for a in exc_info):
- if self.expected_warning is not None:
- if not any(issubclass(r.category, self.expected_warning)
- for r in self):
- __tracebackhide__ = True
- fail("DID NOT WARN. No warnings of type {0} was emitted. "
- "The list of emitted warnings is: {1}.".format(
- self.expected_warning,
- [each.message for each in self]))
diff --git a/lib/spack/external/pytest-fallback/_pytest/resultlog.py b/lib/spack/external/pytest-fallback/_pytest/resultlog.py
deleted file mode 100644
index 9f9c2d1f65..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/resultlog.py
+++ /dev/null
@@ -1,113 +0,0 @@
-""" log machine-parseable test session result information in a plain
-text file.
-"""
-from __future__ import absolute_import, division, print_function
-
-import py
-import os
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("terminal reporting", "resultlog plugin options")
- group.addoption('--resultlog', '--result-log', action="store",
- metavar="path", default=None,
- help="DEPRECATED path for machine-readable result log.")
-
-
-def pytest_configure(config):
- resultlog = config.option.resultlog
- # prevent opening resultlog on slave nodes (xdist)
- if resultlog and not hasattr(config, 'slaveinput'):
- dirname = os.path.dirname(os.path.abspath(resultlog))
- if not os.path.isdir(dirname):
- os.makedirs(dirname)
- logfile = open(resultlog, 'w', 1) # line buffered
- config._resultlog = ResultLog(config, logfile)
- config.pluginmanager.register(config._resultlog)
-
- from _pytest.deprecated import RESULT_LOG
- config.warn('C1', RESULT_LOG)
-
-
-def pytest_unconfigure(config):
- resultlog = getattr(config, '_resultlog', None)
- if resultlog:
- resultlog.logfile.close()
- del config._resultlog
- config.pluginmanager.unregister(resultlog)
-
-
-def generic_path(item):
- chain = item.listchain()
- gpath = [chain[0].name]
- fspath = chain[0].fspath
- fspart = False
- for node in chain[1:]:
- newfspath = node.fspath
- if newfspath == fspath:
- if fspart:
- gpath.append(':')
- fspart = False
- else:
- gpath.append('.')
- else:
- gpath.append('/')
- fspart = True
- name = node.name
- if name[0] in '([':
- gpath.pop()
- gpath.append(name)
- fspath = newfspath
- return ''.join(gpath)
-
-
-class ResultLog(object):
- def __init__(self, config, logfile):
- self.config = config
- self.logfile = logfile # preferably line buffered
-
- def write_log_entry(self, testpath, lettercode, longrepr):
- print("%s %s" % (lettercode, testpath), file=self.logfile)
- for line in longrepr.splitlines():
- print(" %s" % line, file=self.logfile)
-
- def log_outcome(self, report, lettercode, longrepr):
- testpath = getattr(report, 'nodeid', None)
- if testpath is None:
- testpath = report.fspath
- self.write_log_entry(testpath, lettercode, longrepr)
-
- def pytest_runtest_logreport(self, report):
- if report.when != "call" and report.passed:
- return
- res = self.config.hook.pytest_report_teststatus(report=report)
- code = res[1]
- if code == 'x':
- longrepr = str(report.longrepr)
- elif code == 'X':
- longrepr = ''
- elif report.passed:
- longrepr = ""
- elif report.failed:
- longrepr = str(report.longrepr)
- elif report.skipped:
- longrepr = str(report.longrepr[2])
- self.log_outcome(report, code, longrepr)
-
- def pytest_collectreport(self, report):
- if not report.passed:
- if report.failed:
- code = "F"
- longrepr = str(report.longrepr)
- else:
- assert report.skipped
- code = "S"
- longrepr = "%s:%d: %s" % report.longrepr
- self.log_outcome(report, code, longrepr)
-
- def pytest_internalerror(self, excrepr):
- reprcrash = getattr(excrepr, 'reprcrash', None)
- path = getattr(reprcrash, "path", None)
- if path is None:
- path = "cwd:%s" % py.path.local()
- self.write_log_entry(path, '!', str(excrepr))
diff --git a/lib/spack/external/pytest-fallback/_pytest/runner.py b/lib/spack/external/pytest-fallback/_pytest/runner.py
deleted file mode 100644
index b643fa3c91..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/runner.py
+++ /dev/null
@@ -1,508 +0,0 @@
-""" basic collect and runtest protocol implementations """
-from __future__ import absolute_import, division, print_function
-
-import bdb
-import os
-import sys
-from time import time
-
-import py
-from _pytest.compat import _PY2
-from _pytest._code.code import TerminalRepr, ExceptionInfo
-from _pytest.outcomes import skip, Skipped, TEST_OUTCOME
-
-#
-# pytest plugin hooks
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("terminal reporting", "reporting", after="general")
- group.addoption('--durations',
- action="store", type=int, default=None, metavar="N",
- help="show N slowest setup/test durations (N=0 for all)."),
-
-
-def pytest_terminal_summary(terminalreporter):
- durations = terminalreporter.config.option.durations
- if durations is None:
- return
- tr = terminalreporter
- dlist = []
- for replist in tr.stats.values():
- for rep in replist:
- if hasattr(rep, 'duration'):
- dlist.append(rep)
- if not dlist:
- return
- dlist.sort(key=lambda x: x.duration)
- dlist.reverse()
- if not durations:
- tr.write_sep("=", "slowest test durations")
- else:
- tr.write_sep("=", "slowest %s test durations" % durations)
- dlist = dlist[:durations]
-
- for rep in dlist:
- nodeid = rep.nodeid.replace("::()::", "::")
- tr.write_line("%02.2fs %-8s %s" %
- (rep.duration, rep.when, nodeid))
-
-
-def pytest_sessionstart(session):
- session._setupstate = SetupState()
-
-
-def pytest_sessionfinish(session):
- session._setupstate.teardown_all()
-
-
-def pytest_runtest_protocol(item, nextitem):
- item.ihook.pytest_runtest_logstart(
- nodeid=item.nodeid, location=item.location,
- )
- runtestprotocol(item, nextitem=nextitem)
- return True
-
-
-def runtestprotocol(item, log=True, nextitem=None):
- hasrequest = hasattr(item, "_request")
- if hasrequest and not item._request:
- item._initrequest()
- rep = call_and_report(item, "setup", log)
- reports = [rep]
- if rep.passed:
- if item.config.option.setupshow:
- show_test_item(item)
- if not item.config.option.setuponly:
- reports.append(call_and_report(item, "call", log))
- reports.append(call_and_report(item, "teardown", log,
- nextitem=nextitem))
- # after all teardown hooks have been called
- # want funcargs and request info to go away
- if hasrequest:
- item._request = False
- item.funcargs = None
- return reports
-
-
-def show_test_item(item):
- """Show test function, parameters and the fixtures of the test item."""
- tw = item.config.get_terminal_writer()
- tw.line()
- tw.write(' ' * 8)
- tw.write(item._nodeid)
- used_fixtures = sorted(item._fixtureinfo.name2fixturedefs.keys())
- if used_fixtures:
- tw.write(' (fixtures used: {0})'.format(', '.join(used_fixtures)))
-
-
-def pytest_runtest_setup(item):
- _update_current_test_var(item, 'setup')
- item.session._setupstate.prepare(item)
-
-
-def pytest_runtest_call(item):
- _update_current_test_var(item, 'call')
- try:
- item.runtest()
- except Exception:
- # Store trace info to allow postmortem debugging
- type, value, tb = sys.exc_info()
- tb = tb.tb_next # Skip *this* frame
- sys.last_type = type
- sys.last_value = value
- sys.last_traceback = tb
- del tb # Get rid of it in this namespace
- raise
-
-
-def pytest_runtest_teardown(item, nextitem):
- _update_current_test_var(item, 'teardown')
- item.session._setupstate.teardown_exact(item, nextitem)
- _update_current_test_var(item, None)
-
-
-def _update_current_test_var(item, when):
- """
- Update PYTEST_CURRENT_TEST to reflect the current item and stage.
-
- If ``when`` is None, delete PYTEST_CURRENT_TEST from the environment.
- """
- var_name = 'PYTEST_CURRENT_TEST'
- if when:
- value = '{0} ({1})'.format(item.nodeid, when)
- if _PY2:
- # python 2 doesn't like null bytes on environment variables (see #2644)
- value = value.replace('\x00', '(null)')
- os.environ[var_name] = value
- else:
- os.environ.pop(var_name)
-
-
-def pytest_report_teststatus(report):
- if report.when in ("setup", "teardown"):
- if report.failed:
- # category, shortletter, verbose-word
- return "error", "E", "ERROR"
- elif report.skipped:
- return "skipped", "s", "SKIPPED"
- else:
- return "", "", ""
-
-
-#
-# Implementation
-
-def call_and_report(item, when, log=True, **kwds):
- call = call_runtest_hook(item, when, **kwds)
- hook = item.ihook
- report = hook.pytest_runtest_makereport(item=item, call=call)
- if log:
- hook.pytest_runtest_logreport(report=report)
- if check_interactive_exception(call, report):
- hook.pytest_exception_interact(node=item, call=call, report=report)
- return report
-
-
-def check_interactive_exception(call, report):
- return call.excinfo and not (
- hasattr(report, "wasxfail") or
- call.excinfo.errisinstance(skip.Exception) or
- call.excinfo.errisinstance(bdb.BdbQuit))
-
-
-def call_runtest_hook(item, when, **kwds):
- hookname = "pytest_runtest_" + when
- ihook = getattr(item.ihook, hookname)
- return CallInfo(lambda: ihook(item=item, **kwds), when=when)
-
-
-class CallInfo:
- """ Result/Exception info a function invocation. """
- #: None or ExceptionInfo object.
- excinfo = None
-
- def __init__(self, func, when):
- #: context of invocation: one of "setup", "call",
- #: "teardown", "memocollect"
- self.when = when
- self.start = time()
- try:
- self.result = func()
- except KeyboardInterrupt:
- self.stop = time()
- raise
- except: # noqa
- self.excinfo = ExceptionInfo()
- self.stop = time()
-
- def __repr__(self):
- if self.excinfo:
- status = "exception: %s" % str(self.excinfo.value)
- else:
- status = "result: %r" % (self.result,)
- return "<CallInfo when=%r %s>" % (self.when, status)
-
-
-def getslaveinfoline(node):
- try:
- return node._slaveinfocache
- except AttributeError:
- d = node.slaveinfo
- ver = "%s.%s.%s" % d['version_info'][:3]
- node._slaveinfocache = s = "[%s] %s -- Python %s %s" % (
- d['id'], d['sysplatform'], ver, d['executable'])
- return s
-
-
-class BaseReport(object):
-
- def __init__(self, **kw):
- self.__dict__.update(kw)
-
- def toterminal(self, out):
- if hasattr(self, 'node'):
- out.line(getslaveinfoline(self.node))
-
- longrepr = self.longrepr
- if longrepr is None:
- return
-
- if hasattr(longrepr, 'toterminal'):
- longrepr.toterminal(out)
- else:
- try:
- out.line(longrepr)
- except UnicodeEncodeError:
- out.line("<unprintable longrepr>")
-
- def get_sections(self, prefix):
- for name, content in self.sections:
- if name.startswith(prefix):
- yield prefix, content
-
- @property
- def longreprtext(self):
- """
- Read-only property that returns the full string representation
- of ``longrepr``.
-
- .. versionadded:: 3.0
- """
- tw = py.io.TerminalWriter(stringio=True)
- tw.hasmarkup = False
- self.toterminal(tw)
- exc = tw.stringio.getvalue()
- return exc.strip()
-
- @property
- def capstdout(self):
- """Return captured text from stdout, if capturing is enabled
-
- .. versionadded:: 3.0
- """
- return ''.join(content for (prefix, content) in self.get_sections('Captured stdout'))
-
- @property
- def capstderr(self):
- """Return captured text from stderr, if capturing is enabled
-
- .. versionadded:: 3.0
- """
- return ''.join(content for (prefix, content) in self.get_sections('Captured stderr'))
-
- passed = property(lambda x: x.outcome == "passed")
- failed = property(lambda x: x.outcome == "failed")
- skipped = property(lambda x: x.outcome == "skipped")
-
- @property
- def fspath(self):
- return self.nodeid.split("::")[0]
-
-
-def pytest_runtest_makereport(item, call):
- when = call.when
- duration = call.stop - call.start
- keywords = dict([(x, 1) for x in item.keywords])
- excinfo = call.excinfo
- sections = []
- if not call.excinfo:
- outcome = "passed"
- longrepr = None
- else:
- if not isinstance(excinfo, ExceptionInfo):
- outcome = "failed"
- longrepr = excinfo
- elif excinfo.errisinstance(skip.Exception):
- outcome = "skipped"
- r = excinfo._getreprcrash()
- longrepr = (str(r.path), r.lineno, r.message)
- else:
- outcome = "failed"
- if call.when == "call":
- longrepr = item.repr_failure(excinfo)
- else: # exception in setup or teardown
- longrepr = item._repr_failure_py(excinfo,
- style=item.config.option.tbstyle)
- for rwhen, key, content in item._report_sections:
- sections.append(("Captured %s %s" % (key, rwhen), content))
- return TestReport(item.nodeid, item.location,
- keywords, outcome, longrepr, when,
- sections, duration)
-
-
-class TestReport(BaseReport):
- """ Basic test report object (also used for setup and teardown calls if
- they fail).
- """
-
- def __init__(self, nodeid, location, keywords, outcome,
- longrepr, when, sections=(), duration=0, **extra):
- #: normalized collection node id
- self.nodeid = nodeid
-
- #: a (filesystempath, lineno, domaininfo) tuple indicating the
- #: actual location of a test item - it might be different from the
- #: collected one e.g. if a method is inherited from a different module.
- self.location = location
-
- #: a name -> value dictionary containing all keywords and
- #: markers associated with a test invocation.
- self.keywords = keywords
-
- #: test outcome, always one of "passed", "failed", "skipped".
- self.outcome = outcome
-
- #: None or a failure representation.
- self.longrepr = longrepr
-
- #: one of 'setup', 'call', 'teardown' to indicate runtest phase.
- self.when = when
-
- #: list of pairs ``(str, str)`` of extra information which needs to
- #: marshallable. Used by pytest to add captured text
- #: from ``stdout`` and ``stderr``, but may be used by other plugins
- #: to add arbitrary information to reports.
- self.sections = list(sections)
-
- #: time it took to run just the test
- self.duration = duration
-
- self.__dict__.update(extra)
-
- def __repr__(self):
- return "<TestReport %r when=%r outcome=%r>" % (
- self.nodeid, self.when, self.outcome)
-
-
-class TeardownErrorReport(BaseReport):
- outcome = "failed"
- when = "teardown"
-
- def __init__(self, longrepr, **extra):
- self.longrepr = longrepr
- self.sections = []
- self.__dict__.update(extra)
-
-
-def pytest_make_collect_report(collector):
- call = CallInfo(
- lambda: list(collector.collect()),
- 'collect')
- longrepr = None
- if not call.excinfo:
- outcome = "passed"
- else:
- from _pytest import nose
- skip_exceptions = (Skipped,) + nose.get_skip_exceptions()
- if call.excinfo.errisinstance(skip_exceptions):
- outcome = "skipped"
- r = collector._repr_failure_py(call.excinfo, "line").reprcrash
- longrepr = (str(r.path), r.lineno, r.message)
- else:
- outcome = "failed"
- errorinfo = collector.repr_failure(call.excinfo)
- if not hasattr(errorinfo, "toterminal"):
- errorinfo = CollectErrorRepr(errorinfo)
- longrepr = errorinfo
- rep = CollectReport(collector.nodeid, outcome, longrepr,
- getattr(call, 'result', None))
- rep.call = call # see collect_one_node
- return rep
-
-
-class CollectReport(BaseReport):
- def __init__(self, nodeid, outcome, longrepr, result,
- sections=(), **extra):
- self.nodeid = nodeid
- self.outcome = outcome
- self.longrepr = longrepr
- self.result = result or []
- self.sections = list(sections)
- self.__dict__.update(extra)
-
- @property
- def location(self):
- return (self.fspath, None, self.fspath)
-
- def __repr__(self):
- return "<CollectReport %r lenresult=%s outcome=%r>" % (
- self.nodeid, len(self.result), self.outcome)
-
-
-class CollectErrorRepr(TerminalRepr):
- def __init__(self, msg):
- self.longrepr = msg
-
- def toterminal(self, out):
- out.line(self.longrepr, red=True)
-
-
-class SetupState(object):
- """ shared state for setting up/tearing down test items or collectors. """
-
- def __init__(self):
- self.stack = []
- self._finalizers = {}
-
- def addfinalizer(self, finalizer, colitem):
- """ attach a finalizer to the given colitem.
- if colitem is None, this will add a finalizer that
- is called at the end of teardown_all().
- """
- assert colitem and not isinstance(colitem, tuple)
- assert py.builtin.callable(finalizer)
- # assert colitem in self.stack # some unit tests don't setup stack :/
- self._finalizers.setdefault(colitem, []).append(finalizer)
-
- def _pop_and_teardown(self):
- colitem = self.stack.pop()
- self._teardown_with_finalization(colitem)
-
- def _callfinalizers(self, colitem):
- finalizers = self._finalizers.pop(colitem, None)
- exc = None
- while finalizers:
- fin = finalizers.pop()
- try:
- fin()
- except TEST_OUTCOME:
- # XXX Only first exception will be seen by user,
- # ideally all should be reported.
- if exc is None:
- exc = sys.exc_info()
- if exc:
- py.builtin._reraise(*exc)
-
- def _teardown_with_finalization(self, colitem):
- self._callfinalizers(colitem)
- if hasattr(colitem, "teardown"):
- colitem.teardown()
- for colitem in self._finalizers:
- assert colitem is None or colitem in self.stack \
- or isinstance(colitem, tuple)
-
- def teardown_all(self):
- while self.stack:
- self._pop_and_teardown()
- for key in list(self._finalizers):
- self._teardown_with_finalization(key)
- assert not self._finalizers
-
- def teardown_exact(self, item, nextitem):
- needed_collectors = nextitem and nextitem.listchain() or []
- self._teardown_towards(needed_collectors)
-
- def _teardown_towards(self, needed_collectors):
- while self.stack:
- if self.stack == needed_collectors[:len(self.stack)]:
- break
- self._pop_and_teardown()
-
- def prepare(self, colitem):
- """ setup objects along the collector chain to the test-method
- and teardown previously setup objects."""
- needed_collectors = colitem.listchain()
- self._teardown_towards(needed_collectors)
-
- # check if the last collection node has raised an error
- for col in self.stack:
- if hasattr(col, '_prepare_exc'):
- py.builtin._reraise(*col._prepare_exc)
- for col in needed_collectors[len(self.stack):]:
- self.stack.append(col)
- try:
- col.setup()
- except TEST_OUTCOME:
- col._prepare_exc = sys.exc_info()
- raise
-
-
-def collect_one_node(collector):
- ihook = collector.ihook
- ihook.pytest_collectstart(collector=collector)
- rep = ihook.pytest_make_collect_report(collector=collector)
- call = rep.__dict__.pop("call", None)
- if call and check_interactive_exception(call, rep):
- ihook.pytest_exception_interact(node=collector, call=call, report=rep)
- return rep
diff --git a/lib/spack/external/pytest-fallback/_pytest/setuponly.py b/lib/spack/external/pytest-fallback/_pytest/setuponly.py
deleted file mode 100644
index 15e195ad5a..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/setuponly.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import pytest
-import sys
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("debugconfig")
- group.addoption('--setuponly', '--setup-only', action="store_true",
- help="only setup fixtures, do not execute tests.")
- group.addoption('--setupshow', '--setup-show', action="store_true",
- help="show setup of fixtures while executing tests.")
-
-
-@pytest.hookimpl(hookwrapper=True)
-def pytest_fixture_setup(fixturedef, request):
- yield
- config = request.config
- if config.option.setupshow:
- if hasattr(request, 'param'):
- # Save the fixture parameter so ._show_fixture_action() can
- # display it now and during the teardown (in .finish()).
- if fixturedef.ids:
- if callable(fixturedef.ids):
- fixturedef.cached_param = fixturedef.ids(request.param)
- else:
- fixturedef.cached_param = fixturedef.ids[
- request.param_index]
- else:
- fixturedef.cached_param = request.param
- _show_fixture_action(fixturedef, 'SETUP')
-
-
-def pytest_fixture_post_finalizer(fixturedef):
- if hasattr(fixturedef, "cached_result"):
- config = fixturedef._fixturemanager.config
- if config.option.setupshow:
- _show_fixture_action(fixturedef, 'TEARDOWN')
- if hasattr(fixturedef, "cached_param"):
- del fixturedef.cached_param
-
-
-def _show_fixture_action(fixturedef, msg):
- config = fixturedef._fixturemanager.config
- capman = config.pluginmanager.getplugin('capturemanager')
- if capman:
- out, err = capman.suspendcapture()
-
- tw = config.get_terminal_writer()
- tw.line()
- tw.write(' ' * 2 * fixturedef.scopenum)
- tw.write('{step} {scope} {fixture}'.format(
- step=msg.ljust(8), # align the output to TEARDOWN
- scope=fixturedef.scope[0].upper(),
- fixture=fixturedef.argname))
-
- if msg == 'SETUP':
- deps = sorted(arg for arg in fixturedef.argnames if arg != 'request')
- if deps:
- tw.write(' (fixtures used: {0})'.format(', '.join(deps)))
-
- if hasattr(fixturedef, 'cached_param'):
- tw.write('[{0}]'.format(fixturedef.cached_param))
-
- if capman:
- capman.resumecapture()
- sys.stdout.write(out)
- sys.stderr.write(err)
-
-
-@pytest.hookimpl(tryfirst=True)
-def pytest_cmdline_main(config):
- if config.option.setuponly:
- config.option.setupshow = True
diff --git a/lib/spack/external/pytest-fallback/_pytest/setupplan.py b/lib/spack/external/pytest-fallback/_pytest/setupplan.py
deleted file mode 100644
index e11bd40698..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/setupplan.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import pytest
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("debugconfig")
- group.addoption('--setupplan', '--setup-plan', action="store_true",
- help="show what fixtures and tests would be executed but "
- "don't execute anything.")
-
-
-@pytest.hookimpl(tryfirst=True)
-def pytest_fixture_setup(fixturedef, request):
- # Will return a dummy fixture if the setuponly option is provided.
- if request.config.option.setupplan:
- fixturedef.cached_result = (None, None, None)
- return fixturedef.cached_result
-
-
-@pytest.hookimpl(tryfirst=True)
-def pytest_cmdline_main(config):
- if config.option.setupplan:
- config.option.setuponly = True
- config.option.setupshow = True
diff --git a/lib/spack/external/pytest-fallback/_pytest/skipping.py b/lib/spack/external/pytest-fallback/_pytest/skipping.py
deleted file mode 100644
index b92800d10b..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/skipping.py
+++ /dev/null
@@ -1,372 +0,0 @@
-""" support for skip/xfail functions and markers. """
-from __future__ import absolute_import, division, print_function
-
-import os
-import sys
-import traceback
-
-import py
-from _pytest.config import hookimpl
-from _pytest.mark import MarkInfo, MarkDecorator
-from _pytest.outcomes import fail, skip, xfail, TEST_OUTCOME
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group.addoption('--runxfail',
- action="store_true", dest="runxfail", default=False,
- help="run tests even if they are marked xfail")
-
- parser.addini("xfail_strict", "default for the strict parameter of xfail "
- "markers when not given explicitly (default: "
- "False)",
- default=False,
- type="bool")
-
-
-def pytest_configure(config):
- if config.option.runxfail:
- # yay a hack
- import pytest
- old = pytest.xfail
- config._cleanup.append(lambda: setattr(pytest, "xfail", old))
-
- def nop(*args, **kwargs):
- pass
-
- nop.Exception = xfail.Exception
- setattr(pytest, "xfail", nop)
-
- config.addinivalue_line("markers",
- "skip(reason=None): skip the given test function with an optional reason. "
- "Example: skip(reason=\"no way of currently testing this\") skips the "
- "test."
- )
- config.addinivalue_line("markers",
- "skipif(condition): skip the given test function if eval(condition) "
- "results in a True value. Evaluation happens within the "
- "module global context. Example: skipif('sys.platform == \"win32\"') "
- "skips the test if we are on the win32 platform. see "
- "http://pytest.org/latest/skipping.html"
- )
- config.addinivalue_line("markers",
- "xfail(condition, reason=None, run=True, raises=None, strict=False): "
- "mark the test function as an expected failure if eval(condition) "
- "has a True value. Optionally specify a reason for better reporting "
- "and run=False if you don't even want to execute the test function. "
- "If only specific exception(s) are expected, you can list them in "
- "raises, and if the test fails in other ways, it will be reported as "
- "a true failure. See http://pytest.org/latest/skipping.html"
- )
-
-
-class MarkEvaluator:
- def __init__(self, item, name):
- self.item = item
- self.name = name
-
- @property
- def holder(self):
- return self.item.keywords.get(self.name)
-
- def __bool__(self):
- return bool(self.holder)
- __nonzero__ = __bool__
-
- def wasvalid(self):
- return not hasattr(self, 'exc')
-
- def invalidraise(self, exc):
- raises = self.get('raises')
- if not raises:
- return
- return not isinstance(exc, raises)
-
- def istrue(self):
- try:
- return self._istrue()
- except TEST_OUTCOME:
- self.exc = sys.exc_info()
- if isinstance(self.exc[1], SyntaxError):
- msg = [" " * (self.exc[1].offset + 4) + "^", ]
- msg.append("SyntaxError: invalid syntax")
- else:
- msg = traceback.format_exception_only(*self.exc[:2])
- fail("Error evaluating %r expression\n"
- " %s\n"
- "%s"
- % (self.name, self.expr, "\n".join(msg)),
- pytrace=False)
-
- def _getglobals(self):
- d = {'os': os, 'sys': sys, 'config': self.item.config}
- if hasattr(self.item, 'obj'):
- d.update(self.item.obj.__globals__)
- return d
-
- def _istrue(self):
- if hasattr(self, 'result'):
- return self.result
- if self.holder:
- if self.holder.args or 'condition' in self.holder.kwargs:
- self.result = False
- # "holder" might be a MarkInfo or a MarkDecorator; only
- # MarkInfo keeps track of all parameters it received in an
- # _arglist attribute
- marks = getattr(self.holder, '_marks', None) \
- or [self.holder.mark]
- for _, args, kwargs in marks:
- if 'condition' in kwargs:
- args = (kwargs['condition'],)
- for expr in args:
- self.expr = expr
- if isinstance(expr, py.builtin._basestring):
- d = self._getglobals()
- result = cached_eval(self.item.config, expr, d)
- else:
- if "reason" not in kwargs:
- # XXX better be checked at collection time
- msg = "you need to specify reason=STRING " \
- "when using booleans as conditions."
- fail(msg)
- result = bool(expr)
- if result:
- self.result = True
- self.reason = kwargs.get('reason', None)
- self.expr = expr
- return self.result
- else:
- self.result = True
- return getattr(self, 'result', False)
-
- def get(self, attr, default=None):
- return self.holder.kwargs.get(attr, default)
-
- def getexplanation(self):
- expl = getattr(self, 'reason', None) or self.get('reason', None)
- if not expl:
- if not hasattr(self, 'expr'):
- return ""
- else:
- return "condition: " + str(self.expr)
- return expl
-
-
-@hookimpl(tryfirst=True)
-def pytest_runtest_setup(item):
- # Check if skip or skipif are specified as pytest marks
-
- skipif_info = item.keywords.get('skipif')
- if isinstance(skipif_info, (MarkInfo, MarkDecorator)):
- eval_skipif = MarkEvaluator(item, 'skipif')
- if eval_skipif.istrue():
- item._evalskip = eval_skipif
- skip(eval_skipif.getexplanation())
-
- skip_info = item.keywords.get('skip')
- if isinstance(skip_info, (MarkInfo, MarkDecorator)):
- item._evalskip = True
- if 'reason' in skip_info.kwargs:
- skip(skip_info.kwargs['reason'])
- elif skip_info.args:
- skip(skip_info.args[0])
- else:
- skip("unconditional skip")
-
- item._evalxfail = MarkEvaluator(item, 'xfail')
- check_xfail_no_run(item)
-
-
-@hookimpl(hookwrapper=True)
-def pytest_pyfunc_call(pyfuncitem):
- check_xfail_no_run(pyfuncitem)
- outcome = yield
- passed = outcome.excinfo is None
- if passed:
- check_strict_xfail(pyfuncitem)
-
-
-def check_xfail_no_run(item):
- """check xfail(run=False)"""
- if not item.config.option.runxfail:
- evalxfail = item._evalxfail
- if evalxfail.istrue():
- if not evalxfail.get('run', True):
- xfail("[NOTRUN] " + evalxfail.getexplanation())
-
-
-def check_strict_xfail(pyfuncitem):
- """check xfail(strict=True) for the given PASSING test"""
- evalxfail = pyfuncitem._evalxfail
- if evalxfail.istrue():
- strict_default = pyfuncitem.config.getini('xfail_strict')
- is_strict_xfail = evalxfail.get('strict', strict_default)
- if is_strict_xfail:
- del pyfuncitem._evalxfail
- explanation = evalxfail.getexplanation()
- fail('[XPASS(strict)] ' + explanation, pytrace=False)
-
-
-@hookimpl(hookwrapper=True)
-def pytest_runtest_makereport(item, call):
- outcome = yield
- rep = outcome.get_result()
- evalxfail = getattr(item, '_evalxfail', None)
- evalskip = getattr(item, '_evalskip', None)
- # unitttest special case, see setting of _unexpectedsuccess
- if hasattr(item, '_unexpectedsuccess') and rep.when == "call":
- from _pytest.compat import _is_unittest_unexpected_success_a_failure
- if item._unexpectedsuccess:
- rep.longrepr = "Unexpected success: {0}".format(item._unexpectedsuccess)
- else:
- rep.longrepr = "Unexpected success"
- if _is_unittest_unexpected_success_a_failure():
- rep.outcome = "failed"
- else:
- rep.outcome = "passed"
- rep.wasxfail = rep.longrepr
- elif item.config.option.runxfail:
- pass # don't interefere
- elif call.excinfo and call.excinfo.errisinstance(xfail.Exception):
- rep.wasxfail = "reason: " + call.excinfo.value.msg
- rep.outcome = "skipped"
- elif evalxfail and not rep.skipped and evalxfail.wasvalid() and \
- evalxfail.istrue():
- if call.excinfo:
- if evalxfail.invalidraise(call.excinfo.value):
- rep.outcome = "failed"
- else:
- rep.outcome = "skipped"
- rep.wasxfail = evalxfail.getexplanation()
- elif call.when == "call":
- strict_default = item.config.getini('xfail_strict')
- is_strict_xfail = evalxfail.get('strict', strict_default)
- explanation = evalxfail.getexplanation()
- if is_strict_xfail:
- rep.outcome = "failed"
- rep.longrepr = "[XPASS(strict)] {0}".format(explanation)
- else:
- rep.outcome = "passed"
- rep.wasxfail = explanation
- elif evalskip is not None and rep.skipped and type(rep.longrepr) is tuple:
- # skipped by mark.skipif; change the location of the failure
- # to point to the item definition, otherwise it will display
- # the location of where the skip exception was raised within pytest
- filename, line, reason = rep.longrepr
- filename, line = item.location[:2]
- rep.longrepr = filename, line, reason
-
-# called by terminalreporter progress reporting
-
-
-def pytest_report_teststatus(report):
- if hasattr(report, "wasxfail"):
- if report.skipped:
- return "xfailed", "x", "xfail"
- elif report.passed:
- return "xpassed", "X", ("XPASS", {'yellow': True})
-
-# called by the terminalreporter instance/plugin
-
-
-def pytest_terminal_summary(terminalreporter):
- tr = terminalreporter
- if not tr.reportchars:
- # for name in "xfailed skipped failed xpassed":
- # if not tr.stats.get(name, 0):
- # tr.write_line("HINT: use '-r' option to see extra "
- # "summary info about tests")
- # break
- return
-
- lines = []
- for char in tr.reportchars:
- if char == "x":
- show_xfailed(terminalreporter, lines)
- elif char == "X":
- show_xpassed(terminalreporter, lines)
- elif char in "fF":
- show_simple(terminalreporter, lines, 'failed', "FAIL %s")
- elif char in "sS":
- show_skipped(terminalreporter, lines)
- elif char == "E":
- show_simple(terminalreporter, lines, 'error', "ERROR %s")
- elif char == 'p':
- show_simple(terminalreporter, lines, 'passed', "PASSED %s")
-
- if lines:
- tr._tw.sep("=", "short test summary info")
- for line in lines:
- tr._tw.line(line)
-
-
-def show_simple(terminalreporter, lines, stat, format):
- failed = terminalreporter.stats.get(stat)
- if failed:
- for rep in failed:
- pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
- lines.append(format % (pos,))
-
-
-def show_xfailed(terminalreporter, lines):
- xfailed = terminalreporter.stats.get("xfailed")
- if xfailed:
- for rep in xfailed:
- pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
- reason = rep.wasxfail
- lines.append("XFAIL %s" % (pos,))
- if reason:
- lines.append(" " + str(reason))
-
-
-def show_xpassed(terminalreporter, lines):
- xpassed = terminalreporter.stats.get("xpassed")
- if xpassed:
- for rep in xpassed:
- pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
- reason = rep.wasxfail
- lines.append("XPASS %s %s" % (pos, reason))
-
-
-def cached_eval(config, expr, d):
- if not hasattr(config, '_evalcache'):
- config._evalcache = {}
- try:
- return config._evalcache[expr]
- except KeyError:
- import _pytest._code
- exprcode = _pytest._code.compile(expr, mode="eval")
- config._evalcache[expr] = x = eval(exprcode, d)
- return x
-
-
-def folded_skips(skipped):
- d = {}
- for event in skipped:
- key = event.longrepr
- assert len(key) == 3, (event, key)
- d.setdefault(key, []).append(event)
- values = []
- for key, events in d.items():
- values.append((len(events),) + key)
- return values
-
-
-def show_skipped(terminalreporter, lines):
- tr = terminalreporter
- skipped = tr.stats.get('skipped', [])
- if skipped:
- # if not tr.hasopt('skipped'):
- # tr.write_line(
- # "%d skipped tests, specify -rs for more info" %
- # len(skipped))
- # return
- fskips = folded_skips(skipped)
- if fskips:
- # tr.write_sep("_", "skipped test summary")
- for num, fspath, lineno, reason in fskips:
- if reason.startswith("Skipped: "):
- reason = reason[9:]
- lines.append(
- "SKIP [%d] %s:%d: %s" %
- (num, fspath, lineno + 1, reason))
diff --git a/lib/spack/external/pytest-fallback/_pytest/terminal.py b/lib/spack/external/pytest-fallback/_pytest/terminal.py
deleted file mode 100644
index 9da94d0c91..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/terminal.py
+++ /dev/null
@@ -1,650 +0,0 @@
-""" terminal reporting of the full testing process.
-
-This is a good source for looking at the various reporting hooks.
-"""
-from __future__ import absolute_import, division, print_function
-
-import itertools
-from _pytest.main import EXIT_OK, EXIT_TESTSFAILED, EXIT_INTERRUPTED, \
- EXIT_USAGEERROR, EXIT_NOTESTSCOLLECTED
-import pytest
-import py
-import sys
-import time
-import platform
-
-from _pytest import nodes
-import _pytest._pluggy as pluggy
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("terminal reporting", "reporting", after="general")
- group._addoption('-v', '--verbose', action="count",
- dest="verbose", default=0, help="increase verbosity."),
- group._addoption('-q', '--quiet', action="count",
- dest="quiet", default=0, help="decrease verbosity."),
- group._addoption('-r',
- action="store", dest="reportchars", default='', metavar="chars",
- help="show extra test summary info as specified by chars (f)ailed, "
- "(E)error, (s)skipped, (x)failed, (X)passed, "
- "(p)passed, (P)passed with output, (a)all except pP. "
- "Warnings are displayed at all times except when "
- "--disable-warnings is set")
- group._addoption('--disable-warnings', '--disable-pytest-warnings', default=False,
- dest='disable_warnings', action='store_true',
- help='disable warnings summary')
- group._addoption('-l', '--showlocals',
- action="store_true", dest="showlocals", default=False,
- help="show locals in tracebacks (disabled by default).")
- group._addoption('--tb', metavar="style",
- action="store", dest="tbstyle", default='auto',
- choices=['auto', 'long', 'short', 'no', 'line', 'native'],
- help="traceback print mode (auto/long/short/line/native/no).")
- group._addoption('--fulltrace', '--full-trace',
- action="store_true", default=False,
- help="don't cut any tracebacks (default is to cut).")
- group._addoption('--color', metavar="color",
- action="store", dest="color", default='auto',
- choices=['yes', 'no', 'auto'],
- help="color terminal output (yes/no/auto).")
-
-
-def pytest_configure(config):
- config.option.verbose -= config.option.quiet
- reporter = TerminalReporter(config, sys.stdout)
- config.pluginmanager.register(reporter, 'terminalreporter')
- if config.option.debug or config.option.traceconfig:
- def mywriter(tags, args):
- msg = " ".join(map(str, args))
- reporter.write_line("[traceconfig] " + msg)
- config.trace.root.setprocessor("pytest:config", mywriter)
-
-
-def getreportopt(config):
- reportopts = ""
- reportchars = config.option.reportchars
- if not config.option.disable_warnings and 'w' not in reportchars:
- reportchars += 'w'
- elif config.option.disable_warnings and 'w' in reportchars:
- reportchars = reportchars.replace('w', '')
- if reportchars:
- for char in reportchars:
- if char not in reportopts and char != 'a':
- reportopts += char
- elif char == 'a':
- reportopts = 'fEsxXw'
- return reportopts
-
-
-def pytest_report_teststatus(report):
- if report.passed:
- letter = "."
- elif report.skipped:
- letter = "s"
- elif report.failed:
- letter = "F"
- if report.when != "call":
- letter = "f"
- return report.outcome, letter, report.outcome.upper()
-
-
-class WarningReport:
- """
- Simple structure to hold warnings information captured by ``pytest_logwarning``.
- """
-
- def __init__(self, code, message, nodeid=None, fslocation=None):
- """
- :param code: unused
- :param str message: user friendly message about the warning
- :param str|None nodeid: node id that generated the warning (see ``get_location``).
- :param tuple|py.path.local fslocation:
- file system location of the source of the warning (see ``get_location``).
- """
- self.code = code
- self.message = message
- self.nodeid = nodeid
- self.fslocation = fslocation
-
- def get_location(self, config):
- """
- Returns the more user-friendly information about the location
- of a warning, or None.
- """
- if self.nodeid:
- return self.nodeid
- if self.fslocation:
- if isinstance(self.fslocation, tuple) and len(self.fslocation) >= 2:
- filename, linenum = self.fslocation[:2]
- relpath = py.path.local(filename).relto(config.invocation_dir)
- return '%s:%s' % (relpath, linenum)
- else:
- return str(self.fslocation)
- return None
-
-
-class TerminalReporter:
- def __init__(self, config, file=None):
- import _pytest.config
- self.config = config
- self.verbosity = self.config.option.verbose
- self.showheader = self.verbosity >= 0
- self.showfspath = self.verbosity >= 0
- self.showlongtestinfo = self.verbosity > 0
- self._numcollected = 0
-
- self.stats = {}
- self.startdir = py.path.local()
- if file is None:
- file = sys.stdout
- self._tw = self.writer = _pytest.config.create_terminal_writer(config,
- file)
- self.currentfspath = None
- self.reportchars = getreportopt(config)
- self.hasmarkup = self._tw.hasmarkup
- self.isatty = file.isatty()
-
- def hasopt(self, char):
- char = {'xfailed': 'x', 'skipped': 's'}.get(char, char)
- return char in self.reportchars
-
- def write_fspath_result(self, nodeid, res):
- fspath = self.config.rootdir.join(nodeid.split("::")[0])
- if fspath != self.currentfspath:
- self.currentfspath = fspath
- fspath = self.startdir.bestrelpath(fspath)
- self._tw.line()
- self._tw.write(fspath + " ")
- self._tw.write(res)
-
- def write_ensure_prefix(self, prefix, extra="", **kwargs):
- if self.currentfspath != prefix:
- self._tw.line()
- self.currentfspath = prefix
- self._tw.write(prefix)
- if extra:
- self._tw.write(extra, **kwargs)
- self.currentfspath = -2
-
- def ensure_newline(self):
- if self.currentfspath:
- self._tw.line()
- self.currentfspath = None
-
- def write(self, content, **markup):
- self._tw.write(content, **markup)
-
- def write_line(self, line, **markup):
- if not py.builtin._istext(line):
- line = py.builtin.text(line, errors="replace")
- self.ensure_newline()
- self._tw.line(line, **markup)
-
- def rewrite(self, line, **markup):
- """
- Rewinds the terminal cursor to the beginning and writes the given line.
-
- :kwarg erase: if True, will also add spaces until the full terminal width to ensure
- previous lines are properly erased.
-
- The rest of the keyword arguments are markup instructions.
- """
- erase = markup.pop('erase', False)
- if erase:
- fill_count = self._tw.fullwidth - len(line)
- fill = ' ' * fill_count
- else:
- fill = ''
- line = str(line)
- self._tw.write("\r" + line + fill, **markup)
-
- def write_sep(self, sep, title=None, **markup):
- self.ensure_newline()
- self._tw.sep(sep, title, **markup)
-
- def section(self, title, sep="=", **kw):
- self._tw.sep(sep, title, **kw)
-
- def line(self, msg, **kw):
- self._tw.line(msg, **kw)
-
- def pytest_internalerror(self, excrepr):
- for line in py.builtin.text(excrepr).split("\n"):
- self.write_line("INTERNALERROR> " + line)
- return 1
-
- def pytest_logwarning(self, code, fslocation, message, nodeid):
- warnings = self.stats.setdefault("warnings", [])
- warning = WarningReport(code=code, fslocation=fslocation,
- message=message, nodeid=nodeid)
- warnings.append(warning)
-
- def pytest_plugin_registered(self, plugin):
- if self.config.option.traceconfig:
- msg = "PLUGIN registered: %s" % (plugin,)
- # XXX this event may happen during setup/teardown time
- # which unfortunately captures our output here
- # which garbles our output if we use self.write_line
- self.write_line(msg)
-
- def pytest_deselected(self, items):
- self.stats.setdefault('deselected', []).extend(items)
-
- def pytest_runtest_logstart(self, nodeid, location):
- # ensure that the path is printed before the
- # 1st test of a module starts running
- if self.showlongtestinfo:
- line = self._locationline(nodeid, *location)
- self.write_ensure_prefix(line, "")
- elif self.showfspath:
- fsid = nodeid.split("::")[0]
- self.write_fspath_result(fsid, "")
-
- def pytest_runtest_logreport(self, report):
- rep = report
- res = self.config.hook.pytest_report_teststatus(report=rep)
- cat, letter, word = res
- self.stats.setdefault(cat, []).append(rep)
- self._tests_ran = True
- if not letter and not word:
- # probably passed setup/teardown
- return
- if self.verbosity <= 0:
- if not hasattr(rep, 'node') and self.showfspath:
- self.write_fspath_result(rep.nodeid, letter)
- else:
- self._tw.write(letter)
- else:
- if isinstance(word, tuple):
- word, markup = word
- else:
- if rep.passed:
- markup = {'green': True}
- elif rep.failed:
- markup = {'red': True}
- elif rep.skipped:
- markup = {'yellow': True}
- line = self._locationline(rep.nodeid, *rep.location)
- if not hasattr(rep, 'node'):
- self.write_ensure_prefix(line, word, **markup)
- # self._tw.write(word, **markup)
- else:
- self.ensure_newline()
- if hasattr(rep, 'node'):
- self._tw.write("[%s] " % rep.node.gateway.id)
- self._tw.write(word, **markup)
- self._tw.write(" " + line)
- self.currentfspath = -2
-
- def pytest_collection(self):
- if not self.isatty and self.config.option.verbose >= 1:
- self.write("collecting ... ", bold=True)
-
- def pytest_collectreport(self, report):
- if report.failed:
- self.stats.setdefault("error", []).append(report)
- elif report.skipped:
- self.stats.setdefault("skipped", []).append(report)
- items = [x for x in report.result if isinstance(x, pytest.Item)]
- self._numcollected += len(items)
- if self.isatty:
- # self.write_fspath_result(report.nodeid, 'E')
- self.report_collect()
-
- def report_collect(self, final=False):
- if self.config.option.verbose < 0:
- return
-
- errors = len(self.stats.get('error', []))
- skipped = len(self.stats.get('skipped', []))
- if final:
- line = "collected "
- else:
- line = "collecting "
- line += str(self._numcollected) + " item" + ('' if self._numcollected == 1 else 's')
- if errors:
- line += " / %d errors" % errors
- if skipped:
- line += " / %d skipped" % skipped
- if self.isatty:
- self.rewrite(line, bold=True, erase=True)
- if final:
- self.write('\n')
- else:
- self.write_line(line)
-
- def pytest_collection_modifyitems(self):
- self.report_collect(True)
-
- @pytest.hookimpl(trylast=True)
- def pytest_sessionstart(self, session):
- self._sessionstarttime = time.time()
- if not self.showheader:
- return
- self.write_sep("=", "test session starts", bold=True)
- verinfo = platform.python_version()
- msg = "platform %s -- Python %s" % (sys.platform, verinfo)
- if hasattr(sys, 'pypy_version_info'):
- verinfo = ".".join(map(str, sys.pypy_version_info[:3]))
- msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3])
- msg += ", pytest-%s, py-%s, pluggy-%s" % (
- pytest.__version__, py.__version__, pluggy.__version__)
- if self.verbosity > 0 or self.config.option.debug or \
- getattr(self.config.option, 'pastebin', None):
- msg += " -- " + str(sys.executable)
- self.write_line(msg)
- lines = self.config.hook.pytest_report_header(
- config=self.config, startdir=self.startdir)
- self._write_report_lines_from_hooks(lines)
-
- def _write_report_lines_from_hooks(self, lines):
- lines.reverse()
- for line in flatten(lines):
- self.write_line(line)
-
- def pytest_report_header(self, config):
- inifile = ""
- if config.inifile:
- inifile = " " + config.rootdir.bestrelpath(config.inifile)
- lines = ["rootdir: %s, inifile:%s" % (config.rootdir, inifile)]
-
- plugininfo = config.pluginmanager.list_plugin_distinfo()
- if plugininfo:
-
- lines.append(
- "plugins: %s" % ", ".join(_plugin_nameversions(plugininfo)))
- return lines
-
- def pytest_collection_finish(self, session):
- if self.config.option.collectonly:
- self._printcollecteditems(session.items)
- if self.stats.get('failed'):
- self._tw.sep("!", "collection failures")
- for rep in self.stats.get('failed'):
- rep.toterminal(self._tw)
- return 1
- return 0
- lines = self.config.hook.pytest_report_collectionfinish(
- config=self.config, startdir=self.startdir, items=session.items)
- self._write_report_lines_from_hooks(lines)
-
- def _printcollecteditems(self, items):
- # to print out items and their parent collectors
- # we take care to leave out Instances aka ()
- # because later versions are going to get rid of them anyway
- if self.config.option.verbose < 0:
- if self.config.option.verbose < -1:
- counts = {}
- for item in items:
- name = item.nodeid.split('::', 1)[0]
- counts[name] = counts.get(name, 0) + 1
- for name, count in sorted(counts.items()):
- self._tw.line("%s: %d" % (name, count))
- else:
- for item in items:
- nodeid = item.nodeid
- nodeid = nodeid.replace("::()::", "::")
- self._tw.line(nodeid)
- return
- stack = []
- indent = ""
- for item in items:
- needed_collectors = item.listchain()[1:] # strip root node
- while stack:
- if stack == needed_collectors[:len(stack)]:
- break
- stack.pop()
- for col in needed_collectors[len(stack):]:
- stack.append(col)
- # if col.name == "()":
- # continue
- indent = (len(stack) - 1) * " "
- self._tw.line("%s%s" % (indent, col))
-
- @pytest.hookimpl(hookwrapper=True)
- def pytest_sessionfinish(self, exitstatus):
- outcome = yield
- outcome.get_result()
- self._tw.line("")
- summary_exit_codes = (
- EXIT_OK, EXIT_TESTSFAILED, EXIT_INTERRUPTED, EXIT_USAGEERROR,
- EXIT_NOTESTSCOLLECTED)
- if exitstatus in summary_exit_codes:
- self.config.hook.pytest_terminal_summary(terminalreporter=self,
- exitstatus=exitstatus)
- self.summary_errors()
- self.summary_failures()
- self.summary_warnings()
- self.summary_passes()
- if exitstatus == EXIT_INTERRUPTED:
- self._report_keyboardinterrupt()
- del self._keyboardinterrupt_memo
- self.summary_deselected()
- self.summary_stats()
-
- def pytest_keyboard_interrupt(self, excinfo):
- self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
-
- def pytest_unconfigure(self):
- if hasattr(self, '_keyboardinterrupt_memo'):
- self._report_keyboardinterrupt()
-
- def _report_keyboardinterrupt(self):
- excrepr = self._keyboardinterrupt_memo
- msg = excrepr.reprcrash.message
- self.write_sep("!", msg)
- if "KeyboardInterrupt" in msg:
- if self.config.option.fulltrace:
- excrepr.toterminal(self._tw)
- else:
- self._tw.line("to show a full traceback on KeyboardInterrupt use --fulltrace", yellow=True)
- excrepr.reprcrash.toterminal(self._tw)
-
- def _locationline(self, nodeid, fspath, lineno, domain):
- def mkrel(nodeid):
- line = self.config.cwd_relative_nodeid(nodeid)
- if domain and line.endswith(domain):
- line = line[:-len(domain)]
- values = domain.split("[")
- values[0] = values[0].replace('.', '::') # don't replace '.' in params
- line += "[".join(values)
- return line
- # collect_fspath comes from testid which has a "/"-normalized path
-
- if fspath:
- res = mkrel(nodeid).replace("::()", "") # parens-normalization
- if nodeid.split("::")[0] != fspath.replace("\\", nodes.SEP):
- res += " <- " + self.startdir.bestrelpath(fspath)
- else:
- res = "[location]"
- return res + " "
-
- def _getfailureheadline(self, rep):
- if hasattr(rep, 'location'):
- fspath, lineno, domain = rep.location
- return domain
- else:
- return "test session" # XXX?
-
- def _getcrashline(self, rep):
- try:
- return str(rep.longrepr.reprcrash)
- except AttributeError:
- try:
- return str(rep.longrepr)[:50]
- except AttributeError:
- return ""
-
- #
- # summaries for sessionfinish
- #
- def getreports(self, name):
- values = []
- for x in self.stats.get(name, []):
- if not hasattr(x, '_pdbshown'):
- values.append(x)
- return values
-
- def summary_warnings(self):
- if self.hasopt("w"):
- all_warnings = self.stats.get("warnings")
- if not all_warnings:
- return
-
- grouped = itertools.groupby(all_warnings, key=lambda wr: wr.get_location(self.config))
-
- self.write_sep("=", "warnings summary", yellow=True, bold=False)
- for location, warnings in grouped:
- self._tw.line(str(location) or '<undetermined location>')
- for w in warnings:
- lines = w.message.splitlines()
- indented = '\n'.join(' ' + x for x in lines)
- self._tw.line(indented)
- self._tw.line()
- self._tw.line('-- Docs: http://doc.pytest.org/en/latest/warnings.html')
-
- def summary_passes(self):
- if self.config.option.tbstyle != "no":
- if self.hasopt("P"):
- reports = self.getreports('passed')
- if not reports:
- return
- self.write_sep("=", "PASSES")
- for rep in reports:
- msg = self._getfailureheadline(rep)
- self.write_sep("_", msg)
- self._outrep_summary(rep)
-
- def print_teardown_sections(self, rep):
- for secname, content in rep.sections:
- if 'teardown' in secname:
- self._tw.sep('-', secname)
- if content[-1:] == "\n":
- content = content[:-1]
- self._tw.line(content)
-
- def summary_failures(self):
- if self.config.option.tbstyle != "no":
- reports = self.getreports('failed')
- if not reports:
- return
- self.write_sep("=", "FAILURES")
- for rep in reports:
- if self.config.option.tbstyle == "line":
- line = self._getcrashline(rep)
- self.write_line(line)
- else:
- msg = self._getfailureheadline(rep)
- markup = {'red': True, 'bold': True}
- self.write_sep("_", msg, **markup)
- self._outrep_summary(rep)
- for report in self.getreports(''):
- if report.nodeid == rep.nodeid and report.when == 'teardown':
- self.print_teardown_sections(report)
-
- def summary_errors(self):
- if self.config.option.tbstyle != "no":
- reports = self.getreports('error')
- if not reports:
- return
- self.write_sep("=", "ERRORS")
- for rep in self.stats['error']:
- msg = self._getfailureheadline(rep)
- if not hasattr(rep, 'when'):
- # collect
- msg = "ERROR collecting " + msg
- elif rep.when == "setup":
- msg = "ERROR at setup of " + msg
- elif rep.when == "teardown":
- msg = "ERROR at teardown of " + msg
- self.write_sep("_", msg)
- self._outrep_summary(rep)
-
- def _outrep_summary(self, rep):
- rep.toterminal(self._tw)
- for secname, content in rep.sections:
- self._tw.sep("-", secname)
- if content[-1:] == "\n":
- content = content[:-1]
- self._tw.line(content)
-
- def summary_stats(self):
- session_duration = time.time() - self._sessionstarttime
- (line, color) = build_summary_stats_line(self.stats)
- msg = "%s in %.2f seconds" % (line, session_duration)
- markup = {color: True, 'bold': True}
-
- if self.verbosity >= 0:
- self.write_sep("=", msg, **markup)
- if self.verbosity == -1:
- self.write_line(msg, **markup)
-
- def summary_deselected(self):
- if 'deselected' in self.stats:
- self.write_sep("=", "%d tests deselected" % (
- len(self.stats['deselected'])), bold=True)
-
-
-def repr_pythonversion(v=None):
- if v is None:
- v = sys.version_info
- try:
- return "%s.%s.%s-%s-%s" % v
- except (TypeError, ValueError):
- return str(v)
-
-
-def flatten(values):
- for x in values:
- if isinstance(x, (list, tuple)):
- for y in flatten(x):
- yield y
- else:
- yield x
-
-
-def build_summary_stats_line(stats):
- keys = ("failed passed skipped deselected "
- "xfailed xpassed warnings error").split()
- unknown_key_seen = False
- for key in stats.keys():
- if key not in keys:
- if key: # setup/teardown reports have an empty key, ignore them
- keys.append(key)
- unknown_key_seen = True
- parts = []
- for key in keys:
- val = stats.get(key, None)
- if val:
- parts.append("%d %s" % (len(val), key))
-
- if parts:
- line = ", ".join(parts)
- else:
- line = "no tests ran"
-
- if 'failed' in stats or 'error' in stats:
- color = 'red'
- elif 'warnings' in stats or unknown_key_seen:
- color = 'yellow'
- elif 'passed' in stats:
- color = 'green'
- else:
- color = 'yellow'
-
- return (line, color)
-
-
-def _plugin_nameversions(plugininfo):
- values = []
- for plugin, dist in plugininfo:
- # gets us name and version!
- name = '{dist.project_name}-{dist.version}'.format(dist=dist)
- # questionable convenience, but it keeps things short
- if name.startswith("pytest-"):
- name = name[7:]
- # we decided to print python package names
- # they can have more than one plugin
- if name not in values:
- values.append(name)
- return values
diff --git a/lib/spack/external/pytest-fallback/_pytest/tmpdir.py b/lib/spack/external/pytest-fallback/_pytest/tmpdir.py
deleted file mode 100644
index da1b032237..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/tmpdir.py
+++ /dev/null
@@ -1,126 +0,0 @@
-""" support for providing temporary directories to test functions. """
-from __future__ import absolute_import, division, print_function
-
-import re
-
-import pytest
-import py
-from _pytest.monkeypatch import MonkeyPatch
-
-
-class TempdirFactory:
- """Factory for temporary directories under the common base temp directory.
-
- The base directory can be configured using the ``--basetemp`` option.
- """
-
- def __init__(self, config):
- self.config = config
- self.trace = config.trace.get("tmpdir")
-
- def ensuretemp(self, string, dir=1):
- """ (deprecated) return temporary directory path with
- the given string as the trailing part. It is usually
- better to use the 'tmpdir' function argument which
- provides an empty unique-per-test-invocation directory
- and is guaranteed to be empty.
- """
- # py.log._apiwarn(">1.1", "use tmpdir function argument")
- return self.getbasetemp().ensure(string, dir=dir)
-
- def mktemp(self, basename, numbered=True):
- """Create a subdirectory of the base temporary directory and return it.
- If ``numbered``, ensure the directory is unique by adding a number
- prefix greater than any existing one.
- """
- basetemp = self.getbasetemp()
- if not numbered:
- p = basetemp.mkdir(basename)
- else:
- p = py.path.local.make_numbered_dir(prefix=basename,
- keep=0, rootdir=basetemp, lock_timeout=None)
- self.trace("mktemp", p)
- return p
-
- def getbasetemp(self):
- """ return base temporary directory. """
- try:
- return self._basetemp
- except AttributeError:
- basetemp = self.config.option.basetemp
- if basetemp:
- basetemp = py.path.local(basetemp)
- if basetemp.check():
- basetemp.remove()
- basetemp.mkdir()
- else:
- temproot = py.path.local.get_temproot()
- user = get_user()
- if user:
- # use a sub-directory in the temproot to speed-up
- # make_numbered_dir() call
- rootdir = temproot.join('pytest-of-%s' % user)
- else:
- rootdir = temproot
- rootdir.ensure(dir=1)
- basetemp = py.path.local.make_numbered_dir(prefix='pytest-',
- rootdir=rootdir)
- self._basetemp = t = basetemp.realpath()
- self.trace("new basetemp", t)
- return t
-
- def finish(self):
- self.trace("finish")
-
-
-def get_user():
- """Return the current user name, or None if getuser() does not work
- in the current environment (see #1010).
- """
- import getpass
- try:
- return getpass.getuser()
- except (ImportError, KeyError):
- return None
-
-
-# backward compatibility
-TempdirHandler = TempdirFactory
-
-
-def pytest_configure(config):
- """Create a TempdirFactory and attach it to the config object.
-
- This is to comply with existing plugins which expect the handler to be
- available at pytest_configure time, but ideally should be moved entirely
- to the tmpdir_factory session fixture.
- """
- mp = MonkeyPatch()
- t = TempdirFactory(config)
- config._cleanup.extend([mp.undo, t.finish])
- mp.setattr(config, '_tmpdirhandler', t, raising=False)
- mp.setattr(pytest, 'ensuretemp', t.ensuretemp, raising=False)
-
-
-@pytest.fixture(scope='session')
-def tmpdir_factory(request):
- """Return a TempdirFactory instance for the test session.
- """
- return request.config._tmpdirhandler
-
-
-@pytest.fixture
-def tmpdir(request, tmpdir_factory):
- """Return a temporary directory path object
- which is unique to each test function invocation,
- created as a sub directory of the base temporary
- directory. The returned object is a `py.path.local`_
- path object.
- """
- name = request.node.name
- name = re.sub(r"[\W]", "_", name)
- MAXVAL = 30
- if len(name) > MAXVAL:
- name = name[:MAXVAL]
- x = tmpdir_factory.mktemp(name, numbered=True)
- return x
diff --git a/lib/spack/external/pytest-fallback/_pytest/unittest.py b/lib/spack/external/pytest-fallback/_pytest/unittest.py
deleted file mode 100644
index 52c9813e8b..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/unittest.py
+++ /dev/null
@@ -1,239 +0,0 @@
-""" discovery and running of std-library "unittest" style tests. """
-from __future__ import absolute_import, division, print_function
-
-import sys
-import traceback
-
-# for transferring markers
-import _pytest._code
-from _pytest.config import hookimpl
-from _pytest.outcomes import fail, skip, xfail
-from _pytest.python import transfer_markers, Class, Module, Function
-from _pytest.skipping import MarkEvaluator
-
-
-def pytest_pycollect_makeitem(collector, name, obj):
- # has unittest been imported and is obj a subclass of its TestCase?
- try:
- if not issubclass(obj, sys.modules["unittest"].TestCase):
- return
- except Exception:
- return
- # yes, so let's collect it
- return UnitTestCase(name, parent=collector)
-
-
-class UnitTestCase(Class):
- # marker for fixturemanger.getfixtureinfo()
- # to declare that our children do not support funcargs
- nofuncargs = True
-
- def setup(self):
- cls = self.obj
- if getattr(cls, '__unittest_skip__', False):
- return # skipped
- setup = getattr(cls, 'setUpClass', None)
- if setup is not None:
- setup()
- teardown = getattr(cls, 'tearDownClass', None)
- if teardown is not None:
- self.addfinalizer(teardown)
- super(UnitTestCase, self).setup()
-
- def collect(self):
- from unittest import TestLoader
- cls = self.obj
- if not getattr(cls, "__test__", True):
- return
- self.session._fixturemanager.parsefactories(self, unittest=True)
- loader = TestLoader()
- module = self.getparent(Module).obj
- foundsomething = False
- for name in loader.getTestCaseNames(self.obj):
- x = getattr(self.obj, name)
- if not getattr(x, '__test__', True):
- continue
- funcobj = getattr(x, 'im_func', x)
- transfer_markers(funcobj, cls, module)
- yield TestCaseFunction(name, parent=self)
- foundsomething = True
-
- if not foundsomething:
- runtest = getattr(self.obj, 'runTest', None)
- if runtest is not None:
- ut = sys.modules.get("twisted.trial.unittest", None)
- if ut is None or runtest != ut.TestCase.runTest:
- yield TestCaseFunction('runTest', parent=self)
-
-
-class TestCaseFunction(Function):
- _excinfo = None
-
- def setup(self):
- self._testcase = self.parent.obj(self.name)
- self._fix_unittest_skip_decorator()
- self._obj = getattr(self._testcase, self.name)
- if hasattr(self._testcase, 'setup_method'):
- self._testcase.setup_method(self._obj)
- if hasattr(self, "_request"):
- self._request._fillfixtures()
-
- def _fix_unittest_skip_decorator(self):
- """
- The @unittest.skip decorator calls functools.wraps(self._testcase)
- The call to functools.wraps() fails unless self._testcase
- has a __name__ attribute. This is usually automatically supplied
- if the test is a function or method, but we need to add manually
- here.
-
- See issue #1169
- """
- if sys.version_info[0] == 2:
- setattr(self._testcase, "__name__", self.name)
-
- def teardown(self):
- if hasattr(self._testcase, 'teardown_method'):
- self._testcase.teardown_method(self._obj)
- # Allow garbage collection on TestCase instance attributes.
- self._testcase = None
- self._obj = None
-
- def startTest(self, testcase):
- pass
-
- def _addexcinfo(self, rawexcinfo):
- # unwrap potential exception info (see twisted trial support below)
- rawexcinfo = getattr(rawexcinfo, '_rawexcinfo', rawexcinfo)
- try:
- excinfo = _pytest._code.ExceptionInfo(rawexcinfo)
- except TypeError:
- try:
- try:
- values = traceback.format_exception(*rawexcinfo)
- values.insert(0, "NOTE: Incompatible Exception Representation, "
- "displaying natively:\n\n")
- fail("".join(values), pytrace=False)
- except (fail.Exception, KeyboardInterrupt):
- raise
- except: # noqa
- fail("ERROR: Unknown Incompatible Exception "
- "representation:\n%r" % (rawexcinfo,), pytrace=False)
- except KeyboardInterrupt:
- raise
- except fail.Exception:
- excinfo = _pytest._code.ExceptionInfo()
- self.__dict__.setdefault('_excinfo', []).append(excinfo)
-
- def addError(self, testcase, rawexcinfo):
- self._addexcinfo(rawexcinfo)
-
- def addFailure(self, testcase, rawexcinfo):
- self._addexcinfo(rawexcinfo)
-
- def addSkip(self, testcase, reason):
- try:
- skip(reason)
- except skip.Exception:
- self._evalskip = MarkEvaluator(self, 'SkipTest')
- self._evalskip.result = True
- self._addexcinfo(sys.exc_info())
-
- def addExpectedFailure(self, testcase, rawexcinfo, reason=""):
- try:
- xfail(str(reason))
- except xfail.Exception:
- self._addexcinfo(sys.exc_info())
-
- def addUnexpectedSuccess(self, testcase, reason=""):
- self._unexpectedsuccess = reason
-
- def addSuccess(self, testcase):
- pass
-
- def stopTest(self, testcase):
- pass
-
- def _handle_skip(self):
- # implements the skipping machinery (see #2137)
- # analog to pythons Lib/unittest/case.py:run
- testMethod = getattr(self._testcase, self._testcase._testMethodName)
- if (getattr(self._testcase.__class__, "__unittest_skip__", False) or
- getattr(testMethod, "__unittest_skip__", False)):
- # If the class or method was skipped.
- skip_why = (getattr(self._testcase.__class__, '__unittest_skip_why__', '') or
- getattr(testMethod, '__unittest_skip_why__', ''))
- try: # PY3, unittest2 on PY2
- self._testcase._addSkip(self, self._testcase, skip_why)
- except TypeError: # PY2
- if sys.version_info[0] != 2:
- raise
- self._testcase._addSkip(self, skip_why)
- return True
- return False
-
- def runtest(self):
- if self.config.pluginmanager.get_plugin("pdbinvoke") is None:
- self._testcase(result=self)
- else:
- # disables tearDown and cleanups for post mortem debugging (see #1890)
- if self._handle_skip():
- return
- self._testcase.debug()
-
- def _prunetraceback(self, excinfo):
- Function._prunetraceback(self, excinfo)
- traceback = excinfo.traceback.filter(
- lambda x: not x.frame.f_globals.get('__unittest'))
- if traceback:
- excinfo.traceback = traceback
-
-
-@hookimpl(tryfirst=True)
-def pytest_runtest_makereport(item, call):
- if isinstance(item, TestCaseFunction):
- if item._excinfo:
- call.excinfo = item._excinfo.pop(0)
- try:
- del call.result
- except AttributeError:
- pass
-
-# twisted trial support
-
-
-@hookimpl(hookwrapper=True)
-def pytest_runtest_protocol(item):
- if isinstance(item, TestCaseFunction) and \
- 'twisted.trial.unittest' in sys.modules:
- ut = sys.modules['twisted.python.failure']
- Failure__init__ = ut.Failure.__init__
- check_testcase_implements_trial_reporter()
-
- def excstore(self, exc_value=None, exc_type=None, exc_tb=None,
- captureVars=None):
- if exc_value is None:
- self._rawexcinfo = sys.exc_info()
- else:
- if exc_type is None:
- exc_type = type(exc_value)
- self._rawexcinfo = (exc_type, exc_value, exc_tb)
- try:
- Failure__init__(self, exc_value, exc_type, exc_tb,
- captureVars=captureVars)
- except TypeError:
- Failure__init__(self, exc_value, exc_type, exc_tb)
-
- ut.Failure.__init__ = excstore
- yield
- ut.Failure.__init__ = Failure__init__
- else:
- yield
-
-
-def check_testcase_implements_trial_reporter(done=[]):
- if done:
- return
- from zope.interface import classImplements
- from twisted.trial.itrial import IReporter
- classImplements(TestCaseFunction, IReporter)
- done.append(1)
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/README.md b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/README.md
deleted file mode 100644
index b5fe6febb0..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/README.md
+++ /dev/null
@@ -1,13 +0,0 @@
-This directory vendors the `pluggy` module.
-
-For a more detailed discussion for the reasons to vendoring this
-package, please see [this issue](https://github.com/pytest-dev/pytest/issues/944).
-
-To update the current version, execute:
-
-```
-$ pip install -U pluggy==<version> --no-compile --target=_pytest/vendored_packages
-```
-
-And commit the modified files. The `pluggy-<version>.dist-info` directory
-created by `pip` should be added as well.
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/__init__.py b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/__init__.py
+++ /dev/null
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst
deleted file mode 100644
index da0e7a6ed7..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-
-Plugin registration and hook calling for Python
-===============================================
-
-This is the plugin manager as used by pytest but stripped
-of pytest specific details.
-
-During the 0.x series this plugin does not have much documentation
-except extensive docstrings in the pluggy.py module.
-
-
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt
deleted file mode 100644
index 121017d086..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2015 holger krekel (rather uses bitbucket/hpk42)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA
deleted file mode 100644
index bd88517c94..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA
+++ /dev/null
@@ -1,40 +0,0 @@
-Metadata-Version: 2.0
-Name: pluggy
-Version: 0.4.0
-Summary: plugin and hook calling mechanisms for python
-Home-page: https://github.com/pytest-dev/pluggy
-Author: Holger Krekel
-Author-email: holger at merlinux.eu
-License: MIT license
-Platform: unix
-Platform: linux
-Platform: osx
-Platform: win32
-Classifier: Development Status :: 4 - Beta
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Operating System :: POSIX
-Classifier: Operating System :: Microsoft :: Windows
-Classifier: Operating System :: MacOS :: MacOS X
-Classifier: Topic :: Software Development :: Testing
-Classifier: Topic :: Software Development :: Libraries
-Classifier: Topic :: Utilities
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
-Classifier: Programming Language :: Python :: 3.5
-
-
-Plugin registration and hook calling for Python
-===============================================
-
-This is the plugin manager as used by pytest but stripped
-of pytest specific details.
-
-During the 0.x series this plugin does not have much documentation
-except extensive docstrings in the pluggy.py module.
-
-
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD
deleted file mode 100644
index 3003a3bf2b..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD
+++ /dev/null
@@ -1,9 +0,0 @@
-pluggy.py,sha256=u0oG9cv-oLOkNvEBlwnnu8pp1AyxpoERgUO00S3rvpQ,31543
-pluggy-0.4.0.dist-info/DESCRIPTION.rst,sha256=ltvjkFd40LW_xShthp6RRVM6OB_uACYDFR3kTpKw7o4,307
-pluggy-0.4.0.dist-info/LICENSE.txt,sha256=ruwhUOyV1HgE9F35JVL9BCZ9vMSALx369I4xq9rhpkM,1134
-pluggy-0.4.0.dist-info/METADATA,sha256=pe2hbsqKFaLHC6wAQPpFPn0KlpcPfLBe_BnS4O70bfk,1364
-pluggy-0.4.0.dist-info/RECORD,,
-pluggy-0.4.0.dist-info/WHEEL,sha256=9Z5Xm-eel1bTS7e6ogYiKz0zmPEqDwIypurdHN1hR40,116
-pluggy-0.4.0.dist-info/metadata.json,sha256=T3go5L2qOa_-H-HpCZi3EoVKb8sZ3R-fOssbkWo2nvM,1119
-pluggy-0.4.0.dist-info/top_level.txt,sha256=xKSCRhai-v9MckvMuWqNz16c1tbsmOggoMSwTgcpYHE,7
-pluggy-0.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL
deleted file mode 100644
index 8b6dd1b5a8..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL
+++ /dev/null
@@ -1,6 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.29.0)
-Root-Is-Purelib: true
-Tag: py2-none-any
-Tag: py3-none-any
-
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json
deleted file mode 100644
index cde22aff02..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Topic :: Software Development :: Testing", "Topic :: Software Development :: Libraries", "Topic :: Utilities", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5"], "extensions": {"python.details": {"contacts": [{"email": "holger at merlinux.eu", "name": "Holger Krekel", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/pytest-dev/pluggy"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT license", "metadata_version": "2.0", "name": "pluggy", "platform": "unix", "summary": "plugin and hook calling mechanisms for python", "version": "0.4.0"} \ No newline at end of file
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt
deleted file mode 100644
index 11bdb5c1f5..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-pluggy
diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy.py b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy.py
deleted file mode 100644
index 6f26552d73..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy.py
+++ /dev/null
@@ -1,782 +0,0 @@
-"""
-PluginManager, basic initialization and tracing.
-
-pluggy is the cristallized core of plugin management as used
-by some 150 plugins for pytest.
-
-Pluggy uses semantic versioning. Breaking changes are only foreseen for
-Major releases (incremented X in "X.Y.Z"). If you want to use pluggy in
-your project you should thus use a dependency restriction like
-"pluggy>=0.1.0,<1.0" to avoid surprises.
-
-pluggy is concerned with hook specification, hook implementations and hook
-calling. For any given hook specification a hook call invokes up to N implementations.
-A hook implementation can influence its position and type of execution:
-if attributed "tryfirst" or "trylast" it will be tried to execute
-first or last. However, if attributed "hookwrapper" an implementation
-can wrap all calls to non-hookwrapper implementations. A hookwrapper
-can thus execute some code ahead and after the execution of other hooks.
-
-Hook specification is done by way of a regular python function where
-both the function name and the names of all its arguments are significant.
-Each hook implementation function is verified against the original specification
-function, including the names of all its arguments. To allow for hook specifications
-to evolve over the livetime of a project, hook implementations can
-accept less arguments. One can thus add new arguments and semantics to
-a hook specification by adding another argument typically without breaking
-existing hook implementations.
-
-The chosen approach is meant to let a hook designer think carefuly about
-which objects are needed by an extension writer. By contrast, subclass-based
-extension mechanisms often expose a lot more state and behaviour than needed,
-thus restricting future developments.
-
-Pluggy currently consists of functionality for:
-
-- a way to register new hook specifications. Without a hook
- specification no hook calling can be performed.
-
-- a registry of plugins which contain hook implementation functions. It
- is possible to register plugins for which a hook specification is not yet
- known and validate all hooks when the system is in a more referentially
- consistent state. Setting an "optionalhook" attribution to a hook
- implementation will avoid PluginValidationError's if a specification
- is missing. This allows to have optional integration between plugins.
-
-- a "hook" relay object from which you can launch 1:N calls to
- registered hook implementation functions
-
-- a mechanism for ordering hook implementation functions
-
-- mechanisms for two different type of 1:N calls: "firstresult" for when
- the call should stop when the first implementation returns a non-None result.
- And the other (default) way of guaranteeing that all hook implementations
- will be called and their non-None result collected.
-
-- mechanisms for "historic" extension points such that all newly
- registered functions will receive all hook calls that happened
- before their registration.
-
-- a mechanism for discovering plugin objects which are based on
- setuptools based entry points.
-
-- a simple tracing mechanism, including tracing of plugin calls and
- their arguments.
-
-"""
-import sys
-import inspect
-
-__version__ = '0.4.0'
-
-__all__ = ["PluginManager", "PluginValidationError", "HookCallError",
- "HookspecMarker", "HookimplMarker"]
-
-_py3 = sys.version_info > (3, 0)
-
-
-class HookspecMarker:
- """ Decorator helper class for marking functions as hook specifications.
-
- You can instantiate it with a project_name to get a decorator.
- Calling PluginManager.add_hookspecs later will discover all marked functions
- if the PluginManager uses the same project_name.
- """
-
- def __init__(self, project_name):
- self.project_name = project_name
-
- def __call__(self, function=None, firstresult=False, historic=False):
- """ if passed a function, directly sets attributes on the function
- which will make it discoverable to add_hookspecs(). If passed no
- function, returns a decorator which can be applied to a function
- later using the attributes supplied.
-
- If firstresult is True the 1:N hook call (N being the number of registered
- hook implementation functions) will stop at I<=N when the I'th function
- returns a non-None result.
-
- If historic is True calls to a hook will be memorized and replayed
- on later registered plugins.
-
- """
- def setattr_hookspec_opts(func):
- if historic and firstresult:
- raise ValueError("cannot have a historic firstresult hook")
- setattr(func, self.project_name + "_spec",
- dict(firstresult=firstresult, historic=historic))
- return func
-
- if function is not None:
- return setattr_hookspec_opts(function)
- else:
- return setattr_hookspec_opts
-
-
-class HookimplMarker:
- """ Decorator helper class for marking functions as hook implementations.
-
- You can instantiate with a project_name to get a decorator.
- Calling PluginManager.register later will discover all marked functions
- if the PluginManager uses the same project_name.
- """
- def __init__(self, project_name):
- self.project_name = project_name
-
- def __call__(self, function=None, hookwrapper=False, optionalhook=False,
- tryfirst=False, trylast=False):
-
- """ if passed a function, directly sets attributes on the function
- which will make it discoverable to register(). If passed no function,
- returns a decorator which can be applied to a function later using
- the attributes supplied.
-
- If optionalhook is True a missing matching hook specification will not result
- in an error (by default it is an error if no matching spec is found).
-
- If tryfirst is True this hook implementation will run as early as possible
- in the chain of N hook implementations for a specfication.
-
- If trylast is True this hook implementation will run as late as possible
- in the chain of N hook implementations.
-
- If hookwrapper is True the hook implementations needs to execute exactly
- one "yield". The code before the yield is run early before any non-hookwrapper
- function is run. The code after the yield is run after all non-hookwrapper
- function have run. The yield receives an ``_CallOutcome`` object representing
- the exception or result outcome of the inner calls (including other hookwrapper
- calls).
-
- """
- def setattr_hookimpl_opts(func):
- setattr(func, self.project_name + "_impl",
- dict(hookwrapper=hookwrapper, optionalhook=optionalhook,
- tryfirst=tryfirst, trylast=trylast))
- return func
-
- if function is None:
- return setattr_hookimpl_opts
- else:
- return setattr_hookimpl_opts(function)
-
-
-def normalize_hookimpl_opts(opts):
- opts.setdefault("tryfirst", False)
- opts.setdefault("trylast", False)
- opts.setdefault("hookwrapper", False)
- opts.setdefault("optionalhook", False)
-
-
-class _TagTracer:
- def __init__(self):
- self._tag2proc = {}
- self.writer = None
- self.indent = 0
-
- def get(self, name):
- return _TagTracerSub(self, (name,))
-
- def format_message(self, tags, args):
- if isinstance(args[-1], dict):
- extra = args[-1]
- args = args[:-1]
- else:
- extra = {}
-
- content = " ".join(map(str, args))
- indent = " " * self.indent
-
- lines = [
- "%s%s [%s]\n" % (indent, content, ":".join(tags))
- ]
-
- for name, value in extra.items():
- lines.append("%s %s: %s\n" % (indent, name, value))
- return lines
-
- def processmessage(self, tags, args):
- if self.writer is not None and args:
- lines = self.format_message(tags, args)
- self.writer(''.join(lines))
- try:
- self._tag2proc[tags](tags, args)
- except KeyError:
- pass
-
- def setwriter(self, writer):
- self.writer = writer
-
- def setprocessor(self, tags, processor):
- if isinstance(tags, str):
- tags = tuple(tags.split(":"))
- else:
- assert isinstance(tags, tuple)
- self._tag2proc[tags] = processor
-
-
-class _TagTracerSub:
- def __init__(self, root, tags):
- self.root = root
- self.tags = tags
-
- def __call__(self, *args):
- self.root.processmessage(self.tags, args)
-
- def setmyprocessor(self, processor):
- self.root.setprocessor(self.tags, processor)
-
- def get(self, name):
- return self.__class__(self.root, self.tags + (name,))
-
-
-def _raise_wrapfail(wrap_controller, msg):
- co = wrap_controller.gi_code
- raise RuntimeError("wrap_controller at %r %s:%d %s" %
- (co.co_name, co.co_filename, co.co_firstlineno, msg))
-
-
-def _wrapped_call(wrap_controller, func):
- """ Wrap calling to a function with a generator which needs to yield
- exactly once. The yield point will trigger calling the wrapped function
- and return its _CallOutcome to the yield point. The generator then needs
- to finish (raise StopIteration) in order for the wrapped call to complete.
- """
- try:
- next(wrap_controller) # first yield
- except StopIteration:
- _raise_wrapfail(wrap_controller, "did not yield")
- call_outcome = _CallOutcome(func)
- try:
- wrap_controller.send(call_outcome)
- _raise_wrapfail(wrap_controller, "has second yield")
- except StopIteration:
- pass
- return call_outcome.get_result()
-
-
-class _CallOutcome:
- """ Outcome of a function call, either an exception or a proper result.
- Calling the ``get_result`` method will return the result or reraise
- the exception raised when the function was called. """
- excinfo = None
-
- def __init__(self, func):
- try:
- self.result = func()
- except BaseException:
- self.excinfo = sys.exc_info()
-
- def force_result(self, result):
- self.result = result
- self.excinfo = None
-
- def get_result(self):
- if self.excinfo is None:
- return self.result
- else:
- ex = self.excinfo
- if _py3:
- raise ex[1].with_traceback(ex[2])
- _reraise(*ex) # noqa
-
-if not _py3:
- exec("""
-def _reraise(cls, val, tb):
- raise cls, val, tb
-""")
-
-
-class _TracedHookExecution:
- def __init__(self, pluginmanager, before, after):
- self.pluginmanager = pluginmanager
- self.before = before
- self.after = after
- self.oldcall = pluginmanager._inner_hookexec
- assert not isinstance(self.oldcall, _TracedHookExecution)
- self.pluginmanager._inner_hookexec = self
-
- def __call__(self, hook, hook_impls, kwargs):
- self.before(hook.name, hook_impls, kwargs)
- outcome = _CallOutcome(lambda: self.oldcall(hook, hook_impls, kwargs))
- self.after(outcome, hook.name, hook_impls, kwargs)
- return outcome.get_result()
-
- def undo(self):
- self.pluginmanager._inner_hookexec = self.oldcall
-
-
-class PluginManager(object):
- """ Core Pluginmanager class which manages registration
- of plugin objects and 1:N hook calling.
-
- You can register new hooks by calling ``add_hookspec(module_or_class)``.
- You can register plugin objects (which contain hooks) by calling
- ``register(plugin)``. The Pluginmanager is initialized with a
- prefix that is searched for in the names of the dict of registered
- plugin objects. An optional excludefunc allows to blacklist names which
- are not considered as hooks despite a matching prefix.
-
- For debugging purposes you can call ``enable_tracing()``
- which will subsequently send debug information to the trace helper.
- """
-
- def __init__(self, project_name, implprefix=None):
- """ if implprefix is given implementation functions
- will be recognized if their name matches the implprefix. """
- self.project_name = project_name
- self._name2plugin = {}
- self._plugin2hookcallers = {}
- self._plugin_distinfo = []
- self.trace = _TagTracer().get("pluginmanage")
- self.hook = _HookRelay(self.trace.root.get("hook"))
- self._implprefix = implprefix
- self._inner_hookexec = lambda hook, methods, kwargs: \
- _MultiCall(methods, kwargs, hook.spec_opts).execute()
-
- def _hookexec(self, hook, methods, kwargs):
- # called from all hookcaller instances.
- # enable_tracing will set its own wrapping function at self._inner_hookexec
- return self._inner_hookexec(hook, methods, kwargs)
-
- def register(self, plugin, name=None):
- """ Register a plugin and return its canonical name or None if the name
- is blocked from registering. Raise a ValueError if the plugin is already
- registered. """
- plugin_name = name or self.get_canonical_name(plugin)
-
- if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
- if self._name2plugin.get(plugin_name, -1) is None:
- return # blocked plugin, return None to indicate no registration
- raise ValueError("Plugin already registered: %s=%s\n%s" %
- (plugin_name, plugin, self._name2plugin))
-
- # XXX if an error happens we should make sure no state has been
- # changed at point of return
- self._name2plugin[plugin_name] = plugin
-
- # register matching hook implementations of the plugin
- self._plugin2hookcallers[plugin] = hookcallers = []
- for name in dir(plugin):
- hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
- if hookimpl_opts is not None:
- normalize_hookimpl_opts(hookimpl_opts)
- method = getattr(plugin, name)
- hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
- hook = getattr(self.hook, name, None)
- if hook is None:
- hook = _HookCaller(name, self._hookexec)
- setattr(self.hook, name, hook)
- elif hook.has_spec():
- self._verify_hook(hook, hookimpl)
- hook._maybe_apply_history(hookimpl)
- hook._add_hookimpl(hookimpl)
- hookcallers.append(hook)
- return plugin_name
-
- def parse_hookimpl_opts(self, plugin, name):
- method = getattr(plugin, name)
- try:
- res = getattr(method, self.project_name + "_impl", None)
- except Exception:
- res = {}
- if res is not None and not isinstance(res, dict):
- # false positive
- res = None
- elif res is None and self._implprefix and name.startswith(self._implprefix):
- res = {}
- return res
-
- def unregister(self, plugin=None, name=None):
- """ unregister a plugin object and all its contained hook implementations
- from internal data structures. """
- if name is None:
- assert plugin is not None, "one of name or plugin needs to be specified"
- name = self.get_name(plugin)
-
- if plugin is None:
- plugin = self.get_plugin(name)
-
- # if self._name2plugin[name] == None registration was blocked: ignore
- if self._name2plugin.get(name):
- del self._name2plugin[name]
-
- for hookcaller in self._plugin2hookcallers.pop(plugin, []):
- hookcaller._remove_plugin(plugin)
-
- return plugin
-
- def set_blocked(self, name):
- """ block registrations of the given name, unregister if already registered. """
- self.unregister(name=name)
- self._name2plugin[name] = None
-
- def is_blocked(self, name):
- """ return True if the name blogs registering plugins of that name. """
- return name in self._name2plugin and self._name2plugin[name] is None
-
- def add_hookspecs(self, module_or_class):
- """ add new hook specifications defined in the given module_or_class.
- Functions are recognized if they have been decorated accordingly. """
- names = []
- for name in dir(module_or_class):
- spec_opts = self.parse_hookspec_opts(module_or_class, name)
- if spec_opts is not None:
- hc = getattr(self.hook, name, None)
- if hc is None:
- hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
- setattr(self.hook, name, hc)
- else:
- # plugins registered this hook without knowing the spec
- hc.set_specification(module_or_class, spec_opts)
- for hookfunction in (hc._wrappers + hc._nonwrappers):
- self._verify_hook(hc, hookfunction)
- names.append(name)
-
- if not names:
- raise ValueError("did not find any %r hooks in %r" %
- (self.project_name, module_or_class))
-
- def parse_hookspec_opts(self, module_or_class, name):
- method = getattr(module_or_class, name)
- return getattr(method, self.project_name + "_spec", None)
-
- def get_plugins(self):
- """ return the set of registered plugins. """
- return set(self._plugin2hookcallers)
-
- def is_registered(self, plugin):
- """ Return True if the plugin is already registered. """
- return plugin in self._plugin2hookcallers
-
- def get_canonical_name(self, plugin):
- """ Return canonical name for a plugin object. Note that a plugin
- may be registered under a different name which was specified
- by the caller of register(plugin, name). To obtain the name
- of an registered plugin use ``get_name(plugin)`` instead."""
- return getattr(plugin, "__name__", None) or str(id(plugin))
-
- def get_plugin(self, name):
- """ Return a plugin or None for the given name. """
- return self._name2plugin.get(name)
-
- def has_plugin(self, name):
- """ Return True if a plugin with the given name is registered. """
- return self.get_plugin(name) is not None
-
- def get_name(self, plugin):
- """ Return name for registered plugin or None if not registered. """
- for name, val in self._name2plugin.items():
- if plugin == val:
- return name
-
- def _verify_hook(self, hook, hookimpl):
- if hook.is_historic() and hookimpl.hookwrapper:
- raise PluginValidationError(
- "Plugin %r\nhook %r\nhistoric incompatible to hookwrapper" %
- (hookimpl.plugin_name, hook.name))
-
- for arg in hookimpl.argnames:
- if arg not in hook.argnames:
- raise PluginValidationError(
- "Plugin %r\nhook %r\nargument %r not available\n"
- "plugin definition: %s\n"
- "available hookargs: %s" %
- (hookimpl.plugin_name, hook.name, arg,
- _formatdef(hookimpl.function), ", ".join(hook.argnames)))
-
- def check_pending(self):
- """ Verify that all hooks which have not been verified against
- a hook specification are optional, otherwise raise PluginValidationError"""
- for name in self.hook.__dict__:
- if name[0] != "_":
- hook = getattr(self.hook, name)
- if not hook.has_spec():
- for hookimpl in (hook._wrappers + hook._nonwrappers):
- if not hookimpl.optionalhook:
- raise PluginValidationError(
- "unknown hook %r in plugin %r" %
- (name, hookimpl.plugin))
-
- def list_plugin_distinfo(self):
- """ return list of distinfo/plugin tuples for all setuptools registered
- plugins. """
- return list(self._plugin_distinfo)
-
- def list_name_plugin(self):
- """ return list of name/plugin pairs. """
- return list(self._name2plugin.items())
-
- def get_hookcallers(self, plugin):
- """ get all hook callers for the specified plugin. """
- return self._plugin2hookcallers.get(plugin)
-
- def add_hookcall_monitoring(self, before, after):
- """ add before/after tracing functions for all hooks
- and return an undo function which, when called,
- will remove the added tracers.
-
- ``before(hook_name, hook_impls, kwargs)`` will be called ahead
- of all hook calls and receive a hookcaller instance, a list
- of HookImpl instances and the keyword arguments for the hook call.
-
- ``after(outcome, hook_name, hook_impls, kwargs)`` receives the
- same arguments as ``before`` but also a :py:class:`_CallOutcome <_pytest.vendored_packages.pluggy._CallOutcome>` object
- which represents the result of the overall hook call.
- """
- return _TracedHookExecution(self, before, after).undo
-
- def enable_tracing(self):
- """ enable tracing of hook calls and return an undo function. """
- hooktrace = self.hook._trace
-
- def before(hook_name, methods, kwargs):
- hooktrace.root.indent += 1
- hooktrace(hook_name, kwargs)
-
- def after(outcome, hook_name, methods, kwargs):
- if outcome.excinfo is None:
- hooktrace("finish", hook_name, "-->", outcome.result)
- hooktrace.root.indent -= 1
-
- return self.add_hookcall_monitoring(before, after)
-
- def subset_hook_caller(self, name, remove_plugins):
- """ Return a new _HookCaller instance for the named method
- which manages calls to all registered plugins except the
- ones from remove_plugins. """
- orig = getattr(self.hook, name)
- plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
- if plugins_to_remove:
- hc = _HookCaller(orig.name, orig._hookexec, orig._specmodule_or_class,
- orig.spec_opts)
- for hookimpl in (orig._wrappers + orig._nonwrappers):
- plugin = hookimpl.plugin
- if plugin not in plugins_to_remove:
- hc._add_hookimpl(hookimpl)
- # we also keep track of this hook caller so it
- # gets properly removed on plugin unregistration
- self._plugin2hookcallers.setdefault(plugin, []).append(hc)
- return hc
- return orig
-
-
-class _MultiCall:
- """ execute a call into multiple python functions/methods. """
-
- # XXX note that the __multicall__ argument is supported only
- # for pytest compatibility reasons. It was never officially
- # supported there and is explicitely deprecated since 2.8
- # so we can remove it soon, allowing to avoid the below recursion
- # in execute() and simplify/speed up the execute loop.
-
- def __init__(self, hook_impls, kwargs, specopts={}):
- self.hook_impls = hook_impls
- self.kwargs = kwargs
- self.kwargs["__multicall__"] = self
- self.specopts = specopts
-
- def execute(self):
- all_kwargs = self.kwargs
- self.results = results = []
- firstresult = self.specopts.get("firstresult")
-
- while self.hook_impls:
- hook_impl = self.hook_impls.pop()
- try:
- args = [all_kwargs[argname] for argname in hook_impl.argnames]
- except KeyError:
- for argname in hook_impl.argnames:
- if argname not in all_kwargs:
- raise HookCallError(
- "hook call must provide argument %r" % (argname,))
- if hook_impl.hookwrapper:
- return _wrapped_call(hook_impl.function(*args), self.execute)
- res = hook_impl.function(*args)
- if res is not None:
- if firstresult:
- return res
- results.append(res)
-
- if not firstresult:
- return results
-
- def __repr__(self):
- status = "%d meths" % (len(self.hook_impls),)
- if hasattr(self, "results"):
- status = ("%d results, " % len(self.results)) + status
- return "<_MultiCall %s, kwargs=%r>" % (status, self.kwargs)
-
-
-def varnames(func, startindex=None):
- """ return argument name tuple for a function, method, class or callable.
-
- In case of a class, its "__init__" method is considered.
- For methods the "self" parameter is not included unless you are passing
- an unbound method with Python3 (which has no supports for unbound methods)
- """
- cache = getattr(func, "__dict__", {})
- try:
- return cache["_varnames"]
- except KeyError:
- pass
- if inspect.isclass(func):
- try:
- func = func.__init__
- except AttributeError:
- return ()
- startindex = 1
- else:
- if not inspect.isfunction(func) and not inspect.ismethod(func):
- try:
- func = getattr(func, '__call__', func)
- except Exception:
- return ()
- if startindex is None:
- startindex = int(inspect.ismethod(func))
-
- try:
- rawcode = func.__code__
- except AttributeError:
- return ()
- try:
- x = rawcode.co_varnames[startindex:rawcode.co_argcount]
- except AttributeError:
- x = ()
- else:
- defaults = func.__defaults__
- if defaults:
- x = x[:-len(defaults)]
- try:
- cache["_varnames"] = x
- except TypeError:
- pass
- return x
-
-
-class _HookRelay:
- """ hook holder object for performing 1:N hook calls where N is the number
- of registered plugins.
-
- """
-
- def __init__(self, trace):
- self._trace = trace
-
-
-class _HookCaller(object):
- def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None):
- self.name = name
- self._wrappers = []
- self._nonwrappers = []
- self._hookexec = hook_execute
- if specmodule_or_class is not None:
- assert spec_opts is not None
- self.set_specification(specmodule_or_class, spec_opts)
-
- def has_spec(self):
- return hasattr(self, "_specmodule_or_class")
-
- def set_specification(self, specmodule_or_class, spec_opts):
- assert not self.has_spec()
- self._specmodule_or_class = specmodule_or_class
- specfunc = getattr(specmodule_or_class, self.name)
- argnames = varnames(specfunc, startindex=inspect.isclass(specmodule_or_class))
- assert "self" not in argnames # sanity check
- self.argnames = ["__multicall__"] + list(argnames)
- self.spec_opts = spec_opts
- if spec_opts.get("historic"):
- self._call_history = []
-
- def is_historic(self):
- return hasattr(self, "_call_history")
-
- def _remove_plugin(self, plugin):
- def remove(wrappers):
- for i, method in enumerate(wrappers):
- if method.plugin == plugin:
- del wrappers[i]
- return True
- if remove(self._wrappers) is None:
- if remove(self._nonwrappers) is None:
- raise ValueError("plugin %r not found" % (plugin,))
-
- def _add_hookimpl(self, hookimpl):
- if hookimpl.hookwrapper:
- methods = self._wrappers
- else:
- methods = self._nonwrappers
-
- if hookimpl.trylast:
- methods.insert(0, hookimpl)
- elif hookimpl.tryfirst:
- methods.append(hookimpl)
- else:
- # find last non-tryfirst method
- i = len(methods) - 1
- while i >= 0 and methods[i].tryfirst:
- i -= 1
- methods.insert(i + 1, hookimpl)
-
- def __repr__(self):
- return "<_HookCaller %r>" % (self.name,)
-
- def __call__(self, **kwargs):
- assert not self.is_historic()
- return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
-
- def call_historic(self, proc=None, kwargs=None):
- self._call_history.append((kwargs or {}, proc))
- # historizing hooks don't return results
- self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
-
- def call_extra(self, methods, kwargs):
- """ Call the hook with some additional temporarily participating
- methods using the specified kwargs as call parameters. """
- old = list(self._nonwrappers), list(self._wrappers)
- for method in methods:
- opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
- hookimpl = HookImpl(None, "<temp>", method, opts)
- self._add_hookimpl(hookimpl)
- try:
- return self(**kwargs)
- finally:
- self._nonwrappers, self._wrappers = old
-
- def _maybe_apply_history(self, method):
- if self.is_historic():
- for kwargs, proc in self._call_history:
- res = self._hookexec(self, [method], kwargs)
- if res and proc is not None:
- proc(res[0])
-
-
-class HookImpl:
- def __init__(self, plugin, plugin_name, function, hook_impl_opts):
- self.function = function
- self.argnames = varnames(self.function)
- self.plugin = plugin
- self.opts = hook_impl_opts
- self.plugin_name = plugin_name
- self.__dict__.update(hook_impl_opts)
-
-
-class PluginValidationError(Exception):
- """ plugin failed validation. """
-
-
-class HookCallError(Exception):
- """ Hook was called wrongly. """
-
-
-if hasattr(inspect, 'signature'):
- def _formatdef(func):
- return "%s%s" % (
- func.__name__,
- str(inspect.signature(func))
- )
-else:
- def _formatdef(func):
- return "%s%s" % (
- func.__name__,
- inspect.formatargspec(*inspect.getargspec(func))
- )
diff --git a/lib/spack/external/pytest-fallback/_pytest/warnings.py b/lib/spack/external/pytest-fallback/_pytest/warnings.py
deleted file mode 100644
index 926b1f5811..0000000000
--- a/lib/spack/external/pytest-fallback/_pytest/warnings.py
+++ /dev/null
@@ -1,94 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import warnings
-from contextlib import contextmanager
-
-import pytest
-
-from _pytest import compat
-
-
-def _setoption(wmod, arg):
- """
- Copy of the warning._setoption function but does not escape arguments.
- """
- parts = arg.split(':')
- if len(parts) > 5:
- raise wmod._OptionError("too many fields (max 5): %r" % (arg,))
- while len(parts) < 5:
- parts.append('')
- action, message, category, module, lineno = [s.strip()
- for s in parts]
- action = wmod._getaction(action)
- category = wmod._getcategory(category)
- if lineno:
- try:
- lineno = int(lineno)
- if lineno < 0:
- raise ValueError
- except (ValueError, OverflowError):
- raise wmod._OptionError("invalid lineno %r" % (lineno,))
- else:
- lineno = 0
- wmod.filterwarnings(action, message, category, module, lineno)
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("pytest-warnings")
- group.addoption(
- '-W', '--pythonwarnings', action='append',
- help="set which warnings to report, see -W option of python itself.")
- parser.addini("filterwarnings", type="linelist",
- help="Each line specifies a pattern for "
- "warnings.filterwarnings. "
- "Processed after -W and --pythonwarnings.")
-
-
-@contextmanager
-def catch_warnings_for_item(item):
- """
- catches the warnings generated during setup/call/teardown execution
- of the given item and after it is done posts them as warnings to this
- item.
- """
- args = item.config.getoption('pythonwarnings') or []
- inifilters = item.config.getini("filterwarnings")
- with warnings.catch_warnings(record=True) as log:
- for arg in args:
- warnings._setoption(arg)
-
- for arg in inifilters:
- _setoption(warnings, arg)
-
- mark = item.get_marker('filterwarnings')
- if mark:
- for arg in mark.args:
- warnings._setoption(arg)
-
- yield
-
- for warning in log:
- warn_msg = warning.message
- unicode_warning = False
-
- if compat._PY2 and any(isinstance(m, compat.UNICODE_TYPES) for m in warn_msg.args):
- new_args = [compat.safe_str(m) for m in warn_msg.args]
- unicode_warning = warn_msg.args != new_args
- warn_msg.args = new_args
-
- msg = warnings.formatwarning(
- warn_msg, warning.category,
- warning.filename, warning.lineno, warning.line)
- item.warn("unused", msg)
-
- if unicode_warning:
- warnings.warn(
- "Warning is using unicode non convertible to ascii, "
- "converting to a safe representation:\n %s" % msg,
- UnicodeWarning)
-
-
-@pytest.hookimpl(hookwrapper=True)
-def pytest_runtest_protocol(item):
- with catch_warnings_for_item(item):
- yield
diff --git a/lib/spack/external/pytest-fallback/py/__init__.py b/lib/spack/external/pytest-fallback/py/__init__.py
deleted file mode 100644
index 85af650f5c..0000000000
--- a/lib/spack/external/pytest-fallback/py/__init__.py
+++ /dev/null
@@ -1,152 +0,0 @@
-"""
-pylib: rapid testing and development utils
-
-this module uses apipkg.py for lazy-loading sub modules
-and classes. The initpkg-dictionary below specifies
-name->value mappings where value can be another namespace
-dictionary or an import path.
-
-(c) Holger Krekel and others, 2004-2014
-"""
-__version__ = '1.4.34'
-
-from py import _apipkg
-
-# so that py.error.* instances are picklable
-import sys
-sys.modules['py.error'] = _apipkg.AliasModule("py.error", "py._error", 'error')
-import py.error # "Dereference" it now just to be safe (issue110)
-
-
-_apipkg.initpkg(__name__, attr={'_apipkg': _apipkg}, exportdefs={
- # access to all standard lib modules
- 'std': '._std:std',
- # access to all posix errno's as classes
- 'error': '._error:error',
-
- '_pydir' : '.__metainfo:pydir',
- 'version': 'py:__version__', # backward compatibility
-
- # pytest-2.0 has a flat namespace, we use alias modules
- # to keep old references compatible
- 'test' : 'pytest',
- 'test.collect' : 'pytest',
- 'test.cmdline' : 'pytest',
-
- # hook into the top-level standard library
- 'process' : {
- '__doc__' : '._process:__doc__',
- 'cmdexec' : '._process.cmdexec:cmdexec',
- 'kill' : '._process.killproc:kill',
- 'ForkedFunc' : '._process.forkedfunc:ForkedFunc',
- },
-
- 'apipkg' : {
- 'initpkg' : '._apipkg:initpkg',
- 'ApiModule' : '._apipkg:ApiModule',
- },
-
- 'iniconfig' : {
- 'IniConfig' : '._iniconfig:IniConfig',
- 'ParseError' : '._iniconfig:ParseError',
- },
-
- 'path' : {
- '__doc__' : '._path:__doc__',
- 'svnwc' : '._path.svnwc:SvnWCCommandPath',
- 'svnurl' : '._path.svnurl:SvnCommandPath',
- 'local' : '._path.local:LocalPath',
- 'SvnAuth' : '._path.svnwc:SvnAuth',
- },
-
- # python inspection/code-generation API
- 'code' : {
- '__doc__' : '._code:__doc__',
- 'compile' : '._code.source:compile_',
- 'Source' : '._code.source:Source',
- 'Code' : '._code.code:Code',
- 'Frame' : '._code.code:Frame',
- 'ExceptionInfo' : '._code.code:ExceptionInfo',
- 'Traceback' : '._code.code:Traceback',
- 'getfslineno' : '._code.source:getfslineno',
- 'getrawcode' : '._code.code:getrawcode',
- 'patch_builtins' : '._code.code:patch_builtins',
- 'unpatch_builtins' : '._code.code:unpatch_builtins',
- '_AssertionError' : '._code.assertion:AssertionError',
- '_reinterpret_old' : '._code.assertion:reinterpret_old',
- '_reinterpret' : '._code.assertion:reinterpret',
- '_reprcompare' : '._code.assertion:_reprcompare',
- '_format_explanation' : '._code.assertion:_format_explanation',
- },
-
- # backports and additions of builtins
- 'builtin' : {
- '__doc__' : '._builtin:__doc__',
- 'enumerate' : '._builtin:enumerate',
- 'reversed' : '._builtin:reversed',
- 'sorted' : '._builtin:sorted',
- 'any' : '._builtin:any',
- 'all' : '._builtin:all',
- 'set' : '._builtin:set',
- 'frozenset' : '._builtin:frozenset',
- 'BaseException' : '._builtin:BaseException',
- 'GeneratorExit' : '._builtin:GeneratorExit',
- '_sysex' : '._builtin:_sysex',
- 'print_' : '._builtin:print_',
- '_reraise' : '._builtin:_reraise',
- '_tryimport' : '._builtin:_tryimport',
- 'exec_' : '._builtin:exec_',
- '_basestring' : '._builtin:_basestring',
- '_totext' : '._builtin:_totext',
- '_isbytes' : '._builtin:_isbytes',
- '_istext' : '._builtin:_istext',
- '_getimself' : '._builtin:_getimself',
- '_getfuncdict' : '._builtin:_getfuncdict',
- '_getcode' : '._builtin:_getcode',
- 'builtins' : '._builtin:builtins',
- 'execfile' : '._builtin:execfile',
- 'callable' : '._builtin:callable',
- 'bytes' : '._builtin:bytes',
- 'text' : '._builtin:text',
- },
-
- # input-output helping
- 'io' : {
- '__doc__' : '._io:__doc__',
- 'dupfile' : '._io.capture:dupfile',
- 'TextIO' : '._io.capture:TextIO',
- 'BytesIO' : '._io.capture:BytesIO',
- 'FDCapture' : '._io.capture:FDCapture',
- 'StdCapture' : '._io.capture:StdCapture',
- 'StdCaptureFD' : '._io.capture:StdCaptureFD',
- 'TerminalWriter' : '._io.terminalwriter:TerminalWriter',
- 'ansi_print' : '._io.terminalwriter:ansi_print',
- 'get_terminal_width' : '._io.terminalwriter:get_terminal_width',
- 'saferepr' : '._io.saferepr:saferepr',
- },
-
- # small and mean xml/html generation
- 'xml' : {
- '__doc__' : '._xmlgen:__doc__',
- 'html' : '._xmlgen:html',
- 'Tag' : '._xmlgen:Tag',
- 'raw' : '._xmlgen:raw',
- 'Namespace' : '._xmlgen:Namespace',
- 'escape' : '._xmlgen:escape',
- },
-
- 'log' : {
- # logging API ('producers' and 'consumers' connected via keywords)
- '__doc__' : '._log:__doc__',
- '_apiwarn' : '._log.warning:_apiwarn',
- 'Producer' : '._log.log:Producer',
- 'setconsumer' : '._log.log:setconsumer',
- '_setstate' : '._log.log:setstate',
- '_getstate' : '._log.log:getstate',
- 'Path' : '._log.log:Path',
- 'STDOUT' : '._log.log:STDOUT',
- 'STDERR' : '._log.log:STDERR',
- 'Syslog' : '._log.log:Syslog',
- },
-
-})
diff --git a/lib/spack/external/pytest-fallback/py/__metainfo.py b/lib/spack/external/pytest-fallback/py/__metainfo.py
deleted file mode 100644
index 12581eb7af..0000000000
--- a/lib/spack/external/pytest-fallback/py/__metainfo.py
+++ /dev/null
@@ -1,2 +0,0 @@
-import py
-pydir = py.path.local(py.__file__).dirpath()
diff --git a/lib/spack/external/pytest-fallback/py/_apipkg.py b/lib/spack/external/pytest-fallback/py/_apipkg.py
deleted file mode 100644
index a73b8f6d0b..0000000000
--- a/lib/spack/external/pytest-fallback/py/_apipkg.py
+++ /dev/null
@@ -1,181 +0,0 @@
-"""
-apipkg: control the exported namespace of a python package.
-
-see http://pypi.python.org/pypi/apipkg
-
-(c) holger krekel, 2009 - MIT license
-"""
-import os
-import sys
-from types import ModuleType
-
-__version__ = '1.3.dev'
-
-def _py_abspath(path):
- """
- special version of abspath
- that will leave paths from jython jars alone
- """
- if path.startswith('__pyclasspath__'):
-
- return path
- else:
- return os.path.abspath(path)
-
-def initpkg(pkgname, exportdefs, attr=dict()):
- """ initialize given package from the export definitions. """
- oldmod = sys.modules.get(pkgname)
- d = {}
- f = getattr(oldmod, '__file__', None)
- if f:
- f = _py_abspath(f)
- d['__file__'] = f
- if hasattr(oldmod, '__version__'):
- d['__version__'] = oldmod.__version__
- if hasattr(oldmod, '__loader__'):
- d['__loader__'] = oldmod.__loader__
- if hasattr(oldmod, '__path__'):
- d['__path__'] = [_py_abspath(p) for p in oldmod.__path__]
- if '__doc__' not in exportdefs and getattr(oldmod, '__doc__', None):
- d['__doc__'] = oldmod.__doc__
- d.update(attr)
- if hasattr(oldmod, "__dict__"):
- oldmod.__dict__.update(d)
- mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d)
- sys.modules[pkgname] = mod
-
-def importobj(modpath, attrname):
- module = __import__(modpath, None, None, ['__doc__'])
- if not attrname:
- return module
-
- retval = module
- names = attrname.split(".")
- for x in names:
- retval = getattr(retval, x)
- return retval
-
-class ApiModule(ModuleType):
- def __docget(self):
- try:
- return self.__doc
- except AttributeError:
- if '__doc__' in self.__map__:
- return self.__makeattr('__doc__')
- def __docset(self, value):
- self.__doc = value
- __doc__ = property(__docget, __docset)
-
- def __init__(self, name, importspec, implprefix=None, attr=None):
- self.__name__ = name
- self.__all__ = [x for x in importspec if x != '__onfirstaccess__']
- self.__map__ = {}
- self.__implprefix__ = implprefix or name
- if attr:
- for name, val in attr.items():
- # print "setting", self.__name__, name, val
- setattr(self, name, val)
- for name, importspec in importspec.items():
- if isinstance(importspec, dict):
- subname = '%s.%s' % (self.__name__, name)
- apimod = ApiModule(subname, importspec, implprefix)
- sys.modules[subname] = apimod
- setattr(self, name, apimod)
- else:
- parts = importspec.split(':')
- modpath = parts.pop(0)
- attrname = parts and parts[0] or ""
- if modpath[0] == '.':
- modpath = implprefix + modpath
-
- if not attrname:
- subname = '%s.%s' % (self.__name__, name)
- apimod = AliasModule(subname, modpath)
- sys.modules[subname] = apimod
- if '.' not in name:
- setattr(self, name, apimod)
- else:
- self.__map__[name] = (modpath, attrname)
-
- def __repr__(self):
- l = []
- if hasattr(self, '__version__'):
- l.append("version=" + repr(self.__version__))
- if hasattr(self, '__file__'):
- l.append('from ' + repr(self.__file__))
- if l:
- return '<ApiModule %r %s>' % (self.__name__, " ".join(l))
- return '<ApiModule %r>' % (self.__name__,)
-
- def __makeattr(self, name):
- """lazily compute value for name or raise AttributeError if unknown."""
- # print "makeattr", self.__name__, name
- target = None
- if '__onfirstaccess__' in self.__map__:
- target = self.__map__.pop('__onfirstaccess__')
- importobj(*target)()
- try:
- modpath, attrname = self.__map__[name]
- except KeyError:
- if target is not None and name != '__onfirstaccess__':
- # retry, onfirstaccess might have set attrs
- return getattr(self, name)
- raise AttributeError(name)
- else:
- result = importobj(modpath, attrname)
- setattr(self, name, result)
- try:
- del self.__map__[name]
- except KeyError:
- pass # in a recursive-import situation a double-del can happen
- return result
-
- __getattr__ = __makeattr
-
- def __dict__(self):
- # force all the content of the module to be loaded when __dict__ is read
- dictdescr = ModuleType.__dict__['__dict__']
- dict = dictdescr.__get__(self)
- if dict is not None:
- hasattr(self, 'some')
- for name in self.__all__:
- try:
- self.__makeattr(name)
- except AttributeError:
- pass
- return dict
- __dict__ = property(__dict__)
-
-
-def AliasModule(modname, modpath, attrname=None):
- mod = []
-
- def getmod():
- if not mod:
- x = importobj(modpath, None)
- if attrname is not None:
- x = getattr(x, attrname)
- mod.append(x)
- return mod[0]
-
- class AliasModule(ModuleType):
-
- def __repr__(self):
- x = modpath
- if attrname:
- x += "." + attrname
- return '<AliasModule %r for %r>' % (modname, x)
-
- def __getattribute__(self, name):
- try:
- return getattr(getmod(), name)
- except ImportError:
- return None
-
- def __setattr__(self, name, value):
- setattr(getmod(), name, value)
-
- def __delattr__(self, name):
- delattr(getmod(), name)
-
- return AliasModule(str(modname))
diff --git a/lib/spack/external/pytest-fallback/py/_builtin.py b/lib/spack/external/pytest-fallback/py/_builtin.py
deleted file mode 100644
index 52ee9d79ca..0000000000
--- a/lib/spack/external/pytest-fallback/py/_builtin.py
+++ /dev/null
@@ -1,248 +0,0 @@
-import sys
-
-try:
- reversed = reversed
-except NameError:
- def reversed(sequence):
- """reversed(sequence) -> reverse iterator over values of the sequence
-
- Return a reverse iterator
- """
- if hasattr(sequence, '__reversed__'):
- return sequence.__reversed__()
- if not hasattr(sequence, '__getitem__'):
- raise TypeError("argument to reversed() must be a sequence")
- return reversed_iterator(sequence)
-
- class reversed_iterator(object):
-
- def __init__(self, seq):
- self.seq = seq
- self.remaining = len(seq)
-
- def __iter__(self):
- return self
-
- def next(self):
- i = self.remaining
- if i > 0:
- i -= 1
- item = self.seq[i]
- self.remaining = i
- return item
- raise StopIteration
-
- def __length_hint__(self):
- return self.remaining
-
-try:
- any = any
-except NameError:
- def any(iterable):
- for x in iterable:
- if x:
- return True
- return False
-
-try:
- all = all
-except NameError:
- def all(iterable):
- for x in iterable:
- if not x:
- return False
- return True
-
-try:
- sorted = sorted
-except NameError:
- builtin_cmp = cmp # need to use cmp as keyword arg
-
- def sorted(iterable, cmp=None, key=None, reverse=0):
- use_cmp = None
- if key is not None:
- if cmp is None:
- def use_cmp(x, y):
- return builtin_cmp(x[0], y[0])
- else:
- def use_cmp(x, y):
- return cmp(x[0], y[0])
- l = [(key(element), element) for element in iterable]
- else:
- if cmp is not None:
- use_cmp = cmp
- l = list(iterable)
- if use_cmp is not None:
- l.sort(use_cmp)
- else:
- l.sort()
- if reverse:
- l.reverse()
- if key is not None:
- return [element for (_, element) in l]
- return l
-
-try:
- set, frozenset = set, frozenset
-except NameError:
- from sets import set, frozenset
-
-# pass through
-enumerate = enumerate
-
-try:
- BaseException = BaseException
-except NameError:
- BaseException = Exception
-
-try:
- GeneratorExit = GeneratorExit
-except NameError:
- class GeneratorExit(Exception):
- """ This exception is never raised, it is there to make it possible to
- write code compatible with CPython 2.5 even in lower CPython
- versions."""
- pass
- GeneratorExit.__module__ = 'exceptions'
-
-_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit)
-
-try:
- callable = callable
-except NameError:
- def callable(obj):
- return hasattr(obj, "__call__")
-
-if sys.version_info >= (3, 0):
- exec ("print_ = print ; exec_=exec")
- import builtins
-
- # some backward compatibility helpers
- _basestring = str
- def _totext(obj, encoding=None, errors=None):
- if isinstance(obj, bytes):
- if errors is None:
- obj = obj.decode(encoding)
- else:
- obj = obj.decode(encoding, errors)
- elif not isinstance(obj, str):
- obj = str(obj)
- return obj
-
- def _isbytes(x):
- return isinstance(x, bytes)
- def _istext(x):
- return isinstance(x, str)
-
- text = str
- bytes = bytes
-
-
- def _getimself(function):
- return getattr(function, '__self__', None)
-
- def _getfuncdict(function):
- return getattr(function, "__dict__", None)
-
- def _getcode(function):
- return getattr(function, "__code__", None)
-
- def execfile(fn, globs=None, locs=None):
- if globs is None:
- back = sys._getframe(1)
- globs = back.f_globals
- locs = back.f_locals
- del back
- elif locs is None:
- locs = globs
- fp = open(fn, "r")
- try:
- source = fp.read()
- finally:
- fp.close()
- co = compile(source, fn, "exec", dont_inherit=True)
- exec_(co, globs, locs)
-
-else:
- import __builtin__ as builtins
- _totext = unicode
- _basestring = basestring
- text = unicode
- bytes = str
- execfile = execfile
- callable = callable
- def _isbytes(x):
- return isinstance(x, str)
- def _istext(x):
- return isinstance(x, unicode)
-
- def _getimself(function):
- return getattr(function, 'im_self', None)
-
- def _getfuncdict(function):
- return getattr(function, "__dict__", None)
-
- def _getcode(function):
- try:
- return getattr(function, "__code__")
- except AttributeError:
- return getattr(function, "func_code", None)
-
- def print_(*args, **kwargs):
- """ minimal backport of py3k print statement. """
- sep = ' '
- if 'sep' in kwargs:
- sep = kwargs.pop('sep')
- end = '\n'
- if 'end' in kwargs:
- end = kwargs.pop('end')
- file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
- if kwargs:
- args = ", ".join([str(x) for x in kwargs])
- raise TypeError("invalid keyword arguments: %s" % args)
- at_start = True
- for x in args:
- if not at_start:
- file.write(sep)
- file.write(str(x))
- at_start = False
- file.write(end)
-
- def exec_(obj, globals=None, locals=None):
- """ minimal backport of py3k exec statement. """
- __tracebackhide__ = True
- if globals is None:
- frame = sys._getframe(1)
- globals = frame.f_globals
- if locals is None:
- locals = frame.f_locals
- elif locals is None:
- locals = globals
- exec2(obj, globals, locals)
-
-if sys.version_info >= (3, 0):
- def _reraise(cls, val, tb):
- __tracebackhide__ = True
- assert hasattr(val, '__traceback__')
- raise cls.with_traceback(val, tb)
-else:
- exec ("""
-def _reraise(cls, val, tb):
- __tracebackhide__ = True
- raise cls, val, tb
-def exec2(obj, globals, locals):
- __tracebackhide__ = True
- exec obj in globals, locals
-""")
-
-def _tryimport(*names):
- """ return the first successfully imported module. """
- assert names
- for name in names:
- try:
- __import__(name)
- except ImportError:
- excinfo = sys.exc_info()
- else:
- return sys.modules[name]
- _reraise(*excinfo)
diff --git a/lib/spack/external/pytest-fallback/py/_code/__init__.py b/lib/spack/external/pytest-fallback/py/_code/__init__.py
deleted file mode 100644
index f15acf8513..0000000000
--- a/lib/spack/external/pytest-fallback/py/_code/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-""" python inspection/code generation API """
diff --git a/lib/spack/external/pytest-fallback/py/_code/_assertionnew.py b/lib/spack/external/pytest-fallback/py/_code/_assertionnew.py
deleted file mode 100644
index afb1b31ff0..0000000000
--- a/lib/spack/external/pytest-fallback/py/_code/_assertionnew.py
+++ /dev/null
@@ -1,339 +0,0 @@
-"""
-Find intermediate evalutation results in assert statements through builtin AST.
-This should replace _assertionold.py eventually.
-"""
-
-import sys
-import ast
-
-import py
-from py._code.assertion import _format_explanation, BuiltinAssertionError
-
-
-if sys.platform.startswith("java") and sys.version_info < (2, 5, 2):
- # See http://bugs.jython.org/issue1497
- _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
- "ListComp", "GeneratorExp", "Yield", "Compare", "Call",
- "Repr", "Num", "Str", "Attribute", "Subscript", "Name",
- "List", "Tuple")
- _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
- "AugAssign", "Print", "For", "While", "If", "With", "Raise",
- "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
- "Exec", "Global", "Expr", "Pass", "Break", "Continue")
- _expr_nodes = set(getattr(ast, name) for name in _exprs)
- _stmt_nodes = set(getattr(ast, name) for name in _stmts)
- def _is_ast_expr(node):
- return node.__class__ in _expr_nodes
- def _is_ast_stmt(node):
- return node.__class__ in _stmt_nodes
-else:
- def _is_ast_expr(node):
- return isinstance(node, ast.expr)
- def _is_ast_stmt(node):
- return isinstance(node, ast.stmt)
-
-
-class Failure(Exception):
- """Error found while interpreting AST."""
-
- def __init__(self, explanation=""):
- self.cause = sys.exc_info()
- self.explanation = explanation
-
-
-def interpret(source, frame, should_fail=False):
- mod = ast.parse(source)
- visitor = DebugInterpreter(frame)
- try:
- visitor.visit(mod)
- except Failure:
- failure = sys.exc_info()[1]
- return getfailure(failure)
- if should_fail:
- return ("(assertion failed, but when it was re-run for "
- "printing intermediate values, it did not fail. Suggestions: "
- "compute assert expression before the assert or use --no-assert)")
-
-def run(offending_line, frame=None):
- if frame is None:
- frame = py.code.Frame(sys._getframe(1))
- return interpret(offending_line, frame)
-
-def getfailure(failure):
- explanation = _format_explanation(failure.explanation)
- value = failure.cause[1]
- if str(value):
- lines = explanation.splitlines()
- if not lines:
- lines.append("")
- lines[0] += " << %s" % (value,)
- explanation = "\n".join(lines)
- text = "%s: %s" % (failure.cause[0].__name__, explanation)
- if text.startswith("AssertionError: assert "):
- text = text[16:]
- return text
-
-
-operator_map = {
- ast.BitOr : "|",
- ast.BitXor : "^",
- ast.BitAnd : "&",
- ast.LShift : "<<",
- ast.RShift : ">>",
- ast.Add : "+",
- ast.Sub : "-",
- ast.Mult : "*",
- ast.Div : "/",
- ast.FloorDiv : "//",
- ast.Mod : "%",
- ast.Eq : "==",
- ast.NotEq : "!=",
- ast.Lt : "<",
- ast.LtE : "<=",
- ast.Gt : ">",
- ast.GtE : ">=",
- ast.Pow : "**",
- ast.Is : "is",
- ast.IsNot : "is not",
- ast.In : "in",
- ast.NotIn : "not in"
-}
-
-unary_map = {
- ast.Not : "not %s",
- ast.Invert : "~%s",
- ast.USub : "-%s",
- ast.UAdd : "+%s"
-}
-
-
-class DebugInterpreter(ast.NodeVisitor):
- """Interpret AST nodes to gleam useful debugging information. """
-
- def __init__(self, frame):
- self.frame = frame
-
- def generic_visit(self, node):
- # Fallback when we don't have a special implementation.
- if _is_ast_expr(node):
- mod = ast.Expression(node)
- co = self._compile(mod)
- try:
- result = self.frame.eval(co)
- except Exception:
- raise Failure()
- explanation = self.frame.repr(result)
- return explanation, result
- elif _is_ast_stmt(node):
- mod = ast.Module([node])
- co = self._compile(mod, "exec")
- try:
- self.frame.exec_(co)
- except Exception:
- raise Failure()
- return None, None
- else:
- raise AssertionError("can't handle %s" %(node,))
-
- def _compile(self, source, mode="eval"):
- return compile(source, "<assertion interpretation>", mode)
-
- def visit_Expr(self, expr):
- return self.visit(expr.value)
-
- def visit_Module(self, mod):
- for stmt in mod.body:
- self.visit(stmt)
-
- def visit_Name(self, name):
- explanation, result = self.generic_visit(name)
- # See if the name is local.
- source = "%r in locals() is not globals()" % (name.id,)
- co = self._compile(source)
- try:
- local = self.frame.eval(co)
- except Exception:
- # have to assume it isn't
- local = False
- if not local:
- return name.id, result
- return explanation, result
-
- def visit_Compare(self, comp):
- left = comp.left
- left_explanation, left_result = self.visit(left)
- for op, next_op in zip(comp.ops, comp.comparators):
- next_explanation, next_result = self.visit(next_op)
- op_symbol = operator_map[op.__class__]
- explanation = "%s %s %s" % (left_explanation, op_symbol,
- next_explanation)
- source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, __exprinfo_left=left_result,
- __exprinfo_right=next_result)
- except Exception:
- raise Failure(explanation)
- try:
- if not result:
- break
- except KeyboardInterrupt:
- raise
- except:
- break
- left_explanation, left_result = next_explanation, next_result
-
- rcomp = py.code._reprcompare
- if rcomp:
- res = rcomp(op_symbol, left_result, next_result)
- if res:
- explanation = res
- return explanation, result
-
- def visit_BoolOp(self, boolop):
- is_or = isinstance(boolop.op, ast.Or)
- explanations = []
- for operand in boolop.values:
- explanation, result = self.visit(operand)
- explanations.append(explanation)
- if result == is_or:
- break
- name = is_or and " or " or " and "
- explanation = "(" + name.join(explanations) + ")"
- return explanation, result
-
- def visit_UnaryOp(self, unary):
- pattern = unary_map[unary.op.__class__]
- operand_explanation, operand_result = self.visit(unary.operand)
- explanation = pattern % (operand_explanation,)
- co = self._compile(pattern % ("__exprinfo_expr",))
- try:
- result = self.frame.eval(co, __exprinfo_expr=operand_result)
- except Exception:
- raise Failure(explanation)
- return explanation, result
-
- def visit_BinOp(self, binop):
- left_explanation, left_result = self.visit(binop.left)
- right_explanation, right_result = self.visit(binop.right)
- symbol = operator_map[binop.op.__class__]
- explanation = "(%s %s %s)" % (left_explanation, symbol,
- right_explanation)
- source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, __exprinfo_left=left_result,
- __exprinfo_right=right_result)
- except Exception:
- raise Failure(explanation)
- return explanation, result
-
- def visit_Call(self, call):
- func_explanation, func = self.visit(call.func)
- arg_explanations = []
- ns = {"__exprinfo_func" : func}
- arguments = []
- for arg in call.args:
- arg_explanation, arg_result = self.visit(arg)
- arg_name = "__exprinfo_%s" % (len(ns),)
- ns[arg_name] = arg_result
- arguments.append(arg_name)
- arg_explanations.append(arg_explanation)
- for keyword in call.keywords:
- arg_explanation, arg_result = self.visit(keyword.value)
- arg_name = "__exprinfo_%s" % (len(ns),)
- ns[arg_name] = arg_result
- keyword_source = "%s=%%s" % (keyword.arg)
- arguments.append(keyword_source % (arg_name,))
- arg_explanations.append(keyword_source % (arg_explanation,))
- if call.starargs:
- arg_explanation, arg_result = self.visit(call.starargs)
- arg_name = "__exprinfo_star"
- ns[arg_name] = arg_result
- arguments.append("*%s" % (arg_name,))
- arg_explanations.append("*%s" % (arg_explanation,))
- if call.kwargs:
- arg_explanation, arg_result = self.visit(call.kwargs)
- arg_name = "__exprinfo_kwds"
- ns[arg_name] = arg_result
- arguments.append("**%s" % (arg_name,))
- arg_explanations.append("**%s" % (arg_explanation,))
- args_explained = ", ".join(arg_explanations)
- explanation = "%s(%s)" % (func_explanation, args_explained)
- args = ", ".join(arguments)
- source = "__exprinfo_func(%s)" % (args,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, **ns)
- except Exception:
- raise Failure(explanation)
- pattern = "%s\n{%s = %s\n}"
- rep = self.frame.repr(result)
- explanation = pattern % (rep, rep, explanation)
- return explanation, result
-
- def _is_builtin_name(self, name):
- pattern = "%r not in globals() and %r not in locals()"
- source = pattern % (name.id, name.id)
- co = self._compile(source)
- try:
- return self.frame.eval(co)
- except Exception:
- return False
-
- def visit_Attribute(self, attr):
- if not isinstance(attr.ctx, ast.Load):
- return self.generic_visit(attr)
- source_explanation, source_result = self.visit(attr.value)
- explanation = "%s.%s" % (source_explanation, attr.attr)
- source = "__exprinfo_expr.%s" % (attr.attr,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, __exprinfo_expr=source_result)
- except Exception:
- raise Failure(explanation)
- explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
- self.frame.repr(result),
- source_explanation, attr.attr)
- # Check if the attr is from an instance.
- source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
- source = source % (attr.attr,)
- co = self._compile(source)
- try:
- from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
- except Exception:
- from_instance = True
- if from_instance:
- rep = self.frame.repr(result)
- pattern = "%s\n{%s = %s\n}"
- explanation = pattern % (rep, rep, explanation)
- return explanation, result
-
- def visit_Assert(self, assrt):
- test_explanation, test_result = self.visit(assrt.test)
- if test_explanation.startswith("False\n{False =") and \
- test_explanation.endswith("\n"):
- test_explanation = test_explanation[15:-2]
- explanation = "assert %s" % (test_explanation,)
- if not test_result:
- try:
- raise BuiltinAssertionError
- except Exception:
- raise Failure(explanation)
- return explanation, test_result
-
- def visit_Assign(self, assign):
- value_explanation, value_result = self.visit(assign.value)
- explanation = "... = %s" % (value_explanation,)
- name = ast.Name("__exprinfo_expr", ast.Load(),
- lineno=assign.value.lineno,
- col_offset=assign.value.col_offset)
- new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
- col_offset=assign.col_offset)
- mod = ast.Module([new_assign])
- co = self._compile(mod, "exec")
- try:
- self.frame.exec_(co, __exprinfo_expr=value_result)
- except Exception:
- raise Failure(explanation)
- return explanation, value_result
diff --git a/lib/spack/external/pytest-fallback/py/_code/_assertionold.py b/lib/spack/external/pytest-fallback/py/_code/_assertionold.py
deleted file mode 100644
index 4e81fb3ef6..0000000000
--- a/lib/spack/external/pytest-fallback/py/_code/_assertionold.py
+++ /dev/null
@@ -1,555 +0,0 @@
-import py
-import sys, inspect
-from compiler import parse, ast, pycodegen
-from py._code.assertion import BuiltinAssertionError, _format_explanation
-
-passthroughex = py.builtin._sysex
-
-class Failure:
- def __init__(self, node):
- self.exc, self.value, self.tb = sys.exc_info()
- self.node = node
-
-class View(object):
- """View base class.
-
- If C is a subclass of View, then C(x) creates a proxy object around
- the object x. The actual class of the proxy is not C in general,
- but a *subclass* of C determined by the rules below. To avoid confusion
- we call view class the class of the proxy (a subclass of C, so of View)
- and object class the class of x.
-
- Attributes and methods not found in the proxy are automatically read on x.
- Other operations like setting attributes are performed on the proxy, as
- determined by its view class. The object x is available from the proxy
- as its __obj__ attribute.
-
- The view class selection is determined by the __view__ tuples and the
- optional __viewkey__ method. By default, the selected view class is the
- most specific subclass of C whose __view__ mentions the class of x.
- If no such subclass is found, the search proceeds with the parent
- object classes. For example, C(True) will first look for a subclass
- of C with __view__ = (..., bool, ...) and only if it doesn't find any
- look for one with __view__ = (..., int, ...), and then ..., object,...
- If everything fails the class C itself is considered to be the default.
-
- Alternatively, the view class selection can be driven by another aspect
- of the object x, instead of the class of x, by overriding __viewkey__.
- See last example at the end of this module.
- """
-
- _viewcache = {}
- __view__ = ()
-
- def __new__(rootclass, obj, *args, **kwds):
- self = object.__new__(rootclass)
- self.__obj__ = obj
- self.__rootclass__ = rootclass
- key = self.__viewkey__()
- try:
- self.__class__ = self._viewcache[key]
- except KeyError:
- self.__class__ = self._selectsubclass(key)
- return self
-
- def __getattr__(self, attr):
- # attributes not found in the normal hierarchy rooted on View
- # are looked up in the object's real class
- return getattr(self.__obj__, attr)
-
- def __viewkey__(self):
- return self.__obj__.__class__
-
- def __matchkey__(self, key, subclasses):
- if inspect.isclass(key):
- keys = inspect.getmro(key)
- else:
- keys = [key]
- for key in keys:
- result = [C for C in subclasses if key in C.__view__]
- if result:
- return result
- return []
-
- def _selectsubclass(self, key):
- subclasses = list(enumsubclasses(self.__rootclass__))
- for C in subclasses:
- if not isinstance(C.__view__, tuple):
- C.__view__ = (C.__view__,)
- choices = self.__matchkey__(key, subclasses)
- if not choices:
- return self.__rootclass__
- elif len(choices) == 1:
- return choices[0]
- else:
- # combine the multiple choices
- return type('?', tuple(choices), {})
-
- def __repr__(self):
- return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
-
-
-def enumsubclasses(cls):
- for subcls in cls.__subclasses__():
- for subsubclass in enumsubclasses(subcls):
- yield subsubclass
- yield cls
-
-
-class Interpretable(View):
- """A parse tree node with a few extra methods."""
- explanation = None
-
- def is_builtin(self, frame):
- return False
-
- def eval(self, frame):
- # fall-back for unknown expression nodes
- try:
- expr = ast.Expression(self.__obj__)
- expr.filename = '<eval>'
- self.__obj__.filename = '<eval>'
- co = pycodegen.ExpressionCodeGenerator(expr).getCode()
- result = frame.eval(co)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- self.result = result
- self.explanation = self.explanation or frame.repr(self.result)
-
- def run(self, frame):
- # fall-back for unknown statement nodes
- try:
- expr = ast.Module(None, ast.Stmt([self.__obj__]))
- expr.filename = '<run>'
- co = pycodegen.ModuleCodeGenerator(expr).getCode()
- frame.exec_(co)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
- def nice_explanation(self):
- return _format_explanation(self.explanation)
-
-
-class Name(Interpretable):
- __view__ = ast.Name
-
- def is_local(self, frame):
- source = '%r in locals() is not globals()' % self.name
- try:
- return frame.is_true(frame.eval(source))
- except passthroughex:
- raise
- except:
- return False
-
- def is_global(self, frame):
- source = '%r in globals()' % self.name
- try:
- return frame.is_true(frame.eval(source))
- except passthroughex:
- raise
- except:
- return False
-
- def is_builtin(self, frame):
- source = '%r not in locals() and %r not in globals()' % (
- self.name, self.name)
- try:
- return frame.is_true(frame.eval(source))
- except passthroughex:
- raise
- except:
- return False
-
- def eval(self, frame):
- super(Name, self).eval(frame)
- if not self.is_local(frame):
- self.explanation = self.name
-
-class Compare(Interpretable):
- __view__ = ast.Compare
-
- def eval(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- for operation, expr2 in self.ops:
- if hasattr(self, 'result'):
- # shortcutting in chained expressions
- if not frame.is_true(self.result):
- break
- expr2 = Interpretable(expr2)
- expr2.eval(frame)
- self.explanation = "%s %s %s" % (
- expr.explanation, operation, expr2.explanation)
- source = "__exprinfo_left %s __exprinfo_right" % operation
- try:
- self.result = frame.eval(source,
- __exprinfo_left=expr.result,
- __exprinfo_right=expr2.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- expr = expr2
-
-class And(Interpretable):
- __view__ = ast.And
-
- def eval(self, frame):
- explanations = []
- for expr in self.nodes:
- expr = Interpretable(expr)
- expr.eval(frame)
- explanations.append(expr.explanation)
- self.result = expr.result
- if not frame.is_true(expr.result):
- break
- self.explanation = '(' + ' and '.join(explanations) + ')'
-
-class Or(Interpretable):
- __view__ = ast.Or
-
- def eval(self, frame):
- explanations = []
- for expr in self.nodes:
- expr = Interpretable(expr)
- expr.eval(frame)
- explanations.append(expr.explanation)
- self.result = expr.result
- if frame.is_true(expr.result):
- break
- self.explanation = '(' + ' or '.join(explanations) + ')'
-
-
-# == Unary operations ==
-keepalive = []
-for astclass, astpattern in {
- ast.Not : 'not __exprinfo_expr',
- ast.Invert : '(~__exprinfo_expr)',
- }.items():
-
- class UnaryArith(Interpretable):
- __view__ = astclass
-
- def eval(self, frame, astpattern=astpattern):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- self.explanation = astpattern.replace('__exprinfo_expr',
- expr.explanation)
- try:
- self.result = frame.eval(astpattern,
- __exprinfo_expr=expr.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
- keepalive.append(UnaryArith)
-
-# == Binary operations ==
-for astclass, astpattern in {
- ast.Add : '(__exprinfo_left + __exprinfo_right)',
- ast.Sub : '(__exprinfo_left - __exprinfo_right)',
- ast.Mul : '(__exprinfo_left * __exprinfo_right)',
- ast.Div : '(__exprinfo_left / __exprinfo_right)',
- ast.Mod : '(__exprinfo_left % __exprinfo_right)',
- ast.Power : '(__exprinfo_left ** __exprinfo_right)',
- }.items():
-
- class BinaryArith(Interpretable):
- __view__ = astclass
-
- def eval(self, frame, astpattern=astpattern):
- left = Interpretable(self.left)
- left.eval(frame)
- right = Interpretable(self.right)
- right.eval(frame)
- self.explanation = (astpattern
- .replace('__exprinfo_left', left .explanation)
- .replace('__exprinfo_right', right.explanation))
- try:
- self.result = frame.eval(astpattern,
- __exprinfo_left=left.result,
- __exprinfo_right=right.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
- keepalive.append(BinaryArith)
-
-
-class CallFunc(Interpretable):
- __view__ = ast.CallFunc
-
- def is_bool(self, frame):
- source = 'isinstance(__exprinfo_value, bool)'
- try:
- return frame.is_true(frame.eval(source,
- __exprinfo_value=self.result))
- except passthroughex:
- raise
- except:
- return False
-
- def eval(self, frame):
- node = Interpretable(self.node)
- node.eval(frame)
- explanations = []
- vars = {'__exprinfo_fn': node.result}
- source = '__exprinfo_fn('
- for a in self.args:
- if isinstance(a, ast.Keyword):
- keyword = a.name
- a = a.expr
- else:
- keyword = None
- a = Interpretable(a)
- a.eval(frame)
- argname = '__exprinfo_%d' % len(vars)
- vars[argname] = a.result
- if keyword is None:
- source += argname + ','
- explanations.append(a.explanation)
- else:
- source += '%s=%s,' % (keyword, argname)
- explanations.append('%s=%s' % (keyword, a.explanation))
- if self.star_args:
- star_args = Interpretable(self.star_args)
- star_args.eval(frame)
- argname = '__exprinfo_star'
- vars[argname] = star_args.result
- source += '*' + argname + ','
- explanations.append('*' + star_args.explanation)
- if self.dstar_args:
- dstar_args = Interpretable(self.dstar_args)
- dstar_args.eval(frame)
- argname = '__exprinfo_kwds'
- vars[argname] = dstar_args.result
- source += '**' + argname + ','
- explanations.append('**' + dstar_args.explanation)
- self.explanation = "%s(%s)" % (
- node.explanation, ', '.join(explanations))
- if source.endswith(','):
- source = source[:-1]
- source += ')'
- try:
- self.result = frame.eval(source, **vars)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- if not node.is_builtin(frame) or not self.is_bool(frame):
- r = frame.repr(self.result)
- self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
-
-class Getattr(Interpretable):
- __view__ = ast.Getattr
-
- def eval(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- source = '__exprinfo_expr.%s' % self.attrname
- try:
- self.result = frame.eval(source, __exprinfo_expr=expr.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- self.explanation = '%s.%s' % (expr.explanation, self.attrname)
- # if the attribute comes from the instance, its value is interesting
- source = ('hasattr(__exprinfo_expr, "__dict__") and '
- '%r in __exprinfo_expr.__dict__' % self.attrname)
- try:
- from_instance = frame.is_true(
- frame.eval(source, __exprinfo_expr=expr.result))
- except passthroughex:
- raise
- except:
- from_instance = True
- if from_instance:
- r = frame.repr(self.result)
- self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
-
-# == Re-interpretation of full statements ==
-
-class Assert(Interpretable):
- __view__ = ast.Assert
-
- def run(self, frame):
- test = Interpretable(self.test)
- test.eval(frame)
- # simplify 'assert False where False = ...'
- if (test.explanation.startswith('False\n{False = ') and
- test.explanation.endswith('\n}')):
- test.explanation = test.explanation[15:-2]
- # print the result as 'assert <explanation>'
- self.result = test.result
- self.explanation = 'assert ' + test.explanation
- if not frame.is_true(test.result):
- try:
- raise BuiltinAssertionError
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
-class Assign(Interpretable):
- __view__ = ast.Assign
-
- def run(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- self.result = expr.result
- self.explanation = '... = ' + expr.explanation
- # fall-back-run the rest of the assignment
- ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
- mod = ast.Module(None, ast.Stmt([ass]))
- mod.filename = '<run>'
- co = pycodegen.ModuleCodeGenerator(mod).getCode()
- try:
- frame.exec_(co, __exprinfo_expr=expr.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
-class Discard(Interpretable):
- __view__ = ast.Discard
-
- def run(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- self.result = expr.result
- self.explanation = expr.explanation
-
-class Stmt(Interpretable):
- __view__ = ast.Stmt
-
- def run(self, frame):
- for stmt in self.nodes:
- stmt = Interpretable(stmt)
- stmt.run(frame)
-
-
-def report_failure(e):
- explanation = e.node.nice_explanation()
- if explanation:
- explanation = ", in: " + explanation
- else:
- explanation = ""
- sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
-
-def check(s, frame=None):
- if frame is None:
- frame = sys._getframe(1)
- frame = py.code.Frame(frame)
- expr = parse(s, 'eval')
- assert isinstance(expr, ast.Expression)
- node = Interpretable(expr.node)
- try:
- node.eval(frame)
- except passthroughex:
- raise
- except Failure:
- e = sys.exc_info()[1]
- report_failure(e)
- else:
- if not frame.is_true(node.result):
- sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
-
-
-###########################################################
-# API / Entry points
-# #########################################################
-
-def interpret(source, frame, should_fail=False):
- module = Interpretable(parse(source, 'exec').node)
- #print "got module", module
- if isinstance(frame, py.std.types.FrameType):
- frame = py.code.Frame(frame)
- try:
- module.run(frame)
- except Failure:
- e = sys.exc_info()[1]
- return getfailure(e)
- except passthroughex:
- raise
- except:
- import traceback
- traceback.print_exc()
- if should_fail:
- return ("(assertion failed, but when it was re-run for "
- "printing intermediate values, it did not fail. Suggestions: "
- "compute assert expression before the assert or use --nomagic)")
- else:
- return None
-
-def getmsg(excinfo):
- if isinstance(excinfo, tuple):
- excinfo = py.code.ExceptionInfo(excinfo)
- #frame, line = gettbline(tb)
- #frame = py.code.Frame(frame)
- #return interpret(line, frame)
-
- tb = excinfo.traceback[-1]
- source = str(tb.statement).strip()
- x = interpret(source, tb.frame, should_fail=True)
- if not isinstance(x, str):
- raise TypeError("interpret returned non-string %r" % (x,))
- return x
-
-def getfailure(e):
- explanation = e.node.nice_explanation()
- if str(e.value):
- lines = explanation.split('\n')
- lines[0] += " << %s" % (e.value,)
- explanation = '\n'.join(lines)
- text = "%s: %s" % (e.exc.__name__, explanation)
- if text.startswith('AssertionError: assert '):
- text = text[16:]
- return text
-
-def run(s, frame=None):
- if frame is None:
- frame = sys._getframe(1)
- frame = py.code.Frame(frame)
- module = Interpretable(parse(s, 'exec').node)
- try:
- module.run(frame)
- except Failure:
- e = sys.exc_info()[1]
- report_failure(e)
-
-
-if __name__ == '__main__':
- # example:
- def f():
- return 5
- def g():
- return 3
- def h(x):
- return 'never'
- check("f() * g() == 5")
- check("not f()")
- check("not (f() and g() or 0)")
- check("f() == g()")
- i = 4
- check("i == f()")
- check("len(f()) == 0")
- check("isinstance(2+3+4, float)")
-
- run("x = i")
- check("x == 5")
-
- run("assert not f(), 'oops'")
- run("a, b, c = 1, 2")
- run("a, b, c = f()")
-
- check("max([f(),g()]) == 4")
- check("'hello'[g()] == 'h'")
- run("'guk%d' % h(f())")
diff --git a/lib/spack/external/pytest-fallback/py/_code/_py2traceback.py b/lib/spack/external/pytest-fallback/py/_code/_py2traceback.py
deleted file mode 100644
index d65e27cb73..0000000000
--- a/lib/spack/external/pytest-fallback/py/_code/_py2traceback.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# copied from python-2.7.3's traceback.py
-# CHANGES:
-# - some_str is replaced, trying to create unicode strings
-#
-import types
-
-def format_exception_only(etype, value):
- """Format the exception part of a traceback.
-
- The arguments are the exception type and value such as given by
- sys.last_type and sys.last_value. The return value is a list of
- strings, each ending in a newline.
-
- Normally, the list contains a single string; however, for
- SyntaxError exceptions, it contains several lines that (when
- printed) display detailed information about where the syntax
- error occurred.
-
- The message indicating which exception occurred is always the last
- string in the list.
-
- """
-
- # An instance should not have a meaningful value parameter, but
- # sometimes does, particularly for string exceptions, such as
- # >>> raise string1, string2 # deprecated
- #
- # Clear these out first because issubtype(string1, SyntaxError)
- # would throw another exception and mask the original problem.
- if (isinstance(etype, BaseException) or
- isinstance(etype, types.InstanceType) or
- etype is None or type(etype) is str):
- return [_format_final_exc_line(etype, value)]
-
- stype = etype.__name__
-
- if not issubclass(etype, SyntaxError):
- return [_format_final_exc_line(stype, value)]
-
- # It was a syntax error; show exactly where the problem was found.
- lines = []
- try:
- msg, (filename, lineno, offset, badline) = value.args
- except Exception:
- pass
- else:
- filename = filename or "<string>"
- lines.append(' File "%s", line %d\n' % (filename, lineno))
- if badline is not None:
- lines.append(' %s\n' % badline.strip())
- if offset is not None:
- caretspace = badline.rstrip('\n')[:offset].lstrip()
- # non-space whitespace (likes tabs) must be kept for alignment
- caretspace = ((c.isspace() and c or ' ') for c in caretspace)
- # only three spaces to account for offset1 == pos 0
- lines.append(' %s^\n' % ''.join(caretspace))
- value = msg
-
- lines.append(_format_final_exc_line(stype, value))
- return lines
-
-def _format_final_exc_line(etype, value):
- """Return a list of a single line -- normal case for format_exception_only"""
- valuestr = _some_str(value)
- if value is None or not valuestr:
- line = "%s\n" % etype
- else:
- line = "%s: %s\n" % (etype, valuestr)
- return line
-
-def _some_str(value):
- try:
- return unicode(value)
- except Exception:
- try:
- return str(value)
- except Exception:
- pass
- return '<unprintable %s object>' % type(value).__name__
diff --git a/lib/spack/external/pytest-fallback/py/_code/assertion.py b/lib/spack/external/pytest-fallback/py/_code/assertion.py
deleted file mode 100644
index 4ce80c75b1..0000000000
--- a/lib/spack/external/pytest-fallback/py/_code/assertion.py
+++ /dev/null
@@ -1,94 +0,0 @@
-import sys
-import py
-
-BuiltinAssertionError = py.builtin.builtins.AssertionError
-
-_reprcompare = None # if set, will be called by assert reinterp for comparison ops
-
-def _format_explanation(explanation):
- """This formats an explanation
-
- Normally all embedded newlines are escaped, however there are
- three exceptions: \n{, \n} and \n~. The first two are intended
- cover nested explanations, see function and attribute explanations
- for examples (.visit_Call(), visit_Attribute()). The last one is
- for when one explanation needs to span multiple lines, e.g. when
- displaying diffs.
- """
- raw_lines = (explanation or '').split('\n')
- # escape newlines not followed by {, } and ~
- lines = [raw_lines[0]]
- for l in raw_lines[1:]:
- if l.startswith('{') or l.startswith('}') or l.startswith('~'):
- lines.append(l)
- else:
- lines[-1] += '\\n' + l
-
- result = lines[:1]
- stack = [0]
- stackcnt = [0]
- for line in lines[1:]:
- if line.startswith('{'):
- if stackcnt[-1]:
- s = 'and '
- else:
- s = 'where '
- stack.append(len(result))
- stackcnt[-1] += 1
- stackcnt.append(0)
- result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
- elif line.startswith('}'):
- assert line.startswith('}')
- stack.pop()
- stackcnt.pop()
- result[stack[-1]] += line[1:]
- else:
- assert line.startswith('~')
- result.append(' '*len(stack) + line[1:])
- assert len(stack) == 1
- return '\n'.join(result)
-
-
-class AssertionError(BuiltinAssertionError):
- def __init__(self, *args):
- BuiltinAssertionError.__init__(self, *args)
- if args:
- try:
- self.msg = str(args[0])
- except py.builtin._sysex:
- raise
- except:
- self.msg = "<[broken __repr__] %s at %0xd>" %(
- args[0].__class__, id(args[0]))
- else:
- f = py.code.Frame(sys._getframe(1))
- try:
- source = f.code.fullsource
- if source is not None:
- try:
- source = source.getstatement(f.lineno, assertion=True)
- except IndexError:
- source = None
- else:
- source = str(source.deindent()).strip()
- except py.error.ENOENT:
- source = None
- # this can also occur during reinterpretation, when the
- # co_filename is set to "<run>".
- if source:
- self.msg = reinterpret(source, f, should_fail=True)
- else:
- self.msg = "<could not determine information>"
- if not self.args:
- self.args = (self.msg,)
-
-if sys.version_info > (3, 0):
- AssertionError.__module__ = "builtins"
- reinterpret_old = "old reinterpretation not available for py3"
-else:
- from py._code._assertionold import interpret as reinterpret_old
-if sys.version_info >= (2, 6) or (sys.platform.startswith("java")):
- from py._code._assertionnew import interpret as reinterpret
-else:
- reinterpret = reinterpret_old
-
diff --git a/lib/spack/external/pytest-fallback/py/_code/code.py b/lib/spack/external/pytest-fallback/py/_code/code.py
deleted file mode 100644
index 20fd965c97..0000000000
--- a/lib/spack/external/pytest-fallback/py/_code/code.py
+++ /dev/null
@@ -1,787 +0,0 @@
-import py
-import sys
-from inspect import CO_VARARGS, CO_VARKEYWORDS
-
-builtin_repr = repr
-
-reprlib = py.builtin._tryimport('repr', 'reprlib')
-
-if sys.version_info[0] >= 3:
- from traceback import format_exception_only
-else:
- from py._code._py2traceback import format_exception_only
-
-class Code(object):
- """ wrapper around Python code objects """
- def __init__(self, rawcode):
- if not hasattr(rawcode, "co_filename"):
- rawcode = py.code.getrawcode(rawcode)
- try:
- self.filename = rawcode.co_filename
- self.firstlineno = rawcode.co_firstlineno - 1
- self.name = rawcode.co_name
- except AttributeError:
- raise TypeError("not a code object: %r" %(rawcode,))
- self.raw = rawcode
-
- def __eq__(self, other):
- return self.raw == other.raw
-
- def __ne__(self, other):
- return not self == other
-
- @property
- def path(self):
- """ return a path object pointing to source code (note that it
- might not point to an actually existing file). """
- p = py.path.local(self.raw.co_filename)
- # maybe don't try this checking
- if not p.check():
- # XXX maybe try harder like the weird logic
- # in the standard lib [linecache.updatecache] does?
- p = self.raw.co_filename
- return p
-
- @property
- def fullsource(self):
- """ return a py.code.Source object for the full source file of the code
- """
- from py._code import source
- full, _ = source.findsource(self.raw)
- return full
-
- def source(self):
- """ return a py.code.Source object for the code object's source only
- """
- # return source only for that part of code
- return py.code.Source(self.raw)
-
- def getargs(self, var=False):
- """ return a tuple with the argument names for the code object
-
- if 'var' is set True also return the names of the variable and
- keyword arguments when present
- """
- # handfull shortcut for getting args
- raw = self.raw
- argcount = raw.co_argcount
- if var:
- argcount += raw.co_flags & CO_VARARGS
- argcount += raw.co_flags & CO_VARKEYWORDS
- return raw.co_varnames[:argcount]
-
-class Frame(object):
- """Wrapper around a Python frame holding f_locals and f_globals
- in which expressions can be evaluated."""
-
- def __init__(self, frame):
- self.lineno = frame.f_lineno - 1
- self.f_globals = frame.f_globals
- self.f_locals = frame.f_locals
- self.raw = frame
- self.code = py.code.Code(frame.f_code)
-
- @property
- def statement(self):
- """ statement this frame is at """
- if self.code.fullsource is None:
- return py.code.Source("")
- return self.code.fullsource.getstatement(self.lineno)
-
- def eval(self, code, **vars):
- """ evaluate 'code' in the frame
-
- 'vars' are optional additional local variables
-
- returns the result of the evaluation
- """
- f_locals = self.f_locals.copy()
- f_locals.update(vars)
- return eval(code, self.f_globals, f_locals)
-
- def exec_(self, code, **vars):
- """ exec 'code' in the frame
-
- 'vars' are optiona; additional local variables
- """
- f_locals = self.f_locals.copy()
- f_locals.update(vars)
- py.builtin.exec_(code, self.f_globals, f_locals )
-
- def repr(self, object):
- """ return a 'safe' (non-recursive, one-line) string repr for 'object'
- """
- return py.io.saferepr(object)
-
- def is_true(self, object):
- return object
-
- def getargs(self, var=False):
- """ return a list of tuples (name, value) for all arguments
-
- if 'var' is set True also include the variable and keyword
- arguments when present
- """
- retval = []
- for arg in self.code.getargs(var):
- try:
- retval.append((arg, self.f_locals[arg]))
- except KeyError:
- pass # this can occur when using Psyco
- return retval
-
-class TracebackEntry(object):
- """ a single entry in a traceback """
-
- _repr_style = None
- exprinfo = None
-
- def __init__(self, rawentry):
- self._rawentry = rawentry
- self.lineno = rawentry.tb_lineno - 1
-
- def set_repr_style(self, mode):
- assert mode in ("short", "long")
- self._repr_style = mode
-
- @property
- def frame(self):
- return py.code.Frame(self._rawentry.tb_frame)
-
- @property
- def relline(self):
- return self.lineno - self.frame.code.firstlineno
-
- def __repr__(self):
- return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1)
-
- @property
- def statement(self):
- """ py.code.Source object for the current statement """
- source = self.frame.code.fullsource
- return source.getstatement(self.lineno)
-
- @property
- def path(self):
- """ path to the source code """
- return self.frame.code.path
-
- def getlocals(self):
- return self.frame.f_locals
- locals = property(getlocals, None, None, "locals of underlaying frame")
-
- def reinterpret(self):
- """Reinterpret the failing statement and returns a detailed information
- about what operations are performed."""
- if self.exprinfo is None:
- source = str(self.statement).strip()
- x = py.code._reinterpret(source, self.frame, should_fail=True)
- if not isinstance(x, str):
- raise TypeError("interpret returned non-string %r" % (x,))
- self.exprinfo = x
- return self.exprinfo
-
- def getfirstlinesource(self):
- # on Jython this firstlineno can be -1 apparently
- return max(self.frame.code.firstlineno, 0)
-
- def getsource(self, astcache=None):
- """ return failing source code. """
- # we use the passed in astcache to not reparse asttrees
- # within exception info printing
- from py._code.source import getstatementrange_ast
- source = self.frame.code.fullsource
- if source is None:
- return None
- key = astnode = None
- if astcache is not None:
- key = self.frame.code.path
- if key is not None:
- astnode = astcache.get(key, None)
- start = self.getfirstlinesource()
- try:
- astnode, _, end = getstatementrange_ast(self.lineno, source,
- astnode=astnode)
- except SyntaxError:
- end = self.lineno + 1
- else:
- if key is not None:
- astcache[key] = astnode
- return source[start:end]
-
- source = property(getsource)
-
- def ishidden(self):
- """ return True if the current frame has a var __tracebackhide__
- resolving to True
-
- mostly for internal use
- """
- try:
- return self.frame.f_locals['__tracebackhide__']
- except KeyError:
- try:
- return self.frame.f_globals['__tracebackhide__']
- except KeyError:
- return False
-
- def __str__(self):
- try:
- fn = str(self.path)
- except py.error.Error:
- fn = '???'
- name = self.frame.code.name
- try:
- line = str(self.statement).lstrip()
- except KeyboardInterrupt:
- raise
- except:
- line = "???"
- return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line)
-
- def name(self):
- return self.frame.code.raw.co_name
- name = property(name, None, None, "co_name of underlaying code")
-
-class Traceback(list):
- """ Traceback objects encapsulate and offer higher level
- access to Traceback entries.
- """
- Entry = TracebackEntry
- def __init__(self, tb):
- """ initialize from given python traceback object. """
- if hasattr(tb, 'tb_next'):
- def f(cur):
- while cur is not None:
- yield self.Entry(cur)
- cur = cur.tb_next
- list.__init__(self, f(tb))
- else:
- list.__init__(self, tb)
-
- def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
- """ return a Traceback instance wrapping part of this Traceback
-
- by provding any combination of path, lineno and firstlineno, the
- first frame to start the to-be-returned traceback is determined
-
- this allows cutting the first part of a Traceback instance e.g.
- for formatting reasons (removing some uninteresting bits that deal
- with handling of the exception/traceback)
- """
- for x in self:
- code = x.frame.code
- codepath = code.path
- if ((path is None or codepath == path) and
- (excludepath is None or not hasattr(codepath, 'relto') or
- not codepath.relto(excludepath)) and
- (lineno is None or x.lineno == lineno) and
- (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
- return Traceback(x._rawentry)
- return self
-
- def __getitem__(self, key):
- val = super(Traceback, self).__getitem__(key)
- if isinstance(key, type(slice(0))):
- val = self.__class__(val)
- return val
-
- def filter(self, fn=lambda x: not x.ishidden()):
- """ return a Traceback instance with certain items removed
-
- fn is a function that gets a single argument, a TracebackItem
- instance, and should return True when the item should be added
- to the Traceback, False when not
-
- by default this removes all the TracebackItems which are hidden
- (see ishidden() above)
- """
- return Traceback(filter(fn, self))
-
- def getcrashentry(self):
- """ return last non-hidden traceback entry that lead
- to the exception of a traceback.
- """
- for i in range(-1, -len(self)-1, -1):
- entry = self[i]
- if not entry.ishidden():
- return entry
- return self[-1]
-
- def recursionindex(self):
- """ return the index of the frame/TracebackItem where recursion
- originates if appropriate, None if no recursion occurred
- """
- cache = {}
- for i, entry in enumerate(self):
- # id for the code.raw is needed to work around
- # the strange metaprogramming in the decorator lib from pypi
- # which generates code objects that have hash/value equality
- #XXX needs a test
- key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
- #print "checking for recursion at", key
- l = cache.setdefault(key, [])
- if l:
- f = entry.frame
- loc = f.f_locals
- for otherloc in l:
- if f.is_true(f.eval(co_equal,
- __recursioncache_locals_1=loc,
- __recursioncache_locals_2=otherloc)):
- return i
- l.append(entry.frame.f_locals)
- return None
-
-co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
- '?', 'eval')
-
-class ExceptionInfo(object):
- """ wraps sys.exc_info() objects and offers
- help for navigating the traceback.
- """
- _striptext = ''
- def __init__(self, tup=None, exprinfo=None):
- if tup is None:
- tup = sys.exc_info()
- if exprinfo is None and isinstance(tup[1], AssertionError):
- exprinfo = getattr(tup[1], 'msg', None)
- if exprinfo is None:
- exprinfo = str(tup[1])
- if exprinfo and exprinfo.startswith('assert '):
- self._striptext = 'AssertionError: '
- self._excinfo = tup
- #: the exception class
- self.type = tup[0]
- #: the exception instance
- self.value = tup[1]
- #: the exception raw traceback
- self.tb = tup[2]
- #: the exception type name
- self.typename = self.type.__name__
- #: the exception traceback (py.code.Traceback instance)
- self.traceback = py.code.Traceback(self.tb)
-
- def __repr__(self):
- return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
-
- def exconly(self, tryshort=False):
- """ return the exception as a string
-
- when 'tryshort' resolves to True, and the exception is a
- py.code._AssertionError, only the actual exception part of
- the exception representation is returned (so 'AssertionError: ' is
- removed from the beginning)
- """
- lines = format_exception_only(self.type, self.value)
- text = ''.join(lines)
- text = text.rstrip()
- if tryshort:
- if text.startswith(self._striptext):
- text = text[len(self._striptext):]
- return text
-
- def errisinstance(self, exc):
- """ return True if the exception is an instance of exc """
- return isinstance(self.value, exc)
-
- def _getreprcrash(self):
- exconly = self.exconly(tryshort=True)
- entry = self.traceback.getcrashentry()
- path, lineno = entry.frame.code.raw.co_filename, entry.lineno
- return ReprFileLocation(path, lineno+1, exconly)
-
- def getrepr(self, showlocals=False, style="long",
- abspath=False, tbfilter=True, funcargs=False):
- """ return str()able representation of this exception info.
- showlocals: show locals per traceback entry
- style: long|short|no|native traceback style
- tbfilter: hide entries (where __tracebackhide__ is true)
-
- in case of style==native, tbfilter and showlocals is ignored.
- """
- if style == 'native':
- return ReprExceptionInfo(ReprTracebackNative(
- py.std.traceback.format_exception(
- self.type,
- self.value,
- self.traceback[0]._rawentry,
- )), self._getreprcrash())
-
- fmt = FormattedExcinfo(showlocals=showlocals, style=style,
- abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
- return fmt.repr_excinfo(self)
-
- def __str__(self):
- entry = self.traceback[-1]
- loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
- return str(loc)
-
- def __unicode__(self):
- entry = self.traceback[-1]
- loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
- return loc.__unicode__()
-
-
-class FormattedExcinfo(object):
- """ presenting information about failing Functions and Generators. """
- # for traceback entries
- flow_marker = ">"
- fail_marker = "E"
-
- def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False):
- self.showlocals = showlocals
- self.style = style
- self.tbfilter = tbfilter
- self.funcargs = funcargs
- self.abspath = abspath
- self.astcache = {}
-
- def _getindent(self, source):
- # figure out indent for given source
- try:
- s = str(source.getstatement(len(source)-1))
- except KeyboardInterrupt:
- raise
- except:
- try:
- s = str(source[-1])
- except KeyboardInterrupt:
- raise
- except:
- return 0
- return 4 + (len(s) - len(s.lstrip()))
-
- def _getentrysource(self, entry):
- source = entry.getsource(self.astcache)
- if source is not None:
- source = source.deindent()
- return source
-
- def _saferepr(self, obj):
- return py.io.saferepr(obj)
-
- def repr_args(self, entry):
- if self.funcargs:
- args = []
- for argname, argvalue in entry.frame.getargs(var=True):
- args.append((argname, self._saferepr(argvalue)))
- return ReprFuncArgs(args)
-
- def get_source(self, source, line_index=-1, excinfo=None, short=False):
- """ return formatted and marked up source lines. """
- lines = []
- if source is None or line_index >= len(source.lines):
- source = py.code.Source("???")
- line_index = 0
- if line_index < 0:
- line_index += len(source)
- space_prefix = " "
- if short:
- lines.append(space_prefix + source.lines[line_index].strip())
- else:
- for line in source.lines[:line_index]:
- lines.append(space_prefix + line)
- lines.append(self.flow_marker + " " + source.lines[line_index])
- for line in source.lines[line_index+1:]:
- lines.append(space_prefix + line)
- if excinfo is not None:
- indent = 4 if short else self._getindent(source)
- lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
- return lines
-
- def get_exconly(self, excinfo, indent=4, markall=False):
- lines = []
- indent = " " * indent
- # get the real exception information out
- exlines = excinfo.exconly(tryshort=True).split('\n')
- failindent = self.fail_marker + indent[1:]
- for line in exlines:
- lines.append(failindent + line)
- if not markall:
- failindent = indent
- return lines
-
- def repr_locals(self, locals):
- if self.showlocals:
- lines = []
- keys = [loc for loc in locals if loc[0] != "@"]
- keys.sort()
- for name in keys:
- value = locals[name]
- if name == '__builtins__':
- lines.append("__builtins__ = <builtins>")
- else:
- # This formatting could all be handled by the
- # _repr() function, which is only reprlib.Repr in
- # disguise, so is very configurable.
- str_repr = self._saferepr(value)
- #if len(str_repr) < 70 or not isinstance(value,
- # (list, tuple, dict)):
- lines.append("%-10s = %s" %(name, str_repr))
- #else:
- # self._line("%-10s =\\" % (name,))
- # # XXX
- # py.std.pprint.pprint(value, stream=self.excinfowriter)
- return ReprLocals(lines)
-
- def repr_traceback_entry(self, entry, excinfo=None):
- source = self._getentrysource(entry)
- if source is None:
- source = py.code.Source("???")
- line_index = 0
- else:
- # entry.getfirstlinesource() can be -1, should be 0 on jython
- line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
-
- lines = []
- style = entry._repr_style
- if style is None:
- style = self.style
- if style in ("short", "long"):
- short = style == "short"
- reprargs = self.repr_args(entry) if not short else None
- s = self.get_source(source, line_index, excinfo, short=short)
- lines.extend(s)
- if short:
- message = "in %s" %(entry.name)
- else:
- message = excinfo and excinfo.typename or ""
- path = self._makepath(entry.path)
- filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
- localsrepr = None
- if not short:
- localsrepr = self.repr_locals(entry.locals)
- return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
- if excinfo:
- lines.extend(self.get_exconly(excinfo, indent=4))
- return ReprEntry(lines, None, None, None, style)
-
- def _makepath(self, path):
- if not self.abspath:
- try:
- np = py.path.local().bestrelpath(path)
- except OSError:
- return path
- if len(np) < len(str(path)):
- path = np
- return path
-
- def repr_traceback(self, excinfo):
- traceback = excinfo.traceback
- if self.tbfilter:
- traceback = traceback.filter()
- recursionindex = None
- if excinfo.errisinstance(RuntimeError):
- if "maximum recursion depth exceeded" in str(excinfo.value):
- recursionindex = traceback.recursionindex()
- last = traceback[-1]
- entries = []
- extraline = None
- for index, entry in enumerate(traceback):
- einfo = (last == entry) and excinfo or None
- reprentry = self.repr_traceback_entry(entry, einfo)
- entries.append(reprentry)
- if index == recursionindex:
- extraline = "!!! Recursion detected (same locals & position)"
- break
- return ReprTraceback(entries, extraline, style=self.style)
-
- def repr_excinfo(self, excinfo):
- reprtraceback = self.repr_traceback(excinfo)
- reprcrash = excinfo._getreprcrash()
- return ReprExceptionInfo(reprtraceback, reprcrash)
-
-class TerminalRepr:
- def __str__(self):
- s = self.__unicode__()
- if sys.version_info[0] < 3:
- s = s.encode('utf-8')
- return s
-
- def __unicode__(self):
- # FYI this is called from pytest-xdist's serialization of exception
- # information.
- io = py.io.TextIO()
- tw = py.io.TerminalWriter(file=io)
- self.toterminal(tw)
- return io.getvalue().strip()
-
- def __repr__(self):
- return "<%s instance at %0x>" %(self.__class__, id(self))
-
-
-class ReprExceptionInfo(TerminalRepr):
- def __init__(self, reprtraceback, reprcrash):
- self.reprtraceback = reprtraceback
- self.reprcrash = reprcrash
- self.sections = []
-
- def addsection(self, name, content, sep="-"):
- self.sections.append((name, content, sep))
-
- def toterminal(self, tw):
- self.reprtraceback.toterminal(tw)
- for name, content, sep in self.sections:
- tw.sep(sep, name)
- tw.line(content)
-
-class ReprTraceback(TerminalRepr):
- entrysep = "_ "
-
- def __init__(self, reprentries, extraline, style):
- self.reprentries = reprentries
- self.extraline = extraline
- self.style = style
-
- def toterminal(self, tw):
- # the entries might have different styles
- last_style = None
- for i, entry in enumerate(self.reprentries):
- if entry.style == "long":
- tw.line("")
- entry.toterminal(tw)
- if i < len(self.reprentries) - 1:
- next_entry = self.reprentries[i+1]
- if entry.style == "long" or \
- entry.style == "short" and next_entry.style == "long":
- tw.sep(self.entrysep)
-
- if self.extraline:
- tw.line(self.extraline)
-
-class ReprTracebackNative(ReprTraceback):
- def __init__(self, tblines):
- self.style = "native"
- self.reprentries = [ReprEntryNative(tblines)]
- self.extraline = None
-
-class ReprEntryNative(TerminalRepr):
- style = "native"
-
- def __init__(self, tblines):
- self.lines = tblines
-
- def toterminal(self, tw):
- tw.write("".join(self.lines))
-
-class ReprEntry(TerminalRepr):
- localssep = "_ "
-
- def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
- self.lines = lines
- self.reprfuncargs = reprfuncargs
- self.reprlocals = reprlocals
- self.reprfileloc = filelocrepr
- self.style = style
-
- def toterminal(self, tw):
- if self.style == "short":
- self.reprfileloc.toterminal(tw)
- for line in self.lines:
- red = line.startswith("E ")
- tw.line(line, bold=True, red=red)
- #tw.line("")
- return
- if self.reprfuncargs:
- self.reprfuncargs.toterminal(tw)
- for line in self.lines:
- red = line.startswith("E ")
- tw.line(line, bold=True, red=red)
- if self.reprlocals:
- #tw.sep(self.localssep, "Locals")
- tw.line("")
- self.reprlocals.toterminal(tw)
- if self.reprfileloc:
- if self.lines:
- tw.line("")
- self.reprfileloc.toterminal(tw)
-
- def __str__(self):
- return "%s\n%s\n%s" % ("\n".join(self.lines),
- self.reprlocals,
- self.reprfileloc)
-
-class ReprFileLocation(TerminalRepr):
- def __init__(self, path, lineno, message):
- self.path = str(path)
- self.lineno = lineno
- self.message = message
-
- def toterminal(self, tw):
- # filename and lineno output for each entry,
- # using an output format that most editors unterstand
- msg = self.message
- i = msg.find("\n")
- if i != -1:
- msg = msg[:i]
- tw.line("%s:%s: %s" %(self.path, self.lineno, msg))
-
-class ReprLocals(TerminalRepr):
- def __init__(self, lines):
- self.lines = lines
-
- def toterminal(self, tw):
- for line in self.lines:
- tw.line(line)
-
-class ReprFuncArgs(TerminalRepr):
- def __init__(self, args):
- self.args = args
-
- def toterminal(self, tw):
- if self.args:
- linesofar = ""
- for name, value in self.args:
- ns = "%s = %s" %(name, value)
- if len(ns) + len(linesofar) + 2 > tw.fullwidth:
- if linesofar:
- tw.line(linesofar)
- linesofar = ns
- else:
- if linesofar:
- linesofar += ", " + ns
- else:
- linesofar = ns
- if linesofar:
- tw.line(linesofar)
- tw.line("")
-
-
-
-oldbuiltins = {}
-
-def patch_builtins(assertion=True, compile=True):
- """ put compile and AssertionError builtins to Python's builtins. """
- if assertion:
- from py._code import assertion
- l = oldbuiltins.setdefault('AssertionError', [])
- l.append(py.builtin.builtins.AssertionError)
- py.builtin.builtins.AssertionError = assertion.AssertionError
- if compile:
- l = oldbuiltins.setdefault('compile', [])
- l.append(py.builtin.builtins.compile)
- py.builtin.builtins.compile = py.code.compile
-
-def unpatch_builtins(assertion=True, compile=True):
- """ remove compile and AssertionError builtins from Python builtins. """
- if assertion:
- py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
- if compile:
- py.builtin.builtins.compile = oldbuiltins['compile'].pop()
-
-def getrawcode(obj, trycall=True):
- """ return code object for given function. """
- try:
- return obj.__code__
- except AttributeError:
- obj = getattr(obj, 'im_func', obj)
- obj = getattr(obj, 'func_code', obj)
- obj = getattr(obj, 'f_code', obj)
- obj = getattr(obj, '__code__', obj)
- if trycall and not hasattr(obj, 'co_firstlineno'):
- if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj):
- x = getrawcode(obj.__call__, trycall=False)
- if hasattr(x, 'co_firstlineno'):
- return x
- return obj
-
diff --git a/lib/spack/external/pytest-fallback/py/_code/source.py b/lib/spack/external/pytest-fallback/py/_code/source.py
deleted file mode 100644
index c8b668b2fb..0000000000
--- a/lib/spack/external/pytest-fallback/py/_code/source.py
+++ /dev/null
@@ -1,411 +0,0 @@
-from __future__ import generators
-
-from bisect import bisect_right
-import sys
-import inspect, tokenize
-import py
-from types import ModuleType
-cpy_compile = compile
-
-try:
- import _ast
- from _ast import PyCF_ONLY_AST as _AST_FLAG
-except ImportError:
- _AST_FLAG = 0
- _ast = None
-
-
-class Source(object):
- """ a immutable object holding a source code fragment,
- possibly deindenting it.
- """
- _compilecounter = 0
- def __init__(self, *parts, **kwargs):
- self.lines = lines = []
- de = kwargs.get('deindent', True)
- rstrip = kwargs.get('rstrip', True)
- for part in parts:
- if not part:
- partlines = []
- if isinstance(part, Source):
- partlines = part.lines
- elif isinstance(part, (tuple, list)):
- partlines = [x.rstrip("\n") for x in part]
- elif isinstance(part, py.builtin._basestring):
- partlines = part.split('\n')
- if rstrip:
- while partlines:
- if partlines[-1].strip():
- break
- partlines.pop()
- else:
- partlines = getsource(part, deindent=de).lines
- if de:
- partlines = deindent(partlines)
- lines.extend(partlines)
-
- def __eq__(self, other):
- try:
- return self.lines == other.lines
- except AttributeError:
- if isinstance(other, str):
- return str(self) == other
- return False
-
- def __getitem__(self, key):
- if isinstance(key, int):
- return self.lines[key]
- else:
- if key.step not in (None, 1):
- raise IndexError("cannot slice a Source with a step")
- return self.__getslice__(key.start, key.stop)
-
- def __len__(self):
- return len(self.lines)
-
- def __getslice__(self, start, end):
- newsource = Source()
- newsource.lines = self.lines[start:end]
- return newsource
-
- def strip(self):
- """ return new source object with trailing
- and leading blank lines removed.
- """
- start, end = 0, len(self)
- while start < end and not self.lines[start].strip():
- start += 1
- while end > start and not self.lines[end-1].strip():
- end -= 1
- source = Source()
- source.lines[:] = self.lines[start:end]
- return source
-
- def putaround(self, before='', after='', indent=' ' * 4):
- """ return a copy of the source object with
- 'before' and 'after' wrapped around it.
- """
- before = Source(before)
- after = Source(after)
- newsource = Source()
- lines = [ (indent + line) for line in self.lines]
- newsource.lines = before.lines + lines + after.lines
- return newsource
-
- def indent(self, indent=' ' * 4):
- """ return a copy of the source object with
- all lines indented by the given indent-string.
- """
- newsource = Source()
- newsource.lines = [(indent+line) for line in self.lines]
- return newsource
-
- def getstatement(self, lineno, assertion=False):
- """ return Source statement which contains the
- given linenumber (counted from 0).
- """
- start, end = self.getstatementrange(lineno, assertion)
- return self[start:end]
-
- def getstatementrange(self, lineno, assertion=False):
- """ return (start, end) tuple which spans the minimal
- statement region which containing the given lineno.
- """
- if not (0 <= lineno < len(self)):
- raise IndexError("lineno out of range")
- ast, start, end = getstatementrange_ast(lineno, self)
- return start, end
-
- def deindent(self, offset=None):
- """ return a new source object deindented by offset.
- If offset is None then guess an indentation offset from
- the first non-blank line. Subsequent lines which have a
- lower indentation offset will be copied verbatim as
- they are assumed to be part of multilines.
- """
- # XXX maybe use the tokenizer to properly handle multiline
- # strings etc.pp?
- newsource = Source()
- newsource.lines[:] = deindent(self.lines, offset)
- return newsource
-
- def isparseable(self, deindent=True):
- """ return True if source is parseable, heuristically
- deindenting it by default.
- """
- try:
- import parser
- except ImportError:
- syntax_checker = lambda x: compile(x, 'asd', 'exec')
- else:
- syntax_checker = parser.suite
-
- if deindent:
- source = str(self.deindent())
- else:
- source = str(self)
- try:
- #compile(source+'\n', "x", "exec")
- syntax_checker(source+'\n')
- except KeyboardInterrupt:
- raise
- except Exception:
- return False
- else:
- return True
-
- def __str__(self):
- return "\n".join(self.lines)
-
- def compile(self, filename=None, mode='exec',
- flag=generators.compiler_flag,
- dont_inherit=0, _genframe=None):
- """ return compiled code object. if filename is None
- invent an artificial filename which displays
- the source/line position of the caller frame.
- """
- if not filename or py.path.local(filename).check(file=0):
- if _genframe is None:
- _genframe = sys._getframe(1) # the caller
- fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
- base = "<%d-codegen " % self._compilecounter
- self.__class__._compilecounter += 1
- if not filename:
- filename = base + '%s:%d>' % (fn, lineno)
- else:
- filename = base + '%r %s:%d>' % (filename, fn, lineno)
- source = "\n".join(self.lines) + '\n'
- try:
- co = cpy_compile(source, filename, mode, flag)
- except SyntaxError:
- ex = sys.exc_info()[1]
- # re-represent syntax errors from parsing python strings
- msglines = self.lines[:ex.lineno]
- if ex.offset:
- msglines.append(" "*ex.offset + '^')
- msglines.append("(code was compiled probably from here: %s)" % filename)
- newex = SyntaxError('\n'.join(msglines))
- newex.offset = ex.offset
- newex.lineno = ex.lineno
- newex.text = ex.text
- raise newex
- else:
- if flag & _AST_FLAG:
- return co
- lines = [(x + "\n") for x in self.lines]
- py.std.linecache.cache[filename] = (1, None, lines, filename)
- return co
-
-#
-# public API shortcut functions
-#
-
-def compile_(source, filename=None, mode='exec', flags=
- generators.compiler_flag, dont_inherit=0):
- """ compile the given source to a raw code object,
- and maintain an internal cache which allows later
- retrieval of the source code for the code object
- and any recursively created code objects.
- """
- if _ast is not None and isinstance(source, _ast.AST):
- # XXX should Source support having AST?
- return cpy_compile(source, filename, mode, flags, dont_inherit)
- _genframe = sys._getframe(1) # the caller
- s = Source(source)
- co = s.compile(filename, mode, flags, _genframe=_genframe)
- return co
-
-
-def getfslineno(obj):
- """ Return source location (path, lineno) for the given object.
- If the source cannot be determined return ("", -1)
- """
- try:
- code = py.code.Code(obj)
- except TypeError:
- try:
- fn = (py.std.inspect.getsourcefile(obj) or
- py.std.inspect.getfile(obj))
- except TypeError:
- return "", -1
-
- fspath = fn and py.path.local(fn) or None
- lineno = -1
- if fspath:
- try:
- _, lineno = findsource(obj)
- except IOError:
- pass
- else:
- fspath = code.path
- lineno = code.firstlineno
- assert isinstance(lineno, int)
- return fspath, lineno
-
-#
-# helper functions
-#
-
-def findsource(obj):
- try:
- sourcelines, lineno = py.std.inspect.findsource(obj)
- except py.builtin._sysex:
- raise
- except:
- return None, -1
- source = Source()
- source.lines = [line.rstrip() for line in sourcelines]
- return source, lineno
-
-def getsource(obj, **kwargs):
- obj = py.code.getrawcode(obj)
- try:
- strsrc = inspect.getsource(obj)
- except IndentationError:
- strsrc = "\"Buggy python version consider upgrading, cannot get source\""
- assert isinstance(strsrc, str)
- return Source(strsrc, **kwargs)
-
-def deindent(lines, offset=None):
- if offset is None:
- for line in lines:
- line = line.expandtabs()
- s = line.lstrip()
- if s:
- offset = len(line)-len(s)
- break
- else:
- offset = 0
- if offset == 0:
- return list(lines)
- newlines = []
- def readline_generator(lines):
- for line in lines:
- yield line + '\n'
- while True:
- yield ''
-
- it = readline_generator(lines)
-
- try:
- for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
- if sline > len(lines):
- break # End of input reached
- if sline > len(newlines):
- line = lines[sline - 1].expandtabs()
- if line.lstrip() and line[:offset].isspace():
- line = line[offset:] # Deindent
- newlines.append(line)
-
- for i in range(sline, eline):
- # Don't deindent continuing lines of
- # multiline tokens (i.e. multiline strings)
- newlines.append(lines[i])
- except (IndentationError, tokenize.TokenError):
- pass
- # Add any lines we didn't see. E.g. if an exception was raised.
- newlines.extend(lines[len(newlines):])
- return newlines
-
-
-def get_statement_startend2(lineno, node):
- import ast
- # flatten all statements and except handlers into one lineno-list
- # AST's line numbers start indexing at 1
- l = []
- for x in ast.walk(node):
- if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
- l.append(x.lineno - 1)
- for name in "finalbody", "orelse":
- val = getattr(x, name, None)
- if val:
- # treat the finally/orelse part as its own statement
- l.append(val[0].lineno - 1 - 1)
- l.sort()
- insert_index = bisect_right(l, lineno)
- start = l[insert_index - 1]
- if insert_index >= len(l):
- end = None
- else:
- end = l[insert_index]
- return start, end
-
-
-def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
- if astnode is None:
- content = str(source)
- if sys.version_info < (2,7):
- content += "\n"
- try:
- astnode = compile(content, "source", "exec", 1024) # 1024 for AST
- except ValueError:
- start, end = getstatementrange_old(lineno, source, assertion)
- return None, start, end
- start, end = get_statement_startend2(lineno, astnode)
- # we need to correct the end:
- # - ast-parsing strips comments
- # - there might be empty lines
- # - we might have lesser indented code blocks at the end
- if end is None:
- end = len(source.lines)
-
- if end > start + 1:
- # make sure we don't span differently indented code blocks
- # by using the BlockFinder helper used which inspect.getsource() uses itself
- block_finder = inspect.BlockFinder()
- # if we start with an indented line, put blockfinder to "started" mode
- block_finder.started = source.lines[start][0].isspace()
- it = ((x + "\n") for x in source.lines[start:end])
- try:
- for tok in tokenize.generate_tokens(lambda: next(it)):
- block_finder.tokeneater(*tok)
- except (inspect.EndOfBlock, IndentationError):
- end = block_finder.last + start
- except Exception:
- pass
-
- # the end might still point to a comment or empty line, correct it
- while end:
- line = source.lines[end - 1].lstrip()
- if line.startswith("#") or not line:
- end -= 1
- else:
- break
- return astnode, start, end
-
-
-def getstatementrange_old(lineno, source, assertion=False):
- """ return (start, end) tuple which spans the minimal
- statement region which containing the given lineno.
- raise an IndexError if no such statementrange can be found.
- """
- # XXX this logic is only used on python2.4 and below
- # 1. find the start of the statement
- from codeop import compile_command
- for start in range(lineno, -1, -1):
- if assertion:
- line = source.lines[start]
- # the following lines are not fully tested, change with care
- if 'super' in line and 'self' in line and '__init__' in line:
- raise IndexError("likely a subclass")
- if "assert" not in line and "raise" not in line:
- continue
- trylines = source.lines[start:lineno+1]
- # quick hack to prepare parsing an indented line with
- # compile_command() (which errors on "return" outside defs)
- trylines.insert(0, 'def xxx():')
- trysource = '\n '.join(trylines)
- # ^ space here
- try:
- compile_command(trysource)
- except (SyntaxError, OverflowError, ValueError):
- continue
-
- # 2. find the end of the statement
- for end in range(lineno+1, len(source)+1):
- trysource = source[start:end]
- if trysource.isparseable():
- return start, end
- raise SyntaxError("no valid source range around line %d " % (lineno,))
-
-
diff --git a/lib/spack/external/pytest-fallback/py/_error.py b/lib/spack/external/pytest-fallback/py/_error.py
deleted file mode 100644
index 8ca339beba..0000000000
--- a/lib/spack/external/pytest-fallback/py/_error.py
+++ /dev/null
@@ -1,89 +0,0 @@
-"""
-create errno-specific classes for IO or os calls.
-
-"""
-import sys, os, errno
-
-class Error(EnvironmentError):
- def __repr__(self):
- return "%s.%s %r: %s " %(self.__class__.__module__,
- self.__class__.__name__,
- self.__class__.__doc__,
- " ".join(map(str, self.args)),
- #repr(self.args)
- )
-
- def __str__(self):
- s = "[%s]: %s" %(self.__class__.__doc__,
- " ".join(map(str, self.args)),
- )
- return s
-
-_winerrnomap = {
- 2: errno.ENOENT,
- 3: errno.ENOENT,
- 17: errno.EEXIST,
- 18: errno.EXDEV,
- 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
- 22: errno.ENOTDIR,
- 20: errno.ENOTDIR,
- 267: errno.ENOTDIR,
- 5: errno.EACCES, # anything better?
-}
-
-class ErrorMaker(object):
- """ lazily provides Exception classes for each possible POSIX errno
- (as defined per the 'errno' module). All such instances
- subclass EnvironmentError.
- """
- Error = Error
- _errno2class = {}
-
- def __getattr__(self, name):
- if name[0] == "_":
- raise AttributeError(name)
- eno = getattr(errno, name)
- cls = self._geterrnoclass(eno)
- setattr(self, name, cls)
- return cls
-
- def _geterrnoclass(self, eno):
- try:
- return self._errno2class[eno]
- except KeyError:
- clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
- errorcls = type(Error)(clsname, (Error,),
- {'__module__':'py.error',
- '__doc__': os.strerror(eno)})
- self._errno2class[eno] = errorcls
- return errorcls
-
- def checked_call(self, func, *args, **kwargs):
- """ call a function and raise an errno-exception if applicable. """
- __tracebackhide__ = True
- try:
- return func(*args, **kwargs)
- except self.Error:
- raise
- except (OSError, EnvironmentError):
- cls, value, tb = sys.exc_info()
- if not hasattr(value, 'errno'):
- raise
- __tracebackhide__ = False
- errno = value.errno
- try:
- if not isinstance(value, WindowsError):
- raise NameError
- except NameError:
- # we are not on Windows, or we got a proper OSError
- cls = self._geterrnoclass(errno)
- else:
- try:
- cls = self._geterrnoclass(_winerrnomap[errno])
- except KeyError:
- raise value
- raise cls("%s%r" % (func.__name__, args))
- __tracebackhide__ = True
-
-
-error = ErrorMaker()
diff --git a/lib/spack/external/pytest-fallback/py/_iniconfig.py b/lib/spack/external/pytest-fallback/py/_iniconfig.py
deleted file mode 100644
index 92b50bd853..0000000000
--- a/lib/spack/external/pytest-fallback/py/_iniconfig.py
+++ /dev/null
@@ -1,162 +0,0 @@
-""" brain-dead simple parser for ini-style files.
-(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed
-"""
-__version__ = "0.2.dev2"
-
-__all__ = ['IniConfig', 'ParseError']
-
-COMMENTCHARS = "#;"
-
-class ParseError(Exception):
- def __init__(self, path, lineno, msg):
- Exception.__init__(self, path, lineno, msg)
- self.path = path
- self.lineno = lineno
- self.msg = msg
-
- def __str__(self):
- return "%s:%s: %s" %(self.path, self.lineno+1, self.msg)
-
-class SectionWrapper(object):
- def __init__(self, config, name):
- self.config = config
- self.name = name
-
- def lineof(self, name):
- return self.config.lineof(self.name, name)
-
- def get(self, key, default=None, convert=str):
- return self.config.get(self.name, key, convert=convert, default=default)
-
- def __getitem__(self, key):
- return self.config.sections[self.name][key]
-
- def __iter__(self):
- section = self.config.sections.get(self.name, [])
- def lineof(key):
- return self.config.lineof(self.name, key)
- for name in sorted(section, key=lineof):
- yield name
-
- def items(self):
- for name in self:
- yield name, self[name]
-
-
-class IniConfig(object):
- def __init__(self, path, data=None):
- self.path = str(path) # convenience
- if data is None:
- f = open(self.path)
- try:
- tokens = self._parse(iter(f))
- finally:
- f.close()
- else:
- tokens = self._parse(data.splitlines(True))
-
- self._sources = {}
- self.sections = {}
-
- for lineno, section, name, value in tokens:
- if section is None:
- self._raise(lineno, 'no section header defined')
- self._sources[section, name] = lineno
- if name is None:
- if section in self.sections:
- self._raise(lineno, 'duplicate section %r'%(section, ))
- self.sections[section] = {}
- else:
- if name in self.sections[section]:
- self._raise(lineno, 'duplicate name %r'%(name, ))
- self.sections[section][name] = value
-
- def _raise(self, lineno, msg):
- raise ParseError(self.path, lineno, msg)
-
- def _parse(self, line_iter):
- result = []
- section = None
- for lineno, line in enumerate(line_iter):
- name, data = self._parseline(line, lineno)
- # new value
- if name is not None and data is not None:
- result.append((lineno, section, name, data))
- # new section
- elif name is not None and data is None:
- if not name:
- self._raise(lineno, 'empty section name')
- section = name
- result.append((lineno, section, None, None))
- # continuation
- elif name is None and data is not None:
- if not result:
- self._raise(lineno, 'unexpected value continuation')
- last = result.pop()
- last_name, last_data = last[-2:]
- if last_name is None:
- self._raise(lineno, 'unexpected value continuation')
-
- if last_data:
- data = '%s\n%s' % (last_data, data)
- result.append(last[:-1] + (data,))
- return result
-
- def _parseline(self, line, lineno):
- # blank lines
- if iscommentline(line):
- line = ""
- else:
- line = line.rstrip()
- if not line:
- return None, None
- # section
- if line[0] == '[':
- realline = line
- for c in COMMENTCHARS:
- line = line.split(c)[0].rstrip()
- if line[-1] == "]":
- return line[1:-1], None
- return None, realline.strip()
- # value
- elif not line[0].isspace():
- try:
- name, value = line.split('=', 1)
- if ":" in name:
- raise ValueError()
- except ValueError:
- try:
- name, value = line.split(":", 1)
- except ValueError:
- self._raise(lineno, 'unexpected line: %r' % line)
- return name.strip(), value.strip()
- # continuation
- else:
- return None, line.strip()
-
- def lineof(self, section, name=None):
- lineno = self._sources.get((section, name))
- if lineno is not None:
- return lineno + 1
-
- def get(self, section, name, default=None, convert=str):
- try:
- return convert(self.sections[section][name])
- except KeyError:
- return default
-
- def __getitem__(self, name):
- if name not in self.sections:
- raise KeyError(name)
- return SectionWrapper(self, name)
-
- def __iter__(self):
- for name in sorted(self.sections, key=self.lineof):
- yield SectionWrapper(self, name)
-
- def __contains__(self, arg):
- return arg in self.sections
-
-def iscommentline(line):
- c = line.lstrip()[:1]
- return c in COMMENTCHARS
diff --git a/lib/spack/external/pytest-fallback/py/_io/__init__.py b/lib/spack/external/pytest-fallback/py/_io/__init__.py
deleted file mode 100644
index 835f01f3ab..0000000000
--- a/lib/spack/external/pytest-fallback/py/_io/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-""" input/output helping """
diff --git a/lib/spack/external/pytest-fallback/py/_io/capture.py b/lib/spack/external/pytest-fallback/py/_io/capture.py
deleted file mode 100644
index bc157ed978..0000000000
--- a/lib/spack/external/pytest-fallback/py/_io/capture.py
+++ /dev/null
@@ -1,371 +0,0 @@
-import os
-import sys
-import py
-import tempfile
-
-try:
- from io import StringIO
-except ImportError:
- from StringIO import StringIO
-
-if sys.version_info < (3,0):
- class TextIO(StringIO):
- def write(self, data):
- if not isinstance(data, unicode):
- data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
- StringIO.write(self, data)
-else:
- TextIO = StringIO
-
-try:
- from io import BytesIO
-except ImportError:
- class BytesIO(StringIO):
- def write(self, data):
- if isinstance(data, unicode):
- raise TypeError("not a byte value: %r" %(data,))
- StringIO.write(self, data)
-
-patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
-
-class FDCapture:
- """ Capture IO to/from a given os-level filedescriptor. """
-
- def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False):
- """ save targetfd descriptor, and open a new
- temporary file there. If no tmpfile is
- specified a tempfile.Tempfile() will be opened
- in text mode.
- """
- self.targetfd = targetfd
- if tmpfile is None and targetfd != 0:
- f = tempfile.TemporaryFile('wb+')
- tmpfile = dupfile(f, encoding="UTF-8")
- f.close()
- self.tmpfile = tmpfile
- self._savefd = os.dup(self.targetfd)
- if patchsys:
- self._oldsys = getattr(sys, patchsysdict[targetfd])
- if now:
- self.start()
-
- def start(self):
- try:
- os.fstat(self._savefd)
- except OSError:
- raise ValueError("saved filedescriptor not valid, "
- "did you call start() twice?")
- if self.targetfd == 0 and not self.tmpfile:
- fd = os.open(devnullpath, os.O_RDONLY)
- os.dup2(fd, 0)
- os.close(fd)
- if hasattr(self, '_oldsys'):
- setattr(sys, patchsysdict[self.targetfd], DontReadFromInput())
- else:
- os.dup2(self.tmpfile.fileno(), self.targetfd)
- if hasattr(self, '_oldsys'):
- setattr(sys, patchsysdict[self.targetfd], self.tmpfile)
-
- def done(self):
- """ unpatch and clean up, returns the self.tmpfile (file object)
- """
- os.dup2(self._savefd, self.targetfd)
- os.close(self._savefd)
- if self.targetfd != 0:
- self.tmpfile.seek(0)
- if hasattr(self, '_oldsys'):
- setattr(sys, patchsysdict[self.targetfd], self._oldsys)
- return self.tmpfile
-
- def writeorg(self, data):
- """ write a string to the original file descriptor
- """
- tempfp = tempfile.TemporaryFile()
- try:
- os.dup2(self._savefd, tempfp.fileno())
- tempfp.write(data)
- finally:
- tempfp.close()
-
-
-def dupfile(f, mode=None, buffering=0, raising=False, encoding=None):
- """ return a new open file object that's a duplicate of f
-
- mode is duplicated if not given, 'buffering' controls
- buffer size (defaulting to no buffering) and 'raising'
- defines whether an exception is raised when an incompatible
- file object is passed in (if raising is False, the file
- object itself will be returned)
- """
- try:
- fd = f.fileno()
- mode = mode or f.mode
- except AttributeError:
- if raising:
- raise
- return f
- newfd = os.dup(fd)
- if sys.version_info >= (3,0):
- if encoding is not None:
- mode = mode.replace("b", "")
- buffering = True
- return os.fdopen(newfd, mode, buffering, encoding, closefd=True)
- else:
- f = os.fdopen(newfd, mode, buffering)
- if encoding is not None:
- return EncodedFile(f, encoding)
- return f
-
-class EncodedFile(object):
- def __init__(self, _stream, encoding):
- self._stream = _stream
- self.encoding = encoding
-
- def write(self, obj):
- if isinstance(obj, unicode):
- obj = obj.encode(self.encoding)
- elif isinstance(obj, str):
- pass
- else:
- obj = str(obj)
- self._stream.write(obj)
-
- def writelines(self, linelist):
- data = ''.join(linelist)
- self.write(data)
-
- def __getattr__(self, name):
- return getattr(self._stream, name)
-
-class Capture(object):
- def call(cls, func, *args, **kwargs):
- """ return a (res, out, err) tuple where
- out and err represent the output/error output
- during function execution.
- call the given function with args/kwargs
- and capture output/error during its execution.
- """
- so = cls()
- try:
- res = func(*args, **kwargs)
- finally:
- out, err = so.reset()
- return res, out, err
- call = classmethod(call)
-
- def reset(self):
- """ reset sys.stdout/stderr and return captured output as strings. """
- if hasattr(self, '_reset'):
- raise ValueError("was already reset")
- self._reset = True
- outfile, errfile = self.done(save=False)
- out, err = "", ""
- if outfile and not outfile.closed:
- out = outfile.read()
- outfile.close()
- if errfile and errfile != outfile and not errfile.closed:
- err = errfile.read()
- errfile.close()
- return out, err
-
- def suspend(self):
- """ return current snapshot captures, memorize tempfiles. """
- outerr = self.readouterr()
- outfile, errfile = self.done()
- return outerr
-
-
-class StdCaptureFD(Capture):
- """ This class allows to capture writes to FD1 and FD2
- and may connect a NULL file to FD0 (and prevent
- reads from sys.stdin). If any of the 0,1,2 file descriptors
- is invalid it will not be captured.
- """
- def __init__(self, out=True, err=True, mixed=False,
- in_=True, patchsys=True, now=True):
- self._options = {
- "out": out,
- "err": err,
- "mixed": mixed,
- "in_": in_,
- "patchsys": patchsys,
- "now": now,
- }
- self._save()
- if now:
- self.startall()
-
- def _save(self):
- in_ = self._options['in_']
- out = self._options['out']
- err = self._options['err']
- mixed = self._options['mixed']
- patchsys = self._options['patchsys']
- if in_:
- try:
- self.in_ = FDCapture(0, tmpfile=None, now=False,
- patchsys=patchsys)
- except OSError:
- pass
- if out:
- tmpfile = None
- if hasattr(out, 'write'):
- tmpfile = out
- try:
- self.out = FDCapture(1, tmpfile=tmpfile,
- now=False, patchsys=patchsys)
- self._options['out'] = self.out.tmpfile
- except OSError:
- pass
- if err:
- if out and mixed:
- tmpfile = self.out.tmpfile
- elif hasattr(err, 'write'):
- tmpfile = err
- else:
- tmpfile = None
- try:
- self.err = FDCapture(2, tmpfile=tmpfile,
- now=False, patchsys=patchsys)
- self._options['err'] = self.err.tmpfile
- except OSError:
- pass
-
- def startall(self):
- if hasattr(self, 'in_'):
- self.in_.start()
- if hasattr(self, 'out'):
- self.out.start()
- if hasattr(self, 'err'):
- self.err.start()
-
- def resume(self):
- """ resume capturing with original temp files. """
- self.startall()
-
- def done(self, save=True):
- """ return (outfile, errfile) and stop capturing. """
- outfile = errfile = None
- if hasattr(self, 'out') and not self.out.tmpfile.closed:
- outfile = self.out.done()
- if hasattr(self, 'err') and not self.err.tmpfile.closed:
- errfile = self.err.done()
- if hasattr(self, 'in_'):
- tmpfile = self.in_.done()
- if save:
- self._save()
- return outfile, errfile
-
- def readouterr(self):
- """ return snapshot value of stdout/stderr capturings. """
- if hasattr(self, "out"):
- out = self._readsnapshot(self.out.tmpfile)
- else:
- out = ""
- if hasattr(self, "err"):
- err = self._readsnapshot(self.err.tmpfile)
- else:
- err = ""
- return [out, err]
-
- def _readsnapshot(self, f):
- f.seek(0)
- res = f.read()
- enc = getattr(f, "encoding", None)
- if enc:
- res = py.builtin._totext(res, enc, "replace")
- f.truncate(0)
- f.seek(0)
- return res
-
-
-class StdCapture(Capture):
- """ This class allows to capture writes to sys.stdout|stderr "in-memory"
- and will raise errors on tries to read from sys.stdin. It only
- modifies sys.stdout|stderr|stdin attributes and does not
- touch underlying File Descriptors (use StdCaptureFD for that).
- """
- def __init__(self, out=True, err=True, in_=True, mixed=False, now=True):
- self._oldout = sys.stdout
- self._olderr = sys.stderr
- self._oldin = sys.stdin
- if out and not hasattr(out, 'file'):
- out = TextIO()
- self.out = out
- if err:
- if mixed:
- err = out
- elif not hasattr(err, 'write'):
- err = TextIO()
- self.err = err
- self.in_ = in_
- if now:
- self.startall()
-
- def startall(self):
- if self.out:
- sys.stdout = self.out
- if self.err:
- sys.stderr = self.err
- if self.in_:
- sys.stdin = self.in_ = DontReadFromInput()
-
- def done(self, save=True):
- """ return (outfile, errfile) and stop capturing. """
- outfile = errfile = None
- if self.out and not self.out.closed:
- sys.stdout = self._oldout
- outfile = self.out
- outfile.seek(0)
- if self.err and not self.err.closed:
- sys.stderr = self._olderr
- errfile = self.err
- errfile.seek(0)
- if self.in_:
- sys.stdin = self._oldin
- return outfile, errfile
-
- def resume(self):
- """ resume capturing with original temp files. """
- self.startall()
-
- def readouterr(self):
- """ return snapshot value of stdout/stderr capturings. """
- out = err = ""
- if self.out:
- out = self.out.getvalue()
- self.out.truncate(0)
- self.out.seek(0)
- if self.err:
- err = self.err.getvalue()
- self.err.truncate(0)
- self.err.seek(0)
- return out, err
-
-class DontReadFromInput:
- """Temporary stub class. Ideally when stdin is accessed, the
- capturing should be turned off, with possibly all data captured
- so far sent to the screen. This should be configurable, though,
- because in automated test runs it is better to crash than
- hang indefinitely.
- """
- def read(self, *args):
- raise IOError("reading from stdin while output is captured")
- readline = read
- readlines = read
- __iter__ = read
-
- def fileno(self):
- raise ValueError("redirected Stdin is pseudofile, has no fileno()")
- def isatty(self):
- return False
- def close(self):
- pass
-
-try:
- devnullpath = os.devnull
-except AttributeError:
- if os.name == 'nt':
- devnullpath = 'NUL'
- else:
- devnullpath = '/dev/null'
diff --git a/lib/spack/external/pytest-fallback/py/_io/saferepr.py b/lib/spack/external/pytest-fallback/py/_io/saferepr.py
deleted file mode 100644
index 8518290efd..0000000000
--- a/lib/spack/external/pytest-fallback/py/_io/saferepr.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import py
-import sys
-
-builtin_repr = repr
-
-reprlib = py.builtin._tryimport('repr', 'reprlib')
-
-class SafeRepr(reprlib.Repr):
- """ subclass of repr.Repr that limits the resulting size of repr()
- and includes information on exceptions raised during the call.
- """
- def repr(self, x):
- return self._callhelper(reprlib.Repr.repr, self, x)
-
- def repr_unicode(self, x, level):
- # Strictly speaking wrong on narrow builds
- def repr(u):
- if "'" not in u:
- return py.builtin._totext("'%s'") % u
- elif '"' not in u:
- return py.builtin._totext('"%s"') % u
- else:
- return py.builtin._totext("'%s'") % u.replace("'", r"\'")
- s = repr(x[:self.maxstring])
- if len(s) > self.maxstring:
- i = max(0, (self.maxstring-3)//2)
- j = max(0, self.maxstring-3-i)
- s = repr(x[:i] + x[len(x)-j:])
- s = s[:i] + '...' + s[len(s)-j:]
- return s
-
- def repr_instance(self, x, level):
- return self._callhelper(builtin_repr, x)
-
- def _callhelper(self, call, x, *args):
- try:
- # Try the vanilla repr and make sure that the result is a string
- s = call(x, *args)
- except py.builtin._sysex:
- raise
- except:
- cls, e, tb = sys.exc_info()
- exc_name = getattr(cls, '__name__', 'unknown')
- try:
- exc_info = str(e)
- except py.builtin._sysex:
- raise
- except:
- exc_info = 'unknown'
- return '<[%s("%s") raised in repr()] %s object at 0x%x>' % (
- exc_name, exc_info, x.__class__.__name__, id(x))
- else:
- if len(s) > self.maxsize:
- i = max(0, (self.maxsize-3)//2)
- j = max(0, self.maxsize-3-i)
- s = s[:i] + '...' + s[len(s)-j:]
- return s
-
-def saferepr(obj, maxsize=240):
- """ return a size-limited safe repr-string for the given object.
- Failing __repr__ functions of user instances will be represented
- with a short exception info and 'saferepr' generally takes
- care to never raise exceptions itself. This function is a wrapper
- around the Repr/reprlib functionality of the standard 2.6 lib.
- """
- # review exception handling
- srepr = SafeRepr()
- srepr.maxstring = maxsize
- srepr.maxsize = maxsize
- srepr.maxother = 160
- return srepr.repr(obj)
diff --git a/lib/spack/external/pytest-fallback/py/_io/terminalwriter.py b/lib/spack/external/pytest-fallback/py/_io/terminalwriter.py
deleted file mode 100644
index 390e8ca7b9..0000000000
--- a/lib/spack/external/pytest-fallback/py/_io/terminalwriter.py
+++ /dev/null
@@ -1,357 +0,0 @@
-"""
-
-Helper functions for writing to terminals and files.
-
-"""
-
-
-import sys, os
-import py
-py3k = sys.version_info[0] >= 3
-from py.builtin import text, bytes
-
-win32_and_ctypes = False
-colorama = None
-if sys.platform == "win32":
- try:
- import colorama
- except ImportError:
- try:
- import ctypes
- win32_and_ctypes = True
- except ImportError:
- pass
-
-
-def _getdimensions():
- import termios,fcntl,struct
- call = fcntl.ioctl(1,termios.TIOCGWINSZ,"\000"*8)
- height,width = struct.unpack( "hhhh", call ) [:2]
- return height, width
-
-
-def get_terminal_width():
- height = width = 0
- try:
- height, width = _getdimensions()
- except py.builtin._sysex:
- raise
- except:
- # pass to fallback below
- pass
-
- if width == 0:
- # FALLBACK:
- # * some exception happened
- # * or this is emacs terminal which reports (0,0)
- width = int(os.environ.get('COLUMNS', 80))
-
- # XXX the windows getdimensions may be bogus, let's sanify a bit
- if width < 40:
- width = 80
- return width
-
-terminal_width = get_terminal_width()
-
-# XXX unify with _escaped func below
-def ansi_print(text, esc, file=None, newline=True, flush=False):
- if file is None:
- file = sys.stderr
- text = text.rstrip()
- if esc and not isinstance(esc, tuple):
- esc = (esc,)
- if esc and sys.platform != "win32" and file.isatty():
- text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
- text +
- '\x1b[0m') # ANSI color code "reset"
- if newline:
- text += '\n'
-
- if esc and win32_and_ctypes and file.isatty():
- if 1 in esc:
- bold = True
- esc = tuple([x for x in esc if x != 1])
- else:
- bold = False
- esctable = {() : FOREGROUND_WHITE, # normal
- (31,): FOREGROUND_RED, # red
- (32,): FOREGROUND_GREEN, # green
- (33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow
- (34,): FOREGROUND_BLUE, # blue
- (35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple
- (36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan
- (37,): FOREGROUND_WHITE, # white
- (39,): FOREGROUND_WHITE, # reset
- }
- attr = esctable.get(esc, FOREGROUND_WHITE)
- if bold:
- attr |= FOREGROUND_INTENSITY
- STD_OUTPUT_HANDLE = -11
- STD_ERROR_HANDLE = -12
- if file is sys.stderr:
- handle = GetStdHandle(STD_ERROR_HANDLE)
- else:
- handle = GetStdHandle(STD_OUTPUT_HANDLE)
- oldcolors = GetConsoleInfo(handle).wAttributes
- attr |= (oldcolors & 0x0f0)
- SetConsoleTextAttribute(handle, attr)
- while len(text) > 32768:
- file.write(text[:32768])
- text = text[32768:]
- if text:
- file.write(text)
- SetConsoleTextAttribute(handle, oldcolors)
- else:
- file.write(text)
-
- if flush:
- file.flush()
-
-def should_do_markup(file):
- if os.environ.get('PY_COLORS') == '1':
- return True
- if os.environ.get('PY_COLORS') == '0':
- return False
- return hasattr(file, 'isatty') and file.isatty() \
- and os.environ.get('TERM') != 'dumb' \
- and not (sys.platform.startswith('java') and os._name == 'nt')
-
-class TerminalWriter(object):
- _esctable = dict(black=30, red=31, green=32, yellow=33,
- blue=34, purple=35, cyan=36, white=37,
- Black=40, Red=41, Green=42, Yellow=43,
- Blue=44, Purple=45, Cyan=46, White=47,
- bold=1, light=2, blink=5, invert=7)
-
- # XXX deprecate stringio argument
- def __init__(self, file=None, stringio=False, encoding=None):
- if file is None:
- if stringio:
- self.stringio = file = py.io.TextIO()
- else:
- file = py.std.sys.stdout
- elif py.builtin.callable(file) and not (
- hasattr(file, "write") and hasattr(file, "flush")):
- file = WriteFile(file, encoding=encoding)
- if hasattr(file, "isatty") and file.isatty() and colorama:
- file = colorama.AnsiToWin32(file).stream
- self.encoding = encoding or getattr(file, 'encoding', "utf-8")
- self._file = file
- self.hasmarkup = should_do_markup(file)
- self._lastlen = 0
-
- @property
- def fullwidth(self):
- if hasattr(self, '_terminal_width'):
- return self._terminal_width
- return get_terminal_width()
-
- @fullwidth.setter
- def fullwidth(self, value):
- self._terminal_width = value
-
- def _escaped(self, text, esc):
- if esc and self.hasmarkup:
- text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
- text +'\x1b[0m')
- return text
-
- def markup(self, text, **kw):
- esc = []
- for name in kw:
- if name not in self._esctable:
- raise ValueError("unknown markup: %r" %(name,))
- if kw[name]:
- esc.append(self._esctable[name])
- return self._escaped(text, tuple(esc))
-
- def sep(self, sepchar, title=None, fullwidth=None, **kw):
- if fullwidth is None:
- fullwidth = self.fullwidth
- # the goal is to have the line be as long as possible
- # under the condition that len(line) <= fullwidth
- if sys.platform == "win32":
- # if we print in the last column on windows we are on a
- # new line but there is no way to verify/neutralize this
- # (we may not know the exact line width)
- # so let's be defensive to avoid empty lines in the output
- fullwidth -= 1
- if title is not None:
- # we want 2 + 2*len(fill) + len(title) <= fullwidth
- # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
- # 2*len(sepchar)*N <= fullwidth - len(title) - 2
- # N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
- N = (fullwidth - len(title) - 2) // (2*len(sepchar))
- fill = sepchar * N
- line = "%s %s %s" % (fill, title, fill)
- else:
- # we want len(sepchar)*N <= fullwidth
- # i.e. N <= fullwidth // len(sepchar)
- line = sepchar * (fullwidth // len(sepchar))
- # in some situations there is room for an extra sepchar at the right,
- # in particular if we consider that with a sepchar like "_ " the
- # trailing space is not important at the end of the line
- if len(line) + len(sepchar.rstrip()) <= fullwidth:
- line += sepchar.rstrip()
-
- self.line(line, **kw)
-
- def write(self, msg, **kw):
- if msg:
- if not isinstance(msg, (bytes, text)):
- msg = text(msg)
- if self.hasmarkup and kw:
- markupmsg = self.markup(msg, **kw)
- else:
- markupmsg = msg
- write_out(self._file, markupmsg)
-
- def line(self, s='', **kw):
- self.write(s, **kw)
- self._checkfill(s)
- self.write('\n')
-
- def reline(self, line, **kw):
- if not self.hasmarkup:
- raise ValueError("cannot use rewrite-line without terminal")
- self.write(line, **kw)
- self._checkfill(line)
- self.write('\r')
- self._lastlen = len(line)
-
- def _checkfill(self, line):
- diff2last = self._lastlen - len(line)
- if diff2last > 0:
- self.write(" " * diff2last)
-
-class Win32ConsoleWriter(TerminalWriter):
- def write(self, msg, **kw):
- if msg:
- if not isinstance(msg, (bytes, text)):
- msg = text(msg)
- oldcolors = None
- if self.hasmarkup and kw:
- handle = GetStdHandle(STD_OUTPUT_HANDLE)
- oldcolors = GetConsoleInfo(handle).wAttributes
- default_bg = oldcolors & 0x00F0
- attr = default_bg
- if kw.pop('bold', False):
- attr |= FOREGROUND_INTENSITY
-
- if kw.pop('red', False):
- attr |= FOREGROUND_RED
- elif kw.pop('blue', False):
- attr |= FOREGROUND_BLUE
- elif kw.pop('green', False):
- attr |= FOREGROUND_GREEN
- elif kw.pop('yellow', False):
- attr |= FOREGROUND_GREEN|FOREGROUND_RED
- else:
- attr |= oldcolors & 0x0007
-
- SetConsoleTextAttribute(handle, attr)
- write_out(self._file, msg)
- if oldcolors:
- SetConsoleTextAttribute(handle, oldcolors)
-
-class WriteFile(object):
- def __init__(self, writemethod, encoding=None):
- self.encoding = encoding
- self._writemethod = writemethod
-
- def write(self, data):
- if self.encoding:
- data = data.encode(self.encoding, "replace")
- self._writemethod(data)
-
- def flush(self):
- return
-
-
-if win32_and_ctypes:
- TerminalWriter = Win32ConsoleWriter
- import ctypes
- from ctypes import wintypes
-
- # ctypes access to the Windows console
- STD_OUTPUT_HANDLE = -11
- STD_ERROR_HANDLE = -12
- FOREGROUND_BLACK = 0x0000 # black text
- FOREGROUND_BLUE = 0x0001 # text color contains blue.
- FOREGROUND_GREEN = 0x0002 # text color contains green.
- FOREGROUND_RED = 0x0004 # text color contains red.
- FOREGROUND_WHITE = 0x0007
- FOREGROUND_INTENSITY = 0x0008 # text color is intensified.
- BACKGROUND_BLACK = 0x0000 # background color black
- BACKGROUND_BLUE = 0x0010 # background color contains blue.
- BACKGROUND_GREEN = 0x0020 # background color contains green.
- BACKGROUND_RED = 0x0040 # background color contains red.
- BACKGROUND_WHITE = 0x0070
- BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
-
- SHORT = ctypes.c_short
- class COORD(ctypes.Structure):
- _fields_ = [('X', SHORT),
- ('Y', SHORT)]
- class SMALL_RECT(ctypes.Structure):
- _fields_ = [('Left', SHORT),
- ('Top', SHORT),
- ('Right', SHORT),
- ('Bottom', SHORT)]
- class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
- _fields_ = [('dwSize', COORD),
- ('dwCursorPosition', COORD),
- ('wAttributes', wintypes.WORD),
- ('srWindow', SMALL_RECT),
- ('dwMaximumWindowSize', COORD)]
-
- _GetStdHandle = ctypes.windll.kernel32.GetStdHandle
- _GetStdHandle.argtypes = [wintypes.DWORD]
- _GetStdHandle.restype = wintypes.HANDLE
- def GetStdHandle(kind):
- return _GetStdHandle(kind)
-
- SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
- SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
- SetConsoleTextAttribute.restype = wintypes.BOOL
-
- _GetConsoleScreenBufferInfo = \
- ctypes.windll.kernel32.GetConsoleScreenBufferInfo
- _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
- ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
- _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
- def GetConsoleInfo(handle):
- info = CONSOLE_SCREEN_BUFFER_INFO()
- _GetConsoleScreenBufferInfo(handle, ctypes.byref(info))
- return info
-
- def _getdimensions():
- handle = GetStdHandle(STD_OUTPUT_HANDLE)
- info = GetConsoleInfo(handle)
- # Substract one from the width, otherwise the cursor wraps
- # and the ending \n causes an empty line to display.
- return info.dwSize.Y, info.dwSize.X - 1
-
-def write_out(fil, msg):
- # XXX sometimes "msg" is of type bytes, sometimes text which
- # complicates the situation. Should we try to enforce unicode?
- try:
- # on py27 and above writing out to sys.stdout with an encoding
- # should usually work for unicode messages (if the encoding is
- # capable of it)
- fil.write(msg)
- except UnicodeEncodeError:
- # on py26 it might not work because stdout expects bytes
- if fil.encoding:
- try:
- fil.write(msg.encode(fil.encoding))
- except UnicodeEncodeError:
- # it might still fail if the encoding is not capable
- pass
- else:
- fil.flush()
- return
- # fallback: escape all unicode characters
- msg = msg.encode("unicode-escape").decode("ascii")
- fil.write(msg)
- fil.flush()
diff --git a/lib/spack/external/pytest-fallback/py/_log/__init__.py b/lib/spack/external/pytest-fallback/py/_log/__init__.py
deleted file mode 100644
index fad62e960d..0000000000
--- a/lib/spack/external/pytest-fallback/py/_log/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-""" logging API ('producers' and 'consumers' connected via keywords) """
-
diff --git a/lib/spack/external/pytest-fallback/py/_log/log.py b/lib/spack/external/pytest-fallback/py/_log/log.py
deleted file mode 100644
index ce47e8c754..0000000000
--- a/lib/spack/external/pytest-fallback/py/_log/log.py
+++ /dev/null
@@ -1,186 +0,0 @@
-"""
-basic logging functionality based on a producer/consumer scheme.
-
-XXX implement this API: (maybe put it into slogger.py?)
-
- log = Logger(
- info=py.log.STDOUT,
- debug=py.log.STDOUT,
- command=None)
- log.info("hello", "world")
- log.command("hello", "world")
-
- log = Logger(info=Logger(something=...),
- debug=py.log.STDOUT,
- command=None)
-"""
-import py, sys
-
-class Message(object):
- def __init__(self, keywords, args):
- self.keywords = keywords
- self.args = args
-
- def content(self):
- return " ".join(map(str, self.args))
-
- def prefix(self):
- return "[%s] " % (":".join(self.keywords))
-
- def __str__(self):
- return self.prefix() + self.content()
-
-
-class Producer(object):
- """ (deprecated) Log producer API which sends messages to be logged
- to a 'consumer' object, which then prints them to stdout,
- stderr, files, etc. Used extensively by PyPy-1.1.
- """
-
- Message = Message # to allow later customization
- keywords2consumer = {}
-
- def __init__(self, keywords, keywordmapper=None, **kw):
- if hasattr(keywords, 'split'):
- keywords = tuple(keywords.split())
- self._keywords = keywords
- if keywordmapper is None:
- keywordmapper = default_keywordmapper
- self._keywordmapper = keywordmapper
-
- def __repr__(self):
- return "<py.log.Producer %s>" % ":".join(self._keywords)
-
- def __getattr__(self, name):
- if '_' in name:
- raise AttributeError(name)
- producer = self.__class__(self._keywords + (name,))
- setattr(self, name, producer)
- return producer
-
- def __call__(self, *args):
- """ write a message to the appropriate consumer(s) """
- func = self._keywordmapper.getconsumer(self._keywords)
- if func is not None:
- func(self.Message(self._keywords, args))
-
-class KeywordMapper:
- def __init__(self):
- self.keywords2consumer = {}
-
- def getstate(self):
- return self.keywords2consumer.copy()
- def setstate(self, state):
- self.keywords2consumer.clear()
- self.keywords2consumer.update(state)
-
- def getconsumer(self, keywords):
- """ return a consumer matching the given keywords.
-
- tries to find the most suitable consumer by walking, starting from
- the back, the list of keywords, the first consumer matching a
- keyword is returned (falling back to py.log.default)
- """
- for i in range(len(keywords), 0, -1):
- try:
- return self.keywords2consumer[keywords[:i]]
- except KeyError:
- continue
- return self.keywords2consumer.get('default', default_consumer)
-
- def setconsumer(self, keywords, consumer):
- """ set a consumer for a set of keywords. """
- # normalize to tuples
- if isinstance(keywords, str):
- keywords = tuple(filter(None, keywords.split()))
- elif hasattr(keywords, '_keywords'):
- keywords = keywords._keywords
- elif not isinstance(keywords, tuple):
- raise TypeError("key %r is not a string or tuple" % (keywords,))
- if consumer is not None and not py.builtin.callable(consumer):
- if not hasattr(consumer, 'write'):
- raise TypeError(
- "%r should be None, callable or file-like" % (consumer,))
- consumer = File(consumer)
- self.keywords2consumer[keywords] = consumer
-
-def default_consumer(msg):
- """ the default consumer, prints the message to stdout (using 'print') """
- sys.stderr.write(str(msg)+"\n")
-
-default_keywordmapper = KeywordMapper()
-
-def setconsumer(keywords, consumer):
- default_keywordmapper.setconsumer(keywords, consumer)
-
-def setstate(state):
- default_keywordmapper.setstate(state)
-def getstate():
- return default_keywordmapper.getstate()
-
-#
-# Consumers
-#
-
-class File(object):
- """ log consumer wrapping a file(-like) object """
- def __init__(self, f):
- assert hasattr(f, 'write')
- #assert isinstance(f, file) or not hasattr(f, 'open')
- self._file = f
-
- def __call__(self, msg):
- """ write a message to the log """
- self._file.write(str(msg) + "\n")
- if hasattr(self._file, 'flush'):
- self._file.flush()
-
-class Path(object):
- """ log consumer that opens and writes to a Path """
- def __init__(self, filename, append=False,
- delayed_create=False, buffering=False):
- self._append = append
- self._filename = str(filename)
- self._buffering = buffering
- if not delayed_create:
- self._openfile()
-
- def _openfile(self):
- mode = self._append and 'a' or 'w'
- f = open(self._filename, mode)
- self._file = f
-
- def __call__(self, msg):
- """ write a message to the log """
- if not hasattr(self, "_file"):
- self._openfile()
- self._file.write(str(msg) + "\n")
- if not self._buffering:
- self._file.flush()
-
-def STDOUT(msg):
- """ consumer that writes to sys.stdout """
- sys.stdout.write(str(msg)+"\n")
-
-def STDERR(msg):
- """ consumer that writes to sys.stderr """
- sys.stderr.write(str(msg)+"\n")
-
-class Syslog:
- """ consumer that writes to the syslog daemon """
-
- def __init__(self, priority = None):
- if priority is None:
- priority = self.LOG_INFO
- self.priority = priority
-
- def __call__(self, msg):
- """ write a message to the log """
- py.std.syslog.syslog(self.priority, str(msg))
-
-for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split():
- _prio = "LOG_" + _prio
- try:
- setattr(Syslog, _prio, getattr(py.std.syslog, _prio))
- except AttributeError:
- pass
diff --git a/lib/spack/external/pytest-fallback/py/_log/warning.py b/lib/spack/external/pytest-fallback/py/_log/warning.py
deleted file mode 100644
index 722e31e910..0000000000
--- a/lib/spack/external/pytest-fallback/py/_log/warning.py
+++ /dev/null
@@ -1,76 +0,0 @@
-import py, sys
-
-class DeprecationWarning(DeprecationWarning):
- def __init__(self, msg, path, lineno):
- self.msg = msg
- self.path = path
- self.lineno = lineno
- def __repr__(self):
- return "%s:%d: %s" %(self.path, self.lineno+1, self.msg)
- def __str__(self):
- return self.msg
-
-def _apiwarn(startversion, msg, stacklevel=2, function=None):
- # below is mostly COPIED from python2.4/warnings.py's def warn()
- # Get context information
- if isinstance(stacklevel, str):
- frame = sys._getframe(1)
- level = 1
- found = frame.f_code.co_filename.find(stacklevel) != -1
- while frame:
- co = frame.f_code
- if co.co_filename.find(stacklevel) == -1:
- if found:
- stacklevel = level
- break
- else:
- found = True
- level += 1
- frame = frame.f_back
- else:
- stacklevel = 1
- msg = "%s (since version %s)" %(msg, startversion)
- warn(msg, stacklevel=stacklevel+1, function=function)
-
-def warn(msg, stacklevel=1, function=None):
- if function is not None:
- filename = py.std.inspect.getfile(function)
- lineno = py.code.getrawcode(function).co_firstlineno
- else:
- try:
- caller = sys._getframe(stacklevel)
- except ValueError:
- globals = sys.__dict__
- lineno = 1
- else:
- globals = caller.f_globals
- lineno = caller.f_lineno
- if '__name__' in globals:
- module = globals['__name__']
- else:
- module = "<string>"
- filename = globals.get('__file__')
- if filename:
- fnl = filename.lower()
- if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
- filename = filename[:-1]
- elif fnl.endswith("$py.class"):
- filename = filename.replace('$py.class', '.py')
- else:
- if module == "__main__":
- try:
- filename = sys.argv[0]
- except AttributeError:
- # embedded interpreters don't have sys.argv, see bug #839151
- filename = '__main__'
- if not filename:
- filename = module
- path = py.path.local(filename)
- warning = DeprecationWarning(msg, path, lineno)
- py.std.warnings.warn_explicit(warning, category=Warning,
- filename=str(warning.path),
- lineno=warning.lineno,
- registry=py.std.warnings.__dict__.setdefault(
- "__warningsregistry__", {})
- )
-
diff --git a/lib/spack/external/pytest-fallback/py/_path/__init__.py b/lib/spack/external/pytest-fallback/py/_path/__init__.py
deleted file mode 100644
index 51f3246f80..0000000000
--- a/lib/spack/external/pytest-fallback/py/_path/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-""" unified file system api """
diff --git a/lib/spack/external/pytest-fallback/py/_path/cacheutil.py b/lib/spack/external/pytest-fallback/py/_path/cacheutil.py
deleted file mode 100644
index 9922504750..0000000000
--- a/lib/spack/external/pytest-fallback/py/_path/cacheutil.py
+++ /dev/null
@@ -1,114 +0,0 @@
-"""
-This module contains multithread-safe cache implementations.
-
-All Caches have
-
- getorbuild(key, builder)
- delentry(key)
-
-methods and allow configuration when instantiating the cache class.
-"""
-from time import time as gettime
-
-class BasicCache(object):
- def __init__(self, maxentries=128):
- self.maxentries = maxentries
- self.prunenum = int(maxentries - maxentries/8)
- self._dict = {}
-
- def clear(self):
- self._dict.clear()
-
- def _getentry(self, key):
- return self._dict[key]
-
- def _putentry(self, key, entry):
- self._prunelowestweight()
- self._dict[key] = entry
-
- def delentry(self, key, raising=False):
- try:
- del self._dict[key]
- except KeyError:
- if raising:
- raise
-
- def getorbuild(self, key, builder):
- try:
- entry = self._getentry(key)
- except KeyError:
- entry = self._build(key, builder)
- self._putentry(key, entry)
- return entry.value
-
- def _prunelowestweight(self):
- """ prune out entries with lowest weight. """
- numentries = len(self._dict)
- if numentries >= self.maxentries:
- # evict according to entry's weight
- items = [(entry.weight, key)
- for key, entry in self._dict.items()]
- items.sort()
- index = numentries - self.prunenum
- if index > 0:
- for weight, key in items[:index]:
- # in MT situations the element might be gone
- self.delentry(key, raising=False)
-
-class BuildcostAccessCache(BasicCache):
- """ A BuildTime/Access-counting cache implementation.
- the weight of a value is computed as the product of
-
- num-accesses-of-a-value * time-to-build-the-value
-
- The values with the least such weights are evicted
- if the cache maxentries threshold is superceded.
- For implementation flexibility more than one object
- might be evicted at a time.
- """
- # time function to use for measuring build-times
-
- def _build(self, key, builder):
- start = gettime()
- val = builder()
- end = gettime()
- return WeightedCountingEntry(val, end-start)
-
-
-class WeightedCountingEntry(object):
- def __init__(self, value, oneweight):
- self._value = value
- self.weight = self._oneweight = oneweight
-
- def value(self):
- self.weight += self._oneweight
- return self._value
- value = property(value)
-
-class AgingCache(BasicCache):
- """ This cache prunes out cache entries that are too old.
- """
- def __init__(self, maxentries=128, maxseconds=10.0):
- super(AgingCache, self).__init__(maxentries)
- self.maxseconds = maxseconds
-
- def _getentry(self, key):
- entry = self._dict[key]
- if entry.isexpired():
- self.delentry(key)
- raise KeyError(key)
- return entry
-
- def _build(self, key, builder):
- val = builder()
- entry = AgingEntry(val, gettime() + self.maxseconds)
- return entry
-
-class AgingEntry(object):
- def __init__(self, value, expirationtime):
- self.value = value
- self.weight = expirationtime
-
- def isexpired(self):
- t = gettime()
- return t >= self.weight
diff --git a/lib/spack/external/pytest-fallback/py/_path/common.py b/lib/spack/external/pytest-fallback/py/_path/common.py
deleted file mode 100644
index 5512e51efe..0000000000
--- a/lib/spack/external/pytest-fallback/py/_path/common.py
+++ /dev/null
@@ -1,445 +0,0 @@
-"""
-"""
-import os, sys, posixpath
-import fnmatch
-import py
-
-# Moved from local.py.
-iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt')
-
-try:
- from os import fspath
-except ImportError:
- def fspath(path):
- """
- Return the string representation of the path.
- If str or bytes is passed in, it is returned unchanged.
- This code comes from PEP 519, modified to support earlier versions of
- python.
-
- This is required for python < 3.6.
- """
- if isinstance(path, (py.builtin.text, py.builtin.bytes)):
- return path
-
- # Work from the object's type to match method resolution of other magic
- # methods.
- path_type = type(path)
- try:
- return path_type.__fspath__(path)
- except AttributeError:
- if hasattr(path_type, '__fspath__'):
- raise
- try:
- import pathlib
- except ImportError:
- pass
- else:
- if isinstance(path, pathlib.PurePath):
- return py.builtin.text(path)
-
- raise TypeError("expected str, bytes or os.PathLike object, not "
- + path_type.__name__)
-
-class Checkers:
- _depend_on_existence = 'exists', 'link', 'dir', 'file'
-
- def __init__(self, path):
- self.path = path
-
- def dir(self):
- raise NotImplementedError
-
- def file(self):
- raise NotImplementedError
-
- def dotfile(self):
- return self.path.basename.startswith('.')
-
- def ext(self, arg):
- if not arg.startswith('.'):
- arg = '.' + arg
- return self.path.ext == arg
-
- def exists(self):
- raise NotImplementedError
-
- def basename(self, arg):
- return self.path.basename == arg
-
- def basestarts(self, arg):
- return self.path.basename.startswith(arg)
-
- def relto(self, arg):
- return self.path.relto(arg)
-
- def fnmatch(self, arg):
- return self.path.fnmatch(arg)
-
- def endswith(self, arg):
- return str(self.path).endswith(arg)
-
- def _evaluate(self, kw):
- for name, value in kw.items():
- invert = False
- meth = None
- try:
- meth = getattr(self, name)
- except AttributeError:
- if name[:3] == 'not':
- invert = True
- try:
- meth = getattr(self, name[3:])
- except AttributeError:
- pass
- if meth is None:
- raise TypeError(
- "no %r checker available for %r" % (name, self.path))
- try:
- if py.code.getrawcode(meth).co_argcount > 1:
- if (not meth(value)) ^ invert:
- return False
- else:
- if bool(value) ^ bool(meth()) ^ invert:
- return False
- except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
- # EBUSY feels not entirely correct,
- # but its kind of necessary since ENOMEDIUM
- # is not accessible in python
- for name in self._depend_on_existence:
- if name in kw:
- if kw.get(name):
- return False
- name = 'not' + name
- if name in kw:
- if not kw.get(name):
- return False
- return True
-
-class NeverRaised(Exception):
- pass
-
-class PathBase(object):
- """ shared implementation for filesystem path objects."""
- Checkers = Checkers
-
- def __div__(self, other):
- return self.join(fspath(other))
- __truediv__ = __div__ # py3k
-
- def basename(self):
- """ basename part of path. """
- return self._getbyspec('basename')[0]
- basename = property(basename, None, None, basename.__doc__)
-
- def dirname(self):
- """ dirname part of path. """
- return self._getbyspec('dirname')[0]
- dirname = property(dirname, None, None, dirname.__doc__)
-
- def purebasename(self):
- """ pure base name of the path."""
- return self._getbyspec('purebasename')[0]
- purebasename = property(purebasename, None, None, purebasename.__doc__)
-
- def ext(self):
- """ extension of the path (including the '.')."""
- return self._getbyspec('ext')[0]
- ext = property(ext, None, None, ext.__doc__)
-
- def dirpath(self, *args, **kwargs):
- """ return the directory path joined with any given path arguments. """
- return self.new(basename='').join(*args, **kwargs)
-
- def read_binary(self):
- """ read and return a bytestring from reading the path. """
- with self.open('rb') as f:
- return f.read()
-
- def read_text(self, encoding):
- """ read and return a Unicode string from reading the path. """
- with self.open("r", encoding=encoding) as f:
- return f.read()
-
-
- def read(self, mode='r'):
- """ read and return a bytestring from reading the path. """
- with self.open(mode) as f:
- return f.read()
-
- def readlines(self, cr=1):
- """ read and return a list of lines from the path. if cr is False, the
-newline will be removed from the end of each line. """
- if sys.version_info < (3, ):
- mode = 'rU'
- else: # python 3 deprecates mode "U" in favor of "newline" option
- mode = 'r'
-
- if not cr:
- content = self.read(mode)
- return content.split('\n')
- else:
- f = self.open(mode)
- try:
- return f.readlines()
- finally:
- f.close()
-
- def load(self):
- """ (deprecated) return object unpickled from self.read() """
- f = self.open('rb')
- try:
- return py.error.checked_call(py.std.pickle.load, f)
- finally:
- f.close()
-
- def move(self, target):
- """ move this path to target. """
- if target.relto(self):
- raise py.error.EINVAL(target,
- "cannot move path into a subdirectory of itself")
- try:
- self.rename(target)
- except py.error.EXDEV: # invalid cross-device link
- self.copy(target)
- self.remove()
-
- def __repr__(self):
- """ return a string representation of this path. """
- return repr(str(self))
-
- def check(self, **kw):
- """ check a path for existence and properties.
-
- Without arguments, return True if the path exists, otherwise False.
-
- valid checkers::
-
- file=1 # is a file
- file=0 # is not a file (may not even exist)
- dir=1 # is a dir
- link=1 # is a link
- exists=1 # exists
-
- You can specify multiple checker definitions, for example::
-
- path.check(file=1, link=1) # a link pointing to a file
- """
- if not kw:
- kw = {'exists' : 1}
- return self.Checkers(self)._evaluate(kw)
-
- def fnmatch(self, pattern):
- """return true if the basename/fullname matches the glob-'pattern'.
-
- valid pattern characters::
-
- * matches everything
- ? matches any single character
- [seq] matches any character in seq
- [!seq] matches any char not in seq
-
- If the pattern contains a path-separator then the full path
- is used for pattern matching and a '*' is prepended to the
- pattern.
-
- if the pattern doesn't contain a path-separator the pattern
- is only matched against the basename.
- """
- return FNMatcher(pattern)(self)
-
- def relto(self, relpath):
- """ return a string which is the relative part of the path
- to the given 'relpath'.
- """
- if not isinstance(relpath, (str, PathBase)):
- raise TypeError("%r: not a string or path object" %(relpath,))
- strrelpath = str(relpath)
- if strrelpath and strrelpath[-1] != self.sep:
- strrelpath += self.sep
- #assert strrelpath[-1] == self.sep
- #assert strrelpath[-2] != self.sep
- strself = self.strpath
- if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
- if os.path.normcase(strself).startswith(
- os.path.normcase(strrelpath)):
- return strself[len(strrelpath):]
- elif strself.startswith(strrelpath):
- return strself[len(strrelpath):]
- return ""
-
- def ensure_dir(self, *args):
- """ ensure the path joined with args is a directory. """
- return self.ensure(*args, **{"dir": True})
-
- def bestrelpath(self, dest):
- """ return a string which is a relative path from self
- (assumed to be a directory) to dest such that
- self.join(bestrelpath) == dest and if not such
- path can be determined return dest.
- """
- try:
- if self == dest:
- return os.curdir
- base = self.common(dest)
- if not base: # can be the case on windows
- return str(dest)
- self2base = self.relto(base)
- reldest = dest.relto(base)
- if self2base:
- n = self2base.count(self.sep) + 1
- else:
- n = 0
- l = [os.pardir] * n
- if reldest:
- l.append(reldest)
- target = dest.sep.join(l)
- return target
- except AttributeError:
- return str(dest)
-
- def exists(self):
- return self.check()
-
- def isdir(self):
- return self.check(dir=1)
-
- def isfile(self):
- return self.check(file=1)
-
- def parts(self, reverse=False):
- """ return a root-first list of all ancestor directories
- plus the path itself.
- """
- current = self
- l = [self]
- while 1:
- last = current
- current = current.dirpath()
- if last == current:
- break
- l.append(current)
- if not reverse:
- l.reverse()
- return l
-
- def common(self, other):
- """ return the common part shared with the other path
- or None if there is no common part.
- """
- last = None
- for x, y in zip(self.parts(), other.parts()):
- if x != y:
- return last
- last = x
- return last
-
- def __add__(self, other):
- """ return new path object with 'other' added to the basename"""
- return self.new(basename=self.basename+str(other))
-
- def __cmp__(self, other):
- """ return sort value (-1, 0, +1). """
- try:
- return cmp(self.strpath, other.strpath)
- except AttributeError:
- return cmp(str(self), str(other)) # self.path, other.path)
-
- def __lt__(self, other):
- try:
- return self.strpath < other.strpath
- except AttributeError:
- return str(self) < str(other)
-
- def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False):
- """ yields all paths below the current one
-
- fil is a filter (glob pattern or callable), if not matching the
- path will not be yielded, defaulting to None (everything is
- returned)
-
- rec is a filter (glob pattern or callable) that controls whether
- a node is descended, defaulting to None
-
- ignore is an Exception class that is ignoredwhen calling dirlist()
- on any of the paths (by default, all exceptions are reported)
-
- bf if True will cause a breadthfirst search instead of the
- default depthfirst. Default: False
-
- sort if True will sort entries within each directory level.
- """
- for x in Visitor(fil, rec, ignore, bf, sort).gen(self):
- yield x
-
- def _sortlist(self, res, sort):
- if sort:
- if hasattr(sort, '__call__'):
- res.sort(sort)
- else:
- res.sort()
-
- def samefile(self, other):
- """ return True if other refers to the same stat object as self. """
- return self.strpath == str(other)
-
- def __fspath__(self):
- return self.strpath
-
-class Visitor:
- def __init__(self, fil, rec, ignore, bf, sort):
- if isinstance(fil, py.builtin._basestring):
- fil = FNMatcher(fil)
- if isinstance(rec, py.builtin._basestring):
- self.rec = FNMatcher(rec)
- elif not hasattr(rec, '__call__') and rec:
- self.rec = lambda path: True
- else:
- self.rec = rec
- self.fil = fil
- self.ignore = ignore
- self.breadthfirst = bf
- self.optsort = sort and sorted or (lambda x: x)
-
- def gen(self, path):
- try:
- entries = path.listdir()
- except self.ignore:
- return
- rec = self.rec
- dirs = self.optsort([p for p in entries
- if p.check(dir=1) and (rec is None or rec(p))])
- if not self.breadthfirst:
- for subdir in dirs:
- for p in self.gen(subdir):
- yield p
- for p in self.optsort(entries):
- if self.fil is None or self.fil(p):
- yield p
- if self.breadthfirst:
- for subdir in dirs:
- for p in self.gen(subdir):
- yield p
-
-class FNMatcher:
- def __init__(self, pattern):
- self.pattern = pattern
-
- def __call__(self, path):
- pattern = self.pattern
-
- if (pattern.find(path.sep) == -1 and
- iswin32 and
- pattern.find(posixpath.sep) != -1):
- # Running on Windows, the pattern has no Windows path separators,
- # and the pattern has one or more Posix path separators. Replace
- # the Posix path separators with the Windows path separator.
- pattern = pattern.replace(posixpath.sep, path.sep)
-
- if pattern.find(path.sep) == -1:
- name = path.basename
- else:
- name = str(path) # path.strpath # XXX svn?
- if not os.path.isabs(pattern):
- pattern = '*' + path.sep + pattern
- return fnmatch.fnmatch(name, pattern)
diff --git a/lib/spack/external/pytest-fallback/py/_path/local.py b/lib/spack/external/pytest-fallback/py/_path/local.py
deleted file mode 100644
index d2f16b993e..0000000000
--- a/lib/spack/external/pytest-fallback/py/_path/local.py
+++ /dev/null
@@ -1,930 +0,0 @@
-"""
-local path implementation.
-"""
-from __future__ import with_statement
-
-from contextlib import contextmanager
-import sys, os, re, atexit, io
-import py
-from py._path import common
-from py._path.common import iswin32, fspath
-from stat import S_ISLNK, S_ISDIR, S_ISREG
-
-from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname
-
-if sys.version_info > (3,0):
- def map_as_list(func, iter):
- return list(map(func, iter))
-else:
- map_as_list = map
-
-class Stat(object):
- def __getattr__(self, name):
- return getattr(self._osstatresult, "st_" + name)
-
- def __init__(self, path, osstatresult):
- self.path = path
- self._osstatresult = osstatresult
-
- @property
- def owner(self):
- if iswin32:
- raise NotImplementedError("XXX win32")
- import pwd
- entry = py.error.checked_call(pwd.getpwuid, self.uid)
- return entry[0]
-
- @property
- def group(self):
- """ return group name of file. """
- if iswin32:
- raise NotImplementedError("XXX win32")
- import grp
- entry = py.error.checked_call(grp.getgrgid, self.gid)
- return entry[0]
-
- def isdir(self):
- return S_ISDIR(self._osstatresult.st_mode)
-
- def isfile(self):
- return S_ISREG(self._osstatresult.st_mode)
-
- def islink(self):
- st = self.path.lstat()
- return S_ISLNK(self._osstatresult.st_mode)
-
-class PosixPath(common.PathBase):
- def chown(self, user, group, rec=0):
- """ change ownership to the given user and group.
- user and group may be specified by a number or
- by a name. if rec is True change ownership
- recursively.
- """
- uid = getuserid(user)
- gid = getgroupid(group)
- if rec:
- for x in self.visit(rec=lambda x: x.check(link=0)):
- if x.check(link=0):
- py.error.checked_call(os.chown, str(x), uid, gid)
- py.error.checked_call(os.chown, str(self), uid, gid)
-
- def readlink(self):
- """ return value of a symbolic link. """
- return py.error.checked_call(os.readlink, self.strpath)
-
- def mklinkto(self, oldname):
- """ posix style hard link to another name. """
- py.error.checked_call(os.link, str(oldname), str(self))
-
- def mksymlinkto(self, value, absolute=1):
- """ create a symbolic link with the given value (pointing to another name). """
- if absolute:
- py.error.checked_call(os.symlink, str(value), self.strpath)
- else:
- base = self.common(value)
- # with posix local paths '/' is always a common base
- relsource = self.__class__(value).relto(base)
- reldest = self.relto(base)
- n = reldest.count(self.sep)
- target = self.sep.join(('..', )*n + (relsource, ))
- py.error.checked_call(os.symlink, target, self.strpath)
-
-def getuserid(user):
- import pwd
- if not isinstance(user, int):
- user = pwd.getpwnam(user)[2]
- return user
-
-def getgroupid(group):
- import grp
- if not isinstance(group, int):
- group = grp.getgrnam(group)[2]
- return group
-
-FSBase = not iswin32 and PosixPath or common.PathBase
-
-class LocalPath(FSBase):
- """ object oriented interface to os.path and other local filesystem
- related information.
- """
- class ImportMismatchError(ImportError):
- """ raised on pyimport() if there is a mismatch of __file__'s"""
-
- sep = os.sep
- class Checkers(common.Checkers):
- def _stat(self):
- try:
- return self._statcache
- except AttributeError:
- try:
- self._statcache = self.path.stat()
- except py.error.ELOOP:
- self._statcache = self.path.lstat()
- return self._statcache
-
- def dir(self):
- return S_ISDIR(self._stat().mode)
-
- def file(self):
- return S_ISREG(self._stat().mode)
-
- def exists(self):
- return self._stat()
-
- def link(self):
- st = self.path.lstat()
- return S_ISLNK(st.mode)
-
- def __init__(self, path=None, expanduser=False):
- """ Initialize and return a local Path instance.
-
- Path can be relative to the current directory.
- If path is None it defaults to the current working directory.
- If expanduser is True, tilde-expansion is performed.
- Note that Path instances always carry an absolute path.
- Note also that passing in a local path object will simply return
- the exact same path object. Use new() to get a new copy.
- """
- if path is None:
- self.strpath = py.error.checked_call(os.getcwd)
- else:
- try:
- path = fspath(path)
- except TypeError:
- raise ValueError("can only pass None, Path instances "
- "or non-empty strings to LocalPath")
- if expanduser:
- path = os.path.expanduser(path)
- self.strpath = abspath(path)
-
- def __hash__(self):
- return hash(self.strpath)
-
- def __eq__(self, other):
- s1 = fspath(self)
- try:
- s2 = fspath(other)
- except TypeError:
- return False
- if iswin32:
- s1 = s1.lower()
- try:
- s2 = s2.lower()
- except AttributeError:
- return False
- return s1 == s2
-
- def __ne__(self, other):
- return not (self == other)
-
- def __lt__(self, other):
- return fspath(self) < fspath(other)
-
- def __gt__(self, other):
- return fspath(self) > fspath(other)
-
- def samefile(self, other):
- """ return True if 'other' references the same file as 'self'.
- """
- other = fspath(other)
- if not isabs(other):
- other = abspath(other)
- if self == other:
- return True
- if iswin32:
- return False # there is no samefile
- return py.error.checked_call(
- os.path.samefile, self.strpath, other)
-
- def remove(self, rec=1, ignore_errors=False):
- """ remove a file or directory (or a directory tree if rec=1).
- if ignore_errors is True, errors while removing directories will
- be ignored.
- """
- if self.check(dir=1, link=0):
- if rec:
- # force remove of readonly files on windows
- if iswin32:
- self.chmod(0o700, rec=1)
- py.error.checked_call(py.std.shutil.rmtree, self.strpath,
- ignore_errors=ignore_errors)
- else:
- py.error.checked_call(os.rmdir, self.strpath)
- else:
- if iswin32:
- self.chmod(0o700)
- py.error.checked_call(os.remove, self.strpath)
-
- def computehash(self, hashtype="md5", chunksize=524288):
- """ return hexdigest of hashvalue for this file. """
- try:
- try:
- import hashlib as mod
- except ImportError:
- if hashtype == "sha1":
- hashtype = "sha"
- mod = __import__(hashtype)
- hash = getattr(mod, hashtype)()
- except (AttributeError, ImportError):
- raise ValueError("Don't know how to compute %r hash" %(hashtype,))
- f = self.open('rb')
- try:
- while 1:
- buf = f.read(chunksize)
- if not buf:
- return hash.hexdigest()
- hash.update(buf)
- finally:
- f.close()
-
- def new(self, **kw):
- """ create a modified version of this path.
- the following keyword arguments modify various path parts::
-
- a:/some/path/to/a/file.ext
- xx drive
- xxxxxxxxxxxxxxxxx dirname
- xxxxxxxx basename
- xxxx purebasename
- xxx ext
- """
- obj = object.__new__(self.__class__)
- if not kw:
- obj.strpath = self.strpath
- return obj
- drive, dirname, basename, purebasename,ext = self._getbyspec(
- "drive,dirname,basename,purebasename,ext")
- if 'basename' in kw:
- if 'purebasename' in kw or 'ext' in kw:
- raise ValueError("invalid specification %r" % kw)
- else:
- pb = kw.setdefault('purebasename', purebasename)
- try:
- ext = kw['ext']
- except KeyError:
- pass
- else:
- if ext and not ext.startswith('.'):
- ext = '.' + ext
- kw['basename'] = pb + ext
-
- if ('dirname' in kw and not kw['dirname']):
- kw['dirname'] = drive
- else:
- kw.setdefault('dirname', dirname)
- kw.setdefault('sep', self.sep)
- obj.strpath = normpath(
- "%(dirname)s%(sep)s%(basename)s" % kw)
- return obj
-
- def _getbyspec(self, spec):
- """ see new for what 'spec' can be. """
- res = []
- parts = self.strpath.split(self.sep)
-
- args = filter(None, spec.split(',') )
- append = res.append
- for name in args:
- if name == 'drive':
- append(parts[0])
- elif name == 'dirname':
- append(self.sep.join(parts[:-1]))
- else:
- basename = parts[-1]
- if name == 'basename':
- append(basename)
- else:
- i = basename.rfind('.')
- if i == -1:
- purebasename, ext = basename, ''
- else:
- purebasename, ext = basename[:i], basename[i:]
- if name == 'purebasename':
- append(purebasename)
- elif name == 'ext':
- append(ext)
- else:
- raise ValueError("invalid part specification %r" % name)
- return res
-
- def dirpath(self, *args, **kwargs):
- """ return the directory path joined with any given path arguments. """
- if not kwargs:
- path = object.__new__(self.__class__)
- path.strpath = dirname(self.strpath)
- if args:
- path = path.join(*args)
- return path
- return super(LocalPath, self).dirpath(*args, **kwargs)
-
- def join(self, *args, **kwargs):
- """ return a new path by appending all 'args' as path
- components. if abs=1 is used restart from root if any
- of the args is an absolute path.
- """
- sep = self.sep
- strargs = [fspath(arg) for arg in args]
- strpath = self.strpath
- if kwargs.get('abs'):
- newargs = []
- for arg in reversed(strargs):
- if isabs(arg):
- strpath = arg
- strargs = newargs
- break
- newargs.insert(0, arg)
- for arg in strargs:
- arg = arg.strip(sep)
- if iswin32:
- # allow unix style paths even on windows.
- arg = arg.strip('/')
- arg = arg.replace('/', sep)
- strpath = strpath + sep + arg
- obj = object.__new__(self.__class__)
- obj.strpath = normpath(strpath)
- return obj
-
- def open(self, mode='r', ensure=False, encoding=None):
- """ return an opened file with the given mode.
-
- If ensure is True, create parent directories if needed.
- """
- if ensure:
- self.dirpath().ensure(dir=1)
- if encoding:
- return py.error.checked_call(io.open, self.strpath, mode, encoding=encoding)
- return py.error.checked_call(open, self.strpath, mode)
-
- def _fastjoin(self, name):
- child = object.__new__(self.__class__)
- child.strpath = self.strpath + self.sep + name
- return child
-
- def islink(self):
- return islink(self.strpath)
-
- def check(self, **kw):
- if not kw:
- return exists(self.strpath)
- if len(kw) == 1:
- if "dir" in kw:
- return not kw["dir"] ^ isdir(self.strpath)
- if "file" in kw:
- return not kw["file"] ^ isfile(self.strpath)
- return super(LocalPath, self).check(**kw)
-
- _patternchars = set("*?[" + os.path.sep)
- def listdir(self, fil=None, sort=None):
- """ list directory contents, possibly filter by the given fil func
- and possibly sorted.
- """
- if fil is None and sort is None:
- names = py.error.checked_call(os.listdir, self.strpath)
- return map_as_list(self._fastjoin, names)
- if isinstance(fil, py.builtin._basestring):
- if not self._patternchars.intersection(fil):
- child = self._fastjoin(fil)
- if exists(child.strpath):
- return [child]
- return []
- fil = common.FNMatcher(fil)
- names = py.error.checked_call(os.listdir, self.strpath)
- res = []
- for name in names:
- child = self._fastjoin(name)
- if fil is None or fil(child):
- res.append(child)
- self._sortlist(res, sort)
- return res
-
- def size(self):
- """ return size of the underlying file object """
- return self.stat().size
-
- def mtime(self):
- """ return last modification time of the path. """
- return self.stat().mtime
-
- def copy(self, target, mode=False, stat=False):
- """ copy path to target.
-
- If mode is True, will copy copy permission from path to target.
- If stat is True, copy permission, last modification
- time, last access time, and flags from path to target.
- """
- if self.check(file=1):
- if target.check(dir=1):
- target = target.join(self.basename)
- assert self!=target
- copychunked(self, target)
- if mode:
- copymode(self.strpath, target.strpath)
- if stat:
- copystat(self, target)
- else:
- def rec(p):
- return p.check(link=0)
- for x in self.visit(rec=rec):
- relpath = x.relto(self)
- newx = target.join(relpath)
- newx.dirpath().ensure(dir=1)
- if x.check(link=1):
- newx.mksymlinkto(x.readlink())
- continue
- elif x.check(file=1):
- copychunked(x, newx)
- elif x.check(dir=1):
- newx.ensure(dir=1)
- if mode:
- copymode(x.strpath, newx.strpath)
- if stat:
- copystat(x, newx)
-
- def rename(self, target):
- """ rename this path to target. """
- target = fspath(target)
- return py.error.checked_call(os.rename, self.strpath, target)
-
- def dump(self, obj, bin=1):
- """ pickle object into path location"""
- f = self.open('wb')
- try:
- py.error.checked_call(py.std.pickle.dump, obj, f, bin)
- finally:
- f.close()
-
- def mkdir(self, *args):
- """ create & return the directory joined with args. """
- p = self.join(*args)
- py.error.checked_call(os.mkdir, fspath(p))
- return p
-
- def write_binary(self, data, ensure=False):
- """ write binary data into path. If ensure is True create
- missing parent directories.
- """
- if ensure:
- self.dirpath().ensure(dir=1)
- with self.open('wb') as f:
- f.write(data)
-
- def write_text(self, data, encoding, ensure=False):
- """ write text data into path using the specified encoding.
- If ensure is True create missing parent directories.
- """
- if ensure:
- self.dirpath().ensure(dir=1)
- with self.open('w', encoding=encoding) as f:
- f.write(data)
-
- def write(self, data, mode='w', ensure=False):
- """ write data into path. If ensure is True create
- missing parent directories.
- """
- if ensure:
- self.dirpath().ensure(dir=1)
- if 'b' in mode:
- if not py.builtin._isbytes(data):
- raise ValueError("can only process bytes")
- else:
- if not py.builtin._istext(data):
- if not py.builtin._isbytes(data):
- data = str(data)
- else:
- data = py.builtin._totext(data, sys.getdefaultencoding())
- f = self.open(mode)
- try:
- f.write(data)
- finally:
- f.close()
-
- def _ensuredirs(self):
- parent = self.dirpath()
- if parent == self:
- return self
- if parent.check(dir=0):
- parent._ensuredirs()
- if self.check(dir=0):
- try:
- self.mkdir()
- except py.error.EEXIST:
- # race condition: file/dir created by another thread/process.
- # complain if it is not a dir
- if self.check(dir=0):
- raise
- return self
-
- def ensure(self, *args, **kwargs):
- """ ensure that an args-joined path exists (by default as
- a file). if you specify a keyword argument 'dir=True'
- then the path is forced to be a directory path.
- """
- p = self.join(*args)
- if kwargs.get('dir', 0):
- return p._ensuredirs()
- else:
- p.dirpath()._ensuredirs()
- if not p.check(file=1):
- p.open('w').close()
- return p
-
- def stat(self, raising=True):
- """ Return an os.stat() tuple. """
- if raising == True:
- return Stat(self, py.error.checked_call(os.stat, self.strpath))
- try:
- return Stat(self, os.stat(self.strpath))
- except KeyboardInterrupt:
- raise
- except Exception:
- return None
-
- def lstat(self):
- """ Return an os.lstat() tuple. """
- return Stat(self, py.error.checked_call(os.lstat, self.strpath))
-
- def setmtime(self, mtime=None):
- """ set modification time for the given path. if 'mtime' is None
- (the default) then the file's mtime is set to current time.
-
- Note that the resolution for 'mtime' is platform dependent.
- """
- if mtime is None:
- return py.error.checked_call(os.utime, self.strpath, mtime)
- try:
- return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
- except py.error.EINVAL:
- return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
-
- def chdir(self):
- """ change directory to self and return old current directory """
- try:
- old = self.__class__()
- except py.error.ENOENT:
- old = None
- py.error.checked_call(os.chdir, self.strpath)
- return old
-
-
- @contextmanager
- def as_cwd(self):
- """ return context manager which changes to current dir during the
- managed "with" context. On __enter__ it returns the old dir.
- """
- old = self.chdir()
- try:
- yield old
- finally:
- old.chdir()
-
- def realpath(self):
- """ return a new path which contains no symbolic links."""
- return self.__class__(os.path.realpath(self.strpath))
-
- def atime(self):
- """ return last access time of the path. """
- return self.stat().atime
-
- def __repr__(self):
- return 'local(%r)' % self.strpath
-
- def __str__(self):
- """ return string representation of the Path. """
- return self.strpath
-
- def chmod(self, mode, rec=0):
- """ change permissions to the given mode. If mode is an
- integer it directly encodes the os-specific modes.
- if rec is True perform recursively.
- """
- if not isinstance(mode, int):
- raise TypeError("mode %r must be an integer" % (mode,))
- if rec:
- for x in self.visit(rec=rec):
- py.error.checked_call(os.chmod, str(x), mode)
- py.error.checked_call(os.chmod, self.strpath, mode)
-
- def pypkgpath(self):
- """ return the Python package path by looking for the last
- directory upwards which still contains an __init__.py.
- Return None if a pkgpath can not be determined.
- """
- pkgpath = None
- for parent in self.parts(reverse=True):
- if parent.isdir():
- if not parent.join('__init__.py').exists():
- break
- if not isimportable(parent.basename):
- break
- pkgpath = parent
- return pkgpath
-
- def _ensuresyspath(self, ensuremode, path):
- if ensuremode:
- s = str(path)
- if ensuremode == "append":
- if s not in sys.path:
- sys.path.append(s)
- else:
- if s != sys.path[0]:
- sys.path.insert(0, s)
-
- def pyimport(self, modname=None, ensuresyspath=True):
- """ return path as an imported python module.
-
- If modname is None, look for the containing package
- and construct an according module name.
- The module will be put/looked up in sys.modules.
- if ensuresyspath is True then the root dir for importing
- the file (taking __init__.py files into account) will
- be prepended to sys.path if it isn't there already.
- If ensuresyspath=="append" the root dir will be appended
- if it isn't already contained in sys.path.
- if ensuresyspath is False no modification of syspath happens.
- """
- if not self.check():
- raise py.error.ENOENT(self)
-
- pkgpath = None
- if modname is None:
- pkgpath = self.pypkgpath()
- if pkgpath is not None:
- pkgroot = pkgpath.dirpath()
- names = self.new(ext="").relto(pkgroot).split(self.sep)
- if names[-1] == "__init__":
- names.pop()
- modname = ".".join(names)
- else:
- pkgroot = self.dirpath()
- modname = self.purebasename
-
- self._ensuresyspath(ensuresyspath, pkgroot)
- __import__(modname)
- mod = sys.modules[modname]
- if self.basename == "__init__.py":
- return mod # we don't check anything as we might
- # we in a namespace package ... too icky to check
- modfile = mod.__file__
- if modfile[-4:] in ('.pyc', '.pyo'):
- modfile = modfile[:-1]
- elif modfile.endswith('$py.class'):
- modfile = modfile[:-9] + '.py'
- if modfile.endswith(os.path.sep + "__init__.py"):
- if self.basename != "__init__.py":
- modfile = modfile[:-12]
- try:
- issame = self.samefile(modfile)
- except py.error.ENOENT:
- issame = False
- if not issame:
- raise self.ImportMismatchError(modname, modfile, self)
- return mod
- else:
- try:
- return sys.modules[modname]
- except KeyError:
- # we have a custom modname, do a pseudo-import
- mod = py.std.types.ModuleType(modname)
- mod.__file__ = str(self)
- sys.modules[modname] = mod
- try:
- py.builtin.execfile(str(self), mod.__dict__)
- except:
- del sys.modules[modname]
- raise
- return mod
-
- def sysexec(self, *argv, **popen_opts):
- """ return stdout text from executing a system child process,
- where the 'self' path points to executable.
- The process is directly invoked and not through a system shell.
- """
- from subprocess import Popen, PIPE
- argv = map_as_list(str, argv)
- popen_opts['stdout'] = popen_opts['stderr'] = PIPE
- proc = Popen([str(self)] + argv, **popen_opts)
- stdout, stderr = proc.communicate()
- ret = proc.wait()
- if py.builtin._isbytes(stdout):
- stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
- if ret != 0:
- if py.builtin._isbytes(stderr):
- stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
- raise py.process.cmdexec.Error(ret, ret, str(self),
- stdout, stderr,)
- return stdout
-
- def sysfind(cls, name, checker=None, paths=None):
- """ return a path object found by looking at the systems
- underlying PATH specification. If the checker is not None
- it will be invoked to filter matching paths. If a binary
- cannot be found, None is returned
- Note: This is probably not working on plain win32 systems
- but may work on cygwin.
- """
- if isabs(name):
- p = py.path.local(name)
- if p.check(file=1):
- return p
- else:
- if paths is None:
- if iswin32:
- paths = py.std.os.environ['Path'].split(';')
- if '' not in paths and '.' not in paths:
- paths.append('.')
- try:
- systemroot = os.environ['SYSTEMROOT']
- except KeyError:
- pass
- else:
- paths = [re.sub('%SystemRoot%', systemroot, path)
- for path in paths]
- else:
- paths = py.std.os.environ['PATH'].split(':')
- tryadd = []
- if iswin32:
- tryadd += os.environ['PATHEXT'].split(os.pathsep)
- tryadd.append("")
-
- for x in paths:
- for addext in tryadd:
- p = py.path.local(x).join(name, abs=True) + addext
- try:
- if p.check(file=1):
- if checker:
- if not checker(p):
- continue
- return p
- except py.error.EACCES:
- pass
- return None
- sysfind = classmethod(sysfind)
-
- def _gethomedir(cls):
- try:
- x = os.environ['HOME']
- except KeyError:
- try:
- x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH']
- except KeyError:
- return None
- return cls(x)
- _gethomedir = classmethod(_gethomedir)
-
- #"""
- #special class constructors for local filesystem paths
- #"""
- def get_temproot(cls):
- """ return the system's temporary directory
- (where tempfiles are usually created in)
- """
- return py.path.local(py.std.tempfile.gettempdir())
- get_temproot = classmethod(get_temproot)
-
- def mkdtemp(cls, rootdir=None):
- """ return a Path object pointing to a fresh new temporary directory
- (which we created ourself).
- """
- import tempfile
- if rootdir is None:
- rootdir = cls.get_temproot()
- return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir)))
- mkdtemp = classmethod(mkdtemp)
-
- def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
- lock_timeout = 172800): # two days
- """ return unique directory with a number greater than the current
- maximum one. The number is assumed to start directly after prefix.
- if keep is true directories with a number less than (maxnum-keep)
- will be removed.
- """
- if rootdir is None:
- rootdir = cls.get_temproot()
-
- nprefix = prefix.lower()
- def parse_num(path):
- """ parse the number out of a path (if it matches the prefix) """
- nbasename = path.basename.lower()
- if nbasename.startswith(nprefix):
- try:
- return int(nbasename[len(nprefix):])
- except ValueError:
- pass
-
- # compute the maximum number currently in use with the
- # prefix
- lastmax = None
- while True:
- maxnum = -1
- for path in rootdir.listdir():
- num = parse_num(path)
- if num is not None:
- maxnum = max(maxnum, num)
-
- # make the new directory
- try:
- udir = rootdir.mkdir(prefix + str(maxnum+1))
- except py.error.EEXIST:
- # race condition: another thread/process created the dir
- # in the meantime. Try counting again
- if lastmax == maxnum:
- raise
- lastmax = maxnum
- continue
- break
-
- # put a .lock file in the new directory that will be removed at
- # process exit
- if lock_timeout:
- lockfile = udir.join('.lock')
- mypid = os.getpid()
- if hasattr(lockfile, 'mksymlinkto'):
- lockfile.mksymlinkto(str(mypid))
- else:
- lockfile.write(str(mypid))
- def try_remove_lockfile():
- # in a fork() situation, only the last process should
- # remove the .lock, otherwise the other processes run the
- # risk of seeing their temporary dir disappear. For now
- # we remove the .lock in the parent only (i.e. we assume
- # that the children finish before the parent).
- if os.getpid() != mypid:
- return
- try:
- lockfile.remove()
- except py.error.Error:
- pass
- atexit.register(try_remove_lockfile)
-
- # prune old directories
- if keep:
- for path in rootdir.listdir():
- num = parse_num(path)
- if num is not None and num <= (maxnum - keep):
- lf = path.join('.lock')
- try:
- t1 = lf.lstat().mtime
- t2 = lockfile.lstat().mtime
- if not lock_timeout or abs(t2-t1) < lock_timeout:
- continue # skip directories still locked
- except py.error.Error:
- pass # assume that it means that there is no 'lf'
- try:
- path.remove(rec=1)
- except KeyboardInterrupt:
- raise
- except: # this might be py.error.Error, WindowsError ...
- pass
-
- # make link...
- try:
- username = os.environ['USER'] #linux, et al
- except KeyError:
- try:
- username = os.environ['USERNAME'] #windows
- except KeyError:
- username = 'current'
-
- src = str(udir)
- dest = src[:src.rfind('-')] + '-' + username
- try:
- os.unlink(dest)
- except OSError:
- pass
- try:
- os.symlink(src, dest)
- except (OSError, AttributeError, NotImplementedError):
- pass
-
- return udir
- make_numbered_dir = classmethod(make_numbered_dir)
-
-
-def copymode(src, dest):
- """ copy permission from src to dst. """
- py.std.shutil.copymode(src, dest)
-
-def copystat(src, dest):
- """ copy permission, last modification time, last access time, and flags from src to dst."""
- py.std.shutil.copystat(str(src), str(dest))
-
-def copychunked(src, dest):
- chunksize = 524288 # half a meg of bytes
- fsrc = src.open('rb')
- try:
- fdest = dest.open('wb')
- try:
- while 1:
- buf = fsrc.read(chunksize)
- if not buf:
- break
- fdest.write(buf)
- finally:
- fdest.close()
- finally:
- fsrc.close()
-
-def isimportable(name):
- if name and (name[0].isalpha() or name[0] == '_'):
- name = name.replace("_", '')
- return not name or name.isalnum()
diff --git a/lib/spack/external/pytest-fallback/py/_path/svnurl.py b/lib/spack/external/pytest-fallback/py/_path/svnurl.py
deleted file mode 100644
index 6589a71d09..0000000000
--- a/lib/spack/external/pytest-fallback/py/_path/svnurl.py
+++ /dev/null
@@ -1,380 +0,0 @@
-"""
-module defining a subversion path object based on the external
-command 'svn'. This modules aims to work with svn 1.3 and higher
-but might also interact well with earlier versions.
-"""
-
-import os, sys, time, re
-import py
-from py import path, process
-from py._path import common
-from py._path import svnwc as svncommon
-from py._path.cacheutil import BuildcostAccessCache, AgingCache
-
-DEBUG=False
-
-class SvnCommandPath(svncommon.SvnPathBase):
- """ path implementation that offers access to (possibly remote) subversion
- repositories. """
-
- _lsrevcache = BuildcostAccessCache(maxentries=128)
- _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0)
-
- def __new__(cls, path, rev=None, auth=None):
- self = object.__new__(cls)
- if isinstance(path, cls):
- rev = path.rev
- auth = path.auth
- path = path.strpath
- svncommon.checkbadchars(path)
- path = path.rstrip('/')
- self.strpath = path
- self.rev = rev
- self.auth = auth
- return self
-
- def __repr__(self):
- if self.rev == -1:
- return 'svnurl(%r)' % self.strpath
- else:
- return 'svnurl(%r, %r)' % (self.strpath, self.rev)
-
- def _svnwithrev(self, cmd, *args):
- """ execute an svn command, append our own url and revision """
- if self.rev is None:
- return self._svnwrite(cmd, *args)
- else:
- args = ['-r', self.rev] + list(args)
- return self._svnwrite(cmd, *args)
-
- def _svnwrite(self, cmd, *args):
- """ execute an svn command, append our own url """
- l = ['svn %s' % cmd]
- args = ['"%s"' % self._escape(item) for item in args]
- l.extend(args)
- l.append('"%s"' % self._encodedurl())
- # fixing the locale because we can't otherwise parse
- string = " ".join(l)
- if DEBUG:
- print("execing %s" % string)
- out = self._svncmdexecauth(string)
- return out
-
- def _svncmdexecauth(self, cmd):
- """ execute an svn command 'as is' """
- cmd = svncommon.fixlocale() + cmd
- if self.auth is not None:
- cmd += ' ' + self.auth.makecmdoptions()
- return self._cmdexec(cmd)
-
- def _cmdexec(self, cmd):
- try:
- out = process.cmdexec(cmd)
- except py.process.cmdexec.Error:
- e = sys.exc_info()[1]
- if (e.err.find('File Exists') != -1 or
- e.err.find('File already exists') != -1):
- raise py.error.EEXIST(self)
- raise
- return out
-
- def _svnpopenauth(self, cmd):
- """ execute an svn command, return a pipe for reading stdin """
- cmd = svncommon.fixlocale() + cmd
- if self.auth is not None:
- cmd += ' ' + self.auth.makecmdoptions()
- return self._popen(cmd)
-
- def _popen(self, cmd):
- return os.popen(cmd)
-
- def _encodedurl(self):
- return self._escape(self.strpath)
-
- def _norev_delentry(self, path):
- auth = self.auth and self.auth.makecmdoptions() or None
- self._lsnorevcache.delentry((str(path), auth))
-
- def open(self, mode='r'):
- """ return an opened file with the given mode. """
- if mode not in ("r", "rU",):
- raise ValueError("mode %r not supported" % (mode,))
- assert self.check(file=1) # svn cat returns an empty file otherwise
- if self.rev is None:
- return self._svnpopenauth('svn cat "%s"' % (
- self._escape(self.strpath), ))
- else:
- return self._svnpopenauth('svn cat -r %s "%s"' % (
- self.rev, self._escape(self.strpath)))
-
- def dirpath(self, *args, **kwargs):
- """ return the directory path of the current path joined
- with any given path arguments.
- """
- l = self.strpath.split(self.sep)
- if len(l) < 4:
- raise py.error.EINVAL(self, "base is not valid")
- elif len(l) == 4:
- return self.join(*args, **kwargs)
- else:
- return self.new(basename='').join(*args, **kwargs)
-
- # modifying methods (cache must be invalidated)
- def mkdir(self, *args, **kwargs):
- """ create & return the directory joined with args.
- pass a 'msg' keyword argument to set the commit message.
- """
- commit_msg = kwargs.get('msg', "mkdir by py lib invocation")
- createpath = self.join(*args)
- createpath._svnwrite('mkdir', '-m', commit_msg)
- self._norev_delentry(createpath.dirpath())
- return createpath
-
- def copy(self, target, msg='copied by py lib invocation'):
- """ copy path to target with checkin message msg."""
- if getattr(target, 'rev', None) is not None:
- raise py.error.EINVAL(target, "revisions are immutable")
- self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg,
- self._escape(self), self._escape(target)))
- self._norev_delentry(target.dirpath())
-
- def rename(self, target, msg="renamed by py lib invocation"):
- """ rename this path to target with checkin message msg. """
- if getattr(self, 'rev', None) is not None:
- raise py.error.EINVAL(self, "revisions are immutable")
- self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %(
- msg, self._escape(self), self._escape(target)))
- self._norev_delentry(self.dirpath())
- self._norev_delentry(self)
-
- def remove(self, rec=1, msg='removed by py lib invocation'):
- """ remove a file or directory (or a directory tree if rec=1) with
-checkin message msg."""
- if self.rev is not None:
- raise py.error.EINVAL(self, "revisions are immutable")
- self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self)))
- self._norev_delentry(self.dirpath())
-
- def export(self, topath):
- """ export to a local path
-
- topath should not exist prior to calling this, returns a
- py.path.local instance
- """
- topath = py.path.local(topath)
- args = ['"%s"' % (self._escape(self),),
- '"%s"' % (self._escape(topath),)]
- if self.rev is not None:
- args = ['-r', str(self.rev)] + args
- self._svncmdexecauth('svn export %s' % (' '.join(args),))
- return topath
-
- def ensure(self, *args, **kwargs):
- """ ensure that an args-joined path exists (by default as
- a file). If you specify a keyword argument 'dir=True'
- then the path is forced to be a directory path.
- """
- if getattr(self, 'rev', None) is not None:
- raise py.error.EINVAL(self, "revisions are immutable")
- target = self.join(*args)
- dir = kwargs.get('dir', 0)
- for x in target.parts(reverse=True):
- if x.check():
- break
- else:
- raise py.error.ENOENT(target, "has not any valid base!")
- if x == target:
- if not x.check(dir=dir):
- raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x)
- return x
- tocreate = target.relto(x)
- basename = tocreate.split(self.sep, 1)[0]
- tempdir = py.path.local.mkdtemp()
- try:
- tempdir.ensure(tocreate, dir=dir)
- cmd = 'svn import -m "%s" "%s" "%s"' % (
- "ensure %s" % self._escape(tocreate),
- self._escape(tempdir.join(basename)),
- x.join(basename)._encodedurl())
- self._svncmdexecauth(cmd)
- self._norev_delentry(x)
- finally:
- tempdir.remove()
- return target
-
- # end of modifying methods
- def _propget(self, name):
- res = self._svnwithrev('propget', name)
- return res[:-1] # strip trailing newline
-
- def _proplist(self):
- res = self._svnwithrev('proplist')
- lines = res.split('\n')
- lines = [x.strip() for x in lines[1:]]
- return svncommon.PropListDict(self, lines)
-
- def info(self):
- """ return an Info structure with svn-provided information. """
- parent = self.dirpath()
- nameinfo_seq = parent._listdir_nameinfo()
- bn = self.basename
- for name, info in nameinfo_seq:
- if name == bn:
- return info
- raise py.error.ENOENT(self)
-
-
- def _listdir_nameinfo(self):
- """ return sequence of name-info directory entries of self """
- def builder():
- try:
- res = self._svnwithrev('ls', '-v')
- except process.cmdexec.Error:
- e = sys.exc_info()[1]
- if e.err.find('non-existent in that revision') != -1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.find("E200009:") != -1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.find('File not found') != -1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.find('not part of a repository')!=-1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.find('Unable to open')!=-1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.lower().find('method not allowed')!=-1:
- raise py.error.EACCES(self, e.err)
- raise py.error.Error(e.err)
- lines = res.split('\n')
- nameinfo_seq = []
- for lsline in lines:
- if lsline:
- info = InfoSvnCommand(lsline)
- if info._name != '.': # svn 1.5 produces '.' dirs,
- nameinfo_seq.append((info._name, info))
- nameinfo_seq.sort()
- return nameinfo_seq
- auth = self.auth and self.auth.makecmdoptions() or None
- if self.rev is not None:
- return self._lsrevcache.getorbuild((self.strpath, self.rev, auth),
- builder)
- else:
- return self._lsnorevcache.getorbuild((self.strpath, auth),
- builder)
-
- def listdir(self, fil=None, sort=None):
- """ list directory contents, possibly filter by the given fil func
- and possibly sorted.
- """
- if isinstance(fil, str):
- fil = common.FNMatcher(fil)
- nameinfo_seq = self._listdir_nameinfo()
- if len(nameinfo_seq) == 1:
- name, info = nameinfo_seq[0]
- if name == self.basename and info.kind == 'file':
- #if not self.check(dir=1):
- raise py.error.ENOTDIR(self)
- paths = [self.join(name) for (name, info) in nameinfo_seq]
- if fil:
- paths = [x for x in paths if fil(x)]
- self._sortlist(paths, sort)
- return paths
-
-
- def log(self, rev_start=None, rev_end=1, verbose=False):
- """ return a list of LogEntry instances for this path.
-rev_start is the starting revision (defaulting to the first one).
-rev_end is the last revision (defaulting to HEAD).
-if verbose is True, then the LogEntry instances also know which files changed.
-"""
- assert self.check() #make it simpler for the pipe
- rev_start = rev_start is None and "HEAD" or rev_start
- rev_end = rev_end is None and "HEAD" or rev_end
-
- if rev_start == "HEAD" and rev_end == 1:
- rev_opt = ""
- else:
- rev_opt = "-r %s:%s" % (rev_start, rev_end)
- verbose_opt = verbose and "-v" or ""
- xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' %
- (rev_opt, verbose_opt, self.strpath))
- from xml.dom import minidom
- tree = minidom.parse(xmlpipe)
- result = []
- for logentry in filter(None, tree.firstChild.childNodes):
- if logentry.nodeType == logentry.ELEMENT_NODE:
- result.append(svncommon.LogEntry(logentry))
- return result
-
-#01234567890123456789012345678901234567890123467
-# 2256 hpk 165 Nov 24 17:55 __init__.py
-# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!!
-# 1312 johnny 1627 May 05 14:32 test_decorators.py
-#
-class InfoSvnCommand:
- # the '0?' part in the middle is an indication of whether the resource is
- # locked, see 'svn help ls'
- lspattern = re.compile(
- r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? '
- r'*(?P<date>\w+ +\d{2} +[\d:]+) +(?P<file>.*)$')
- def __init__(self, line):
- # this is a typical line from 'svn ls http://...'
- #_ 1127 jum 0 Jul 13 15:28 branch/
- match = self.lspattern.match(line)
- data = match.groupdict()
- self._name = data['file']
- if self._name[-1] == '/':
- self._name = self._name[:-1]
- self.kind = 'dir'
- else:
- self.kind = 'file'
- #self.has_props = l.pop(0) == 'P'
- self.created_rev = int(data['rev'])
- self.last_author = data['author']
- self.size = data['size'] and int(data['size']) or 0
- self.mtime = parse_time_with_missing_year(data['date'])
- self.time = self.mtime * 1000000
-
- def __eq__(self, other):
- return self.__dict__ == other.__dict__
-
-
-#____________________________________________________
-#
-# helper functions
-#____________________________________________________
-def parse_time_with_missing_year(timestr):
- """ analyze the time part from a single line of "svn ls -v"
- the svn output doesn't show the year makes the 'timestr'
- ambigous.
- """
- import calendar
- t_now = time.gmtime()
-
- tparts = timestr.split()
- month = time.strptime(tparts.pop(0), '%b')[1]
- day = time.strptime(tparts.pop(0), '%d')[2]
- last = tparts.pop(0) # year or hour:minute
- try:
- if ":" in last:
- raise ValueError()
- year = time.strptime(last, '%Y')[0]
- hour = minute = 0
- except ValueError:
- hour, minute = time.strptime(last, '%H:%M')[3:5]
- year = t_now[0]
-
- t_result = (year, month, day, hour, minute, 0,0,0,0)
- if t_result > t_now:
- year -= 1
- t_result = (year, month, day, hour, minute, 0,0,0,0)
- return calendar.timegm(t_result)
-
-class PathEntry:
- def __init__(self, ppart):
- self.strpath = ppart.firstChild.nodeValue.encode('UTF-8')
- self.action = ppart.getAttribute('action').encode('UTF-8')
- if self.action == 'A':
- self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8')
- if self.copyfrom_path:
- self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev'))
-
diff --git a/lib/spack/external/pytest-fallback/py/_path/svnwc.py b/lib/spack/external/pytest-fallback/py/_path/svnwc.py
deleted file mode 100644
index 992223c04a..0000000000
--- a/lib/spack/external/pytest-fallback/py/_path/svnwc.py
+++ /dev/null
@@ -1,1240 +0,0 @@
-"""
-svn-Command based Implementation of a Subversion WorkingCopy Path.
-
- SvnWCCommandPath is the main class.
-
-"""
-
-import os, sys, time, re, calendar
-import py
-import subprocess
-from py._path import common
-
-#-----------------------------------------------------------
-# Caching latest repository revision and repo-paths
-# (getting them is slow with the current implementations)
-#
-# XXX make mt-safe
-#-----------------------------------------------------------
-
-class cache:
- proplist = {}
- info = {}
- entries = {}
- prop = {}
-
-class RepoEntry:
- def __init__(self, url, rev, timestamp):
- self.url = url
- self.rev = rev
- self.timestamp = timestamp
-
- def __str__(self):
- return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp)
-
-class RepoCache:
- """ The Repocache manages discovered repository paths
- and their revisions. If inside a timeout the cache
- will even return the revision of the root.
- """
- timeout = 20 # seconds after which we forget that we know the last revision
-
- def __init__(self):
- self.repos = []
-
- def clear(self):
- self.repos = []
-
- def put(self, url, rev, timestamp=None):
- if rev is None:
- return
- if timestamp is None:
- timestamp = time.time()
-
- for entry in self.repos:
- if url == entry.url:
- entry.timestamp = timestamp
- entry.rev = rev
- #print "set repo", entry
- break
- else:
- entry = RepoEntry(url, rev, timestamp)
- self.repos.append(entry)
- #print "appended repo", entry
-
- def get(self, url):
- now = time.time()
- for entry in self.repos:
- if url.startswith(entry.url):
- if now < entry.timestamp + self.timeout:
- #print "returning immediate Etrny", entry
- return entry.url, entry.rev
- return entry.url, -1
- return url, -1
-
-repositories = RepoCache()
-
-
-# svn support code
-
-ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested
-if sys.platform == "win32":
- ALLOWED_CHARS += ":"
-ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:'
-
-def _getsvnversion(ver=[]):
- try:
- return ver[0]
- except IndexError:
- v = py.process.cmdexec("svn -q --version")
- v.strip()
- v = '.'.join(v.split('.')[:2])
- ver.append(v)
- return v
-
-def _escape_helper(text):
- text = str(text)
- if py.std.sys.platform != 'win32':
- text = str(text).replace('$', '\\$')
- return text
-
-def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS):
- for c in str(text):
- if c.isalnum():
- continue
- if c in allowed_chars:
- continue
- return True
- return False
-
-def checkbadchars(url):
- # (hpk) not quite sure about the exact purpose, guido w.?
- proto, uri = url.split("://", 1)
- if proto != "file":
- host, uripath = uri.split('/', 1)
- # only check for bad chars in the non-protocol parts
- if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \
- or _check_for_bad_chars(uripath, ALLOWED_CHARS)):
- raise ValueError("bad char in %r" % (url, ))
-
-
-#_______________________________________________________________
-
-class SvnPathBase(common.PathBase):
- """ Base implementation for SvnPath implementations. """
- sep = '/'
-
- def _geturl(self):
- return self.strpath
- url = property(_geturl, None, None, "url of this svn-path.")
-
- def __str__(self):
- """ return a string representation (including rev-number) """
- return self.strpath
-
- def __hash__(self):
- return hash(self.strpath)
-
- def new(self, **kw):
- """ create a modified version of this path. A 'rev' argument
- indicates a new revision.
- the following keyword arguments modify various path parts::
-
- http://host.com/repo/path/file.ext
- |-----------------------| dirname
- |------| basename
- |--| purebasename
- |--| ext
- """
- obj = object.__new__(self.__class__)
- obj.rev = kw.get('rev', self.rev)
- obj.auth = kw.get('auth', self.auth)
- dirname, basename, purebasename, ext = self._getbyspec(
- "dirname,basename,purebasename,ext")
- if 'basename' in kw:
- if 'purebasename' in kw or 'ext' in kw:
- raise ValueError("invalid specification %r" % kw)
- else:
- pb = kw.setdefault('purebasename', purebasename)
- ext = kw.setdefault('ext', ext)
- if ext and not ext.startswith('.'):
- ext = '.' + ext
- kw['basename'] = pb + ext
-
- kw.setdefault('dirname', dirname)
- kw.setdefault('sep', self.sep)
- if kw['basename']:
- obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw
- else:
- obj.strpath = "%(dirname)s" % kw
- return obj
-
- def _getbyspec(self, spec):
- """ get specified parts of the path. 'arg' is a string
- with comma separated path parts. The parts are returned
- in exactly the order of the specification.
-
- you may specify the following parts:
-
- http://host.com/repo/path/file.ext
- |-----------------------| dirname
- |------| basename
- |--| purebasename
- |--| ext
- """
- res = []
- parts = self.strpath.split(self.sep)
- for name in spec.split(','):
- name = name.strip()
- if name == 'dirname':
- res.append(self.sep.join(parts[:-1]))
- elif name == 'basename':
- res.append(parts[-1])
- else:
- basename = parts[-1]
- i = basename.rfind('.')
- if i == -1:
- purebasename, ext = basename, ''
- else:
- purebasename, ext = basename[:i], basename[i:]
- if name == 'purebasename':
- res.append(purebasename)
- elif name == 'ext':
- res.append(ext)
- else:
- raise NameError("Don't know part %r" % name)
- return res
-
- def __eq__(self, other):
- """ return true if path and rev attributes each match """
- return (str(self) == str(other) and
- (self.rev == other.rev or self.rev == other.rev))
-
- def __ne__(self, other):
- return not self == other
-
- def join(self, *args):
- """ return a new Path (with the same revision) which is composed
- of the self Path followed by 'args' path components.
- """
- if not args:
- return self
-
- args = tuple([arg.strip(self.sep) for arg in args])
- parts = (self.strpath, ) + args
- newpath = self.__class__(self.sep.join(parts), self.rev, self.auth)
- return newpath
-
- def propget(self, name):
- """ return the content of the given property. """
- value = self._propget(name)
- return value
-
- def proplist(self):
- """ list all property names. """
- content = self._proplist()
- return content
-
- def size(self):
- """ Return the size of the file content of the Path. """
- return self.info().size
-
- def mtime(self):
- """ Return the last modification time of the file. """
- return self.info().mtime
-
- # shared help methods
-
- def _escape(self, cmd):
- return _escape_helper(cmd)
-
-
- #def _childmaxrev(self):
- # """ return maximum revision number of childs (or self.rev if no childs) """
- # rev = self.rev
- # for name, info in self._listdir_nameinfo():
- # rev = max(rev, info.created_rev)
- # return rev
-
- #def _getlatestrevision(self):
- # """ return latest repo-revision for this path. """
- # url = self.strpath
- # path = self.__class__(url, None)
- #
- # # we need a long walk to find the root-repo and revision
- # while 1:
- # try:
- # rev = max(rev, path._childmaxrev())
- # previous = path
- # path = path.dirpath()
- # except (IOError, process.cmdexec.Error):
- # break
- # if rev is None:
- # raise IOError, "could not determine newest repo revision for %s" % self
- # return rev
-
- class Checkers(common.Checkers):
- def dir(self):
- try:
- return self.path.info().kind == 'dir'
- except py.error.Error:
- return self._listdirworks()
-
- def _listdirworks(self):
- try:
- self.path.listdir()
- except py.error.ENOENT:
- return False
- else:
- return True
-
- def file(self):
- try:
- return self.path.info().kind == 'file'
- except py.error.ENOENT:
- return False
-
- def exists(self):
- try:
- return self.path.info()
- except py.error.ENOENT:
- return self._listdirworks()
-
-def parse_apr_time(timestr):
- i = timestr.rfind('.')
- if i == -1:
- raise ValueError("could not parse %s" % timestr)
- timestr = timestr[:i]
- parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
- return time.mktime(parsedtime)
-
-class PropListDict(dict):
- """ a Dictionary which fetches values (InfoSvnCommand instances) lazily"""
- def __init__(self, path, keynames):
- dict.__init__(self, [(x, None) for x in keynames])
- self.path = path
-
- def __getitem__(self, key):
- value = dict.__getitem__(self, key)
- if value is None:
- value = self.path.propget(key)
- dict.__setitem__(self, key, value)
- return value
-
-def fixlocale():
- if sys.platform != 'win32':
- return 'LC_ALL=C '
- return ''
-
-# some nasty chunk of code to solve path and url conversion and quoting issues
-ILLEGAL_CHARS = '* | \\ / : < > ? \t \n \x0b \x0c \r'.split(' ')
-if os.sep in ILLEGAL_CHARS:
- ILLEGAL_CHARS.remove(os.sep)
-ISWINDOWS = sys.platform == 'win32'
-_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I)
-def _check_path(path):
- illegal = ILLEGAL_CHARS[:]
- sp = path.strpath
- if ISWINDOWS:
- illegal.remove(':')
- if not _reg_allow_disk.match(sp):
- raise ValueError('path may not contain a colon (:)')
- for char in sp:
- if char not in string.printable or char in illegal:
- raise ValueError('illegal character %r in path' % (char,))
-
-def path_to_fspath(path, addat=True):
- _check_path(path)
- sp = path.strpath
- if addat and path.rev != -1:
- sp = '%s@%s' % (sp, path.rev)
- elif addat:
- sp = '%s@HEAD' % (sp,)
- return sp
-
-def url_from_path(path):
- fspath = path_to_fspath(path, False)
- quote = py.std.urllib.quote
- if ISWINDOWS:
- match = _reg_allow_disk.match(fspath)
- fspath = fspath.replace('\\', '/')
- if match.group(1):
- fspath = '/%s%s' % (match.group(1).replace('\\', '/'),
- quote(fspath[len(match.group(1)):]))
- else:
- fspath = quote(fspath)
- else:
- fspath = quote(fspath)
- if path.rev != -1:
- fspath = '%s@%s' % (fspath, path.rev)
- else:
- fspath = '%s@HEAD' % (fspath,)
- return 'file://%s' % (fspath,)
-
-class SvnAuth(object):
- """ container for auth information for Subversion """
- def __init__(self, username, password, cache_auth=True, interactive=True):
- self.username = username
- self.password = password
- self.cache_auth = cache_auth
- self.interactive = interactive
-
- def makecmdoptions(self):
- uname = self.username.replace('"', '\\"')
- passwd = self.password.replace('"', '\\"')
- ret = []
- if uname:
- ret.append('--username="%s"' % (uname,))
- if passwd:
- ret.append('--password="%s"' % (passwd,))
- if not self.cache_auth:
- ret.append('--no-auth-cache')
- if not self.interactive:
- ret.append('--non-interactive')
- return ' '.join(ret)
-
- def __str__(self):
- return "<SvnAuth username=%s ...>" %(self.username,)
-
-rex_blame = re.compile(r'\s*(\d+)\s*(\S+) (.*)')
-
-class SvnWCCommandPath(common.PathBase):
- """ path implementation offering access/modification to svn working copies.
- It has methods similar to the functions in os.path and similar to the
- commands of the svn client.
- """
- sep = os.sep
-
- def __new__(cls, wcpath=None, auth=None):
- self = object.__new__(cls)
- if isinstance(wcpath, cls):
- if wcpath.__class__ == cls:
- return wcpath
- wcpath = wcpath.localpath
- if _check_for_bad_chars(str(wcpath),
- ALLOWED_CHARS):
- raise ValueError("bad char in wcpath %s" % (wcpath, ))
- self.localpath = py.path.local(wcpath)
- self.auth = auth
- return self
-
- strpath = property(lambda x: str(x.localpath), None, None, "string path")
- rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision")
-
- def __eq__(self, other):
- return self.localpath == getattr(other, 'localpath', None)
-
- def _geturl(self):
- if getattr(self, '_url', None) is None:
- info = self.info()
- self._url = info.url #SvnPath(info.url, info.rev)
- assert isinstance(self._url, py.builtin._basestring)
- return self._url
-
- url = property(_geturl, None, None, "url of this WC item")
-
- def _escape(self, cmd):
- return _escape_helper(cmd)
-
- def dump(self, obj):
- """ pickle object into path location"""
- return self.localpath.dump(obj)
-
- def svnurl(self):
- """ return current SvnPath for this WC-item. """
- info = self.info()
- return py.path.svnurl(info.url)
-
- def __repr__(self):
- return "svnwc(%r)" % (self.strpath) # , self._url)
-
- def __str__(self):
- return str(self.localpath)
-
- def _makeauthoptions(self):
- if self.auth is None:
- return ''
- return self.auth.makecmdoptions()
-
- def _authsvn(self, cmd, args=None):
- args = args and list(args) or []
- args.append(self._makeauthoptions())
- return self._svn(cmd, *args)
-
- def _svn(self, cmd, *args):
- l = ['svn %s' % cmd]
- args = [self._escape(item) for item in args]
- l.extend(args)
- l.append('"%s"' % self._escape(self.strpath))
- # try fixing the locale because we can't otherwise parse
- string = fixlocale() + " ".join(l)
- try:
- try:
- key = 'LC_MESSAGES'
- hold = os.environ.get(key)
- os.environ[key] = 'C'
- out = py.process.cmdexec(string)
- finally:
- if hold:
- os.environ[key] = hold
- else:
- del os.environ[key]
- except py.process.cmdexec.Error:
- e = sys.exc_info()[1]
- strerr = e.err.lower()
- if strerr.find('not found') != -1:
- raise py.error.ENOENT(self)
- elif strerr.find("E200009:") != -1:
- raise py.error.ENOENT(self)
- if (strerr.find('file exists') != -1 or
- strerr.find('file already exists') != -1 or
- strerr.find('w150002:') != -1 or
- strerr.find("can't create directory") != -1):
- raise py.error.EEXIST(strerr) #self)
- raise
- return out
-
- def switch(self, url):
- """ switch to given URL. """
- self._authsvn('switch', [url])
-
- def checkout(self, url=None, rev=None):
- """ checkout from url to local wcpath. """
- args = []
- if url is None:
- url = self.url
- if rev is None or rev == -1:
- if (py.std.sys.platform != 'win32' and
- _getsvnversion() == '1.3'):
- url += "@HEAD"
- else:
- if _getsvnversion() == '1.3':
- url += "@%d" % rev
- else:
- args.append('-r' + str(rev))
- args.append(url)
- self._authsvn('co', args)
-
- def update(self, rev='HEAD', interactive=True):
- """ update working copy item to given revision. (None -> HEAD). """
- opts = ['-r', rev]
- if not interactive:
- opts.append("--non-interactive")
- self._authsvn('up', opts)
-
- def write(self, content, mode='w'):
- """ write content into local filesystem wc. """
- self.localpath.write(content, mode)
-
- def dirpath(self, *args):
- """ return the directory Path of the current Path. """
- return self.__class__(self.localpath.dirpath(*args), auth=self.auth)
-
- def _ensuredirs(self):
- parent = self.dirpath()
- if parent.check(dir=0):
- parent._ensuredirs()
- if self.check(dir=0):
- self.mkdir()
- return self
-
- def ensure(self, *args, **kwargs):
- """ ensure that an args-joined path exists (by default as
- a file). if you specify a keyword argument 'directory=True'
- then the path is forced to be a directory path.
- """
- p = self.join(*args)
- if p.check():
- if p.check(versioned=False):
- p.add()
- return p
- if kwargs.get('dir', 0):
- return p._ensuredirs()
- parent = p.dirpath()
- parent._ensuredirs()
- p.write("")
- p.add()
- return p
-
- def mkdir(self, *args):
- """ create & return the directory joined with args. """
- if args:
- return self.join(*args).mkdir()
- else:
- self._svn('mkdir')
- return self
-
- def add(self):
- """ add ourself to svn """
- self._svn('add')
-
- def remove(self, rec=1, force=1):
- """ remove a file or a directory tree. 'rec'ursive is
- ignored and considered always true (because of
- underlying svn semantics.
- """
- assert rec, "svn cannot remove non-recursively"
- if not self.check(versioned=True):
- # not added to svn (anymore?), just remove
- py.path.local(self).remove()
- return
- flags = []
- if force:
- flags.append('--force')
- self._svn('remove', *flags)
-
- def copy(self, target):
- """ copy path to target."""
- py.process.cmdexec("svn copy %s %s" %(str(self), str(target)))
-
- def rename(self, target):
- """ rename this path to target. """
- py.process.cmdexec("svn move --force %s %s" %(str(self), str(target)))
-
- def lock(self):
- """ set a lock (exclusive) on the resource """
- out = self._authsvn('lock').strip()
- if not out:
- # warning or error, raise exception
- raise ValueError("unknown error in svn lock command")
-
- def unlock(self):
- """ unset a previously set lock """
- out = self._authsvn('unlock').strip()
- if out.startswith('svn:'):
- # warning or error, raise exception
- raise Exception(out[4:])
-
- def cleanup(self):
- """ remove any locks from the resource """
- # XXX should be fixed properly!!!
- try:
- self.unlock()
- except:
- pass
-
- def status(self, updates=0, rec=0, externals=0):
- """ return (collective) Status object for this file. """
- # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1
- # 2201 2192 jum test
- # XXX
- if externals:
- raise ValueError("XXX cannot perform status() "
- "on external items yet")
- else:
- #1.2 supports: externals = '--ignore-externals'
- externals = ''
- if rec:
- rec= ''
- else:
- rec = '--non-recursive'
-
- # XXX does not work on all subversion versions
- #if not externals:
- # externals = '--ignore-externals'
-
- if updates:
- updates = '-u'
- else:
- updates = ''
-
- try:
- cmd = 'status -v --xml --no-ignore %s %s %s' % (
- updates, rec, externals)
- out = self._authsvn(cmd)
- except py.process.cmdexec.Error:
- cmd = 'status -v --no-ignore %s %s %s' % (
- updates, rec, externals)
- out = self._authsvn(cmd)
- rootstatus = WCStatus(self).fromstring(out, self)
- else:
- rootstatus = XMLWCStatus(self).fromstring(out, self)
- return rootstatus
-
- def diff(self, rev=None):
- """ return a diff of the current path against revision rev (defaulting
- to the last one).
- """
- args = []
- if rev is not None:
- args.append("-r %d" % rev)
- out = self._authsvn('diff', args)
- return out
-
- def blame(self):
- """ return a list of tuples of three elements:
- (revision, commiter, line)
- """
- out = self._svn('blame')
- result = []
- blamelines = out.splitlines()
- reallines = py.path.svnurl(self.url).readlines()
- for i, (blameline, line) in enumerate(
- zip(blamelines, reallines)):
- m = rex_blame.match(blameline)
- if not m:
- raise ValueError("output line %r of svn blame does not match "
- "expected format" % (line, ))
- rev, name, _ = m.groups()
- result.append((int(rev), name, line))
- return result
-
- _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL)
- def commit(self, msg='', rec=1):
- """ commit with support for non-recursive commits """
- # XXX i guess escaping should be done better here?!?
- cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),)
- if not rec:
- cmd += ' -N'
- out = self._authsvn(cmd)
- try:
- del cache.info[self]
- except KeyError:
- pass
- if out:
- m = self._rex_commit.match(out)
- return int(m.group(1))
-
- def propset(self, name, value, *args):
- """ set property name to value on this path. """
- d = py.path.local.mkdtemp()
- try:
- p = d.join('value')
- p.write(value)
- self._svn('propset', name, '--file', str(p), *args)
- finally:
- d.remove()
-
- def propget(self, name):
- """ get property name on this path. """
- res = self._svn('propget', name)
- return res[:-1] # strip trailing newline
-
- def propdel(self, name):
- """ delete property name on this path. """
- res = self._svn('propdel', name)
- return res[:-1] # strip trailing newline
-
- def proplist(self, rec=0):
- """ return a mapping of property names to property values.
-If rec is True, then return a dictionary mapping sub-paths to such mappings.
-"""
- if rec:
- res = self._svn('proplist -R')
- return make_recursive_propdict(self, res)
- else:
- res = self._svn('proplist')
- lines = res.split('\n')
- lines = [x.strip() for x in lines[1:]]
- return PropListDict(self, lines)
-
- def revert(self, rec=0):
- """ revert the local changes of this path. if rec is True, do so
-recursively. """
- if rec:
- result = self._svn('revert -R')
- else:
- result = self._svn('revert')
- return result
-
- def new(self, **kw):
- """ create a modified version of this path. A 'rev' argument
- indicates a new revision.
- the following keyword arguments modify various path parts:
-
- http://host.com/repo/path/file.ext
- |-----------------------| dirname
- |------| basename
- |--| purebasename
- |--| ext
- """
- if kw:
- localpath = self.localpath.new(**kw)
- else:
- localpath = self.localpath
- return self.__class__(localpath, auth=self.auth)
-
- def join(self, *args, **kwargs):
- """ return a new Path (with the same revision) which is composed
- of the self Path followed by 'args' path components.
- """
- if not args:
- return self
- localpath = self.localpath.join(*args, **kwargs)
- return self.__class__(localpath, auth=self.auth)
-
- def info(self, usecache=1):
- """ return an Info structure with svn-provided information. """
- info = usecache and cache.info.get(self)
- if not info:
- try:
- output = self._svn('info')
- except py.process.cmdexec.Error:
- e = sys.exc_info()[1]
- if e.err.find('Path is not a working copy directory') != -1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.find("is not under version control") != -1:
- raise py.error.ENOENT(self, e.err)
- raise
- # XXX SVN 1.3 has output on stderr instead of stdout (while it does
- # return 0!), so a bit nasty, but we assume no output is output
- # to stderr...
- if (output.strip() == '' or
- output.lower().find('not a versioned resource') != -1):
- raise py.error.ENOENT(self, output)
- info = InfoSvnWCCommand(output)
-
- # Can't reliably compare on Windows without access to win32api
- if py.std.sys.platform != 'win32':
- if info.path != self.localpath:
- raise py.error.ENOENT(self, "not a versioned resource:" +
- " %s != %s" % (info.path, self.localpath))
- cache.info[self] = info
- return info
-
- def listdir(self, fil=None, sort=None):
- """ return a sequence of Paths.
-
- listdir will return either a tuple or a list of paths
- depending on implementation choices.
- """
- if isinstance(fil, str):
- fil = common.FNMatcher(fil)
- # XXX unify argument naming with LocalPath.listdir
- def notsvn(path):
- return path.basename != '.svn'
-
- paths = []
- for localpath in self.localpath.listdir(notsvn):
- p = self.__class__(localpath, auth=self.auth)
- if notsvn(p) and (not fil or fil(p)):
- paths.append(p)
- self._sortlist(paths, sort)
- return paths
-
- def open(self, mode='r'):
- """ return an opened file with the given mode. """
- return open(self.strpath, mode)
-
- def _getbyspec(self, spec):
- return self.localpath._getbyspec(spec)
-
- class Checkers(py.path.local.Checkers):
- def __init__(self, path):
- self.svnwcpath = path
- self.path = path.localpath
- def versioned(self):
- try:
- s = self.svnwcpath.info()
- except (py.error.ENOENT, py.error.EEXIST):
- return False
- except py.process.cmdexec.Error:
- e = sys.exc_info()[1]
- if e.err.find('is not a working copy')!=-1:
- return False
- if e.err.lower().find('not a versioned resource') != -1:
- return False
- raise
- else:
- return True
-
- def log(self, rev_start=None, rev_end=1, verbose=False):
- """ return a list of LogEntry instances for this path.
-rev_start is the starting revision (defaulting to the first one).
-rev_end is the last revision (defaulting to HEAD).
-if verbose is True, then the LogEntry instances also know which files changed.
-"""
- assert self.check() # make it simpler for the pipe
- rev_start = rev_start is None and "HEAD" or rev_start
- rev_end = rev_end is None and "HEAD" or rev_end
- if rev_start == "HEAD" and rev_end == 1:
- rev_opt = ""
- else:
- rev_opt = "-r %s:%s" % (rev_start, rev_end)
- verbose_opt = verbose and "-v" or ""
- locale_env = fixlocale()
- # some blather on stderr
- auth_opt = self._makeauthoptions()
- #stdin, stdout, stderr = os.popen3(locale_env +
- # 'svn log --xml %s %s %s "%s"' % (
- # rev_opt, verbose_opt, auth_opt,
- # self.strpath))
- cmd = locale_env + 'svn log --xml %s %s %s "%s"' % (
- rev_opt, verbose_opt, auth_opt, self.strpath)
-
- popen = subprocess.Popen(cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- shell=True,
- )
- stdout, stderr = popen.communicate()
- stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
- minidom,ExpatError = importxml()
- try:
- tree = minidom.parseString(stdout)
- except ExpatError:
- raise ValueError('no such revision')
- result = []
- for logentry in filter(None, tree.firstChild.childNodes):
- if logentry.nodeType == logentry.ELEMENT_NODE:
- result.append(LogEntry(logentry))
- return result
-
- def size(self):
- """ Return the size of the file content of the Path. """
- return self.info().size
-
- def mtime(self):
- """ Return the last modification time of the file. """
- return self.info().mtime
-
- def __hash__(self):
- return hash((self.strpath, self.__class__, self.auth))
-
-
-class WCStatus:
- attrnames = ('modified','added', 'conflict', 'unchanged', 'external',
- 'deleted', 'prop_modified', 'unknown', 'update_available',
- 'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced'
- )
-
- def __init__(self, wcpath, rev=None, modrev=None, author=None):
- self.wcpath = wcpath
- self.rev = rev
- self.modrev = modrev
- self.author = author
-
- for name in self.attrnames:
- setattr(self, name, [])
-
- def allpath(self, sort=True, **kw):
- d = {}
- for name in self.attrnames:
- if name not in kw or kw[name]:
- for path in getattr(self, name):
- d[path] = 1
- l = d.keys()
- if sort:
- l.sort()
- return l
-
- # XXX a bit scary to assume there's always 2 spaces between username and
- # path, however with win32 allowing spaces in user names there doesn't
- # seem to be a more solid approach :(
- _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)')
-
- def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
- """ return a new WCStatus object from data 's'
- """
- rootstatus = WCStatus(rootwcpath, rev, modrev, author)
- update_rev = None
- for line in data.split('\n'):
- if not line.strip():
- continue
- #print "processing %r" % line
- flags, rest = line[:8], line[8:]
- # first column
- c0,c1,c2,c3,c4,c5,x6,c7 = flags
- #if '*' in line:
- # print "flags", repr(flags), "rest", repr(rest)
-
- if c0 in '?XI':
- fn = line.split(None, 1)[1]
- if c0 == '?':
- wcpath = rootwcpath.join(fn, abs=1)
- rootstatus.unknown.append(wcpath)
- elif c0 == 'X':
- wcpath = rootwcpath.__class__(
- rootwcpath.localpath.join(fn, abs=1),
- auth=rootwcpath.auth)
- rootstatus.external.append(wcpath)
- elif c0 == 'I':
- wcpath = rootwcpath.join(fn, abs=1)
- rootstatus.ignored.append(wcpath)
-
- continue
-
- #elif c0 in '~!' or c4 == 'S':
- # raise NotImplementedError("received flag %r" % c0)
-
- m = WCStatus._rex_status.match(rest)
- if not m:
- if c7 == '*':
- fn = rest.strip()
- wcpath = rootwcpath.join(fn, abs=1)
- rootstatus.update_available.append(wcpath)
- continue
- if line.lower().find('against revision:')!=-1:
- update_rev = int(rest.split(':')[1].strip())
- continue
- if line.lower().find('status on external') > -1:
- # XXX not sure what to do here... perhaps we want to
- # store some state instead of just continuing, as right
- # now it makes the top-level external get added twice
- # (once as external, once as 'normal' unchanged item)
- # because of the way SVN presents external items
- continue
- # keep trying
- raise ValueError("could not parse line %r" % line)
- else:
- rev, modrev, author, fn = m.groups()
- wcpath = rootwcpath.join(fn, abs=1)
- #assert wcpath.check()
- if c0 == 'M':
- assert wcpath.check(file=1), "didn't expect a directory with changed content here"
- rootstatus.modified.append(wcpath)
- elif c0 == 'A' or c3 == '+' :
- rootstatus.added.append(wcpath)
- elif c0 == 'D':
- rootstatus.deleted.append(wcpath)
- elif c0 == 'C':
- rootstatus.conflict.append(wcpath)
- elif c0 == '~':
- rootstatus.kindmismatch.append(wcpath)
- elif c0 == '!':
- rootstatus.incomplete.append(wcpath)
- elif c0 == 'R':
- rootstatus.replaced.append(wcpath)
- elif not c0.strip():
- rootstatus.unchanged.append(wcpath)
- else:
- raise NotImplementedError("received flag %r" % c0)
-
- if c1 == 'M':
- rootstatus.prop_modified.append(wcpath)
- # XXX do we cover all client versions here?
- if c2 == 'L' or c5 == 'K':
- rootstatus.locked.append(wcpath)
- if c7 == '*':
- rootstatus.update_available.append(wcpath)
-
- if wcpath == rootwcpath:
- rootstatus.rev = rev
- rootstatus.modrev = modrev
- rootstatus.author = author
- if update_rev:
- rootstatus.update_rev = update_rev
- continue
- return rootstatus
- fromstring = staticmethod(fromstring)
-
-class XMLWCStatus(WCStatus):
- def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
- """ parse 'data' (XML string as outputted by svn st) into a status obj
- """
- # XXX for externals, the path is shown twice: once
- # with external information, and once with full info as if
- # the item was a normal non-external... the current way of
- # dealing with this issue is by ignoring it - this does make
- # externals appear as external items as well as 'normal',
- # unchanged ones in the status object so this is far from ideal
- rootstatus = WCStatus(rootwcpath, rev, modrev, author)
- update_rev = None
- minidom, ExpatError = importxml()
- try:
- doc = minidom.parseString(data)
- except ExpatError:
- e = sys.exc_info()[1]
- raise ValueError(str(e))
- urevels = doc.getElementsByTagName('against')
- if urevels:
- rootstatus.update_rev = urevels[-1].getAttribute('revision')
- for entryel in doc.getElementsByTagName('entry'):
- path = entryel.getAttribute('path')
- statusel = entryel.getElementsByTagName('wc-status')[0]
- itemstatus = statusel.getAttribute('item')
-
- if itemstatus == 'unversioned':
- wcpath = rootwcpath.join(path, abs=1)
- rootstatus.unknown.append(wcpath)
- continue
- elif itemstatus == 'external':
- wcpath = rootwcpath.__class__(
- rootwcpath.localpath.join(path, abs=1),
- auth=rootwcpath.auth)
- rootstatus.external.append(wcpath)
- continue
- elif itemstatus == 'ignored':
- wcpath = rootwcpath.join(path, abs=1)
- rootstatus.ignored.append(wcpath)
- continue
- elif itemstatus == 'incomplete':
- wcpath = rootwcpath.join(path, abs=1)
- rootstatus.incomplete.append(wcpath)
- continue
-
- rev = statusel.getAttribute('revision')
- if itemstatus == 'added' or itemstatus == 'none':
- rev = '0'
- modrev = '?'
- author = '?'
- date = ''
- elif itemstatus == "replaced":
- pass
- else:
- #print entryel.toxml()
- commitel = entryel.getElementsByTagName('commit')[0]
- if commitel:
- modrev = commitel.getAttribute('revision')
- author = ''
- author_els = commitel.getElementsByTagName('author')
- if author_els:
- for c in author_els[0].childNodes:
- author += c.nodeValue
- date = ''
- for c in commitel.getElementsByTagName('date')[0]\
- .childNodes:
- date += c.nodeValue
-
- wcpath = rootwcpath.join(path, abs=1)
-
- assert itemstatus != 'modified' or wcpath.check(file=1), (
- 'did\'t expect a directory with changed content here')
-
- itemattrname = {
- 'normal': 'unchanged',
- 'unversioned': 'unknown',
- 'conflicted': 'conflict',
- 'none': 'added',
- }.get(itemstatus, itemstatus)
-
- attr = getattr(rootstatus, itemattrname)
- attr.append(wcpath)
-
- propsstatus = statusel.getAttribute('props')
- if propsstatus not in ('none', 'normal'):
- rootstatus.prop_modified.append(wcpath)
-
- if wcpath == rootwcpath:
- rootstatus.rev = rev
- rootstatus.modrev = modrev
- rootstatus.author = author
- rootstatus.date = date
-
- # handle repos-status element (remote info)
- rstatusels = entryel.getElementsByTagName('repos-status')
- if rstatusels:
- rstatusel = rstatusels[0]
- ritemstatus = rstatusel.getAttribute('item')
- if ritemstatus in ('added', 'modified'):
- rootstatus.update_available.append(wcpath)
-
- lockels = entryel.getElementsByTagName('lock')
- if len(lockels):
- rootstatus.locked.append(wcpath)
-
- return rootstatus
- fromstring = staticmethod(fromstring)
-
-class InfoSvnWCCommand:
- def __init__(self, output):
- # Path: test
- # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test
- # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
- # Revision: 2151
- # Node Kind: directory
- # Schedule: normal
- # Last Changed Author: hpk
- # Last Changed Rev: 2100
- # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
- # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003)
-
- d = {}
- for line in output.split('\n'):
- if not line.strip():
- continue
- key, value = line.split(':', 1)
- key = key.lower().replace(' ', '')
- value = value.strip()
- d[key] = value
- try:
- self.url = d['url']
- except KeyError:
- raise ValueError("Not a versioned resource")
- #raise ValueError, "Not a versioned resource %r" % path
- self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
- try:
- self.rev = int(d['revision'])
- except KeyError:
- self.rev = None
-
- self.path = py.path.local(d['path'])
- self.size = self.path.size()
- if 'lastchangedrev' in d:
- self.created_rev = int(d['lastchangedrev'])
- if 'lastchangedauthor' in d:
- self.last_author = d['lastchangedauthor']
- if 'lastchangeddate' in d:
- self.mtime = parse_wcinfotime(d['lastchangeddate'])
- self.time = self.mtime * 1000000
-
- def __eq__(self, other):
- return self.__dict__ == other.__dict__
-
-def parse_wcinfotime(timestr):
- """ Returns seconds since epoch, UTC. """
- # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
- m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
- if not m:
- raise ValueError("timestring %r does not match" % timestr)
- timestr, timezone = m.groups()
- # do not handle timezone specially, return value should be UTC
- parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
- return calendar.timegm(parsedtime)
-
-def make_recursive_propdict(wcroot,
- output,
- rex = re.compile("Properties on '(.*)':")):
- """ Return a dictionary of path->PropListDict mappings. """
- lines = [x for x in output.split('\n') if x]
- pdict = {}
- while lines:
- line = lines.pop(0)
- m = rex.match(line)
- if not m:
- raise ValueError("could not parse propget-line: %r" % line)
- path = m.groups()[0]
- wcpath = wcroot.join(path, abs=1)
- propnames = []
- while lines and lines[0].startswith(' '):
- propname = lines.pop(0).strip()
- propnames.append(propname)
- assert propnames, "must have found properties!"
- pdict[wcpath] = PropListDict(wcpath, propnames)
- return pdict
-
-
-def importxml(cache=[]):
- if cache:
- return cache
- from xml.dom import minidom
- from xml.parsers.expat import ExpatError
- cache.extend([minidom, ExpatError])
- return cache
-
-class LogEntry:
- def __init__(self, logentry):
- self.rev = int(logentry.getAttribute('revision'))
- for lpart in filter(None, logentry.childNodes):
- if lpart.nodeType == lpart.ELEMENT_NODE:
- if lpart.nodeName == 'author':
- self.author = lpart.firstChild.nodeValue
- elif lpart.nodeName == 'msg':
- if lpart.firstChild:
- self.msg = lpart.firstChild.nodeValue
- else:
- self.msg = ''
- elif lpart.nodeName == 'date':
- #2003-07-29T20:05:11.598637Z
- timestr = lpart.firstChild.nodeValue
- self.date = parse_apr_time(timestr)
- elif lpart.nodeName == 'paths':
- self.strpaths = []
- for ppart in filter(None, lpart.childNodes):
- if ppart.nodeType == ppart.ELEMENT_NODE:
- self.strpaths.append(PathEntry(ppart))
- def __repr__(self):
- return '<Logentry rev=%d author=%s date=%s>' % (
- self.rev, self.author, self.date)
-
-
diff --git a/lib/spack/external/pytest-fallback/py/_process/__init__.py b/lib/spack/external/pytest-fallback/py/_process/__init__.py
deleted file mode 100644
index 86c714ad1a..0000000000
--- a/lib/spack/external/pytest-fallback/py/_process/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-""" high-level sub-process handling """
diff --git a/lib/spack/external/pytest-fallback/py/_process/cmdexec.py b/lib/spack/external/pytest-fallback/py/_process/cmdexec.py
deleted file mode 100644
index f83a249402..0000000000
--- a/lib/spack/external/pytest-fallback/py/_process/cmdexec.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import sys
-import subprocess
-import py
-from subprocess import Popen, PIPE
-
-def cmdexec(cmd):
- """ return unicode output of executing 'cmd' in a separate process.
-
- raise cmdexec.Error exeception if the command failed.
- the exception will provide an 'err' attribute containing
- the error-output from the command.
- if the subprocess module does not provide a proper encoding/unicode strings
- sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'.
- """
- process = subprocess.Popen(cmd, shell=True,
- universal_newlines=True,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = process.communicate()
- if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not
- try:
- default_encoding = sys.getdefaultencoding() # jython may not have it
- except AttributeError:
- default_encoding = sys.stdout.encoding or 'UTF-8'
- out = unicode(out, process.stdout.encoding or default_encoding)
- err = unicode(err, process.stderr.encoding or default_encoding)
- status = process.poll()
- if status:
- raise ExecutionFailed(status, status, cmd, out, err)
- return out
-
-class ExecutionFailed(py.error.Error):
- def __init__(self, status, systemstatus, cmd, out, err):
- Exception.__init__(self)
- self.status = status
- self.systemstatus = systemstatus
- self.cmd = cmd
- self.err = err
- self.out = out
-
- def __str__(self):
- return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
-
-# export the exception under the name 'py.process.cmdexec.Error'
-cmdexec.Error = ExecutionFailed
-try:
- ExecutionFailed.__module__ = 'py.process.cmdexec'
- ExecutionFailed.__name__ = 'Error'
-except (AttributeError, TypeError):
- pass
diff --git a/lib/spack/external/pytest-fallback/py/_process/forkedfunc.py b/lib/spack/external/pytest-fallback/py/_process/forkedfunc.py
deleted file mode 100644
index 1c28530688..0000000000
--- a/lib/spack/external/pytest-fallback/py/_process/forkedfunc.py
+++ /dev/null
@@ -1,120 +0,0 @@
-
-"""
- ForkedFunc provides a way to run a function in a forked process
- and get at its return value, stdout and stderr output as well
- as signals and exitstatusus.
-"""
-
-import py
-import os
-import sys
-import marshal
-
-
-def get_unbuffered_io(fd, filename):
- f = open(str(filename), "w")
- if fd != f.fileno():
- os.dup2(f.fileno(), fd)
- class AutoFlush:
- def write(self, data):
- f.write(data)
- f.flush()
- def __getattr__(self, name):
- return getattr(f, name)
- return AutoFlush()
-
-
-class ForkedFunc:
- EXITSTATUS_EXCEPTION = 3
-
-
- def __init__(self, fun, args=None, kwargs=None, nice_level=0,
- child_on_start=None, child_on_exit=None):
- if args is None:
- args = []
- if kwargs is None:
- kwargs = {}
- self.fun = fun
- self.args = args
- self.kwargs = kwargs
- self.tempdir = tempdir = py.path.local.mkdtemp()
- self.RETVAL = tempdir.ensure('retval')
- self.STDOUT = tempdir.ensure('stdout')
- self.STDERR = tempdir.ensure('stderr')
-
- pid = os.fork()
- if pid: # in parent process
- self.pid = pid
- else: # in child process
- self.pid = None
- self._child(nice_level, child_on_start, child_on_exit)
-
- def _child(self, nice_level, child_on_start, child_on_exit):
- # right now we need to call a function, but first we need to
- # map all IO that might happen
- sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT)
- sys.stderr = stderr = get_unbuffered_io(2, self.STDERR)
- retvalf = self.RETVAL.open("wb")
- EXITSTATUS = 0
- try:
- if nice_level:
- os.nice(nice_level)
- try:
- if child_on_start is not None:
- child_on_start()
- retval = self.fun(*self.args, **self.kwargs)
- retvalf.write(marshal.dumps(retval))
- if child_on_exit is not None:
- child_on_exit()
- except:
- excinfo = py.code.ExceptionInfo()
- stderr.write(str(excinfo._getreprcrash()))
- EXITSTATUS = self.EXITSTATUS_EXCEPTION
- finally:
- stdout.close()
- stderr.close()
- retvalf.close()
- os.close(1)
- os.close(2)
- os._exit(EXITSTATUS)
-
- def waitfinish(self, waiter=os.waitpid):
- pid, systemstatus = waiter(self.pid, 0)
- if systemstatus:
- if os.WIFSIGNALED(systemstatus):
- exitstatus = os.WTERMSIG(systemstatus) + 128
- else:
- exitstatus = os.WEXITSTATUS(systemstatus)
- else:
- exitstatus = 0
- signal = systemstatus & 0x7f
- if not exitstatus and not signal:
- retval = self.RETVAL.open('rb')
- try:
- retval_data = retval.read()
- finally:
- retval.close()
- retval = marshal.loads(retval_data)
- else:
- retval = None
- stdout = self.STDOUT.read()
- stderr = self.STDERR.read()
- self._removetemp()
- return Result(exitstatus, signal, retval, stdout, stderr)
-
- def _removetemp(self):
- if self.tempdir.check():
- self.tempdir.remove()
-
- def __del__(self):
- if self.pid is not None: # only clean up in main process
- self._removetemp()
-
-
-class Result(object):
- def __init__(self, exitstatus, signal, retval, stdout, stderr):
- self.exitstatus = exitstatus
- self.signal = signal
- self.retval = retval
- self.out = stdout
- self.err = stderr
diff --git a/lib/spack/external/pytest-fallback/py/_process/killproc.py b/lib/spack/external/pytest-fallback/py/_process/killproc.py
deleted file mode 100644
index 18e8310b5f..0000000000
--- a/lib/spack/external/pytest-fallback/py/_process/killproc.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import py
-import os, sys
-
-if sys.platform == "win32" or getattr(os, '_name', '') == 'nt':
- try:
- import ctypes
- except ImportError:
- def dokill(pid):
- py.process.cmdexec("taskkill /F /PID %d" %(pid,))
- else:
- def dokill(pid):
- PROCESS_TERMINATE = 1
- handle = ctypes.windll.kernel32.OpenProcess(
- PROCESS_TERMINATE, False, pid)
- ctypes.windll.kernel32.TerminateProcess(handle, -1)
- ctypes.windll.kernel32.CloseHandle(handle)
-else:
- def dokill(pid):
- os.kill(pid, 15)
-
-def kill(pid):
- """ kill process by id. """
- dokill(pid)
diff --git a/lib/spack/external/pytest-fallback/py/_std.py b/lib/spack/external/pytest-fallback/py/_std.py
deleted file mode 100644
index 97a9853323..0000000000
--- a/lib/spack/external/pytest-fallback/py/_std.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import sys
-
-class Std(object):
- """ makes top-level python modules available as an attribute,
- importing them on first access.
- """
-
- def __init__(self):
- self.__dict__ = sys.modules
-
- def __getattr__(self, name):
- try:
- m = __import__(name)
- except ImportError:
- raise AttributeError("py.std: could not import %s" % name)
- return m
-
-std = Std()
diff --git a/lib/spack/external/pytest-fallback/py/_xmlgen.py b/lib/spack/external/pytest-fallback/py/_xmlgen.py
deleted file mode 100644
index 1c83545884..0000000000
--- a/lib/spack/external/pytest-fallback/py/_xmlgen.py
+++ /dev/null
@@ -1,255 +0,0 @@
-"""
-module for generating and serializing xml and html structures
-by using simple python objects.
-
-(c) holger krekel, holger at merlinux eu. 2009
-"""
-import sys, re
-
-if sys.version_info >= (3,0):
- def u(s):
- return s
- def unicode(x, errors=None):
- if hasattr(x, '__unicode__'):
- return x.__unicode__()
- return str(x)
-else:
- def u(s):
- return unicode(s)
- unicode = unicode
-
-
-class NamespaceMetaclass(type):
- def __getattr__(self, name):
- if name[:1] == '_':
- raise AttributeError(name)
- if self == Namespace:
- raise ValueError("Namespace class is abstract")
- tagspec = self.__tagspec__
- if tagspec is not None and name not in tagspec:
- raise AttributeError(name)
- classattr = {}
- if self.__stickyname__:
- classattr['xmlname'] = name
- cls = type(name, (self.__tagclass__,), classattr)
- setattr(self, name, cls)
- return cls
-
-class Tag(list):
- class Attr(object):
- def __init__(self, **kwargs):
- self.__dict__.update(kwargs)
-
- def __init__(self, *args, **kwargs):
- super(Tag, self).__init__(args)
- self.attr = self.Attr(**kwargs)
-
- def __unicode__(self):
- return self.unicode(indent=0)
- __str__ = __unicode__
-
- def unicode(self, indent=2):
- l = []
- SimpleUnicodeVisitor(l.append, indent).visit(self)
- return u("").join(l)
-
- def __repr__(self):
- name = self.__class__.__name__
- return "<%r tag object %d>" % (name, id(self))
-
-Namespace = NamespaceMetaclass('Namespace', (object, ), {
- '__tagspec__': None,
- '__tagclass__': Tag,
- '__stickyname__': False,
-})
-
-class HtmlTag(Tag):
- def unicode(self, indent=2):
- l = []
- HtmlVisitor(l.append, indent, shortempty=False).visit(self)
- return u("").join(l)
-
-# exported plain html namespace
-class html(Namespace):
- __tagclass__ = HtmlTag
- __stickyname__ = True
- __tagspec__ = dict([(x,1) for x in (
- 'a,abbr,acronym,address,applet,area,article,aside,audio,b,'
- 'base,basefont,bdi,bdo,big,blink,blockquote,body,br,button,'
- 'canvas,caption,center,cite,code,col,colgroup,command,comment,'
- 'datalist,dd,del,details,dfn,dir,div,dl,dt,em,embed,'
- 'fieldset,figcaption,figure,footer,font,form,frame,frameset,h1,'
- 'h2,h3,h4,h5,h6,head,header,hgroup,hr,html,i,iframe,img,input,'
- 'ins,isindex,kbd,keygen,label,legend,li,link,listing,map,mark,'
- 'marquee,menu,meta,meter,multicol,nav,nobr,noembed,noframes,'
- 'noscript,object,ol,optgroup,option,output,p,param,pre,progress,'
- 'q,rp,rt,ruby,s,samp,script,section,select,small,source,span,'
- 'strike,strong,style,sub,summary,sup,table,tbody,td,textarea,'
- 'tfoot,th,thead,time,title,tr,track,tt,u,ul,xmp,var,video,wbr'
- ).split(',') if x])
-
- class Style(object):
- def __init__(self, **kw):
- for x, y in kw.items():
- x = x.replace('_', '-')
- setattr(self, x, y)
-
-
-class raw(object):
- """just a box that can contain a unicode string that will be
- included directly in the output"""
- def __init__(self, uniobj):
- self.uniobj = uniobj
-
-class SimpleUnicodeVisitor(object):
- """ recursive visitor to write unicode. """
- def __init__(self, write, indent=0, curindent=0, shortempty=True):
- self.write = write
- self.cache = {}
- self.visited = {} # for detection of recursion
- self.indent = indent
- self.curindent = curindent
- self.parents = []
- self.shortempty = shortempty # short empty tags or not
-
- def visit(self, node):
- """ dispatcher on node's class/bases name. """
- cls = node.__class__
- try:
- visitmethod = self.cache[cls]
- except KeyError:
- for subclass in cls.__mro__:
- visitmethod = getattr(self, subclass.__name__, None)
- if visitmethod is not None:
- break
- else:
- visitmethod = self.__object
- self.cache[cls] = visitmethod
- visitmethod(node)
-
- # the default fallback handler is marked private
- # to avoid clashes with the tag name object
- def __object(self, obj):
- #self.write(obj)
- self.write(escape(unicode(obj)))
-
- def raw(self, obj):
- self.write(obj.uniobj)
-
- def list(self, obj):
- assert id(obj) not in self.visited
- self.visited[id(obj)] = 1
- for elem in obj:
- self.visit(elem)
-
- def Tag(self, tag):
- assert id(tag) not in self.visited
- try:
- tag.parent = self.parents[-1]
- except IndexError:
- tag.parent = None
- self.visited[id(tag)] = 1
- tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
- if self.curindent and not self._isinline(tagname):
- self.write("\n" + u(' ') * self.curindent)
- if tag:
- self.curindent += self.indent
- self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
- self.parents.append(tag)
- for x in tag:
- self.visit(x)
- self.parents.pop()
- self.write(u('</%s>') % tagname)
- self.curindent -= self.indent
- else:
- nameattr = tagname+self.attributes(tag)
- if self._issingleton(tagname):
- self.write(u('<%s/>') % (nameattr,))
- else:
- self.write(u('<%s></%s>') % (nameattr, tagname))
-
- def attributes(self, tag):
- # serialize attributes
- attrlist = dir(tag.attr)
- attrlist.sort()
- l = []
- for name in attrlist:
- res = self.repr_attribute(tag.attr, name)
- if res is not None:
- l.append(res)
- l.extend(self.getstyle(tag))
- return u("").join(l)
-
- def repr_attribute(self, attrs, name):
- if name[:2] != '__':
- value = getattr(attrs, name)
- if name.endswith('_'):
- name = name[:-1]
- if isinstance(value, raw):
- insert = value.uniobj
- else:
- insert = escape(unicode(value))
- return ' %s="%s"' % (name, insert)
-
- def getstyle(self, tag):
- """ return attribute list suitable for styling. """
- try:
- styledict = tag.style.__dict__
- except AttributeError:
- return []
- else:
- stylelist = [x+': ' + y for x,y in styledict.items()]
- return [u(' style="%s"') % u('; ').join(stylelist)]
-
- def _issingleton(self, tagname):
- """can (and will) be overridden in subclasses"""
- return self.shortempty
-
- def _isinline(self, tagname):
- """can (and will) be overridden in subclasses"""
- return False
-
-class HtmlVisitor(SimpleUnicodeVisitor):
-
- single = dict([(x, 1) for x in
- ('br,img,area,param,col,hr,meta,link,base,'
- 'input,frame').split(',')])
- inline = dict([(x, 1) for x in
- ('a abbr acronym b basefont bdo big br cite code dfn em font '
- 'i img input kbd label q s samp select small span strike '
- 'strong sub sup textarea tt u var'.split(' '))])
-
- def repr_attribute(self, attrs, name):
- if name == 'class_':
- value = getattr(attrs, name)
- if value is None:
- return
- return super(HtmlVisitor, self).repr_attribute(attrs, name)
-
- def _issingleton(self, tagname):
- return tagname in self.single
-
- def _isinline(self, tagname):
- return tagname in self.inline
-
-
-class _escape:
- def __init__(self):
- self.escape = {
- u('"') : u('&quot;'), u('<') : u('&lt;'), u('>') : u('&gt;'),
- u('&') : u('&amp;'), u("'") : u('&apos;'),
- }
- self.charef_rex = re.compile(u("|").join(self.escape.keys()))
-
- def _replacer(self, match):
- return self.escape[match.group(0)]
-
- def __call__(self, ustring):
- """ xml-escape the given unicode string. """
- try:
- ustring = unicode(ustring)
- except UnicodeDecodeError:
- ustring = unicode(ustring, 'utf-8', errors='replace')
- return self.charef_rex.sub(self._replacer, ustring)
-
-escape = _escape()
diff --git a/lib/spack/external/pytest-fallback/py/test.py b/lib/spack/external/pytest-fallback/py/test.py
deleted file mode 100644
index aa5beb1789..0000000000
--- a/lib/spack/external/pytest-fallback/py/test.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import sys
-if __name__ == '__main__':
- import pytest
- sys.exit(pytest.main())
-else:
- import sys, pytest
- sys.modules['py.test'] = pytest
-
-# for more API entry points see the 'tests' definition
-# in __init__.py
diff --git a/lib/spack/external/pytest-fallback/pytest.py b/lib/spack/external/pytest-fallback/pytest.py
deleted file mode 100644
index 6e124db418..0000000000
--- a/lib/spack/external/pytest-fallback/pytest.py
+++ /dev/null
@@ -1,100 +0,0 @@
-# The MIT License (MIT)
-#
-# Copyright (c) 2004-2017 Holger Krekel and others
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy of
-# this software and associated documentation files (the "Software"), to deal in
-# the Software without restriction, including without limitation the rights to
-# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-# of the Software, and to permit persons to whom the Software is furnished to do
-# so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-# PYTHON_ARGCOMPLETE_OK
-"""
-pytest: unit and functional testing with Python.
-"""
-
-
-# else we are imported
-
-from _pytest.config import (
- main, UsageError, _preloadplugins, cmdline,
- hookspec, hookimpl
-)
-from _pytest.fixtures import fixture, yield_fixture
-from _pytest.assertion import register_assert_rewrite
-from _pytest.freeze_support import freeze_includes
-from _pytest import __version__
-from _pytest.debugging import pytestPDB as __pytestPDB
-from _pytest.recwarn import warns, deprecated_call
-from _pytest.outcomes import fail, skip, importorskip, exit, xfail
-from _pytest.mark import MARK_GEN as mark, param
-from _pytest.main import Item, Collector, File, Session
-from _pytest.fixtures import fillfixtures as _fillfuncargs
-from _pytest.python import (
- Module, Class, Instance, Function, Generator,
-)
-
-from _pytest.python_api import approx, raises
-
-set_trace = __pytestPDB.set_trace
-
-__all__ = [
- 'main',
- 'UsageError',
- 'cmdline',
- 'hookspec',
- 'hookimpl',
- '__version__',
- 'register_assert_rewrite',
- 'freeze_includes',
- 'set_trace',
- 'warns',
- 'deprecated_call',
- 'fixture',
- 'yield_fixture',
- 'fail',
- 'skip',
- 'xfail',
- 'importorskip',
- 'exit',
- 'mark',
- 'param',
- 'approx',
- '_fillfuncargs',
-
- 'Item',
- 'File',
- 'Collector',
- 'Session',
- 'Module',
- 'Class',
- 'Instance',
- 'Function',
- 'Generator',
- 'raises',
-
-
-]
-
-if __name__ == '__main__':
- # if run as a script or by 'python -m pytest'
- # we trigger the below "else" condition by the following import
- import pytest
- raise SystemExit(pytest.main())
-else:
-
- from _pytest.compat import _setup_collect_fakemodule
- _preloadplugins() # to populate pytest.* namespace so help(pytest) works
- _setup_collect_fakemodule()
diff --git a/lib/spack/external/six.py b/lib/spack/external/six.py
deleted file mode 100644
index 4e15675d8b..0000000000
--- a/lib/spack/external/six.py
+++ /dev/null
@@ -1,998 +0,0 @@
-# Copyright (c) 2010-2020 Benjamin Peterson
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-"""Utilities for writing code that runs on Python 2 and 3"""
-
-from __future__ import absolute_import
-
-import functools
-import itertools
-import operator
-import sys
-import types
-
-__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.16.0"
-
-
-# Useful for very coarse version differentiation.
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-PY34 = sys.version_info[0:2] >= (3, 4)
-
-if PY3:
- string_types = str,
- integer_types = int,
- class_types = type,
- text_type = str
- binary_type = bytes
-
- MAXSIZE = sys.maxsize
-else:
- string_types = basestring,
- integer_types = (int, long)
- class_types = (type, types.ClassType)
- text_type = unicode
- binary_type = str
-
- if sys.platform.startswith("java"):
- # Jython always uses 32 bits.
- MAXSIZE = int((1 << 31) - 1)
- else:
- # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
- class X(object):
-
- def __len__(self):
- return 1 << 31
- try:
- len(X())
- except OverflowError:
- # 32-bit
- MAXSIZE = int((1 << 31) - 1)
- else:
- # 64-bit
- MAXSIZE = int((1 << 63) - 1)
- del X
-
-if PY34:
- from importlib.util import spec_from_loader
-else:
- spec_from_loader = None
-
-
-def _add_doc(func, doc):
- """Add documentation to a function."""
- func.__doc__ = doc
-
-
-def _import_module(name):
- """Import module, returning the module after the last dot."""
- __import__(name)
- return sys.modules[name]
-
-
-class _LazyDescr(object):
-
- def __init__(self, name):
- self.name = name
-
- def __get__(self, obj, tp):
- result = self._resolve()
- setattr(obj, self.name, result) # Invokes __set__.
- try:
- # This is a bit ugly, but it avoids running this again by
- # removing this descriptor.
- delattr(obj.__class__, self.name)
- except AttributeError:
- pass
- return result
-
-
-class MovedModule(_LazyDescr):
-
- def __init__(self, name, old, new=None):
- super(MovedModule, self).__init__(name)
- if PY3:
- if new is None:
- new = name
- self.mod = new
- else:
- self.mod = old
-
- def _resolve(self):
- return _import_module(self.mod)
-
- def __getattr__(self, attr):
- _module = self._resolve()
- value = getattr(_module, attr)
- setattr(self, attr, value)
- return value
-
-
-class _LazyModule(types.ModuleType):
-
- def __init__(self, name):
- super(_LazyModule, self).__init__(name)
- self.__doc__ = self.__class__.__doc__
-
- def __dir__(self):
- attrs = ["__doc__", "__name__"]
- attrs += [attr.name for attr in self._moved_attributes]
- return attrs
-
- # Subclasses should override this
- _moved_attributes = []
-
-
-class MovedAttribute(_LazyDescr):
-
- def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
- super(MovedAttribute, self).__init__(name)
- if PY3:
- if new_mod is None:
- new_mod = name
- self.mod = new_mod
- if new_attr is None:
- if old_attr is None:
- new_attr = name
- else:
- new_attr = old_attr
- self.attr = new_attr
- else:
- self.mod = old_mod
- if old_attr is None:
- old_attr = name
- self.attr = old_attr
-
- def _resolve(self):
- module = _import_module(self.mod)
- return getattr(module, self.attr)
-
-
-class _SixMetaPathImporter(object):
-
- """
- A meta path importer to import six.moves and its submodules.
-
- This class implements a PEP302 finder and loader. It should be compatible
- with Python 2.5 and all existing versions of Python3
- """
-
- def __init__(self, six_module_name):
- self.name = six_module_name
- self.known_modules = {}
-
- def _add_module(self, mod, *fullnames):
- for fullname in fullnames:
- self.known_modules[self.name + "." + fullname] = mod
-
- def _get_module(self, fullname):
- return self.known_modules[self.name + "." + fullname]
-
- def find_module(self, fullname, path=None):
- if fullname in self.known_modules:
- return self
- return None
-
- def find_spec(self, fullname, path, target=None):
- if fullname in self.known_modules:
- return spec_from_loader(fullname, self)
- return None
-
- def __get_module(self, fullname):
- try:
- return self.known_modules[fullname]
- except KeyError:
- raise ImportError("This loader does not know module " + fullname)
-
- def load_module(self, fullname):
- try:
- # in case of a reload
- return sys.modules[fullname]
- except KeyError:
- pass
- mod = self.__get_module(fullname)
- if isinstance(mod, MovedModule):
- mod = mod._resolve()
- else:
- mod.__loader__ = self
- sys.modules[fullname] = mod
- return mod
-
- def is_package(self, fullname):
- """
- Return true, if the named module is a package.
-
- We need this method to get correct spec objects with
- Python 3.4 (see PEP451)
- """
- return hasattr(self.__get_module(fullname), "__path__")
-
- def get_code(self, fullname):
- """Return None
-
- Required, if is_package is implemented"""
- self.__get_module(fullname) # eventually raises ImportError
- return None
- get_source = get_code # same as get_code
-
- def create_module(self, spec):
- return self.load_module(spec.name)
-
- def exec_module(self, module):
- pass
-
-_importer = _SixMetaPathImporter(__name__)
-
-
-class _MovedItems(_LazyModule):
-
- """Lazy loading of moved objects"""
- __path__ = [] # mark as package
-
-
-_moved_attributes = [
- MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
- MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
- MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
- MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
- MovedAttribute("intern", "__builtin__", "sys"),
- MovedAttribute("map", "itertools", "builtins", "imap", "map"),
- MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
- MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
- MovedAttribute("getoutput", "commands", "subprocess"),
- MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
- MovedAttribute("reduce", "__builtin__", "functools"),
- MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
- MovedAttribute("StringIO", "StringIO", "io"),
- MovedAttribute("UserDict", "UserDict", "collections"),
- MovedAttribute("UserList", "UserList", "collections"),
- MovedAttribute("UserString", "UserString", "collections"),
- MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
- MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
- MovedModule("builtins", "__builtin__"),
- MovedModule("configparser", "ConfigParser"),
- MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
- MovedModule("copyreg", "copy_reg"),
- MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
- MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
- MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"),
- MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
- MovedModule("http_cookies", "Cookie", "http.cookies"),
- MovedModule("html_entities", "htmlentitydefs", "html.entities"),
- MovedModule("html_parser", "HTMLParser", "html.parser"),
- MovedModule("http_client", "httplib", "http.client"),
- MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
- MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
- MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
- MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
- MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
- MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
- MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
- MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
- MovedModule("cPickle", "cPickle", "pickle"),
- MovedModule("queue", "Queue"),
- MovedModule("reprlib", "repr"),
- MovedModule("socketserver", "SocketServer"),
- MovedModule("_thread", "thread", "_thread"),
- MovedModule("tkinter", "Tkinter"),
- MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
- MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
- MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
- MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
- MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
- MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
- MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
- MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
- MovedModule("tkinter_colorchooser", "tkColorChooser",
- "tkinter.colorchooser"),
- MovedModule("tkinter_commondialog", "tkCommonDialog",
- "tkinter.commondialog"),
- MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
- MovedModule("tkinter_font", "tkFont", "tkinter.font"),
- MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
- MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
- "tkinter.simpledialog"),
- MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
- MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
- MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
- MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
- MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
- MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
-]
-# Add windows specific modules.
-if sys.platform == "win32":
- _moved_attributes += [
- MovedModule("winreg", "_winreg"),
- ]
-
-for attr in _moved_attributes:
- setattr(_MovedItems, attr.name, attr)
- if isinstance(attr, MovedModule):
- _importer._add_module(attr, "moves." + attr.name)
-del attr
-
-_MovedItems._moved_attributes = _moved_attributes
-
-moves = _MovedItems(__name__ + ".moves")
-_importer._add_module(moves, "moves")
-
-
-class Module_six_moves_urllib_parse(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_parse"""
-
-
-_urllib_parse_moved_attributes = [
- MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
- MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
- MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
- MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
- MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
- MovedAttribute("urljoin", "urlparse", "urllib.parse"),
- MovedAttribute("urlparse", "urlparse", "urllib.parse"),
- MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
- MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
- MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
- MovedAttribute("quote", "urllib", "urllib.parse"),
- MovedAttribute("quote_plus", "urllib", "urllib.parse"),
- MovedAttribute("unquote", "urllib", "urllib.parse"),
- MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
- MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
- MovedAttribute("urlencode", "urllib", "urllib.parse"),
- MovedAttribute("splitquery", "urllib", "urllib.parse"),
- MovedAttribute("splittag", "urllib", "urllib.parse"),
- MovedAttribute("splituser", "urllib", "urllib.parse"),
- MovedAttribute("splitvalue", "urllib", "urllib.parse"),
- MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
- MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
- MovedAttribute("uses_params", "urlparse", "urllib.parse"),
- MovedAttribute("uses_query", "urlparse", "urllib.parse"),
- MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
-]
-for attr in _urllib_parse_moved_attributes:
- setattr(Module_six_moves_urllib_parse, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
- "moves.urllib_parse", "moves.urllib.parse")
-
-
-class Module_six_moves_urllib_error(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_error"""
-
-
-_urllib_error_moved_attributes = [
- MovedAttribute("URLError", "urllib2", "urllib.error"),
- MovedAttribute("HTTPError", "urllib2", "urllib.error"),
- MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
-]
-for attr in _urllib_error_moved_attributes:
- setattr(Module_six_moves_urllib_error, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
- "moves.urllib_error", "moves.urllib.error")
-
-
-class Module_six_moves_urllib_request(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_request"""
-
-
-_urllib_request_moved_attributes = [
- MovedAttribute("urlopen", "urllib2", "urllib.request"),
- MovedAttribute("install_opener", "urllib2", "urllib.request"),
- MovedAttribute("build_opener", "urllib2", "urllib.request"),
- MovedAttribute("pathname2url", "urllib", "urllib.request"),
- MovedAttribute("url2pathname", "urllib", "urllib.request"),
- MovedAttribute("getproxies", "urllib", "urllib.request"),
- MovedAttribute("Request", "urllib2", "urllib.request"),
- MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
- MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
- MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
- MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
- MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
- MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
- MovedAttribute("FileHandler", "urllib2", "urllib.request"),
- MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
- MovedAttribute("urlretrieve", "urllib", "urllib.request"),
- MovedAttribute("urlcleanup", "urllib", "urllib.request"),
- MovedAttribute("URLopener", "urllib", "urllib.request"),
- MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
- MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
- MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
- MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
-]
-for attr in _urllib_request_moved_attributes:
- setattr(Module_six_moves_urllib_request, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
- "moves.urllib_request", "moves.urllib.request")
-
-
-class Module_six_moves_urllib_response(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_response"""
-
-
-_urllib_response_moved_attributes = [
- MovedAttribute("addbase", "urllib", "urllib.response"),
- MovedAttribute("addclosehook", "urllib", "urllib.response"),
- MovedAttribute("addinfo", "urllib", "urllib.response"),
- MovedAttribute("addinfourl", "urllib", "urllib.response"),
-]
-for attr in _urllib_response_moved_attributes:
- setattr(Module_six_moves_urllib_response, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
- "moves.urllib_response", "moves.urllib.response")
-
-
-class Module_six_moves_urllib_robotparser(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_robotparser"""
-
-
-_urllib_robotparser_moved_attributes = [
- MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
-]
-for attr in _urllib_robotparser_moved_attributes:
- setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
- "moves.urllib_robotparser", "moves.urllib.robotparser")
-
-
-class Module_six_moves_urllib(types.ModuleType):
-
- """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
- __path__ = [] # mark as package
- parse = _importer._get_module("moves.urllib_parse")
- error = _importer._get_module("moves.urllib_error")
- request = _importer._get_module("moves.urllib_request")
- response = _importer._get_module("moves.urllib_response")
- robotparser = _importer._get_module("moves.urllib_robotparser")
-
- def __dir__(self):
- return ['parse', 'error', 'request', 'response', 'robotparser']
-
-_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
- "moves.urllib")
-
-
-def add_move(move):
- """Add an item to six.moves."""
- setattr(_MovedItems, move.name, move)
-
-
-def remove_move(name):
- """Remove item from six.moves."""
- try:
- delattr(_MovedItems, name)
- except AttributeError:
- try:
- del moves.__dict__[name]
- except KeyError:
- raise AttributeError("no such move, %r" % (name,))
-
-
-if PY3:
- _meth_func = "__func__"
- _meth_self = "__self__"
-
- _func_closure = "__closure__"
- _func_code = "__code__"
- _func_defaults = "__defaults__"
- _func_globals = "__globals__"
-else:
- _meth_func = "im_func"
- _meth_self = "im_self"
-
- _func_closure = "func_closure"
- _func_code = "func_code"
- _func_defaults = "func_defaults"
- _func_globals = "func_globals"
-
-
-try:
- advance_iterator = next
-except NameError:
- def advance_iterator(it):
- return it.next()
-next = advance_iterator
-
-
-try:
- callable = callable
-except NameError:
- def callable(obj):
- return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
-
-
-if PY3:
- def get_unbound_function(unbound):
- return unbound
-
- create_bound_method = types.MethodType
-
- def create_unbound_method(func, cls):
- return func
-
- Iterator = object
-else:
- def get_unbound_function(unbound):
- return unbound.im_func
-
- def create_bound_method(func, obj):
- return types.MethodType(func, obj, obj.__class__)
-
- def create_unbound_method(func, cls):
- return types.MethodType(func, None, cls)
-
- class Iterator(object):
-
- def next(self):
- return type(self).__next__(self)
-
- callable = callable
-_add_doc(get_unbound_function,
- """Get the function out of a possibly unbound function""")
-
-
-get_method_function = operator.attrgetter(_meth_func)
-get_method_self = operator.attrgetter(_meth_self)
-get_function_closure = operator.attrgetter(_func_closure)
-get_function_code = operator.attrgetter(_func_code)
-get_function_defaults = operator.attrgetter(_func_defaults)
-get_function_globals = operator.attrgetter(_func_globals)
-
-
-if PY3:
- def iterkeys(d, **kw):
- return iter(d.keys(**kw))
-
- def itervalues(d, **kw):
- return iter(d.values(**kw))
-
- def iteritems(d, **kw):
- return iter(d.items(**kw))
-
- def iterlists(d, **kw):
- return iter(d.lists(**kw))
-
- viewkeys = operator.methodcaller("keys")
-
- viewvalues = operator.methodcaller("values")
-
- viewitems = operator.methodcaller("items")
-else:
- def iterkeys(d, **kw):
- return d.iterkeys(**kw)
-
- def itervalues(d, **kw):
- return d.itervalues(**kw)
-
- def iteritems(d, **kw):
- return d.iteritems(**kw)
-
- def iterlists(d, **kw):
- return d.iterlists(**kw)
-
- viewkeys = operator.methodcaller("viewkeys")
-
- viewvalues = operator.methodcaller("viewvalues")
-
- viewitems = operator.methodcaller("viewitems")
-
-_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
-_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
-_add_doc(iteritems,
- "Return an iterator over the (key, value) pairs of a dictionary.")
-_add_doc(iterlists,
- "Return an iterator over the (key, [values]) pairs of a dictionary.")
-
-
-if PY3:
- def b(s):
- return s.encode("latin-1")
-
- def u(s):
- return s
- unichr = chr
- import struct
- int2byte = struct.Struct(">B").pack
- del struct
- byte2int = operator.itemgetter(0)
- indexbytes = operator.getitem
- iterbytes = iter
- import io
- StringIO = io.StringIO
- BytesIO = io.BytesIO
- del io
- _assertCountEqual = "assertCountEqual"
- if sys.version_info[1] <= 1:
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
- _assertNotRegex = "assertNotRegexpMatches"
- else:
- _assertRaisesRegex = "assertRaisesRegex"
- _assertRegex = "assertRegex"
- _assertNotRegex = "assertNotRegex"
-else:
- def b(s):
- return s
- # Workaround for standalone backslash
-
- def u(s):
- return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
- unichr = unichr
- int2byte = chr
-
- def byte2int(bs):
- return ord(bs[0])
-
- def indexbytes(buf, i):
- return ord(buf[i])
- iterbytes = functools.partial(itertools.imap, ord)
- import StringIO
- StringIO = BytesIO = StringIO.StringIO
- _assertCountEqual = "assertItemsEqual"
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
- _assertNotRegex = "assertNotRegexpMatches"
-_add_doc(b, """Byte literal""")
-_add_doc(u, """Text literal""")
-
-
-def assertCountEqual(self, *args, **kwargs):
- return getattr(self, _assertCountEqual)(*args, **kwargs)
-
-
-def assertRaisesRegex(self, *args, **kwargs):
- return getattr(self, _assertRaisesRegex)(*args, **kwargs)
-
-
-def assertRegex(self, *args, **kwargs):
- return getattr(self, _assertRegex)(*args, **kwargs)
-
-
-def assertNotRegex(self, *args, **kwargs):
- return getattr(self, _assertNotRegex)(*args, **kwargs)
-
-
-if PY3:
- exec_ = getattr(moves.builtins, "exec")
-
- def reraise(tp, value, tb=None):
- try:
- if value is None:
- value = tp()
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
- finally:
- value = None
- tb = None
-
-else:
- def exec_(_code_, _globs_=None, _locs_=None):
- """Execute code in a namespace."""
- if _globs_ is None:
- frame = sys._getframe(1)
- _globs_ = frame.f_globals
- if _locs_ is None:
- _locs_ = frame.f_locals
- del frame
- elif _locs_ is None:
- _locs_ = _globs_
- exec("""exec _code_ in _globs_, _locs_""")
-
- exec_("""def reraise(tp, value, tb=None):
- try:
- raise tp, value, tb
- finally:
- tb = None
-""")
-
-
-if sys.version_info[:2] > (3,):
- exec_("""def raise_from(value, from_value):
- try:
- raise value from from_value
- finally:
- value = None
-""")
-else:
- def raise_from(value, from_value):
- raise value
-
-
-print_ = getattr(moves.builtins, "print", None)
-if print_ is None:
- def print_(*args, **kwargs):
- """The new-style print function for Python 2.4 and 2.5."""
- fp = kwargs.pop("file", sys.stdout)
- if fp is None:
- return
-
- def write(data):
- if not isinstance(data, basestring):
- data = str(data)
- # If the file has an encoding, encode unicode with it.
- if (isinstance(fp, file) and
- isinstance(data, unicode) and
- fp.encoding is not None):
- errors = getattr(fp, "errors", None)
- if errors is None:
- errors = "strict"
- data = data.encode(fp.encoding, errors)
- fp.write(data)
- want_unicode = False
- sep = kwargs.pop("sep", None)
- if sep is not None:
- if isinstance(sep, unicode):
- want_unicode = True
- elif not isinstance(sep, str):
- raise TypeError("sep must be None or a string")
- end = kwargs.pop("end", None)
- if end is not None:
- if isinstance(end, unicode):
- want_unicode = True
- elif not isinstance(end, str):
- raise TypeError("end must be None or a string")
- if kwargs:
- raise TypeError("invalid keyword arguments to print()")
- if not want_unicode:
- for arg in args:
- if isinstance(arg, unicode):
- want_unicode = True
- break
- if want_unicode:
- newline = unicode("\n")
- space = unicode(" ")
- else:
- newline = "\n"
- space = " "
- if sep is None:
- sep = space
- if end is None:
- end = newline
- for i, arg in enumerate(args):
- if i:
- write(sep)
- write(arg)
- write(end)
-if sys.version_info[:2] < (3, 3):
- _print = print_
-
- def print_(*args, **kwargs):
- fp = kwargs.get("file", sys.stdout)
- flush = kwargs.pop("flush", False)
- _print(*args, **kwargs)
- if flush and fp is not None:
- fp.flush()
-
-_add_doc(reraise, """Reraise an exception.""")
-
-if sys.version_info[0:2] < (3, 4):
- # This does exactly the same what the :func:`py3:functools.update_wrapper`
- # function does on Python versions after 3.2. It sets the ``__wrapped__``
- # attribute on ``wrapper`` object and it doesn't raise an error if any of
- # the attributes mentioned in ``assigned`` and ``updated`` are missing on
- # ``wrapped`` object.
- def _update_wrapper(wrapper, wrapped,
- assigned=functools.WRAPPER_ASSIGNMENTS,
- updated=functools.WRAPPER_UPDATES):
- for attr in assigned:
- try:
- value = getattr(wrapped, attr)
- except AttributeError:
- continue
- else:
- setattr(wrapper, attr, value)
- for attr in updated:
- getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
- wrapper.__wrapped__ = wrapped
- return wrapper
- _update_wrapper.__doc__ = functools.update_wrapper.__doc__
-
- def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
- updated=functools.WRAPPER_UPDATES):
- return functools.partial(_update_wrapper, wrapped=wrapped,
- assigned=assigned, updated=updated)
- wraps.__doc__ = functools.wraps.__doc__
-
-else:
- wraps = functools.wraps
-
-
-def with_metaclass(meta, *bases):
- """Create a base class with a metaclass."""
- # This requires a bit of explanation: the basic idea is to make a dummy
- # metaclass for one level of class instantiation that replaces itself with
- # the actual metaclass.
- class metaclass(type):
-
- def __new__(cls, name, this_bases, d):
- if sys.version_info[:2] >= (3, 7):
- # This version introduced PEP 560 that requires a bit
- # of extra care (we mimic what is done by __build_class__).
- resolved_bases = types.resolve_bases(bases)
- if resolved_bases is not bases:
- d['__orig_bases__'] = bases
- else:
- resolved_bases = bases
- return meta(name, resolved_bases, d)
-
- @classmethod
- def __prepare__(cls, name, this_bases):
- return meta.__prepare__(name, bases)
- return type.__new__(metaclass, 'temporary_class', (), {})
-
-
-def add_metaclass(metaclass):
- """Class decorator for creating a class with a metaclass."""
- def wrapper(cls):
- orig_vars = cls.__dict__.copy()
- slots = orig_vars.get('__slots__')
- if slots is not None:
- if isinstance(slots, str):
- slots = [slots]
- for slots_var in slots:
- orig_vars.pop(slots_var)
- orig_vars.pop('__dict__', None)
- orig_vars.pop('__weakref__', None)
- if hasattr(cls, '__qualname__'):
- orig_vars['__qualname__'] = cls.__qualname__
- return metaclass(cls.__name__, cls.__bases__, orig_vars)
- return wrapper
-
-
-def ensure_binary(s, encoding='utf-8', errors='strict'):
- """Coerce **s** to six.binary_type.
-
- For Python 2:
- - `unicode` -> encoded to `str`
- - `str` -> `str`
-
- For Python 3:
- - `str` -> encoded to `bytes`
- - `bytes` -> `bytes`
- """
- if isinstance(s, binary_type):
- return s
- if isinstance(s, text_type):
- return s.encode(encoding, errors)
- raise TypeError("not expecting type '%s'" % type(s))
-
-
-def ensure_str(s, encoding='utf-8', errors='strict'):
- """Coerce *s* to `str`.
-
- For Python 2:
- - `unicode` -> encoded to `str`
- - `str` -> `str`
-
- For Python 3:
- - `str` -> `str`
- - `bytes` -> decoded to `str`
- """
- # Optimization: Fast return for the common case.
- if type(s) is str:
- return s
- if PY2 and isinstance(s, text_type):
- return s.encode(encoding, errors)
- elif PY3 and isinstance(s, binary_type):
- return s.decode(encoding, errors)
- elif not isinstance(s, (text_type, binary_type)):
- raise TypeError("not expecting type '%s'" % type(s))
- return s
-
-
-def ensure_text(s, encoding='utf-8', errors='strict'):
- """Coerce *s* to six.text_type.
-
- For Python 2:
- - `unicode` -> `unicode`
- - `str` -> `unicode`
-
- For Python 3:
- - `str` -> `str`
- - `bytes` -> decoded to `str`
- """
- if isinstance(s, binary_type):
- return s.decode(encoding, errors)
- elif isinstance(s, text_type):
- return s
- else:
- raise TypeError("not expecting type '%s'" % type(s))
-
-
-def python_2_unicode_compatible(klass):
- """
- A class decorator that defines __unicode__ and __str__ methods under Python 2.
- Under Python 3 it does nothing.
-
- To support Python 2 and 3 with a single code base, define a __str__ method
- returning text and apply this decorator to the class.
- """
- if PY2:
- if '__str__' not in klass.__dict__:
- raise ValueError("@python_2_unicode_compatible cannot be applied "
- "to %s because it doesn't define __str__()." %
- klass.__name__)
- klass.__unicode__ = klass.__str__
- klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
- return klass
-
-
-# Complete the moves implementation.
-# This code is at the end of this module to speed up module loading.
-# Turn this module into a package.
-__path__ = [] # required for PEP 302 and PEP 451
-__package__ = __name__ # see PEP 366 @ReservedAssignment
-if globals().get("__spec__") is not None:
- __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
-# Remove other six meta path importers, since they cause problems. This can
-# happen if six is removed from sys.modules and then reloaded. (Setuptools does
-# this for some reason.)
-if sys.meta_path:
- for i, importer in enumerate(sys.meta_path):
- # Here's some real nastiness: Another "instance" of the six module might
- # be floating around. Therefore, we can't use isinstance() to check for
- # the six meta path importer, since the other six instance will have
- # inserted an importer with different class.
- if (type(importer).__name__ == "_SixMetaPathImporter" and
- importer.name == __name__):
- del sys.meta_path[i]
- break
- del i, importer
-# Finally, add the importer to the meta path import hook.
-sys.meta_path.append(_importer)