summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/bootstrap.yml2
-rw-r--r--.github/workflows/ci.yaml8
-rw-r--r--.github/workflows/unit_tests.yaml11
-rw-r--r--.github/workflows/valid-style.yml6
-rwxr-xr-xbin/spack8
-rw-r--r--lib/spack/docs/conf.py18
-rw-r--r--lib/spack/external/py2/argparse.py2392
-rw-r--r--lib/spack/external/py2/functools32/LICENSE289
-rw-r--r--lib/spack/external/py2/functools32/__init__.py1
-rw-r--r--lib/spack/external/py2/functools32/_dummy_thread32.py158
-rw-r--r--lib/spack/external/py2/functools32/functools32.py423
-rw-r--r--lib/spack/external/py2/functools32/reprlib32.py157
-rw-r--r--lib/spack/external/py2/typing.py103
-rw-r--r--lib/spack/llnl/util/compat.py39
-rw-r--r--lib/spack/llnl/util/filesystem.py80
-rw-r--r--lib/spack/llnl/util/lang.py48
-rw-r--r--lib/spack/llnl/util/tty/__init__.py5
-rw-r--r--lib/spack/llnl/util/tty/log.py26
-rw-r--r--lib/spack/spack/audit.py4
-rw-r--r--lib/spack/spack/bootstrap.py15
-rw-r--r--lib/spack/spack/build_environment.py6
-rw-r--r--lib/spack/spack/build_systems/cmake.py6
-rw-r--r--lib/spack/spack/builder.py7
-rw-r--r--lib/spack/spack/ci_needs_workaround.py7
-rw-r--r--lib/spack/spack/ci_optimization.py28
-rw-r--r--lib/spack/spack/cmd/list.py6
-rw-r--r--lib/spack/spack/cmd/style.py23
-rw-r--r--lib/spack/spack/compilers/msvc.py60
-rw-r--r--lib/spack/spack/cray_manifest.py7
-rw-r--r--lib/spack/spack/directives.py6
-rw-r--r--lib/spack/spack/filesystem_view.py1
-rw-r--r--lib/spack/spack/install_test.py4
-rw-r--r--lib/spack/spack/main.py17
-rw-r--r--lib/spack/spack/mirror.py4
-rw-r--r--lib/spack/spack/mixins.py7
-rwxr-xr-xlib/spack/spack/operating_systems/windows_os.py9
-rw-r--r--lib/spack/spack/package_base.py18
-rw-r--r--lib/spack/spack/repo.py164
-rw-r--r--lib/spack/spack/schema/environment.py5
-rw-r--r--lib/spack/spack/solver/asp.py5
-rw-r--r--lib/spack/spack/spec.py116
-rw-r--r--lib/spack/spack/tag.py7
-rw-r--r--lib/spack/spack/test/cmd/commands.py3
-rw-r--r--lib/spack/spack/test/cmd/style.py21
-rw-r--r--lib/spack/spack/test/compilers/basics.py1
-rw-r--r--lib/spack/spack/test/concretize.py14
-rw-r--r--lib/spack/spack/test/graph.py1
-rw-r--r--lib/spack/spack/test/llnl/util/filesystem.py8
-rw-r--r--lib/spack/spack/test/llnl/util/tty/log.py7
-rw-r--r--lib/spack/spack/test/repo.py4
-rw-r--r--lib/spack/spack/test/schema.py4
-rw-r--r--lib/spack/spack/test/spec_semantics.py3
-rw-r--r--lib/spack/spack/test/spec_yaml.py7
-rw-r--r--lib/spack/spack/test/util/executable.py6
-rw-r--r--lib/spack/spack/util/crypto.py11
-rw-r--r--lib/spack/spack/util/elf.py12
-rw-r--r--lib/spack/spack/util/hash.py6
-rw-r--r--lib/spack/spack/util/module_cmd.py6
-rw-r--r--lib/spack/spack/util/pattern.py8
-rw-r--r--lib/spack/spack/util/spack_yaml.py4
-rw-r--r--lib/spack/spack/util/web.py18
-rw-r--r--lib/spack/spack/variant.py5
-rw-r--r--lib/spack/spack_installable/main.py31
-rw-r--r--pyproject.toml2
-rw-r--r--var/spack/repos/builtin/packages/libpng/package.py4
65 files changed, 267 insertions, 4225 deletions
diff --git a/.github/workflows/bootstrap.yml b/.github/workflows/bootstrap.yml
index df2b0f346e..70935c1d6f 100644
--- a/.github/workflows/bootstrap.yml
+++ b/.github/workflows/bootstrap.yml
@@ -214,7 +214,7 @@ jobs:
- name: Bootstrap clingo
run: |
set -ex
- for ver in '2.7' '3.6' '3.7' '3.8' '3.9' '3.10' ; do
+ for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
echo "Testing $ver_dir"
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 3b57bd9bb5..ef951f341b 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -20,12 +20,6 @@ jobs:
uses: ./.github/workflows/valid-style.yml
with:
with_coverage: ${{ needs.changes.outputs.core }}
- audit-ancient-python:
- uses: ./.github/workflows/audit.yaml
- needs: [ changes ]
- with:
- with_coverage: ${{ needs.changes.outputs.core }}
- python_version: 2.7
all-prechecks:
needs: [ prechecks ]
runs-on: ubuntu-latest
@@ -85,7 +79,7 @@ jobs:
needs: [ prechecks ]
uses: ./.github/workflows/windows_python.yml
all:
- needs: [ windows, unit-tests, bootstrap, audit-ancient-python ]
+ needs: [ windows, unit-tests, bootstrap ]
runs-on: ubuntu-latest
steps:
- name: Success
diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml
index 6a21d166f8..efb8949e23 100644
--- a/.github/workflows/unit_tests.yaml
+++ b/.github/workflows/unit_tests.yaml
@@ -14,14 +14,11 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11']
+ python-version: ['3.6', '3.7', '3.8', '3.9', '3.10', '3.11']
concretizer: ['clingo']
on_develop:
- ${{ github.ref == 'refs/heads/develop' }}
include:
- - python-version: 2.7
- concretizer: original
- on_develop: ${{ github.ref == 'refs/heads/develop' }}
- python-version: '3.11'
concretizer: original
on_develop: ${{ github.ref == 'refs/heads/develop' }}
@@ -66,10 +63,6 @@ jobs:
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click==8.0.4" "black<=21.12b0"
fi
- - name: Pin pathlib for Python 2.7
- if: ${{ matrix.python-version == 2.7 }}
- run: |
- pip install -U pathlib2==2.3.6 toml
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
@@ -89,7 +82,7 @@ jobs:
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
SPACK_TEST_PARALLEL: 2
COVERAGE: true
- UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.11') }}
+ UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
diff --git a/.github/workflows/valid-style.yml b/.github/workflows/valid-style.yml
index a82c786b44..2d70406471 100644
--- a/.github/workflows/valid-style.yml
+++ b/.github/workflows/valid-style.yml
@@ -28,9 +28,9 @@ jobs:
pip install --upgrade pip
pip install --upgrade vermin
- name: vermin (Spack's Core)
- run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
+ run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories)
- run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv var/spack/repos
+ run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
# Run style checks on the files that have been changed
style:
runs-on: ubuntu-latest
@@ -44,7 +44,7 @@ jobs:
cache: 'pip'
- name: Install Python packages
run: |
- python3 -m pip install --upgrade pip six setuptools types-six click==8.0.2 'black==21.12b0' mypy isort clingo flake8
+ python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
diff --git a/bin/spack b/bin/spack
index 08da29dfd2..d0eb8d8160 100755
--- a/bin/spack
+++ b/bin/spack
@@ -31,13 +31,11 @@ import os
import os.path
import sys
-min_python3 = (3, 5)
+min_python3 = (3, 6)
-if sys.version_info[:2] < (2, 7) or (
- sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < min_python3
-):
+if sys.version_info[:2] < min_python3:
v_info = sys.version_info[:3]
- msg = "Spack requires Python 2.7 or %d.%d or higher " % min_python3
+ msg = "Spack requires Python %d.%d or higher " % min_python3
msg += "You are running spack with Python %d.%d.%d." % v_info
sys.exit(msg)
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index 4fc321c72d..55848106de 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -37,12 +37,6 @@ if not os.path.exists(link_name):
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
-
-if sys.version_info[0] < 3:
- sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib"))
-else:
- sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib3"))
-
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
# Add the Spack bin directory to the path so that we can use its output in docs.
@@ -160,8 +154,8 @@ source_encoding = "utf-8-sig"
master_doc = "index"
# General information about the project.
-project = u"Spack"
-copyright = u"2013-2021, Lawrence Livermore National Laboratory."
+project = "Spack"
+copyright = "2013-2021, Lawrence Livermore National Laboratory."
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -350,7 +344,7 @@ latex_elements = {
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
- ("index", "Spack.tex", u"Spack Documentation", u"Todd Gamblin", "manual"),
+ ("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -378,7 +372,7 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
-man_pages = [("index", "spack", u"Spack Documentation", [u"Todd Gamblin"], 1)]
+man_pages = [("index", "spack", "Spack Documentation", ["Todd Gamblin"], 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
@@ -393,8 +387,8 @@ texinfo_documents = [
(
"index",
"Spack",
- u"Spack Documentation",
- u"Todd Gamblin",
+ "Spack Documentation",
+ "Todd Gamblin",
"Spack",
"One line description of project.",
"Miscellaneous",
diff --git a/lib/spack/external/py2/argparse.py b/lib/spack/external/py2/argparse.py
deleted file mode 100644
index d2d232d51e..0000000000
--- a/lib/spack/external/py2/argparse.py
+++ /dev/null
@@ -1,2392 +0,0 @@
-# Author: Steven J. Bethard <steven.bethard@gmail.com>.
-# Maintainer: Thomas Waldmann <tw@waldmann-edv.de>
-
-"""Command-line parsing library
-
-This module is an optparse-inspired command-line parsing library that:
-
- - handles both optional and positional arguments
- - produces highly informative usage messages
- - supports parsers that dispatch to sub-parsers
-
-The following is a simple usage example that sums integers from the
-command-line and writes the result to a file::
-
- parser = argparse.ArgumentParser(
- description='sum the integers at the command line')
- parser.add_argument(
- 'integers', metavar='int', nargs='+', type=int,
- help='an integer to be summed')
- parser.add_argument(
- '--log', default=sys.stdout, type=argparse.FileType('w'),
- help='the file where the sum should be written')
- args = parser.parse_args()
- args.log.write('%s' % sum(args.integers))
- args.log.close()
-
-The module contains the following public classes:
-
- - ArgumentParser -- The main entry point for command-line parsing. As the
- example above shows, the add_argument() method is used to populate
- the parser with actions for optional and positional arguments. Then
- the parse_args() method is invoked to convert the args at the
- command-line into an object with attributes.
-
- - ArgumentError -- The exception raised by ArgumentParser objects when
- there are errors with the parser's actions. Errors raised while
- parsing the command-line are caught by ArgumentParser and emitted
- as command-line messages.
-
- - FileType -- A factory for defining types of files to be created. As the
- example above shows, instances of FileType are typically passed as
- the type= argument of add_argument() calls.
-
- - Action -- The base class for parser actions. Typically actions are
- selected by passing strings like 'store_true' or 'append_const' to
- the action= argument of add_argument(). However, for greater
- customization of ArgumentParser actions, subclasses of Action may
- be defined and passed as the action= argument.
-
- - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
- ArgumentDefaultsHelpFormatter -- Formatter classes which
- may be passed as the formatter_class= argument to the
- ArgumentParser constructor. HelpFormatter is the default,
- RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
- not to change the formatting for help text, and
- ArgumentDefaultsHelpFormatter adds information about argument defaults
- to the help.
-
-All other classes in this module are considered implementation details.
-(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
-considered public as object names -- the API of the formatter objects is
-still considered an implementation detail.)
-"""
-
-__version__ = '1.4.0' # we use our own version number independant of the
- # one in stdlib and we release this on pypi.
-
-__external_lib__ = True # to make sure the tests really test THIS lib,
- # not the builtin one in Python stdlib
-
-__all__ = [
- 'ArgumentParser',
- 'ArgumentError',
- 'ArgumentTypeError',
- 'FileType',
- 'HelpFormatter',
- 'ArgumentDefaultsHelpFormatter',
- 'RawDescriptionHelpFormatter',
- 'RawTextHelpFormatter',
- 'Namespace',
- 'Action',
- 'ONE_OR_MORE',
- 'OPTIONAL',
- 'PARSER',
- 'REMAINDER',
- 'SUPPRESS',
- 'ZERO_OR_MORE',
-]
-
-
-import copy as _copy
-import os as _os
-import re as _re
-import sys as _sys
-import textwrap as _textwrap
-
-from llnl.util.tty.colify import colified
-
-from gettext import gettext as _
-
-try:
- set
-except NameError:
- # for python < 2.4 compatibility (sets module is there since 2.3):
- from sets import Set as set
-
-try:
- basestring
-except NameError:
- basestring = str
-
-try:
- sorted
-except NameError:
- # for python < 2.4 compatibility:
- def sorted(iterable, reverse=False):
- result = list(iterable)
- result.sort()
- if reverse:
- result.reverse()
- return result
-
-
-def _callable(obj):
- return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
-
-
-SUPPRESS = '==SUPPRESS=='
-
-OPTIONAL = '?'
-ZERO_OR_MORE = '*'
-ONE_OR_MORE = '+'
-PARSER = 'A...'
-REMAINDER = '...'
-_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'
-
-# =============================
-# Utility functions and classes
-# =============================
-
-class _AttributeHolder(object):
- """Abstract base class that provides __repr__.
-
- The __repr__ method returns a string in the format::
- ClassName(attr=name, attr=name, ...)
- The attributes are determined either by a class-level attribute,
- '_kwarg_names', or by inspecting the instance __dict__.
- """
-
- def __repr__(self):
- type_name = type(self).__name__
- arg_strings = []
- for arg in self._get_args():
- arg_strings.append(repr(arg))
- for name, value in self._get_kwargs():
- arg_strings.append('%s=%r' % (name, value))
- return '%s(%s)' % (type_name, ', '.join(arg_strings))
-
- def _get_kwargs(self):
- return sorted(self.__dict__.items())
-
- def _get_args(self):
- return []
-
-
-def _ensure_value(namespace, name, value):
- if getattr(namespace, name, None) is None:
- setattr(namespace, name, value)
- return getattr(namespace, name)
-
-
-# ===============
-# Formatting Help
-# ===============
-
-class HelpFormatter(object):
- """Formatter for generating usage messages and argument help strings.
-
- Only the name of this class is considered a public API. All the methods
- provided by the class are considered an implementation detail.
- """
-
- def __init__(self,
- prog,
- indent_increment=2,
- max_help_position=24,
- width=None):
-
- # default setting for width
- if width is None:
- try:
- width = int(_os.environ['COLUMNS'])
- except (KeyError, ValueError):
- width = 80
- width -= 2
-
- self._prog = prog
- self._indent_increment = indent_increment
- self._max_help_position = max_help_position
- self._width = width
-
- self._current_indent = 0
- self._level = 0
- self._action_max_length = 0
-
- self._root_section = self._Section(self, None)
- self._current_section = self._root_section
-
- self._whitespace_matcher = _re.compile(r'\s+')
- self._long_break_matcher = _re.compile(r'\n\n\n+')
-
- # ===============================
- # Section and indentation methods
- # ===============================
- def _indent(self):
- self._current_indent += self._indent_increment
- self._level += 1
-
- def _dedent(self):
- self._current_indent -= self._indent_increment
- assert self._current_indent >= 0, 'Indent decreased below 0.'
- self._level -= 1
-
- class _Section(object):
-
- def __init__(self, formatter, parent, heading=None):
- self.formatter = formatter
- self.parent = parent
- self.heading = heading
- self.items = []
-
- def format_help(self):
- # format the indented section
- if self.parent is not None:
- self.formatter._indent()
- join = self.formatter._join_parts
- for func, args in self.items:
- func(*args)
- item_help = join([func(*args) for func, args in self.items])
- if self.parent is not None:
- self.formatter._dedent()
-
- # return nothing if the section was empty
- if not item_help:
- return ''
-
- # add the heading if the section was non-empty
- if self.heading is not SUPPRESS and self.heading is not None:
- current_indent = self.formatter._current_indent
- heading = '%*s%s:\n' % (current_indent, '', self.heading)
- else:
- heading = ''
-
- # join the section-initial newline, the heading and the help
- return join(['\n', heading, item_help, '\n'])
-
- def _add_item(self, func, args):
- self._current_section.items.append((func, args))
-
- # ========================
- # Message building methods
- # ========================
- def start_section(self, heading):
- self._indent()
- section = self._Section(self, self._current_section, heading)
- self._add_item(section.format_help, [])
- self._current_section = section
-
- def end_section(self):
- self._current_section = self._current_section.parent
- self._dedent()
-
- def add_text(self, text):
- if text is not SUPPRESS and text is not None:
- self._add_item(self._format_text, [text])
-
- def add_usage(self, usage, actions, groups, prefix=None):
- if usage is not SUPPRESS:
- args = usage, actions, groups, prefix
- self._add_item(self._format_usage, args)
-
- def add_argument(self, action):
- if action.help is not SUPPRESS:
-
- # find all invocations
- get_invocation = self._format_action_invocation
- invocations = [get_invocation(action)]
- for subaction in self._iter_indented_subactions(action):
- invocations.append(get_invocation(subaction))
-
- # update the maximum item length
- invocation_length = max([len(s) for s in invocations])
- action_length = invocation_length + self._current_indent
- self._action_max_length = max(self._action_max_length,
- action_length)
-
- # add the item to the list
- self._add_item(self._format_action, [action])
-
- def add_arguments(self, actions):
- for action in actions:
- self.add_argument(action)
-
- # =======================
- # Help-formatting methods
- # =======================
- def format_help(self):
- help = self._root_section.format_help()
- if help:
- help = self._long_break_matcher.sub('\n\n', help)
- help = help.strip('\n') + '\n'
- return help
-
- def _join_parts(self, part_strings):
- return ''.join([part
- for part in part_strings
- if part and part is not SUPPRESS])
-
- def _format_usage(self, usage, actions, groups, prefix):
- if prefix is None:
- prefix = _('usage: ')
-
- # if usage is specified, use that
- if usage is not None:
- usage = usage % dict(prog=self._prog)
-
- # if no optionals or positionals are available, usage is just prog
- elif usage is None and not actions:
- usage = '%(prog)s' % dict(prog=self._prog)
-
- # if optionals and positionals are available, calculate usage
- elif usage is None:
- prog = '%(prog)s' % dict(prog=self._prog)
-
- # split optionals from positionals
- optionals = []
- positionals = []
- for action in actions:
- if action.option_strings:
- optionals.append(action)
- else:
- positionals.append(action)
-
- # build full usage string
- format = self._format_actions_usage
- action_usage = format(optionals + positionals, groups)
- usage = ' '.join([s for s in [prog, action_usage] if s])
-
- # wrap the usage parts if it's too long
- text_width = self._width - self._current_indent
- if len(prefix) + len(usage) > text_width:
-
- # break usage into wrappable parts
- part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
- opt_usage = format(optionals, groups)
- pos_usage = format(positionals, groups)
- opt_parts = _re.findall(part_regexp, opt_usage)
- pos_parts = _re.findall(part_regexp, pos_usage)
-
- # helper for wrapping lines
- def get_lines(parts, indent, prefix=None):
- lines = []
- line = []
- if prefix is not None:
- line_len = len(prefix) - 1
- else:
- line_len = len(indent) - 1
- for part in parts:
- if line_len + 1 + len(part) > text_width:
- lines.append(indent + ' '.join(line))
- line = []
- line_len = len(indent) - 1
- line.append(part)
- line_len += len(part) + 1
- if line:
- lines.append(indent + ' '.join(line))
- if prefix is not None:
- lines[0] = lines[0][len(indent):]
- return lines
-
- # if prog is short, follow it with optionals or positionals
- if len(prefix) + len(prog) <= 0.75 * text_width:
- indent = ' ' * (len(prefix) + len(prog) + 1)
- if opt_parts:
- lines = get_lines([prog] + opt_parts, indent, prefix)
- lines.extend(get_lines(pos_parts, indent))
- elif pos_parts:
- lines = get_lines([prog] + pos_parts, indent, prefix)
- else:
- lines = [prog]
-
- # if prog is long, put it on its own line
- else:
- indent = ' ' * len(prefix)
- parts = opt_parts + pos_parts
- lines = get_lines(parts, indent)
- if len(lines) > 1:
- lines = []
- lines.extend(get_lines(opt_parts, indent))
- lines.extend(get_lines(pos_parts, indent))
- lines = [prog] + lines
-
- # join lines into usage
- usage = '\n'.join(lines)
-
- # prefix with 'usage:'
- return '%s%s\n\n' % (prefix, usage)
-
- def _format_actions_usage(self, actions, groups):
- # find group indices and identify actions in groups
- group_actions = set()
- inserts = {}
- for group in groups:
- try:
- start = actions.index(group._group_actions[0])
- except ValueError:
- continue
- else:
- end = start + len(group._group_actions)
- if actions[start:end] == group._group_actions:
- for action in group._group_actions:
- group_actions.add(action)
- if not group.required:
- if start in inserts:
- inserts[start] += ' ['
- else:
- inserts[start] = '['
- inserts[end] = ']'
- else:
- if start in inserts:
- inserts[start] += ' ('
- else:
- inserts[start] = '('
- inserts[end] = ')'
- for i in range(start + 1, end):
- inserts[i] = '|'
-
- # collect all actions format strings
- parts = []
- for i, action in enumerate(actions):
-
- # suppressed arguments are marked with None
- # remove | separators for suppressed arguments
- if action.help is SUPPRESS:
- parts.append(None)
- if inserts.get(i) == '|':
- inserts.pop(i)
- elif inserts.get(i + 1) == '|':
- inserts.pop(i + 1)
-
- # produce all arg strings
- elif not action.option_strings:
- part = self._format_args(action, action.dest)
-
- # if it's in a group, strip the outer []
- if action in group_actions:
- if part[0] == '[' and part[-1] == ']':
- part = part[1:-1]
-
- # add the action string to the list
- parts.append(part)
-
- # produce the first way to invoke the option in brackets
- else:
- option_string = action.option_strings[0]
-
- # if the Optional doesn't take a value, format is:
- # -s or --long
- if action.nargs == 0:
- part = '%s' % option_string
-
- # if the Optional takes a value, format is:
- # -s ARGS or --long ARGS
- else:
- default = action.dest.upper()
- args_string = self._format_args(action, default)
- part = '%s %s' % (option_string, args_string)
-
- # make it look optional if it's not required or in a group
- if not action.required and action not in group_actions:
- part = '[%s]' % part
-
- # add the action string to the list
- parts.append(part)
-
- # insert things at the necessary indices
- for i in sorted(inserts, reverse=True):
- parts[i:i] = [inserts[i]]
-
- # join all the action items with spaces
- text = ' '.join([item for item in parts if item is not None])
-
- # clean up separators for mutually exclusive groups
- open = r'[\[(]'
- close = r'[\])]'
- text = _re.sub(r'(%s) ' % open, r'\1', text)
- text = _re.sub(r' (%s)' % close, r'\1', text)
- text = _re.sub(r'%s *%s' % (open, close), r'', text)
- text = _re.sub(r'\(([^|]*)\)', r'\1', text)
- text = text.strip()
-
- # return the text
- return text
-
- def _format_text(self, text):
- if '%(prog)' in text:
- text = text % dict(prog=self._prog)
- text_width = self._width - self._current_indent
- indent = ' ' * self._current_indent
- return self._fill_text(text, text_width, indent) + '\n\n'
-
- def _format_action(self, action):
- # determine the required width and the entry label
- help_position = min(self._action_max_length + 2,
- self._max_help_position)
- help_width = self._width - help_position
- action_width = help_position - self._current_indent - 2
- action_header = self._format_action_invocation(action)
-
- # ho nelp; start on same line and add a final newline
- if not action.help:
- tup = self._current_indent, '', action_header
- action_header = '%*s%s\n' % tup
-
- # short action name; start on the same line and pad two spaces
- elif len(action_header) <= action_width:
- tup = self._current_indent, '', action_width, action_header
- action_header = '%*s%-*s ' % tup
- indent_first = 0
-
- # long action name; start on the next line
- else:
- tup = self._current_indent, '', action_header
- action_header = '%*s%s\n' % tup
- indent_first = help_position
-
- # collect the pieces of the action help
- parts = [action_header]
-
- # if there was help for the action, add lines of help text
- if action.help:
- help_text = self._expand_help(action)
- help_lines = self._split_lines(help_text, help_width)
- parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
- for line in help_lines[1:]:
- parts.append('%*s%s\n' % (help_position, '', line))
-
- # or add a newline if the description doesn't end with one
- elif not action_header.endswith('\n'):
- parts.append('\n')
-
- # if there are any sub-actions, add their help as well
- for subaction in self._iter_indented_subactions(action):
- parts.append(self._format_action(subaction))
-
- # return a single string
- return self._join_parts(parts)
-
- def _format_action_invocation(self, action):
- if not action.option_strings:
- metavar, = self._metavar_formatter(action, action.dest)(1)
- return metavar
-
- else:
- parts = []
-
- # if the Optional doesn't take a value, format is:
- # -s, --long
- if action.nargs == 0:
- parts.extend(action.option_strings)
-
- # if the Optional takes a value, format is:
- # -s ARGS, --long ARGS
- else:
- default = action.dest.upper()
- args_string = self._format_args(action, default)
- for option_string in action.option_strings:
- parts.append('%s %s' % (option_string, args_string))
-
- return ', '.join(parts)
-
- def _metavar_formatter(self, action, default_metavar):
- if action.metavar is not None:
- result = action.metavar
- elif action.choices is not None:
- choice_strs = [str(choice) for choice in action.choices]
- result = '{%s}' % ','.join(choice_strs)
- else:
- result = default_metavar
-
- def format(tuple_size):
- if isinstance(result, tuple):
- return result
- else:
- return (result, ) * tuple_size
- return format
-
- def _format_args(self, action, default_metavar):
- get_metavar = self._metavar_formatter(action, default_metavar)
- if action.nargs is None:
- result = '%s' % get_metavar(1)
- elif action.nargs == OPTIONAL:
- result = '[%s]' % get_metavar(1)
- elif action.nargs == ZERO_OR_MORE:
- result = '[%s [%s ...]]' % get_metavar(2)
- elif action.nargs == ONE_OR_MORE:
- result = '%s [%s ...]' % get_metavar(2)
- elif action.nargs == REMAINDER:
- result = '...'
- elif action.nargs == PARSER:
- result = '%s ...' % get_metavar(1)
- else:
- formats = ['%s' for _ in range(action.nargs)]
- result = ' '.join(formats) % get_metavar(action.nargs)
- return result
-
- def _expand_help(self, action):
- params = dict(vars(action), prog=self._prog)
- for name in list(params):
- if params[name] is SUPPRESS:
- del params[name]
- for name in list(params):
- if hasattr(params[name], '__name__'):
- params[name] = params[name].__name__
- if params.get('choices') is not None:
- choices_str = ', '.join([str(c) for c in params['choices']])
- params['choices'] = choices_str
- return self._get_help_string(action) % params
-
- def _iter_indented_subactions(self, action):
- try:
- get_subactions = action._get_subactions
- except AttributeError:
- pass
- else:
- self._indent()
- for subaction in get_subactions():
- yield subaction
- self._dedent()
-
- def _split_lines(self, text, width):
- text = self._whitespace_matcher.sub(' ', text).strip()
- return _textwrap.wrap(text, width)
-
- def _fill_text(self, text, width, indent):
- text = self._whitespace_matcher.sub(' ', text).strip()
- return _textwrap.fill(text, width, initial_indent=indent,
- subsequent_indent=indent)
-
- def _get_help_string(self, action):
- return action.help
-
-
-class RawDescriptionHelpFormatter(HelpFormatter):
- """Help message formatter which retains any formatting in descriptions.
-
- Only the name of this class is considered a public API. All the methods
- provided by the class are considered an implementation detail.
- """
-
- def _fill_text(self, text, width, indent):
- return ''.join([indent + line for line in text.splitlines(True)])
-
-
-class RawTextHelpFormatter(RawDescriptionHelpFormatter):
- """Help message formatter which retains formatting of all help text.
-
- Only the name of this class is considered a public API. All the methods
- provided by the class are considered an implementation detail.
- """
-
- def _split_lines(self, text, width):
- return text.splitlines()
-
-
-class ArgumentDefaultsHelpFormatter(HelpFormatter):
- """Help message formatter which adds default values to argument help.
-
- Only the name of this class is considered a public API. All the methods
- provided by the class are considered an implementation detail.
- """
-
- def _get_help_string(self, action):
- help = action.help
- if '%(default)' not in action.help:
- if action.default is not SUPPRESS:
- defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
- if action.option_strings or action.nargs in defaulting_nargs:
- help += ' (default: %(default)s)'
- return help
-
-
-# =====================
-# Options and Arguments
-# =====================
-
-def _get_action_name(argument):
- if argument is None:
- return None
- elif argument.option_strings:
- return '/'.join(argument.option_strings)
- elif argument.metavar not in (None, SUPPRESS):
- return argument.metavar
- elif argument.dest not in (None, SUPPRESS):
- return argument.dest
- else:
- return None
-
-
-class ArgumentError(Exception):
- """An error from creating or using an argument (optional or positional).
-
- The string value of this exception is the message, augmented with
- information about the argument that caused it.
- """
-
- def __init__(self, argument, message):
- self.argument_name = _get_action_name(argument)
- self.message = message
-
- def __str__(self):
- if self.argument_name is None:
- format = '%(message)s'
- else:
- format = 'argument %(argument_name)s: %(message)s'
- return format % dict(message=self.message,
- argument_name=self.argument_name)
-
-
-class ArgumentTypeError(Exception):
- """An error from trying to convert a command line string to a type."""
- pass
-
-
-# ==============
-# Action classes
-# ==============
-
-class Action(_AttributeHolder):
- """Information about how to convert command line strings to Python objects.
-
- Action objects are used by an ArgumentParser to represent the information
- needed to parse a single argument from one or more strings from the
- command line. The keyword arguments to the Action constructor are also
- all attributes of Action instances.
-
- Keyword Arguments:
-
- - option_strings -- A list of command-line option strings which
- should be associated with this action.
-
- - dest -- The name of the attribute to hold the created object(s)
-
- - nargs -- The number of command-line arguments that should be
- consumed. By default, one argument will be consumed and a single
- value will be produced. Other values include:
- - N (an integer) consumes N arguments (and produces a list)
- - '?' consumes zero or one arguments
- - '*' consumes zero or more arguments (and produces a list)
- - '+' consumes one or more arguments (and produces a list)
- Note that the difference between the default and nargs=1 is that
- with the default, a single value will be produced, while with
- nargs=1, a list containing a single value will be produced.
-
- - const -- The value to be produced if the option is specified and the
- option uses an action that takes no values.
-
- - default -- The value to be produced if the option is not specified.
-
- - type -- The type which the command-line arguments should be converted
- to, should be one of 'string', 'int', 'float', 'complex' or a
- callable object that accepts a single string argument. If None,
- 'string' is assumed.
-
- - choices -- A container of values that should be allowed. If not None,
- after a command-line argument has been converted to the appropriate
- type, an exception will be raised if it is not a member of this
- collection.
-
- - required -- True if the action must always be specified at the
- command line. This is only meaningful for optional command-line
- arguments.
-
- - help -- The help string describing the argument.
-
- - metavar -- The name to be used for the option's argument with the
- help string. If None, the 'dest' value will be used as the name.
- """
-
- def __init__(self,
- option_strings,
- dest,
- nargs=None,
- const=None,
- default=None,
- type=None,
- choices=None,
- required=False,
- help=None,
- metavar=None):
- self.option_strings = option_strings
- self.dest = dest
- self.nargs = nargs
- self.const = const
- self.default = default
- self.type = type
- self.choices = choices
- self.required = required
- self.help = help
- self.metavar = metavar
-
- def _get_kwargs(self):
- names = [
- 'option_strings',
- 'dest',
- 'nargs',
- 'const',
- 'default',
- 'type',
- 'choices',
- 'help',
- 'metavar',
- ]
- return [(name, getattr(self, name)) for name in names]
-
- def __call__(self, parser, namespace, values, option_string=None):
- raise NotImplementedError(_('.__call__() not defined'))
-
-
-class _StoreAction(Action):
-
- def __init__(self,
- option_strings,
- dest,
- nargs=None,
- const=None,
- default=None,
- type=None,
- choices=None,
- required=False,
- help=None,
- metavar=None):
- if nargs == 0:
- raise ValueError('nargs for store actions must be > 0; if you '
- 'have nothing to store, actions such as store '
- 'true or store const may be more appropriate')
- if const is not None and nargs != OPTIONAL:
- raise ValueError('nargs must be %r to supply const' % OPTIONAL)
- super(_StoreAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- nargs=nargs,
- const=const,
- default=default,
- type=type,
- choices=choices,
- required=required,
- help=help,
- metavar=metavar)
-
- def __call__(self, parser, namespace, values, option_string=None):
- setattr(namespace, self.dest, values)
-
-
-class _StoreConstAction(Action):
-
- def __init__(self,
- option_strings,
- dest,
- const,
- default=None,
- required=False,
- help=None,
- metavar=None):
- super(_StoreConstAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- nargs=0,
- const=const,
- default=default,
- required=required,
- help=help)
-
- def __call__(self, parser, namespace, values, option_string=None):
- setattr(namespace, self.dest, self.const)
-
-
-class _StoreTrueAction(_StoreConstAction):
-
- def __init__(self,
- option_strings,
- dest,
- default=False,
- required=False,
- help=None):
- super(_StoreTrueAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- const=True,
- default=default,
- required=required,
- help=help)
-
-
-class _StoreFalseAction(_StoreConstAction):
-
- def __init__(self,
- option_strings,
- dest,
- default=True,
- required=False,
- help=None):
- super(_StoreFalseAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- const=False,
- default=default,
- required=required,
- help=help)
-
-
-class _AppendAction(Action):
-
- def __init__(self,
- option_strings,
- dest,
- nargs=None,
- const=None,
- default=None,
- type=None,
- choices=None,
- required=False,
- help=None,
- metavar=None):
- if nargs == 0:
- raise ValueError('nargs for append actions must be > 0; if arg '
- 'strings are not supplying the value to append, '
- 'the append const action may be more appropriate')
- if const is not None and nargs != OPTIONAL:
- raise ValueError('nargs must be %r to supply const' % OPTIONAL)
- super(_AppendAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- nargs=nargs,
- const=const,
- default=default,
- type=type,
- choices=choices,
- required=required,
- help=help,
- metavar=metavar)
-
- def __call__(self, parser, namespace, values, option_string=None):
- items = _copy.copy(_ensure_value(namespace, self.dest, []))
- items.append(values)
- setattr(namespace, self.dest, items)
-
-
-class _AppendConstAction(Action):
-
- def __init__(self,
- option_strings,
- dest,
- const,
- default=None,
- required=False,
- help=None,
- metavar=None):
- super(_AppendConstAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- nargs=0,
- const=const,
- default=default,
- required=required,
- help=help,
- metavar=metavar)
-
- def __call__(self, parser, namespace, values, option_string=None):
- items = _copy.copy(_ensure_value(namespace, self.dest, []))
- items.append(self.const)
- setattr(namespace, self.dest, items)
-
-
-class _CountAction(Action):
-
- def __init__(self,
- option_strings,
- dest,
- default=None,
- required=False,
- help=None):
- super(_CountAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- nargs=0,
- default=default,
- required=required,
- help=help)
-
- def __call__(self, parser, namespace, values, option_string=None):
- new_count = _ensure_value(namespace, self.dest, 0) + 1
- setattr(namespace, self.dest, new_count)
-
-
-class _HelpAction(Action):
-
- def __init__(self,
- option_strings,
- dest=SUPPRESS,
- default=SUPPRESS,
- help=None):
- super(_HelpAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- default=default,
- nargs=0,
- help=help)
-
- def __call__(self, parser, namespace, values, option_string=None):
- parser.print_help()
- parser.exit()
-
-
-class _VersionAction(Action):
-
- def __init__(self,
- option_strings,
- version=None,
- dest=SUPPRESS,
- default=SUPPRESS,
- help="show program's version number and exit"):
- super(_VersionAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- default=default,
- nargs=0,
- help=help)
- self.version = version
-
- def __call__(self, parser, namespace, values, option_string=None):
- version = self.version
- if version is None:
- version = parser.version
- formatter = parser._get_formatter()
- formatter.add_text(version)
- parser.exit(message=formatter.format_help())
-
-
-class _SubParsersAction(Action):
-
- class _ChoicesPseudoAction(Action):
-
- def __init__(self, name, aliases, help):
- metavar = dest = name
- if aliases:
- metavar += ' (%s)' % ', '.join(aliases)
- sup = super(_SubParsersAction._ChoicesPseudoAction, self)
- sup.__init__(option_strings=[], dest=dest, help=help,
- metavar=metavar)
-
- def __init__(self,
- option_strings,
- prog,
- parser_class,
- dest=SUPPRESS,
- help=None,
- metavar=None):
-
- self._prog_prefix = prog
- self._parser_class = parser_class
- self._name_parser_map = {}
- self._choices_actions = []
-
- super(_SubParsersAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- nargs=PARSER,
- choices=self._name_parser_map,
- help=help,
- metavar=metavar)
-
- def add_parser(self, name, **kwargs):
- # set prog from the existing prefix
- if kwargs.get('prog') is None:
- kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
-
- aliases = kwargs.pop('aliases', ())
-
- # create a pseudo-action to hold the choice help
- if 'help' in kwargs:
- help = kwargs.pop('help')
- choice_action = self._ChoicesPseudoAction(name, aliases, help)
- self._choices_actions.append(choice_action)
-
- # create the parser and add it to the map
- parser = self._parser_class(**kwargs)
- self._name_parser_map[name] = parser
-
- # make parser available under aliases also
- for alias in aliases:
- self._name_parser_map[alias] = parser
-
- return parser
-
- def _get_subactions(self):
- return self._choices_actions
-
- def __call__(self, parser, namespace, values, option_string=None):
- parser_name = values[0]
- arg_strings = values[1:]
-
- # set the parser name if requested
- if self.dest is not SUPPRESS:
- setattr(namespace, self.dest, parser_name)
-
- # select the parser
- try:
- parser = self._name_parser_map[parser_name]
- except KeyError:
- tup = parser_name, ', '.join(self._name_parser_map)
- msg = _('unknown parser %r (choices: %s)' % tup)
- raise ArgumentError(self, msg)
-
- # parse all the remaining options into the namespace
- # store any unrecognized options on the object, so that the top
- # level parser can decide what to do with them
- namespace, arg_strings = parser.parse_known_args(arg_strings, namespace)
- if arg_strings:
- vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
- getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
-
-
-# ==============
-# Type classes
-# ==============
-
-class FileType(object):
- """Factory for creating file object types
-
- Instances of FileType are typically passed as type= arguments to the
- ArgumentParser add_argument() method.
-
- Keyword Arguments:
- - mode -- A string indicating how the file is to be opened. Accepts the
- same values as the builtin open() function.
- - bufsize -- The file's desired buffer size. Accepts the same values as
- the builtin open() function.
- """
-
- def __init__(self, mode='r', bufsize=None):
- self._mode = mode
- self._bufsize = bufsize
-
- def __call__(self, string):
- # the special argument "-" means sys.std{in,out}
- if string == '-':
- if 'r' in self._mode:
- return _sys.stdin
- elif 'w' in self._mode:
- return _sys.stdout
- else:
- msg = _('argument "-" with mode %r' % self._mode)
- raise ValueError(msg)
-
- try:
- # all other arguments are used as file names
- if self._bufsize:
- return open(string, self._mode, self._bufsize)
- else:
- return open(string, self._mode)
- except IOError:
- err = _sys.exc_info()[1]
- message = _("can't open '%s': %s")
- raise ArgumentTypeError(message % (string, err))
-
- def __repr__(self):
- args = [self._mode, self._bufsize]
- args_str = ', '.join([repr(arg) for arg in args if arg is not None])
- return '%s(%s)' % (type(self).__name__, args_str)
-
-# ===========================
-# Optional and Positional Parsing
-# ===========================
-
-class Namespace(_AttributeHolder):
- """Simple object for storing attributes.
-
- Implements equality by attribute names and values, and provides a simple
- string representation.
- """
-
- def __init__(self, **kwargs):
- for name in kwargs:
- setattr(self, name, kwargs[name])
-
- __hash__ = None
-
- def __eq__(self, other):
- return vars(self) == vars(other)
-
- def __ne__(self, other):
- return not (self == other)
-
- def __contains__(self, key):
- return key in self.__dict__
-
-
-class _ActionsContainer(object):
-
- def __init__(self,
- description,
- prefix_chars,
- argument_default,
- conflict_handler):
- super(_ActionsContainer, self).__init__()
-
- self.description = description
- self.argument_default = argument_default
- self.prefix_chars = prefix_chars
- self.conflict_handler = conflict_handler
-
- # set up registries
- self._registries = {}
-
- # register actions
- self.register('action', None, _StoreAction)
- self.register('action', 'store', _StoreAction)
- self.register('action', 'store_const', _StoreConstAction)
- self.register('action', 'store_true', _StoreTrueAction)
- self.register('action', 'store_false', _StoreFalseAction)
- self.register('action', 'append', _AppendAction)
- self.register('action', 'append_const', _AppendConstAction)
- self.register('action', 'count', _CountAction)
- self.register('action', 'help', _HelpAction)
- self.register('action', 'version', _VersionAction)
- self.register('action', 'parsers', _SubParsersAction)
-
- # raise an exception if the conflict handler is invalid
- self._get_handler()
-
- # action storage
- self._actions = []
- self._option_string_actions = {}
-
- # groups
- self._action_groups = []
- self._mutually_exclusive_groups = []
-
- # defaults storage
- self._defaults = {}
-
- # determines whether an "option" looks like a negative number
- self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
-
- # whether or not there are any optionals that look like negative
- # numbers -- uses a list so it can be shared and edited
- self._has_negative_number_optionals = []
-
- # ====================
- # Registration methods
- # ====================
- def register(self, registry_name, value, object):
- registry = self._registries.setdefault(registry_name, {})
- registry[value] = object
-
- def _registry_get(self, registry_name, value, default=None):
- return self._registries[registry_name].get(value, default)
-
- # ==================================
- # Namespace default accessor methods
- # ==================================
- def set_defaults(self, **kwargs):
- self._defaults.update(kwargs)
-
- # if these defaults match any existing arguments, replace
- # the previous default on the object with the new one
- for action in self._actions:
- if action.dest in kwargs:
- action.default = kwargs[action.dest]
-
- def get_default(self, dest):
- for action in self._actions:
- if action.dest == dest and action.default is not None:
- return action.default
- return self._defaults.get(dest, None)
-
-
- # =======================
- # Adding argument actions
- # =======================
- def add_argument(self, *args, **kwargs):
- """
- add_argument(dest, ..., name=value, ...)
- add_argument(option_string, option_string, ..., name=value, ...)
- """
-
- # if no positional args are supplied or only one is supplied and
- # it doesn't look like an option string, parse a positional
- # argument
- chars = self.prefix_chars
- if not args or len(args) == 1 and args[0][0] not in chars:
- if args and 'dest' in kwargs:
- raise ValueError('dest supplied twice for positional argument')
- kwargs = self._get_positional_kwargs(*args, **kwargs)
-
- # otherwise, we're adding an optional argument
- else:
- kwargs = self._get_optional_kwargs(*args, **kwargs)
-
- # if no default was supplied, use the parser-level default
- if 'default' not in kwargs:
- dest = kwargs['dest']
- if dest in self._defaults:
- kwargs['default'] = self._defaults[dest]
- elif self.argument_default is not None:
- kwargs['default'] = self.argument_default
-
- # create the action object, and add it to the parser
- action_class = self._pop_action_class(kwargs)
- if not _callable(action_class):
- raise ValueError('unknown action "%s"' % action_class)
- action = action_class(**kwargs)
-
- # raise an error if the action type is not callable
- type_func = self._registry_get('type', action.type, action.type)
- if not _callable(type_func):
- raise ValueError('%r is not callable' % type_func)
-
- return self._add_action(action)
-
- def add_argument_group(self, *args, **kwargs):
- group = _ArgumentGroup(self, *args, **kwargs)
- self._action_groups.append(group)
- return group
-
- def add_mutually_exclusive_group(self, **kwargs):
- group = _MutuallyExclusiveGroup(self, **kwargs)
- self._mutually_exclusive_groups.append(group)
- return group
-
- def _add_action(self, action):
- # resolve any conflicts
- self._check_conflict(action)
-
- # add to actions list
- self._actions.append(action)
- action.container = self
-
- # index the action by any option strings it has
- for option_string in action.option_strings:
- self._option_string_actions[option_string] = action
-
- # set the flag if any option strings look like negative numbers
- for option_string in action.option_strings:
- if self._negative_number_matcher.match(option_string):
- if not self._has_negative_number_optionals:
- self._has_negative_number_optionals.append(True)
-
- # return the created action
- return action
-
- def _remove_action(self, action):
- self._actions.remove(action)
-
- def _add_container_actions(self, container):
- # collect groups by titles
- title_group_map = {}
- for group in self._action_groups:
- if group.title in title_group_map:
- msg = _('cannot merge actions - two groups are named %r')
- raise ValueError(msg % (group.title))
- title_group_map[group.title] = group
-
- # map each action to its group
- group_map = {}
- for group in container._action_groups:
-
- # if a group with the title exists, use that, otherwise
- # create a new group matching the container's group
- if group.title not in title_group_map:
- title_group_map[group.title] = self.add_argument_group(
- title=group.title,
- description=group.description,
- conflict_handler=group.conflict_handler)
-
- # map the actions to their new group
- for action in group._group_actions:
- group_map[action] = title_group_map[group.title]
-
- # add container's mutually exclusive groups
- # NOTE: if add_mutually_exclusive_group ever gains title= and
- # description= then this code will need to be expanded as above
- for group in container._mutually_exclusive_groups:
- mutex_group = self.add_mutually_exclusive_group(
- required=group.required)
-
- # map the actions to their new mutex group
- for action in group._group_actions:
- group_map[action] = mutex_group
-
- # add all actions to this container or their group
- for action in container._actions:
- group_map.get(action, self)._add_action(action)
-
- def _get_positional_kwargs(self, dest, **kwargs):
- # make sure required is not specified
- if 'required' in kwargs:
- msg = _("'required' is an invalid argument for positionals")
- raise TypeError(msg)
-
- # mark positional arguments as required if at least one is
- # always required
- if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
- kwargs['required'] = True
- if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
- kwargs['required'] = True
-
- # return the keyword arguments with no option strings
- return dict(kwargs, dest=dest, option_strings=[])
-
- def _get_optional_kwargs(self, *args, **kwargs):
- # determine short and long option strings
- option_strings = []
- long_option_strings = []
- for option_string in args:
- # error on strings that don't start with an appropriate prefix
- if not option_string[0] in self.prefix_chars:
- msg = _('invalid option string %r: '
- 'must start with a character %r')
- tup = option_string, self.prefix_chars
- raise ValueError(msg % tup)
-
- # strings starting with two prefix characters are long options
- option_strings.append(option_string)
- if option_string[0] in self.prefix_chars:
- if len(option_string) > 1:
- if option_string[1] in self.prefix_chars:
- long_option_strings.append(option_string)
-
- # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
- dest = kwargs.pop('dest', None)
- if dest is None:
- if long_option_strings:
- dest_option_string = long_option_strings[0]
- else:
- dest_option_string = option_strings[0]
- dest = dest_option_string.lstrip(self.prefix_chars)
- if not dest:
- msg = _('dest= is required for options like %r')
- raise ValueError(msg % option_string)
- dest = dest.replace('-', '_')
-
- # return the updated keyword arguments
- return dict(kwargs, dest=dest, option_strings=option_strings)
-
- def _pop_action_class(self, kwargs, default=None):
- action = kwargs.pop('action', default)
- return self._registry_get('action', action, action)
-
- def _get_handler(self):
- # determine function from conflict handler string
- handler_func_name = '_handle_conflict_%s' % self.conflict_handler
- try:
- return getattr(self, handler_func_name)
- except AttributeError:
- msg = _('invalid conflict_resolution value: %r')
- raise ValueError(msg % self.conflict_handler)
-
- def _check_conflict(self, action):
-
- # find all options that conflict with this option
- confl_optionals = []
- for option_string in action.option_strings:
- if option_string in self._option_string_actions:
- confl_optional = self._option_string_actions[option_string]
- confl_optionals.append((option_string, confl_optional))
-
- # resolve any conflicts
- if confl_optionals:
- conflict_handler = self._get_handler()
- conflict_handler(action, confl_optionals)
-
- def _handle_conflict_error(self, action, conflicting_actions):
- message = _('conflicting option string(s): %s')
- conflict_string = ', '.join([option_string
- for option_string, action
- in conflicting_actions])
- raise ArgumentError(action, message % conflict_string)
-
- def _handle_conflict_resolve(self, action, conflicting_actions):
-
- # remove all conflicting options
- for option_string, action in conflicting_actions:
-
- # remove the conflicting option
- action.option_strings.remove(option_string)
- self._option_string_actions.pop(option_string, None)
-
- # if the option now has no option string, remove it from the
- # container holding it
- if not action.option_strings:
- action.container._remove_action(action)
-
-
-class _ArgumentGroup(_ActionsContainer):
-
- def __init__(self, container, title=None, description=None, **kwargs):
- # add any missing keyword arguments by checking the container
- update = kwargs.setdefault
- update('conflict_handler', container.conflict_handler)
- update('prefix_chars', container.prefix_chars)
- update('argument_default', container.argument_default)
- super_init = super(_ArgumentGroup, self).__init__
- super_init(description=description, **kwargs)
-
- # group attributes
- self.title = title
- self._group_actions = []
-
- # share most attributes with the container
- self._registries = container._registries
- self._actions = container._actions
- self._option_string_actions = container._option_string_actions
- self._defaults = container._defaults
- self._has_negative_number_optionals = \
- container._has_negative_number_optionals
-
- def _add_action(self, action):
- action = super(_ArgumentGroup, self)._add_action(action)
- self._group_actions.append(action)
- return action
-
- def _remove_action(self, action):
- super(_ArgumentGroup, self)._remove_action(action)
- self._group_actions.remove(action)
-
-
-class _MutuallyExclusiveGroup(_ArgumentGroup):
-
- def __init__(self, container, required=False):
- super(_MutuallyExclusiveGroup, self).__init__(container)
- self.required = required
- self._container = container
-
- def _add_action(self, action):
- if action.required:
- msg = _('mutually exclusive arguments must be optional')
- raise ValueError(msg)
- action = self._container._add_action(action)
- self._group_actions.append(action)
- return action
-
- def _remove_action(self, action):
- self._container._remove_action(action)
- self._group_actions.remove(action)
-
-
-class ArgumentParser(_AttributeHolder, _ActionsContainer):
- """Object for parsing command line strings into Python objects.
-
- Keyword Arguments:
- - prog -- The name of the program (default: sys.argv[0])
- - usage -- A usage message (default: auto-generated from arguments)
- - description -- A description of what the program does
- - epilog -- Text following the argument descriptions
- - parents -- Parsers whose arguments should be copied into this one
- - formatter_class -- HelpFormatter class for printing help messages
- - prefix_chars -- Characters that prefix optional arguments
- - fromfile_prefix_chars -- Characters that prefix files containing
- additional arguments
- - argument_default -- The default value for all arguments
- - conflict_handler -- String indicating how to handle conflicts
- - add_help -- Add a -h/-help option
- """
-
- def __init__(self,
- prog=None,
- usage=None,
- description=None,
- epilog=None,
- version=None,
- parents=[],
- formatter_class=HelpFormatter,
- prefix_chars='-',
- fromfile_prefix_chars=None,
- argument_default=None,
- conflict_handler='error',
- add_help=True):
-
- if version is not None:
- import warnings
- warnings.warn(
- """The "version" argument to ArgumentParser is deprecated. """
- """Please use """
- """"add_argument(..., action='version', version="N", ...)" """
- """instead""", DeprecationWarning)
-
- superinit = super(ArgumentParser, self).__init__
- superinit(description=description,
- prefix_chars=prefix_chars,
- argument_default=argument_default,
- conflict_handler=conflict_handler)
-
- # default setting for prog
- if prog is None:
- prog = _os.path.basename(_sys.argv[0])
-
- self.prog = prog
- self.usage = usage
- self.epilog = epilog
- self.version = version
- self.formatter_class = formatter_class
- self.fromfile_prefix_chars = fromfile_prefix_chars
- self.add_help = add_help
-
- add_group = self.add_argument_group
- self._positionals = add_group(_('positional arguments'))
- self._optionals = add_group(_('optional arguments'))
- self._subparsers = None
-
- # register types
- def identity(string):
- return string
- self.register('type', None, identity)
-
- # add help and version arguments if necessary
- # (using explicit default to override global argument_default)
- if '-' in prefix_chars:
- default_prefix = '-'
- else:
- default_prefix = prefix_chars[0]
- if self.add_help:
- self.add_argument(
- default_prefix+'h', default_prefix*2+'help',
- action='help', default=SUPPRESS,
- help=_('show this help message and exit'))
- if self.version:
- self.add_argument(
- default_prefix+'v', default_prefix*2+'version',
- action='version', default=SUPPRESS,
- version=self.version,
- help=_("show program's version number and exit"))
-
- # add parent arguments and defaults
- for parent in parents:
- self._add_container_actions(parent)
- try:
- defaults = parent._defaults
- except AttributeError:
- pass
- else:
- self._defaults.update(defaults)
-
- # =======================
- # Pretty __repr__ methods
- # =======================
- def _get_kwargs(self):
- names = [
- 'prog',
- 'usage',
- 'description',
- 'version',
- 'formatter_class',
- 'conflict_handler',
- 'add_help',
- ]
- return [(name, getattr(self, name)) for name in names]
-
- # ==================================
- # Optional/Positional adding methods
- # ==================================
- def add_subparsers(self, **kwargs):
- if self._subparsers is not None:
- self.error(_('cannot have multiple subparser arguments'))
-
- # add the parser class to the arguments if it's not present
- kwargs.setdefault('parser_class', type(self))
-
- if 'title' in kwargs or 'description' in kwargs:
- title = _(kwargs.pop('title', 'subcommands'))
- description = _(kwargs.pop('description', None))
- self._subparsers = self.add_argument_group(title, description)
- else:
- self._subparsers = self._positionals
-
- # prog defaults to the usage message of this parser, skipping
- # optional arguments and with no "usage:" prefix
- if kwargs.get('prog') is None:
- formatter = self._get_formatter()
- positionals = self._get_positional_actions()
- groups = self._mutually_exclusive_groups
- formatter.add_usage(self.usage, positionals, groups, '')
- kwargs['prog'] = formatter.format_help().strip()
-
- # create the parsers action and add it to the positionals list
- parsers_class = self._pop_action_class(kwargs, 'parsers')
- action = parsers_class(option_strings=[], **kwargs)
- self._subparsers._add_action(action)
-
- # return the created parsers action
- return action
-
- def _add_action(self, action):
- if action.option_strings:
- self._optionals._add_action(action)
- else:
- self._positionals._add_action(action)
- return action
-
- def _get_optional_actions(self):
- return [action
- for action in self._actions
- if action.option_strings]
-
- def _get_positional_actions(self):
- return [action
- for action in self._actions
- if not action.option_strings]
-
- # =====================================
- # Command line argument parsing methods
- # =====================================
- def parse_args(self, args=None, namespace=None):
- args, argv = self.parse_known_args(args, namespace)
- if argv:
- msg = _('unrecognized arguments: %s')
- self.error(msg % ' '.join(argv))
- return args
-
- def parse_known_args(self, args=None, namespace=None):
- # args default to the system args
- if args is None:
- args = _sys.argv[1:]
-
- # default Namespace built from parser defaults
- if namespace is None:
- namespace = Namespace()
-
- # add any action defaults that aren't present
- for action in self._actions:
- if action.dest is not SUPPRESS:
- if not hasattr(namespace, action.dest):
- if action.default is not SUPPRESS:
- setattr(namespace, action.dest, action.default)
-
- # add any parser defaults that aren't present
- for dest in self._defaults:
- if not hasattr(namespace, dest):
- setattr(namespace, dest, self._defaults[dest])
-
- # parse the arguments and exit if there are any errors
- try:
- namespace, args = self._parse_known_args(args, namespace)
- if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
- args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
- delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
- return namespace, args
- except ArgumentError:
- err = _sys.exc_info()[1]
- self.error(str(err))
-
- def _parse_known_args(self, arg_strings, namespace):
- # replace arg strings that are file references
- if self.fromfile_prefix_chars is not None:
- arg_strings = self._read_args_from_files(arg_strings)
-
- # map all mutually exclusive arguments to the other arguments
- # they can't occur with
- action_conflicts = {}
- for mutex_group in self._mutually_exclusive_groups:
- group_actions = mutex_group._group_actions
- for i, mutex_action in enumerate(mutex_group._group_actions):
- conflicts = action_conflicts.setdefault(mutex_action, [])
- conflicts.extend(group_actions[:i])
- conflicts.extend(group_actions[i + 1:])
-
- # find all option indices, and determine the arg_string_pattern
- # which has an 'O' if there is an option at an index,
- # an 'A' if there is an argument, or a '-' if there is a '--'
- option_string_indices = {}
- arg_string_pattern_parts = []
- arg_strings_iter = iter(arg_strings)
- for i, arg_string in enumerate(arg_strings_iter):
-
- # all args after -- are non-options
- if arg_string == '--':
- arg_string_pattern_parts.append('-')
- for arg_string in arg_strings_iter:
- arg_string_pattern_parts.append('A')
-
- # otherwise, add the arg to the arg strings
- # and note the index if it was an option
- else:
- option_tuple = self._parse_optional(arg_string)
- if option_tuple is None:
- pattern = 'A'
- else:
- option_string_indices[i] = option_tuple
- pattern = 'O'
- arg_string_pattern_parts.append(pattern)
-
- # join the pieces together to form the pattern
- arg_strings_pattern = ''.join(arg_string_pattern_parts)
-
- # converts arg strings to the appropriate and then takes the action
- seen_actions = set()
- seen_non_default_actions = set()
-
- def take_action(action, argument_strings, option_string=None):
- seen_actions.add(action)
- argument_values = self._get_values(action, argument_strings)
-
- # error if this argument is not allowed with other previously
- # seen arguments, assuming that actions that use the default
- # value don't really count as "present"
- if argument_values is not action.default:
- seen_non_default_actions.add(action)
- for conflict_action in action_conflicts.get(action, []):
- if conflict_action in seen_non_default_actions:
- msg = _('not allowed with argument %s')
- action_name = _get_action_name(conflict_action)
- raise ArgumentError(action, msg % action_name)
-
- # take the action if we didn't receive a SUPPRESS value
- # (e.g. from a default)
- if argument_values is not SUPPRESS:
- action(self, namespace, argument_values, option_string)
-
- # function to convert arg_strings into an optional action
- def consume_optional(start_index):
-
- # get the optional identified at this index
- option_tuple = option_string_indices[start_index]
- action, option_string, explicit_arg = option_tuple
-
- # identify additional optionals in the same arg string
- # (e.g. -xyz is the same as -x -y -z if no args are required)
- match_argument = self._match_argument
- action_tuples = []
- while True:
-
- # if we found no optional action, skip it
- if action is None:
- extras.append(arg_strings[start_index])
- return start_index + 1
-
- # if there is an explicit argument, try to match the
- # optional's string arguments to only this
- if explicit_arg is not None:
- arg_count = match_argument(action, 'A')
-
- # if the action is a single-dash option and takes no
- # arguments, try to parse more single-dash options out
- # of the tail of the option string
- chars = self.prefix_chars
- if arg_count == 0 and option_string[1] not in chars:
- action_tuples.append((action, [], option_string))
- char = option_string[0]
- option_string = char + explicit_arg[0]
- new_explicit_arg = explicit_arg[1:] or None
- optionals_map = self._option_string_actions
- if option_string in optionals_map:
- action = optionals_map[option_string]
- explicit_arg = new_explicit_arg
- else:
- msg = _('ignored explicit argument %r')
- raise ArgumentError(action, msg % explicit_arg)
-
- # if the action expect exactly one argument, we've
- # successfully matched the option; exit the loop
- elif arg_count == 1:
- stop = start_index + 1
- args = [explicit_arg]
- action_tuples.append((action, args, option_string))
- break
-
- # error if a double-dash option did not use the
- # explicit argument
- else:
- msg = _('ignored explicit argument %r')
- raise ArgumentError(action, msg % explicit_arg)
-
- # if there is no explicit argument, try to match the
- # optional's string arguments with the following strings
- # if successful, exit the loop
- else:
- start = start_index + 1
- selected_patterns = arg_strings_pattern[start:]
- arg_count = match_argument(action, selected_patterns)
- stop = start + arg_count
- args = arg_strings[start:stop]
- action_tuples.append((action, args, option_string))
- break
-
- # add the Optional to the list and return the index at which
- # the Optional's string args stopped
- assert action_tuples
- for action, args, option_string in action_tuples:
- take_action(action, args, option_string)
- return stop
-
- # the list of Positionals left to be parsed; this is modified
- # by consume_positionals()
- positionals = self._get_positional_actions()
-
- # function to convert arg_strings into positional actions
- def consume_positionals(start_index):
- # match as many Positionals as possible
- match_partial = self._match_arguments_partial
- selected_pattern = arg_strings_pattern[start_index:]
- arg_counts = match_partial(positionals, selected_pattern)
-
- # slice off the appropriate arg strings for each Positional
- # and add the Positional and its args to the list
- for action, arg_count in zip(positionals, arg_counts):
- args = arg_strings[start_index: start_index + arg_count]
- start_index += arg_count
- take_action(action, args)
-
- # slice off the Positionals that we just parsed and return the
- # index at which the Positionals' string args stopped
- positionals[:] = positionals[len(arg_counts):]
- return start_index
-
- # consume Positionals and Optionals alternately, until we have
- # passed the last option string
- extras = []
- start_index = 0
- if option_string_indices:
- max_option_string_index = max(option_string_indices)
- else:
- max_option_string_index = -1
- while start_index <= max_option_string_index:
-
- # consume any Positionals preceding the next option
- next_option_string_index = min([
- index
- for index in option_string_indices
- if index >= start_index])
- if start_index != next_option_string_index:
- positionals_end_index = consume_positionals(start_index)
-
- # only try to parse the next optional if we didn't consume
- # the option string during the positionals parsing
- if positionals_end_index > start_index:
- start_index = positionals_end_index
- continue
- else:
- start_index = positionals_end_index
-
- # if we consumed all the positionals we could and we're not
- # at the index of an option string, there were extra arguments
- if start_index not in option_string_indices:
- strings = arg_strings[start_index:next_option_string_index]
- extras.extend(strings)
- start_index = next_option_string_index
-
- # consume the next optional and any arguments for it
- start_index = consume_optional(start_index)
-
- # consume any positionals following the last Optional
- stop_index = consume_positionals(start_index)
-
- # if we didn't consume all the argument strings, there were extras
- extras.extend(arg_strings[stop_index:])
-
- # if we didn't use all the Positional objects, there were too few
- # arg strings supplied.
- if positionals:
- self.error(_('too few arguments'))
-
- # make sure all required actions were present, and convert defaults.
- for action in self._actions:
- if action not in seen_actions:
- if action.required:
- name = _get_action_name(action)
- self.error(_('argument %s is required') % name)
- else:
- # Convert action default now instead of doing it before
- # parsing arguments to avoid calling convert functions
- # twice (which may fail) if the argument was given, but
- # only if it was defined already in the namespace
- if (action.default is not None and
- isinstance(action.default, basestring) and
- hasattr(namespace, action.dest) and
- action.default is getattr(namespace, action.dest)):
- setattr(namespace, action.dest,
- self._get_value(action, action.default))
-
- # make sure all required groups had one option present
- for group in self._mutually_exclusive_groups:
- if group.required:
- for action in group._group_actions:
- if action in seen_non_default_actions:
- break
-
- # if no actions were used, report the error
- else:
- names = [_get_action_name(action)
- for action in group._group_actions
- if action.help is not SUPPRESS]
- msg = _('one of the arguments %s is required')
- self.error(msg % ' '.join(names))
-
- # return the updated namespace and the extra arguments
- return namespace, extras
-
- def _read_args_from_files(self, arg_strings):
- # expand arguments referencing files
- new_arg_strings = []
- for arg_string in arg_strings:
-
- # for regular arguments, just add them back into the list
- if arg_string[0] not in self.fromfile_prefix_chars:
- new_arg_strings.append(arg_string)
-
- # replace arguments referencing files with the file content
- else:
- try:
- args_file = open(arg_string[1:])
- try:
- arg_strings = []
- for arg_line in args_file.read().splitlines():
- for arg in self.convert_arg_line_to_args(arg_line):
- arg_strings.append(arg)
- arg_strings = self._read_args_from_files(arg_strings)
- new_arg_strings.extend(arg_strings)
- finally:
- args_file.close()
- except IOError:
- err = _sys.exc_info()[1]
- self.error(str(err))
-
- # return the modified argument list
- return new_arg_strings
-
- def convert_arg_line_to_args(self, arg_line):
- return [arg_line]
-
- def _match_argument(self, action, arg_strings_pattern):
- # match the pattern for this action to the arg strings
- nargs_pattern = self._get_nargs_pattern(action)
- match = _re.match(nargs_pattern, arg_strings_pattern)
-
- # raise an exception if we weren't able to find a match
- if match is None:
- nargs_errors = {
- None: _('expected one argument'),
- OPTIONAL: _('expected at most one argument'),
- ONE_OR_MORE: _('expected at least one argument'),
- }
- default = _('expected %s argument(s)') % action.nargs
- msg = nargs_errors.get(action.nargs, default)
- raise ArgumentError(action, msg)
-
- # return the number of arguments matched
- return len(match.group(1))
-
- def _match_arguments_partial(self, actions, arg_strings_pattern):
- # progressively shorten the actions list by slicing off the
- # final actions until we find a match
- result = []
- for i in range(len(actions), 0, -1):
- actions_slice = actions[:i]
- pattern = ''.join([self._get_nargs_pattern(action)
- for action in actions_slice])
- match = _re.match(pattern, arg_strings_pattern)
- if match is not None:
- result.extend([len(string) for string in match.groups()])
- break
-
- # return the list of arg string counts
- return result
-
- def _parse_optional(self, arg_string):
- # if it's an empty string, it was meant to be a positional
- if not arg_string:
- return None
-
- # if it doesn't start with a prefix, it was meant to be positional
- if not arg_string[0] in self.prefix_chars:
- return None
-
- # if the option string is present in the parser, return the action
- if arg_string in self._option_string_actions:
- action = self._option_string_actions[arg_string]
- return action, arg_string, None
-
- # if it's just a single character, it was meant to be positional
- if len(arg_string) == 1:
- return None
-
- # if the option string before the "=" is present, return the action
- if '=' in arg_string:
- option_string, explicit_arg = arg_string.split('=', 1)
- if option_string in self._option_string_actions:
- action = self._option_string_actions[option_string]
- return action, option_string, explicit_arg
-
- # search through all possible prefixes of the option string
- # and all actions in the parser for possible interpretations
- option_tuples = self._get_option_tuples(arg_string)
-
- # if multiple actions match, the option string was ambiguous
- if len(option_tuples) > 1:
- options = ', '.join([option_string
- for action, option_string, explicit_arg in option_tuples])
- tup = arg_string, options
- self.error(_('ambiguous option: %s could match %s') % tup)
-
- # if exactly one action matched, this segmentation is good,
- # so return the parsed action
- elif len(option_tuples) == 1:
- option_tuple, = option_tuples
- return option_tuple
-
- # if it was not found as an option, but it looks like a negative
- # number, it was meant to be positional
- # unless there are negative-number-like options
- if self._negative_number_matcher.match(arg_string):
- if not self._has_negative_number_optionals:
- return None
-
- # if it contains a space, it was meant to be a positional
- if ' ' in arg_string:
- return None
-
- # it was meant to be an optional but there is no such option
- # in this parser (though it might be a valid option in a subparser)
- return None, arg_string, None
-
- def _get_option_tuples(self, option_string):
- result = []
-
- # option strings starting with two prefix characters are only
- # split at the '='
- chars = self.prefix_chars
- if option_string[0] in chars and option_string[1] in chars:
- if '=' in option_string:
- option_prefix, explicit_arg = option_string.split('=', 1)
- else:
- option_prefix = option_string
- explicit_arg = None
- for option_string in self._option_string_actions:
- if option_string.startswith(option_prefix):
- action = self._option_string_actions[option_string]
- tup = action, option_string, explicit_arg
- result.append(tup)
-
- # single character options can be concatenated with their arguments
- # but multiple character options always have to have their argument
- # separate
- elif option_string[0] in chars and option_string[1] not in chars:
- option_prefix = option_string
- explicit_arg = None
- short_option_prefix = option_string[:2]
- short_explicit_arg = option_string[2:]
-
- for option_string in self._option_string_actions:
- if option_string == short_option_prefix:
- action = self._option_string_actions[option_string]
- tup = action, option_string, short_explicit_arg
- result.append(tup)
- elif option_string.startswith(option_prefix):
- action = self._option_string_actions[option_string]
- tup = action, option_string, explicit_arg
- result.append(tup)
-
- # shouldn't ever get here
- else:
- self.error(_('unexpected option string: %s') % option_string)
-
- # return the collected option tuples
- return result
-
- def _get_nargs_pattern(self, action):
- # in all examples below, we have to allow for '--' args
- # which are represented as '-' in the pattern
- nargs = action.nargs
-
- # the default (None) is assumed to be a single argument
- if nargs is None:
- nargs_pattern = '(-*A-*)'
-
- # allow zero or one arguments
- elif nargs == OPTIONAL:
- nargs_pattern = '(-*A?-*)'
-
- # allow zero or more arguments
- elif nargs == ZERO_OR_MORE:
- nargs_pattern = '(-*[A-]*)'
-
- # allow one or more arguments
- elif nargs == ONE_OR_MORE:
- nargs_pattern = '(-*A[A-]*)'
-
- # allow any number of options or arguments
- elif nargs == REMAINDER:
- nargs_pattern = '([-AO]*)'
-
- # allow one argument followed by any number of options or arguments
- elif nargs == PARSER:
- nargs_pattern = '(-*A[-AO]*)'
-
- # all others should be integers
- else:
- nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
-
- # if this is an optional action, -- is not allowed
- if action.option_strings:
- nargs_pattern = nargs_pattern.replace('-*', '')
- nargs_pattern = nargs_pattern.replace('-', '')
-
- # return the pattern
- return nargs_pattern
-
- # ========================
- # Value conversion methods
- # ========================
- def _get_values(self, action, arg_strings):
- # for everything but PARSER args, strip out '--'
- if action.nargs not in [PARSER, REMAINDER]:
- arg_strings = [s for s in arg_strings if s != '--']
-
- # optional argument produces a default when not present
- if not arg_strings and action.nargs == OPTIONAL:
- if action.option_strings:
- value = action.const
- else:
- value = action.default
- if isinstance(value, basestring):
- value = self._get_value(action, value)
- self._check_value(action, value)
-
- # when nargs='*' on a positional, if there were no command-line
- # args, use the default if it is anything other than None
- elif (not arg_strings and action.nargs == ZERO_OR_MORE and
- not action.option_strings):
- if action.default is not None:
- value = action.default
- else:
- value = arg_strings
- self._check_value(action, value)
-
- # single argument or optional argument produces a single value
- elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
- arg_string, = arg_strings
- value = self._get_value(action, arg_string)
- self._check_value(action, value)
-
- # REMAINDER arguments convert all values, checking none
- elif action.nargs == REMAINDER:
- value = [self._get_value(action, v) for v in arg_strings]
-
- # PARSER arguments convert all values, but check only the first
- elif action.nargs == PARSER:
- value = [self._get_value(action, v) for v in arg_strings]
- self._check_value(action, value[0])
-
- # all other types of nargs produce a list
- else:
- value = [self._get_value(action, v) for v in arg_strings]
- for v in value:
- self._check_value(action, v)
-
- # return the converted value
- return value
-
- def _get_value(self, action, arg_string):
- type_func = self._registry_get('type', action.type, action.type)
- if not _callable(type_func):
- msg = _('%r is not callable')
- raise ArgumentError(action, msg % type_func)
-
- # convert the value to the appropriate type
- try:
- result = type_func(arg_string)
-
- # ArgumentTypeErrors indicate errors
- except ArgumentTypeError:
- name = getattr(action.type, '__name__', repr(action.type))
- msg = str(_sys.exc_info()[1])
- raise ArgumentError(action, msg)
-
- # TypeErrors or ValueErrors also indicate errors
- except (TypeError, ValueError):
- name = getattr(action.type, '__name__', repr(action.type))
- msg = _('invalid %s value: %r')
- raise ArgumentError(action, msg % (name, arg_string))
-
- # return the converted value
- return result
-
- def _check_value(self, action, value):
- # converted value must be one of the choices (if specified)
- if action.choices is not None and value not in action.choices:
- cols = colified(sorted(action.choices), indent=4, tty=True)
- msg = _('invalid choice: %r choose from:\n%s') % (value, cols)
- raise ArgumentError(action, msg)
-
- # =======================
- # Help-formatting methods
- # =======================
- def format_usage(self):
- formatter = self._get_formatter()
- formatter.add_usage(self.usage, self._actions,
- self._mutually_exclusive_groups)
- return formatter.format_help()
-
- def format_help(self):
- formatter = self._get_formatter()
-
- # usage
- formatter.add_usage(self.usage, self._actions,
- self._mutually_exclusive_groups)
-
- # description
- formatter.add_text(self.description)
-
- # positionals, optionals and user-defined groups
- for action_group in self._action_groups:
- formatter.start_section(action_group.title)
- formatter.add_text(action_group.description)
- formatter.add_arguments(action_group._group_actions)
- formatter.end_section()
-
- # epilog
- formatter.add_text(self.epilog)
-
- # determine help from format above
- return formatter.format_help()
-
- def format_version(self):
- import warnings
- warnings.warn(
- 'The format_version method is deprecated -- the "version" '
- 'argument to ArgumentParser is no longer supported.',
- DeprecationWarning)
- formatter = self._get_formatter()
- formatter.add_text(self.version)
- return formatter.format_help()
-
- def _get_formatter(self):
- return self.formatter_class(prog=self.prog)
-
- # =====================
- # Help-printing methods
- # =====================
- def print_usage(self, file=None):
- if file is None:
- file = _sys.stdout
- self._print_message(self.format_usage(), file)
-
- def print_help(self, file=None):
- if file is None:
- file = _sys.stdout
- self._print_message(self.format_help(), file)
-
- def print_version(self, file=None):
- import warnings
- warnings.warn(
- 'The print_version method is deprecated -- the "version" '
- 'argument to ArgumentParser is no longer supported.',
- DeprecationWarning)
- self._print_message(self.format_version(), file)
-
- def _print_message(self, message, file=None):
- if message:
- if file is None:
- file = _sys.stderr
- file.write(message)
-
- # ===============
- # Exiting methods
- # ===============
- def exit(self, status=0, message=None):
- if message:
- self._print_message(message, _sys.stderr)
- _sys.exit(status)
-
- def error(self, message):
- """error(message: string)
-
- Prints a usage message incorporating the message to stderr and
- exits.
-
- If you override this in a subclass, it should not return -- it
- should either exit or raise an exception.
- """
- self.print_usage(_sys.stderr)
- self.exit(2, _('%s: error: %s\n') % (self.prog, message))
diff --git a/lib/spack/external/py2/functools32/LICENSE b/lib/spack/external/py2/functools32/LICENSE
deleted file mode 100644
index 43388e7e13..0000000000
--- a/lib/spack/external/py2/functools32/LICENSE
+++ /dev/null
@@ -1,289 +0,0 @@
-A. HISTORY OF THE SOFTWARE
-==========================
-
-Python was created in the early 1990s by Guido van Rossum at Stichting
-Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
-as a successor of a language called ABC. Guido remains Python's
-principal author, although it includes many contributions from others.
-
-In 1995, Guido continued his work on Python at the Corporation for
-National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
-in Reston, Virginia where he released several versions of the
-software.
-
-In May 2000, Guido and the Python core development team moved to
-BeOpen.com to form the BeOpen PythonLabs team. In October of the same
-year, the PythonLabs team moved to Digital Creations (now Zope
-Corporation, see http://www.zope.com). In 2001, the Python Software
-Foundation (PSF, see http://www.python.org/psf/) was formed, a
-non-profit organization created specifically to own Python-related
-Intellectual Property. Zope Corporation is a sponsoring member of
-the PSF.
-
-All Python releases are Open Source (see http://www.opensource.org for
-the Open Source Definition). Historically, most, but not all, Python
-releases have also been GPL-compatible; the table below summarizes
-the various releases.
-
- Release Derived Year Owner GPL-
- from compatible? (1)
-
- 0.9.0 thru 1.2 1991-1995 CWI yes
- 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
- 1.6 1.5.2 2000 CNRI no
- 2.0 1.6 2000 BeOpen.com no
- 1.6.1 1.6 2001 CNRI yes (2)
- 2.1 2.0+1.6.1 2001 PSF no
- 2.0.1 2.0+1.6.1 2001 PSF yes
- 2.1.1 2.1+2.0.1 2001 PSF yes
- 2.2 2.1.1 2001 PSF yes
- 2.1.2 2.1.1 2002 PSF yes
- 2.1.3 2.1.2 2002 PSF yes
- 2.2.1 2.2 2002 PSF yes
- 2.2.2 2.2.1 2002 PSF yes
- 2.2.3 2.2.2 2003 PSF yes
- 2.3 2.2.2 2002-2003 PSF yes
- 2.3.1 2.3 2002-2003 PSF yes
- 2.3.2 2.3.1 2002-2003 PSF yes
- 2.3.3 2.3.2 2002-2003 PSF yes
- 2.3.4 2.3.3 2004 PSF yes
- 2.3.5 2.3.4 2005 PSF yes
- 2.4 2.3 2004 PSF yes
- 2.4.1 2.4 2005 PSF yes
- 2.4.2 2.4.1 2005 PSF yes
- 2.4.3 2.4.2 2006 PSF yes
- 2.4.4 2.4.3 2006 PSF yes
- 2.5 2.4 2006 PSF yes
- 2.5.1 2.5 2007 PSF yes
- 2.5.2 2.5.1 2008 PSF yes
- 2.5.3 2.5.2 2008 PSF yes
- 2.6 2.5 2008 PSF yes
- 2.6.1 2.6 2008 PSF yes
- 2.6.2 2.6.1 2009 PSF yes
- 2.6.3 2.6.2 2009 PSF yes
- 2.6.4 2.6.3 2009 PSF yes
- 2.6.5 2.6.4 2010 PSF yes
- 3.0 2.6 2008 PSF yes
- 3.0.1 3.0 2009 PSF yes
- 3.1 3.0.1 2009 PSF yes
- 3.1.1 3.1 2009 PSF yes
- 3.1.2 3.1.1 2010 PSF yes
- 3.1.3 3.1.2 2010 PSF yes
- 3.1.4 3.1.3 2011 PSF yes
- 3.2 3.1 2011 PSF yes
- 3.2.1 3.2 2011 PSF yes
- 3.2.2 3.2.1 2011 PSF yes
- 3.2.3 3.2.2 2012 PSF yes
-
-Footnotes:
-
-(1) GPL-compatible doesn't mean that we're distributing Python under
- the GPL. All Python licenses, unlike the GPL, let you distribute
- a modified version without making your changes open source. The
- GPL-compatible licenses make it possible to combine Python with
- other software that is released under the GPL; the others don't.
-
-(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
- because its license has a choice of law clause. According to
- CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
- is "not incompatible" with the GPL.
-
-Thanks to the many outside volunteers who have worked under Guido's
-direction to make these releases possible.
-
-
-B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
-===============================================================
-
-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
---------------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using this software ("Python") in source or binary form and
-its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF hereby
-grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
-analyze, test, perform and/or display publicly, prepare derivative works,
-distribute, and otherwise use Python alone or in any derivative version,
-provided, however, that PSF's License Agreement and PSF's notice of copyright,
-i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
-alone or in any derivative version prepared by Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python.
-
-4. PSF is making Python available to Licensee on an "AS IS"
-basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee. This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
--------------------------------------------
-
-BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
-
-1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
-office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
-Individual or Organization ("Licensee") accessing and otherwise using
-this software in source or binary form and its associated
-documentation ("the Software").
-
-2. Subject to the terms and conditions of this BeOpen Python License
-Agreement, BeOpen hereby grants Licensee a non-exclusive,
-royalty-free, world-wide license to reproduce, analyze, test, perform
-and/or display publicly, prepare derivative works, distribute, and
-otherwise use the Software alone or in any derivative version,
-provided, however, that the BeOpen Python License is retained in the
-Software, alone or in any derivative version prepared by Licensee.
-
-3. BeOpen is making the Software available to Licensee on an "AS IS"
-basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
-SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
-AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
-DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-5. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-6. This License Agreement shall be governed by and interpreted in all
-respects by the law of the State of California, excluding conflict of
-law provisions. Nothing in this License Agreement shall be deemed to
-create any relationship of agency, partnership, or joint venture
-between BeOpen and Licensee. This License Agreement does not grant
-permission to use BeOpen trademarks or trade names in a trademark
-sense to endorse or promote products or services of Licensee, or any
-third party. As an exception, the "BeOpen Python" logos available at
-http://www.pythonlabs.com/logos.html may be used according to the
-permissions granted on that web page.
-
-7. By copying, installing or otherwise using the software, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
----------------------------------------
-
-1. This LICENSE AGREEMENT is between the Corporation for National
-Research Initiatives, having an office at 1895 Preston White Drive,
-Reston, VA 20191 ("CNRI"), and the Individual or Organization
-("Licensee") accessing and otherwise using Python 1.6.1 software in
-source or binary form and its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, CNRI
-hereby grants Licensee a nonexclusive, royalty-free, world-wide
-license to reproduce, analyze, test, perform and/or display publicly,
-prepare derivative works, distribute, and otherwise use Python 1.6.1
-alone or in any derivative version, provided, however, that CNRI's
-License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
-1995-2001 Corporation for National Research Initiatives; All Rights
-Reserved" are retained in Python 1.6.1 alone or in any derivative
-version prepared by Licensee. Alternately, in lieu of CNRI's License
-Agreement, Licensee may substitute the following text (omitting the
-quotes): "Python 1.6.1 is made available subject to the terms and
-conditions in CNRI's License Agreement. This Agreement together with
-Python 1.6.1 may be located on the Internet using the following
-unique, persistent identifier (known as a handle): 1895.22/1013. This
-Agreement may also be obtained from a proxy server on the Internet
-using the following URL: http://hdl.handle.net/1895.22/1013".
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python 1.6.1 or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python 1.6.1.
-
-4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
-basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. This License Agreement shall be governed by the federal
-intellectual property law of the United States, including without
-limitation the federal copyright law, and, to the extent such
-U.S. federal law does not apply, by the law of the Commonwealth of
-Virginia, excluding Virginia's conflict of law provisions.
-Notwithstanding the foregoing, with regard to derivative works based
-on Python 1.6.1 that incorporate non-separable material that was
-previously distributed under the GNU General Public License (GPL), the
-law of the Commonwealth of Virginia shall govern this License
-Agreement only as to issues arising under or with respect to
-Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
-License Agreement shall be deemed to create any relationship of
-agency, partnership, or joint venture between CNRI and Licensee. This
-License Agreement does not grant permission to use CNRI trademarks or
-trade name in a trademark sense to endorse or promote products or
-services of Licensee, or any third party.
-
-8. By clicking on the "ACCEPT" button where indicated, or by copying,
-installing or otherwise using Python 1.6.1, Licensee agrees to be
-bound by the terms and conditions of this License Agreement.
-
- ACCEPT
-
-
-CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
---------------------------------------------------
-
-Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
-The Netherlands. All rights reserved.
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name of Stichting Mathematisch
-Centrum or CWI not be used in advertising or publicity pertaining to
-distribution of the software without specific, written prior
-permission.
-
-STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
-THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
-FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/lib/spack/external/py2/functools32/__init__.py b/lib/spack/external/py2/functools32/__init__.py
deleted file mode 100644
index 837f7fb651..0000000000
--- a/lib/spack/external/py2/functools32/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .functools32 import *
diff --git a/lib/spack/external/py2/functools32/_dummy_thread32.py b/lib/spack/external/py2/functools32/_dummy_thread32.py
deleted file mode 100644
index 8503b0e3dd..0000000000
--- a/lib/spack/external/py2/functools32/_dummy_thread32.py
+++ /dev/null
@@ -1,158 +0,0 @@
-"""Drop-in replacement for the thread module.
-
-Meant to be used as a brain-dead substitute so that threaded code does
-not need to be rewritten for when the thread module is not present.
-
-Suggested usage is::
-
- try:
- try:
- import _thread # Python >= 3
- except:
- import thread as _thread # Python < 3
- except ImportError:
- import _dummy_thread as _thread
-
-"""
-# Exports only things specified by thread documentation;
-# skipping obsolete synonyms allocate(), start_new(), exit_thread().
-__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
- 'interrupt_main', 'LockType']
-
-# A dummy value
-TIMEOUT_MAX = 2**31
-
-# NOTE: this module can be imported early in the extension building process,
-# and so top level imports of other modules should be avoided. Instead, all
-# imports are done when needed on a function-by-function basis. Since threads
-# are disabled, the import lock should not be an issue anyway (??).
-
-class error(Exception):
- """Dummy implementation of _thread.error."""
-
- def __init__(self, *args):
- self.args = args
-
-def start_new_thread(function, args, kwargs={}):
- """Dummy implementation of _thread.start_new_thread().
-
- Compatibility is maintained by making sure that ``args`` is a
- tuple and ``kwargs`` is a dictionary. If an exception is raised
- and it is SystemExit (which can be done by _thread.exit()) it is
- caught and nothing is done; all other exceptions are printed out
- by using traceback.print_exc().
-
- If the executed function calls interrupt_main the KeyboardInterrupt will be
- raised when the function returns.
-
- """
- if type(args) != type(tuple()):
- raise TypeError("2nd arg must be a tuple")
- if type(kwargs) != type(dict()):
- raise TypeError("3rd arg must be a dict")
- global _main
- _main = False
- try:
- function(*args, **kwargs)
- except SystemExit:
- pass
- except:
- import traceback
- traceback.print_exc()
- _main = True
- global _interrupt
- if _interrupt:
- _interrupt = False
- raise KeyboardInterrupt
-
-def exit():
- """Dummy implementation of _thread.exit()."""
- raise SystemExit
-
-def get_ident():
- """Dummy implementation of _thread.get_ident().
-
- Since this module should only be used when _threadmodule is not
- available, it is safe to assume that the current process is the
- only thread. Thus a constant can be safely returned.
- """
- return -1
-
-def allocate_lock():
- """Dummy implementation of _thread.allocate_lock()."""
- return LockType()
-
-def stack_size(size=None):
- """Dummy implementation of _thread.stack_size()."""
- if size is not None:
- raise error("setting thread stack size not supported")
- return 0
-
-class LockType(object):
- """Class implementing dummy implementation of _thread.LockType.
-
- Compatibility is maintained by maintaining self.locked_status
- which is a boolean that stores the state of the lock. Pickling of
- the lock, though, should not be done since if the _thread module is
- then used with an unpickled ``lock()`` from here problems could
- occur from this class not having atomic methods.
-
- """
-
- def __init__(self):
- self.locked_status = False
-
- def acquire(self, waitflag=None, timeout=-1):
- """Dummy implementation of acquire().
-
- For blocking calls, self.locked_status is automatically set to
- True and returned appropriately based on value of
- ``waitflag``. If it is non-blocking, then the value is
- actually checked and not set if it is already acquired. This
- is all done so that threading.Condition's assert statements
- aren't triggered and throw a little fit.
-
- """
- if waitflag is None or waitflag:
- self.locked_status = True
- return True
- else:
- if not self.locked_status:
- self.locked_status = True
- return True
- else:
- if timeout > 0:
- import time
- time.sleep(timeout)
- return False
-
- __enter__ = acquire
-
- def __exit__(self, typ, val, tb):
- self.release()
-
- def release(self):
- """Release the dummy lock."""
- # XXX Perhaps shouldn't actually bother to test? Could lead
- # to problems for complex, threaded code.
- if not self.locked_status:
- raise error
- self.locked_status = False
- return True
-
- def locked(self):
- return self.locked_status
-
-# Used to signal that interrupt_main was called in a "thread"
-_interrupt = False
-# True when not executing in a "thread"
-_main = True
-
-def interrupt_main():
- """Set _interrupt flag to True to have start_new_thread raise
- KeyboardInterrupt upon exiting."""
- if _main:
- raise KeyboardInterrupt
- else:
- global _interrupt
- _interrupt = True
diff --git a/lib/spack/external/py2/functools32/functools32.py b/lib/spack/external/py2/functools32/functools32.py
deleted file mode 100644
index c44551fac0..0000000000
--- a/lib/spack/external/py2/functools32/functools32.py
+++ /dev/null
@@ -1,423 +0,0 @@
-"""functools.py - Tools for working with functions and callable objects
-"""
-# Python module wrapper for _functools C module
-# to allow utilities written in Python to be added
-# to the functools module.
-# Written by Nick Coghlan <ncoghlan at gmail.com>
-# and Raymond Hettinger <python at rcn.com>
-# Copyright (C) 2006-2010 Python Software Foundation.
-# See C source code for _functools credits/copyright
-
-__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
- 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
-
-from _functools import partial, reduce
-from collections import MutableMapping, namedtuple
-from .reprlib32 import recursive_repr as _recursive_repr
-from weakref import proxy as _proxy
-import sys as _sys
-try:
- from thread import allocate_lock as Lock
-except ImportError:
- from ._dummy_thread32 import allocate_lock as Lock
-
-################################################################################
-### OrderedDict
-################################################################################
-
-class _Link(object):
- __slots__ = 'prev', 'next', 'key', '__weakref__'
-
-class OrderedDict(dict):
- 'Dictionary that remembers insertion order'
- # An inherited dict maps keys to values.
- # The inherited dict provides __getitem__, __len__, __contains__, and get.
- # The remaining methods are order-aware.
- # Big-O running times for all methods are the same as regular dictionaries.
-
- # The internal self.__map dict maps keys to links in a doubly linked list.
- # The circular doubly linked list starts and ends with a sentinel element.
- # The sentinel element never gets deleted (this simplifies the algorithm).
- # The sentinel is in self.__hardroot with a weakref proxy in self.__root.
- # The prev links are weakref proxies (to prevent circular references).
- # Individual links are kept alive by the hard reference in self.__map.
- # Those hard references disappear when a key is deleted from an OrderedDict.
-
- def __init__(self, *args, **kwds):
- '''Initialize an ordered dictionary. The signature is the same as
- regular dictionaries, but keyword arguments are not recommended because
- their insertion order is arbitrary.
-
- '''
- if len(args) > 1:
- raise TypeError('expected at most 1 arguments, got %d' % len(args))
- try:
- self.__root
- except AttributeError:
- self.__hardroot = _Link()
- self.__root = root = _proxy(self.__hardroot)
- root.prev = root.next = root
- self.__map = {}
- self.__update(*args, **kwds)
-
- def __setitem__(self, key, value,
- dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
- 'od.__setitem__(i, y) <==> od[i]=y'
- # Setting a new item creates a new link at the end of the linked list,
- # and the inherited dictionary is updated with the new key/value pair.
- if key not in self:
- self.__map[key] = link = Link()
- root = self.__root
- last = root.prev
- link.prev, link.next, link.key = last, root, key
- last.next = link
- root.prev = proxy(link)
- dict_setitem(self, key, value)
-
- def __delitem__(self, key, dict_delitem=dict.__delitem__):
- 'od.__delitem__(y) <==> del od[y]'
- # Deleting an existing item uses self.__map to find the link which gets
- # removed by updating the links in the predecessor and successor nodes.
- dict_delitem(self, key)
- link = self.__map.pop(key)
- link_prev = link.prev
- link_next = link.next
- link_prev.next = link_next
- link_next.prev = link_prev
-
- def __iter__(self):
- 'od.__iter__() <==> iter(od)'
- # Traverse the linked list in order.
- root = self.__root
- curr = root.next
- while curr is not root:
- yield curr.key
- curr = curr.next
-
- def __reversed__(self):
- 'od.__reversed__() <==> reversed(od)'
- # Traverse the linked list in reverse order.
- root = self.__root
- curr = root.prev
- while curr is not root:
- yield curr.key
- curr = curr.prev
-
- def clear(self):
- 'od.clear() -> None. Remove all items from od.'
- root = self.__root
- root.prev = root.next = root
- self.__map.clear()
- dict.clear(self)
-
- def popitem(self, last=True):
- '''od.popitem() -> (k, v), return and remove a (key, value) pair.
- Pairs are returned in LIFO order if last is true or FIFO order if false.
-
- '''
- if not self:
- raise KeyError('dictionary is empty')
- root = self.__root
- if last:
- link = root.prev
- link_prev = link.prev
- link_prev.next = root
- root.prev = link_prev
- else:
- link = root.next
- link_next = link.next
- root.next = link_next
- link_next.prev = root
- key = link.key
- del self.__map[key]
- value = dict.pop(self, key)
- return key, value
-
- def move_to_end(self, key, last=True):
- '''Move an existing element to the end (or beginning if last==False).
-
- Raises KeyError if the element does not exist.
- When last=True, acts like a fast version of self[key]=self.pop(key).
-
- '''
- link = self.__map[key]
- link_prev = link.prev
- link_next = link.next
- link_prev.next = link_next
- link_next.prev = link_prev
- root = self.__root
- if last:
- last = root.prev
- link.prev = last
- link.next = root
- last.next = root.prev = link
- else:
- first = root.next
- link.prev = root
- link.next = first
- root.next = first.prev = link
-
- def __sizeof__(self):
- sizeof = _sys.getsizeof
- n = len(self) + 1 # number of links including root
- size = sizeof(self.__dict__) # instance dictionary
- size += sizeof(self.__map) * 2 # internal dict and inherited dict
- size += sizeof(self.__hardroot) * n # link objects
- size += sizeof(self.__root) * n # proxy objects
- return size
-
- update = __update = MutableMapping.update
- keys = MutableMapping.keys
- values = MutableMapping.values
- items = MutableMapping.items
- __ne__ = MutableMapping.__ne__
-
- __marker = object()
-
- def pop(self, key, default=__marker):
- '''od.pop(k[,d]) -> v, remove specified key and return the corresponding
- value. If key is not found, d is returned if given, otherwise KeyError
- is raised.
-
- '''
- if key in self:
- result = self[key]
- del self[key]
- return result
- if default is self.__marker:
- raise KeyError(key)
- return default
-
- def setdefault(self, key, default=None):
- 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
- if key in self:
- return self[key]
- self[key] = default
- return default
-
- @_recursive_repr()
- def __repr__(self):
- 'od.__repr__() <==> repr(od)'
- if not self:
- return '%s()' % (self.__class__.__name__,)
- return '%s(%r)' % (self.__class__.__name__, list(self.items()))
-
- def __reduce__(self):
- 'Return state information for pickling'
- items = [[k, self[k]] for k in self]
- inst_dict = vars(self).copy()
- for k in vars(OrderedDict()):
- inst_dict.pop(k, None)
- if inst_dict:
- return (self.__class__, (items,), inst_dict)
- return self.__class__, (items,)
-
- def copy(self):
- 'od.copy() -> a shallow copy of od'
- return self.__class__(self)
-
- @classmethod
- def fromkeys(cls, iterable, value=None):
- '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
- If not specified, the value defaults to None.
-
- '''
- self = cls()
- for key in iterable:
- self[key] = value
- return self
-
- def __eq__(self, other):
- '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
- while comparison to a regular mapping is order-insensitive.
-
- '''
- if isinstance(other, OrderedDict):
- return len(self)==len(other) and \
- all(p==q for p, q in zip(self.items(), other.items()))
- return dict.__eq__(self, other)
-
-# update_wrapper() and wraps() are tools to help write
-# wrapper functions that can handle naive introspection
-
-WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__')
-WRAPPER_UPDATES = ('__dict__',)
-def update_wrapper(wrapper,
- wrapped,
- assigned = WRAPPER_ASSIGNMENTS,
- updated = WRAPPER_UPDATES):
- """Update a wrapper function to look like the wrapped function
-
- wrapper is the function to be updated
- wrapped is the original function
- assigned is a tuple naming the attributes assigned directly
- from the wrapped function to the wrapper function (defaults to
- functools.WRAPPER_ASSIGNMENTS)
- updated is a tuple naming the attributes of the wrapper that
- are updated with the corresponding attribute from the wrapped
- function (defaults to functools.WRAPPER_UPDATES)
- """
- wrapper.__wrapped__ = wrapped
- for attr in assigned:
- try:
- value = getattr(wrapped, attr)
- except AttributeError:
- pass
- else:
- setattr(wrapper, attr, value)
- for attr in updated:
- getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
- # Return the wrapper so this can be used as a decorator via partial()
- return wrapper
-
-def wraps(wrapped,
- assigned = WRAPPER_ASSIGNMENTS,
- updated = WRAPPER_UPDATES):
- """Decorator factory to apply update_wrapper() to a wrapper function
-
- Returns a decorator that invokes update_wrapper() with the decorated
- function as the wrapper argument and the arguments to wraps() as the
- remaining arguments. Default arguments are as for update_wrapper().
- This is a convenience function to simplify applying partial() to
- update_wrapper().
- """
- return partial(update_wrapper, wrapped=wrapped,
- assigned=assigned, updated=updated)
-
-def total_ordering(cls):
- """Class decorator that fills in missing ordering methods"""
- convert = {
- '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)),
- ('__le__', lambda self, other: self < other or self == other),
- ('__ge__', lambda self, other: not self < other)],
- '__le__': [('__ge__', lambda self, other: not self <= other or self == other),
- ('__lt__', lambda self, other: self <= other and not self == other),
- ('__gt__', lambda self, other: not self <= other)],
- '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)),
- ('__ge__', lambda self, other: self > other or self == other),
- ('__le__', lambda self, other: not self > other)],
- '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other),
- ('__gt__', lambda self, other: self >= other and not self == other),
- ('__lt__', lambda self, other: not self >= other)]
- }
- roots = set(dir(cls)) & set(convert)
- if not roots:
- raise ValueError('must define at least one ordering operation: < > <= >=')
- root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
- for opname, opfunc in convert[root]:
- if opname not in roots:
- opfunc.__name__ = opname
- opfunc.__doc__ = getattr(int, opname).__doc__
- setattr(cls, opname, opfunc)
- return cls
-
-def cmp_to_key(mycmp):
- """Convert a cmp= function into a key= function"""
- class K(object):
- __slots__ = ['obj']
- def __init__(self, obj):
- self.obj = obj
- def __lt__(self, other):
- return mycmp(self.obj, other.obj) < 0
- def __gt__(self, other):
- return mycmp(self.obj, other.obj) > 0
- def __eq__(self, other):
- return mycmp(self.obj, other.obj) == 0
- def __le__(self, other):
- return mycmp(self.obj, other.obj) <= 0
- def __ge__(self, other):
- return mycmp(self.obj, other.obj) >= 0
- def __ne__(self, other):
- return mycmp(self.obj, other.obj) != 0
- __hash__ = None
- return K
-
-_CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
-
-def lru_cache(maxsize=100):
- """Least-recently-used cache decorator.
-
- If *maxsize* is set to None, the LRU features are disabled and the cache
- can grow without bound.
-
- Arguments to the cached function must be hashable.
-
- View the cache statistics named tuple (hits, misses, maxsize, currsize) with
- f.cache_info(). Clear the cache and statistics with f.cache_clear().
- Access the underlying function with f.__wrapped__.
-
- See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
-
- """
- # Users should only access the lru_cache through its public API:
- # cache_info, cache_clear, and f.__wrapped__
- # The internals of the lru_cache are encapsulated for thread safety and
- # to allow the implementation to change (including a possible C version).
-
- def decorating_function(user_function,
- tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
-
- hits, misses = [0], [0]
- kwd_mark = (object(),) # separates positional and keyword args
- lock = Lock() # needed because OrderedDict isn't threadsafe
-
- if maxsize is None:
- cache = dict() # simple cache without ordering or size limit
-
- @wraps(user_function)
- def wrapper(*args, **kwds):
- key = args
- if kwds:
- key += kwd_mark + tuple(sorted(kwds.items()))
- try:
- result = cache[key]
- hits[0] += 1
- return result
- except KeyError:
- pass
- result = user_function(*args, **kwds)
- cache[key] = result
- misses[0] += 1
- return result
- else:
- cache = OrderedDict() # ordered least recent to most recent
- cache_popitem = cache.popitem
- cache_renew = cache.move_to_end
-
- @wraps(user_function)
- def wrapper(*args, **kwds):
- key = args
- if kwds:
- key += kwd_mark + tuple(sorted(kwds.items()))
- with lock:
- try:
- result = cache[key]
- cache_renew(key) # record recent use of this key
- hits[0] += 1
- return result
- except KeyError:
- pass
- result = user_function(*args, **kwds)
- with lock:
- cache[key] = result # record recent use of this key
- misses[0] += 1
- if len(cache) > maxsize:
- cache_popitem(0) # purge least recently used cache entry
- return result
-
- def cache_info():
- """Report cache statistics"""
- with lock:
- return _CacheInfo(hits[0], misses[0], maxsize, len(cache))
-
- def cache_clear():
- """Clear the cache and cache statistics"""
- with lock:
- cache.clear()
- hits[0] = misses[0] = 0
-
- wrapper.cache_info = cache_info
- wrapper.cache_clear = cache_clear
- return wrapper
-
- return decorating_function
diff --git a/lib/spack/external/py2/functools32/reprlib32.py b/lib/spack/external/py2/functools32/reprlib32.py
deleted file mode 100644
index af919758ca..0000000000
--- a/lib/spack/external/py2/functools32/reprlib32.py
+++ /dev/null
@@ -1,157 +0,0 @@
-"""Redo the builtin repr() (representation) but with limits on most sizes."""
-
-__all__ = ["Repr", "repr", "recursive_repr"]
-
-import __builtin__ as builtins
-from itertools import islice
-try:
- from thread import get_ident
-except ImportError:
- from _dummy_thread32 import get_ident
-
-def recursive_repr(fillvalue='...'):
- 'Decorator to make a repr function return fillvalue for a recursive call'
-
- def decorating_function(user_function):
- repr_running = set()
-
- def wrapper(self):
- key = id(self), get_ident()
- if key in repr_running:
- return fillvalue
- repr_running.add(key)
- try:
- result = user_function(self)
- finally:
- repr_running.discard(key)
- return result
-
- # Can't use functools.wraps() here because of bootstrap issues
- wrapper.__module__ = getattr(user_function, '__module__')
- wrapper.__doc__ = getattr(user_function, '__doc__')
- wrapper.__name__ = getattr(user_function, '__name__')
- wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
- return wrapper
-
- return decorating_function
-
-class Repr:
-
- def __init__(self):
- self.maxlevel = 6
- self.maxtuple = 6
- self.maxlist = 6
- self.maxarray = 5
- self.maxdict = 4
- self.maxset = 6
- self.maxfrozenset = 6
- self.maxdeque = 6
- self.maxstring = 30
- self.maxlong = 40
- self.maxother = 30
-
- def repr(self, x):
- return self.repr1(x, self.maxlevel)
-
- def repr1(self, x, level):
- typename = type(x).__name__
- if ' ' in typename:
- parts = typename.split()
- typename = '_'.join(parts)
- if hasattr(self, 'repr_' + typename):
- return getattr(self, 'repr_' + typename)(x, level)
- else:
- return self.repr_instance(x, level)
-
- def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
- n = len(x)
- if level <= 0 and n:
- s = '...'
- else:
- newlevel = level - 1
- repr1 = self.repr1
- pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
- if n > maxiter: pieces.append('...')
- s = ', '.join(pieces)
- if n == 1 and trail: right = trail + right
- return '%s%s%s' % (left, s, right)
-
- def repr_tuple(self, x, level):
- return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',')
-
- def repr_list(self, x, level):
- return self._repr_iterable(x, level, '[', ']', self.maxlist)
-
- def repr_array(self, x, level):
- header = "array('%s', [" % x.typecode
- return self._repr_iterable(x, level, header, '])', self.maxarray)
-
- def repr_set(self, x, level):
- x = _possibly_sorted(x)
- return self._repr_iterable(x, level, 'set([', '])', self.maxset)
-
- def repr_frozenset(self, x, level):
- x = _possibly_sorted(x)
- return self._repr_iterable(x, level, 'frozenset([', '])',
- self.maxfrozenset)
-
- def repr_deque(self, x, level):
- return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
-
- def repr_dict(self, x, level):
- n = len(x)
- if n == 0: return '{}'
- if level <= 0: return '{...}'
- newlevel = level - 1
- repr1 = self.repr1
- pieces = []
- for key in islice(_possibly_sorted(x), self.maxdict):
- keyrepr = repr1(key, newlevel)
- valrepr = repr1(x[key], newlevel)
- pieces.append('%s: %s' % (keyrepr, valrepr))
- if n > self.maxdict: pieces.append('...')
- s = ', '.join(pieces)
- return '{%s}' % (s,)
-
- def repr_str(self, x, level):
- s = builtins.repr(x[:self.maxstring])
- if len(s) > self.maxstring:
- i = max(0, (self.maxstring-3)//2)
- j = max(0, self.maxstring-3-i)
- s = builtins.repr(x[:i] + x[len(x)-j:])
- s = s[:i] + '...' + s[len(s)-j:]
- return s
-
- def repr_int(self, x, level):
- s = builtins.repr(x) # XXX Hope this isn't too slow...
- if len(s) > self.maxlong:
- i = max(0, (self.maxlong-3)//2)
- j = max(0, self.maxlong-3-i)
- s = s[:i] + '...' + s[len(s)-j:]
- return s
-
- def repr_instance(self, x, level):
- try:
- s = builtins.repr(x)
- # Bugs in x.__repr__() can cause arbitrary
- # exceptions -- then make up something
- except Exception:
- return '<%s instance at %x>' % (x.__class__.__name__, id(x))
- if len(s) > self.maxother:
- i = max(0, (self.maxother-3)//2)
- j = max(0, self.maxother-3-i)
- s = s[:i] + '...' + s[len(s)-j:]
- return s
-
-
-def _possibly_sorted(x):
- # Since not all sequences of items can be sorted and comparison
- # functions may raise arbitrary exceptions, return an unsorted
- # sequence in that case.
- try:
- return sorted(x)
- except Exception:
- return list(x)
-
-aRepr = Repr()
-repr = aRepr.repr
diff --git a/lib/spack/external/py2/typing.py b/lib/spack/external/py2/typing.py
deleted file mode 100644
index a74bd4a1ea..0000000000
--- a/lib/spack/external/py2/typing.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-"""
-This is a fake set of symbols to allow spack to import typing in python
-versions where we do not support type checking (<3)
-"""
-from collections import defaultdict
-
-# (1) Unparameterized types.
-Annotated = object
-Any = object
-AnyStr = object
-ByteString = object
-Counter = object
-Final = object
-Hashable = object
-NoReturn = object
-Sized = object
-SupportsAbs = object
-SupportsBytes = object
-SupportsComplex = object
-SupportsFloat = object
-SupportsIndex = object
-SupportsInt = object
-SupportsRound = object
-
-# (2) Parameterized types.
-AbstractSet = defaultdict(lambda: object)
-AsyncContextManager = defaultdict(lambda: object)
-AsyncGenerator = defaultdict(lambda: object)
-AsyncIterable = defaultdict(lambda: object)
-AsyncIterator = defaultdict(lambda: object)
-Awaitable = defaultdict(lambda: object)
-Callable = defaultdict(lambda: object)
-ChainMap = defaultdict(lambda: object)
-ClassVar = defaultdict(lambda: object)
-Collection = defaultdict(lambda: object)
-Container = defaultdict(lambda: object)
-ContextManager = defaultdict(lambda: object)
-Coroutine = defaultdict(lambda: object)
-DefaultDict = defaultdict(lambda: object)
-Deque = defaultdict(lambda: object)
-Dict = defaultdict(lambda: object)
-ForwardRef = defaultdict(lambda: object)
-FrozenSet = defaultdict(lambda: object)
-Generator = defaultdict(lambda: object)
-Generic = defaultdict(lambda: object)
-ItemsView = defaultdict(lambda: object)
-Iterable = defaultdict(lambda: object)
-Iterator = defaultdict(lambda: object)
-KeysView = defaultdict(lambda: object)
-List = defaultdict(lambda: object)
-Literal = defaultdict(lambda: object)
-Mapping = defaultdict(lambda: object)
-MappingView = defaultdict(lambda: object)
-MutableMapping = defaultdict(lambda: object)
-MutableSequence = defaultdict(lambda: object)
-MutableSet = defaultdict(lambda: object)
-NamedTuple = defaultdict(lambda: object)
-Optional = defaultdict(lambda: object)
-OrderedDict = defaultdict(lambda: object)
-Reversible = defaultdict(lambda: object)
-Sequence = defaultdict(lambda: object)
-Set = defaultdict(lambda: object)
-Tuple = defaultdict(lambda: object)
-Type = defaultdict(lambda: object)
-TypedDict = defaultdict(lambda: object)
-Union = defaultdict(lambda: object)
-ValuesView = defaultdict(lambda: object)
-
-# (3) Type variable declarations.
-TypeVar = lambda *args, **kwargs: None
-
-# (4) Functions.
-cast = lambda _type, x: x
-get_args = None
-get_origin = None
-get_type_hints = None
-no_type_check = None
-no_type_check_decorator = None
-
-## typing_extensions
-# We get a ModuleNotFoundError when attempting to import anything from typing_extensions
-# if we separate this into a separate typing_extensions.py file for some reason.
-
-# (1) Unparameterized types.
-IntVar = object
-Literal = object
-NewType = object
-Text = object
-
-# (2) Parameterized types.
-Protocol = defaultdict(lambda: object)
-
-# (3) Macro for avoiding evaluation except during type checking.
-TYPE_CHECKING = False
-
-# (4) Decorators.
-final = lambda x: x
-overload = lambda x: x
-runtime_checkable = lambda x: x
diff --git a/lib/spack/llnl/util/compat.py b/lib/spack/llnl/util/compat.py
deleted file mode 100644
index ebe509f3a7..0000000000
--- a/lib/spack/llnl/util/compat.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-# isort: off
-
-import sys
-
-if sys.version_info < (3,):
- from itertools import ifilter as filter
- from itertools import imap as map
- from itertools import izip as zip
- from itertools import izip_longest as zip_longest # novm
- from urllib import urlencode as urlencode
- from urllib import urlopen as urlopen
-else:
- filter = filter
- map = map
- zip = zip
- from itertools import zip_longest as zip_longest # novm # noqa: F401
- from urllib.parse import urlencode as urlencode # novm # noqa: F401
- from urllib.request import urlopen as urlopen # novm # noqa: F401
-
-if sys.version_info >= (3, 3):
- from collections.abc import Hashable as Hashable # novm
- from collections.abc import Iterable as Iterable # novm
- from collections.abc import Mapping as Mapping # novm
- from collections.abc import MutableMapping as MutableMapping # novm
- from collections.abc import MutableSequence as MutableSequence # novm
- from collections.abc import MutableSet as MutableSet # novm
- from collections.abc import Sequence as Sequence # novm
-else:
- from collections import Hashable as Hashable # noqa: F401
- from collections import Iterable as Iterable # noqa: F401
- from collections import Mapping as Mapping # noqa: F401
- from collections import MutableMapping as MutableMapping # noqa: F401
- from collections import MutableSequence as MutableSequence # noqa: F401
- from collections import MutableSet as MutableSet # noqa: F401
- from collections import Sequence as Sequence # noqa: F401
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index aece52f843..30ecc1ebc8 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
+import collections.abc
import errno
import glob
import hashlib
@@ -20,7 +21,6 @@ from sys import platform as _platform
import six
from llnl.util import tty
-from llnl.util.compat import Sequence
from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, symlink
@@ -290,9 +290,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
shutil.copy(filename, tmp_filename)
try:
- extra_kwargs = {}
- if sys.version_info > (3, 0):
- extra_kwargs = {"errors": "surrogateescape"}
+ extra_kwargs = {"errors": "surrogateescape"}
# Open as a text file and filter until the end of the file is
# reached or we found a marker in the line if it was specified
@@ -1309,46 +1307,34 @@ def visit_directory_tree(root, visitor, rel_path="", depth=0):
depth (str): current depth from the root
"""
dir = os.path.join(root, rel_path)
-
- if sys.version_info >= (3, 5, 0):
- dir_entries = sorted(os.scandir(dir), key=lambda d: d.name) # novermin
- else:
- dir_entries = os.listdir(dir)
- dir_entries.sort()
+ dir_entries = sorted(os.scandir(dir), key=lambda d: d.name)
for f in dir_entries:
- if sys.version_info >= (3, 5, 0):
- rel_child = os.path.join(rel_path, f.name)
- islink = f.is_symlink()
- # On Windows, symlinks to directories are distinct from
- # symlinks to files, and it is possible to create a
- # broken symlink to a directory (e.g. using os.symlink
- # without `target_is_directory=True`), invoking `isdir`
- # on a symlink on Windows that is broken in this manner
- # will result in an error. In this case we can work around
- # the issue by reading the target and resolving the
- # directory ourselves
- try:
- isdir = f.is_dir()
- except OSError as e:
- if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
- # if path is a symlink, determine destination and
- # evaluate file vs directory
- link_target = resolve_link_target_relative_to_the_link(f)
- # link_target might be relative but
- # resolve_link_target_relative_to_the_link
- # will ensure that if so, that it is relative
- # to the CWD and therefore
- # makes sense
- isdir = os.path.isdir(link_target)
- else:
- raise e
-
- else:
- rel_child = os.path.join(rel_path, f)
- lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
- if not lexists:
- continue
+ rel_child = os.path.join(rel_path, f.name)
+ islink = f.is_symlink()
+ # On Windows, symlinks to directories are distinct from
+ # symlinks to files, and it is possible to create a
+ # broken symlink to a directory (e.g. using os.symlink
+ # without `target_is_directory=True`), invoking `isdir`
+ # on a symlink on Windows that is broken in this manner
+ # will result in an error. In this case we can work around
+ # the issue by reading the target and resolving the
+ # directory ourselves
+ try:
+ isdir = f.is_dir()
+ except OSError as e:
+ if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
+ # if path is a symlink, determine destination and
+ # evaluate file vs directory
+ link_target = resolve_link_target_relative_to_the_link(f)
+ # link_target might be relative but
+ # resolve_link_target_relative_to_the_link
+ # will ensure that if so, that it is relative
+ # to the CWD and therefore
+ # makes sense
+ isdir = os.path.isdir(link_target)
+ else:
+ raise e
if not isdir and not islink:
# handle non-symlink files
@@ -1609,7 +1595,7 @@ def find(root, files, recursive=True):
Parameters:
root (str): The root directory to start searching from
- files (str or Sequence): Library name(s) to search for
+ files (str or collections.abc.Sequence): Library name(s) to search for
recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to True.
@@ -1673,7 +1659,7 @@ def _find_non_recursive(root, search_files):
# Utilities for libraries and headers
-class FileList(Sequence):
+class FileList(collections.abc.Sequence):
"""Sequence of absolute paths to files.
Provides a few convenience methods to manipulate file paths.
@@ -1914,7 +1900,7 @@ def find_headers(headers, root, recursive=False):
"""
if isinstance(headers, six.string_types):
headers = [headers]
- elif not isinstance(headers, Sequence):
+ elif not isinstance(headers, collections.abc.Sequence):
message = "{0} expects a string or sequence of strings as the "
message += "first argument [got {1} instead]"
message = message.format(find_headers.__name__, type(headers))
@@ -2080,7 +2066,7 @@ def find_system_libraries(libraries, shared=True):
"""
if isinstance(libraries, six.string_types):
libraries = [libraries]
- elif not isinstance(libraries, Sequence):
+ elif not isinstance(libraries, collections.abc.Sequence):
message = "{0} expects a string or sequence of strings as the "
message += "first argument [got {1} instead]"
message = message.format(find_system_libraries.__name__, type(libraries))
@@ -2137,7 +2123,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
"""
if isinstance(libraries, six.string_types):
libraries = [libraries]
- elif not isinstance(libraries, Sequence):
+ elif not isinstance(libraries, collections.abc.Sequence):
message = "{0} expects a string or sequence of strings as the "
message += "first argument [got {1} instead]"
message = message.format(find_libraries.__name__, type(libraries))
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index 51bd710ddb..d8f3cc9b9b 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -5,9 +5,11 @@
from __future__ import division
+import collections.abc
import contextlib
import functools
import inspect
+import itertools
import os
import re
import sys
@@ -18,8 +20,6 @@ from typing import Any, Callable, Iterable, List, Tuple
import six
from six import string_types
-from llnl.util.compat import MutableMapping, MutableSequence, zip_longest
-
# Ignore emacs backups when listing modules
ignore_modules = [r"^\.#", "~$"]
@@ -312,7 +312,7 @@ def lazy_eq(lseq, rseq):
# zip_longest is implemented in native code, so use it for speed.
# use zip_longest instead of zip because it allows us to tell
# which iterator was longer.
- for left, right in zip_longest(liter, riter, fillvalue=done):
+ for left, right in itertools.zip_longest(liter, riter, fillvalue=done):
if (left is done) or (right is done):
return False
@@ -332,7 +332,7 @@ def lazy_lt(lseq, rseq):
liter = lseq()
riter = rseq()
- for left, right in zip_longest(liter, riter, fillvalue=done):
+ for left, right in itertools.zip_longest(liter, riter, fillvalue=done):
if (left is done) or (right is done):
return left is done # left was shorter than right
@@ -482,7 +482,7 @@ def lazy_lexicographic_ordering(cls, set_hash=True):
@lazy_lexicographic_ordering
-class HashableMap(MutableMapping):
+class HashableMap(collections.abc.MutableMapping):
"""This is a hashable, comparable dictionary. Hash is performed on
a tuple of the values in the dictionary."""
@@ -887,32 +887,28 @@ def load_module_from_file(module_name, module_path):
ImportError: when the module can't be loaded
FileNotFoundError: when module_path doesn't exist
"""
+ import importlib.util
+
if module_name in sys.modules:
return sys.modules[module_name]
# This recipe is adapted from https://stackoverflow.com/a/67692/771663
- if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
- import importlib.util
- spec = importlib.util.spec_from_file_location(module_name, module_path) # novm
- module = importlib.util.module_from_spec(spec) # novm
- # The module object needs to exist in sys.modules before the
- # loader executes the module code.
- #
- # See https://docs.python.org/3/reference/import.html#loading
- sys.modules[spec.name] = module
+ spec = importlib.util.spec_from_file_location(module_name, module_path) # novm
+ module = importlib.util.module_from_spec(spec) # novm
+ # The module object needs to exist in sys.modules before the
+ # loader executes the module code.
+ #
+ # See https://docs.python.org/3/reference/import.html#loading
+ sys.modules[spec.name] = module
+ try:
+ spec.loader.exec_module(module)
+ except BaseException:
try:
- spec.loader.exec_module(module)
- except BaseException:
- try:
- del sys.modules[spec.name]
- except KeyError:
- pass
- raise
- elif sys.version_info[0] == 2:
- import imp
-
- module = imp.load_source(module_name, module_path)
+ del sys.modules[spec.name]
+ except KeyError:
+ pass
+ raise
return module
@@ -1030,7 +1026,7 @@ def ensure_last(lst, *elements):
lst.append(lst.pop(lst.index(elt)))
-class TypedMutableSequence(MutableSequence):
+class TypedMutableSequence(collections.abc.MutableSequence):
"""Base class that behaves like a list, just with a different type.
Client code can inherit from this base class:
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index ed847298ef..7c86411dbe 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -372,10 +372,5 @@ def terminal_size():
return int(rc[0]), int(rc[1])
else:
- if sys.version_info[0] < 3:
- raise RuntimeError(
- "Terminal size not obtainable on Windows with a\
-Python version older than 3"
- )
rc = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80))
return int(rc[0]), int(rc[1])
diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py
index e155fa1d26..2d6609c390 100644
--- a/lib/spack/llnl/util/tty/log.py
+++ b/lib/spack/llnl/util/tty/log.py
@@ -241,8 +241,7 @@ class keyboard_input(object):
"""If termios was available, restore old settings."""
if self.old_cfg:
self._restore_default_terminal_settings()
- if sys.version_info >= (3,):
- atexit.unregister(self._restore_default_terminal_settings)
+ atexit.unregister(self._restore_default_terminal_settings)
# restore SIGSTP and SIGCONT handlers
if self.old_handlers:
@@ -323,10 +322,7 @@ class FileWrapper(object):
def unwrap(self):
if self.open:
if self.file_like:
- if sys.version_info < (3,):
- self.file = open(self.file_like, "w")
- else:
- self.file = open(self.file_like, "w", encoding="utf-8") # novm
+ self.file = open(self.file_like, "w", encoding="utf-8")
else:
self.file = StringIO()
return self.file
@@ -699,13 +695,10 @@ class StreamWrapper:
self.sys_attr = sys_attr
self.saved_stream = None
if sys.platform.startswith("win32"):
- if sys.version_info < (3, 5):
- libc = ctypes.CDLL(ctypes.util.find_library("c"))
+ if hasattr(sys, "gettotalrefcount"): # debug build
+ libc = ctypes.CDLL("ucrtbased")
else:
- if hasattr(sys, "gettotalrefcount"): # debug build
- libc = ctypes.CDLL("ucrtbased")
- else:
- libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0")
+ libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0")
kernel32 = ctypes.WinDLL("kernel32")
@@ -927,13 +920,10 @@ def _writer_daemon(
if sys.version_info < (3, 8) or sys.platform != "darwin":
os.close(write_fd)
- # Use line buffering (3rd param = 1) since Python 3 has a bug
+ # 1. Use line buffering (3rd param = 1) since Python 3 has a bug
# that prevents unbuffered text I/O.
- if sys.version_info < (3,):
- in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1)
- else:
- # Python 3.x before 3.7 does not open with UTF-8 encoding by default
- in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
+ # 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
+ in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
if stdin_multiprocess_fd:
stdin = os.fdopen(stdin_multiprocess_fd.fd)
diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py
index de9fc1a05b..5187961930 100644
--- a/lib/spack/spack/audit.py
+++ b/lib/spack/spack/audit.py
@@ -37,6 +37,7 @@ as input.
"""
import ast
import collections
+import collections.abc
import inspect
import itertools
import pickle
@@ -45,7 +46,6 @@ import re
from six.moves.urllib.request import urlopen
import llnl.util.lang
-from llnl.util.compat import Sequence
import spack.config
import spack.patch
@@ -81,7 +81,7 @@ class Error(object):
return hash(value)
-class AuditClass(Sequence):
+class AuditClass(collections.abc.Sequence):
def __init__(self, group, tag, description, kwargs):
"""Return an object that acts as a decorator to register functions
associated with a specific class of sanity checks.
diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py
index 60f8153ae2..1715eeaafd 100644
--- a/lib/spack/spack/bootstrap.py
+++ b/lib/spack/spack/bootstrap.py
@@ -476,21 +476,14 @@ def source_is_enabled_or_raise(conf):
def spec_for_current_python():
"""For bootstrapping purposes we are just interested in the Python
- minor version (all patches are ABI compatible with the same minor)
- and on whether ucs4 support has been enabled for Python 2.7
+ minor version (all patches are ABI compatible with the same minor).
See:
https://www.python.org/dev/peps/pep-0513/
https://stackoverflow.com/a/35801395/771663
"""
version_str = ".".join(str(x) for x in sys.version_info[:2])
- variant_str = ""
- if sys.version_info[0] == 2 and sys.version_info[1] == 7:
- unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE")
- variant_str = "+ucs4" if unicode_size == 4 else "~ucs4"
-
- spec_fmt = "python@{0} {1}"
- return spec_fmt.format(version_str, variant_str)
+ return "python@{0}".format(version_str)
@contextlib.contextmanager
@@ -873,9 +866,7 @@ def ensure_mypy_in_path_or_raise():
def black_root_spec():
- # black v21 is the last version to support Python 2.7.
- # Upgrade when we no longer support Python 2.7
- return _root_spec("py-black@:21")
+ return _root_spec("py-black")
def ensure_black_in_path_or_raise():
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 9247d9f150..c2a61280ef 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -353,10 +353,8 @@ def set_compiler_environment_variables(pkg, env):
if isinstance(pkg.flag_handler, types.FunctionType):
handler = pkg.flag_handler
else:
- if sys.version_info >= (3, 0):
- handler = pkg.flag_handler.__func__
- else:
- handler = pkg.flag_handler.im_func
+ handler = pkg.flag_handler.__func__
+
injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag][:])
inject_flags[flag] = injf or []
env_flags[flag] = envf or []
diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py
index 1d0c50ea07..9456726d3e 100644
--- a/lib/spack/spack/build_systems/cmake.py
+++ b/lib/spack/spack/build_systems/cmake.py
@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import collections.abc
import inspect
import os
import platform
@@ -12,7 +13,6 @@ from typing import List, Tuple
import six
import llnl.util.filesystem as fs
-from llnl.util.compat import Sequence
import spack.build_environment
import spack.builder
@@ -302,7 +302,9 @@ class CMakeBuilder(BaseBuilder):
value = "ON" if value else "OFF"
else:
kind = "STRING"
- if isinstance(value, Sequence) and not isinstance(value, six.string_types):
+ if isinstance(value, collections.abc.Sequence) and not isinstance(
+ value, six.string_types
+ ):
value = ";".join(str(v) for v in value)
else:
value = str(value)
diff --git a/lib/spack/spack/builder.py b/lib/spack/spack/builder.py
index 7ae36b6e0a..063a7f0611 100644
--- a/lib/spack/spack/builder.py
+++ b/lib/spack/spack/builder.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
+import collections.abc
import copy
import functools
import inspect
@@ -10,8 +11,6 @@ from typing import List, Optional, Tuple
import six
-import llnl.util.compat
-
import spack.build_environment
#: Builder classes, as registered by the "builder" decorator
@@ -280,7 +279,7 @@ class PhaseCallbacksMeta(type):
return _decorator
-class BuilderMeta(PhaseCallbacksMeta, type(llnl.util.compat.Sequence)): # type: ignore
+class BuilderMeta(PhaseCallbacksMeta, type(collections.abc.Sequence)): # type: ignore
pass
@@ -457,7 +456,7 @@ class InstallationPhase(object):
return copy.deepcopy(self)
-class Builder(six.with_metaclass(BuilderMeta, llnl.util.compat.Sequence)):
+class Builder(six.with_metaclass(BuilderMeta, collections.abc.Sequence)):
"""A builder is a class that, given a package object (i.e. associated with
concrete spec), knows how to install it.
diff --git a/lib/spack/spack/ci_needs_workaround.py b/lib/spack/spack/ci_needs_workaround.py
index 16f18db0a0..e1c7be3180 100644
--- a/lib/spack/spack/ci_needs_workaround.py
+++ b/lib/spack/spack/ci_needs_workaround.py
@@ -2,12 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-from llnl.util.compat import Mapping
+import collections.abc
get_job_name = lambda needs_entry: (
needs_entry.get("job")
- if (isinstance(needs_entry, Mapping) and needs_entry.get("artifacts", True))
+ if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
else needs_entry
if isinstance(needs_entry, str)
else None
@@ -15,7 +14,7 @@ get_job_name = lambda needs_entry: (
def convert_job(job_entry):
- if not isinstance(job_entry, Mapping):
+ if not isinstance(job_entry, collections.abc.Mapping):
return job_entry
needs = job_entry.get("needs")
diff --git a/lib/spack/spack/ci_optimization.py b/lib/spack/spack/ci_optimization.py
index f4f05f0acb..1af575292d 100644
--- a/lib/spack/spack/ci_optimization.py
+++ b/lib/spack/spack/ci_optimization.py
@@ -2,23 +2,21 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
+import collections
+import collections.abc
import copy
import hashlib
-from collections import defaultdict
-
-from llnl.util.compat import Mapping, Sequence
import spack.util.spack_yaml as syaml
def sort_yaml_obj(obj):
- if isinstance(obj, Mapping):
+ if isinstance(obj, collections.abc.Mapping):
return syaml.syaml_dict(
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
)
- if isinstance(obj, Sequence) and not isinstance(obj, str):
+ if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
return obj
@@ -38,15 +36,15 @@ def matches(obj, proto):
Precondition: proto must not have any reference cycles
"""
- if isinstance(obj, Mapping):
- if not isinstance(proto, Mapping):
+ if isinstance(obj, collections.abc.Mapping):
+ if not isinstance(proto, collections.abc.Mapping):
return False
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
- if isinstance(obj, Sequence) and not isinstance(obj, str):
+ if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
- if not (isinstance(proto, Sequence) and not isinstance(proto, str)):
+ if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
return False
if len(obj) != len(proto):
@@ -76,7 +74,9 @@ def subkeys(obj, proto):
Otherwise, obj is returned.
"""
- if not (isinstance(obj, Mapping) and isinstance(proto, Mapping)):
+ if not (
+ isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
+ ):
return obj
new_obj = {}
@@ -88,7 +88,7 @@ def subkeys(obj, proto):
if matches(value, proto[key]) and matches(proto[key], value):
continue
- if isinstance(value, Mapping):
+ if isinstance(value, collections.abc.Mapping):
new_obj[key] = subkeys(value, proto[key])
continue
@@ -116,7 +116,7 @@ def add_extends(yaml, key):
has_key = "extends" in yaml
extends = yaml.get("extends")
- if has_key and not isinstance(extends, (str, Sequence)):
+ if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
return
if extends is None:
@@ -261,7 +261,7 @@ def build_histogram(iterator, key):
The list is sorted in descending order by count, yielding the most
frequently occuring hashes first.
"""
- buckets = defaultdict(int)
+ buckets = collections.defaultdict(int)
values = {}
num_objects = 0
diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py
index 6bb5da7196..fbb7458e7f 100644
--- a/lib/spack/spack/cmd/list.py
+++ b/lib/spack/spack/cmd/list.py
@@ -12,6 +12,7 @@ import math
import os
import re
import sys
+from html import escape
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
@@ -21,11 +22,6 @@ import spack.dependency
import spack.repo
from spack.version import VersionList
-if sys.version_info > (3, 1):
- from html import escape # novm
-else:
- from cgi import escape
-
description = "list and search available packages"
section = "basic"
level = "short"
diff --git a/lib/spack/spack/cmd/style.py b/lib/spack/spack/cmd/style.py
index dc4426c8b9..0172c9f6d4 100644
--- a/lib/spack/spack/cmd/style.py
+++ b/lib/spack/spack/cmd/style.py
@@ -9,6 +9,7 @@ import argparse
import os
import re
import sys
+from itertools import zip_longest
import llnl.util.tty as tty
import llnl.util.tty.color as color
@@ -18,14 +19,6 @@ import spack.bootstrap
import spack.paths
from spack.util.executable import which
-if sys.version_info < (3, 0):
- from itertools import izip_longest # novm
-
- zip_longest = izip_longest
-else:
- from itertools import zip_longest # novm
-
-
description = "runs source code style checks on spack"
section = "developer"
level = "long"
@@ -267,7 +260,7 @@ def run_flake8(flake8_cmd, file_list, args):
"--config=%s" % os.path.join(spack.paths.prefix, ".flake8"),
*chunk,
fail_on_error=False,
- output=str
+ output=str,
)
returncode |= flake8_cmd.returncode
@@ -375,14 +368,6 @@ def run_black(black_cmd, file_list, args):
packed_args = black_args + tuple(chunk)
output = black_cmd(*packed_args, fail_on_error=False, output=str, error=str)
returncode |= black_cmd.returncode
-
- # ignore Python 2.7 deprecation error because we already know it's deprecated.
- output = "\n".join(
- line
- for line in output.split("\n")
- if "DEPRECATION: Python 2 support will be removed" not in line
- )
-
rewrite_and_print_output(output, args, pat, replacement)
print_tool_result("black", returncode)
@@ -400,10 +385,6 @@ def validate_toolset(arg_value):
def style(parser, args):
- # ensure python version is new enough
- if sys.version_info < (3, 6):
- tty.die("spack style requires Python 3.6 or later.")
-
# save initial working directory for relativizing paths later
args.initial_working_dir = os.getcwd()
diff --git a/lib/spack/spack/compilers/msvc.py b/lib/spack/spack/compilers/msvc.py
index 5cf0b1356c..110ef8099e 100644
--- a/lib/spack/spack/compilers/msvc.py
+++ b/lib/spack/spack/compilers/msvc.py
@@ -6,7 +6,6 @@
import os
import re
import subprocess
-import sys
from distutils.version import StrictVersion
from typing import Dict, List, Set # novm
@@ -98,38 +97,33 @@ class Msvc(Compiler):
def setup_custom_environment(self, pkg, env):
"""Set environment variables for MSVC using the
Microsoft-provided script."""
- if sys.version_info[:2] > (2, 6):
- # Set the build environment variables for spack. Just using
- # subprocess.call() doesn't work since that operates in its own
- # environment which is destroyed (along with the adjusted variables)
- # once the process terminates. So go the long way around: examine
- # output, sort into dictionary, use that to make the build
- # environment.
- out = subprocess.check_output( # novermin
- 'cmd /u /c "{}" {} && set'.format(self.setvarsfile, "amd64"),
- stderr=subprocess.STDOUT,
- )
- if sys.version_info[0] >= 3:
- out = out.decode("utf-16le", errors="replace") # novermin
-
- int_env = dict(
- (key.lower(), value)
- for key, _, value in (line.partition("=") for line in out.splitlines())
- if key and value
- )
-
- if "path" in int_env:
- env.set_path("PATH", int_env["path"].split(";"))
- env.set_path("INCLUDE", int_env.get("include", "").split(";"))
- env.set_path("LIB", int_env.get("lib", "").split(";"))
-
- env.set("CC", self.cc)
- env.set("CXX", self.cxx)
- env.set("FC", self.fc)
- env.set("F77", self.f77)
- else:
- # Should not this be an exception?
- print("Cannot pull msvc compiler information in Python 2.6 or below")
+ # Set the build environment variables for spack. Just using
+ # subprocess.call() doesn't work since that operates in its own
+ # environment which is destroyed (along with the adjusted variables)
+ # once the process terminates. So go the long way around: examine
+ # output, sort into dictionary, use that to make the build
+ # environment.
+ out = subprocess.check_output( # novermin
+ 'cmd /u /c "{}" {} && set'.format(self.setvarsfile, "amd64"),
+ stderr=subprocess.STDOUT,
+ )
+ out = out.decode("utf-16le", errors="replace") # novermin
+
+ int_env = dict(
+ (key.lower(), value)
+ for key, _, value in (line.partition("=") for line in out.splitlines())
+ if key and value
+ )
+
+ if "path" in int_env:
+ env.set_path("PATH", int_env["path"].split(";"))
+ env.set_path("INCLUDE", int_env.get("include", "").split(";"))
+ env.set_path("LIB", int_env.get("lib", "").split(";"))
+
+ env.set("CC", self.cc)
+ env.set("CXX", self.cxx)
+ env.set("FC", self.fc)
+ env.set("F77", self.f77)
@classmethod
def fc_version(cls, fc):
diff --git a/lib/spack/spack/cray_manifest.py b/lib/spack/spack/cray_manifest.py
index d15c8450a3..631280c473 100644
--- a/lib/spack/spack/cray_manifest.py
+++ b/lib/spack/spack/cray_manifest.py
@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import json
-import sys
import jsonschema
import jsonschema.exceptions
@@ -163,11 +162,7 @@ def entries_to_specs(entries):
def read(path, apply_updates):
- if sys.version_info >= (3, 0):
- decode_exception_type = json.decoder.JSONDecodeError
- else:
- decode_exception_type = ValueError
-
+ decode_exception_type = json.decoder.JSONDecodeError
try:
with open(path, "r") as json_file:
json_data = json.load(json_file)
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index a4c13c70b7..412e34bf05 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -28,6 +28,7 @@ The available directives are:
* ``version``
"""
+import collections.abc
import functools
import os.path
import re
@@ -37,7 +38,6 @@ import six
import llnl.util.lang
import llnl.util.tty.color
-from llnl.util.compat import Sequence
import spack.error
import spack.patch
@@ -237,7 +237,7 @@ class DirectiveMeta(type):
if isinstance(dicts, six.string_types):
dicts = (dicts,)
- if not isinstance(dicts, Sequence):
+ if not isinstance(dicts, collections.abc.Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
raise TypeError(message.format(type(dicts)))
@@ -300,7 +300,7 @@ class DirectiveMeta(type):
# ...so if it is not a sequence make it so
values = result
- if not isinstance(values, Sequence):
+ if not isinstance(values, collections.abc.Sequence):
values = (values,)
DirectiveMeta._directives_to_be_executed.extend(values)
diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py
index 553d0b4108..51bceeb906 100644
--- a/lib/spack/spack/filesystem_view.py
+++ b/lib/spack/spack/filesystem_view.py
@@ -12,7 +12,6 @@ import shutil
import sys
from llnl.util import tty
-from llnl.util.compat import filter, map, zip
from llnl.util.filesystem import (
mkdirp,
remove_dead_links,
diff --git a/lib/spack/spack/install_test.py b/lib/spack/spack/install_test.py
index 3c976febb1..130dca088a 100644
--- a/lib/spack/spack/install_test.py
+++ b/lib/spack/spack/install_test.py
@@ -7,7 +7,6 @@ import hashlib
import os
import re
import shutil
-import sys
import six
@@ -163,8 +162,7 @@ class TestSuite(object):
json_text = sjson.dump(self.to_dict())
sha = hashlib.sha1(json_text.encode("utf-8"))
b32_hash = base64.b32encode(sha.digest()).lower()
- if sys.version_info[0] >= 3:
- b32_hash = b32_hash.decode("utf-8")
+ b32_hash = b32_hash.decode("utf-8")
self._hash = b32_hash
return self._hash
diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py
index a9a9d20df7..58785d8390 100644
--- a/lib/spack/spack/main.py
+++ b/lib/spack/spack/main.py
@@ -320,9 +320,9 @@ class SpackArgumentParser(argparse.ArgumentParser):
kwargs.setdefault("required", True)
sp = super(SpackArgumentParser, self).add_subparsers(**kwargs)
- # This monkey patching is needed for Python 3.5 and 3.6, which support
+ # This monkey patching is needed for Python 3.6, which supports
# having a required subparser but don't expose the API used above
- if sys.version_info[:2] == (3, 5) or sys.version_info[:2] == (3, 6):
+ if sys.version_info[:2] == (3, 6):
sp.required = True
old_add_parser = sp.add_parser
@@ -388,7 +388,7 @@ def make_argument_parser(**kwargs):
"A flexible package manager that supports multiple versions,\n"
"configurations, platforms, and compilers."
),
- **kwargs
+ **kwargs,
)
# stat names in groups of 7, for nice wrapping.
@@ -560,12 +560,6 @@ def setup_main_options(args):
# Assign a custom function to show warnings
warnings.showwarning = send_warning_to_tty
- if sys.version_info[:2] == (2, 7):
- warnings.warn(
- "Python 2.7 support is deprecated and will be removed in Spack v0.20.\n"
- " Please move to Python 3.6 or higher."
- )
-
# Set up environment based on args.
tty.set_verbose(args.verbose)
tty.set_debug(args.debug)
@@ -1015,10 +1009,7 @@ def main(argv=None):
raise
sys.stderr.write("\n")
tty.error("Keyboard interrupt.")
- if sys.version_info >= (3, 5):
- return signal.SIGINT.value
- else:
- return signal.SIGINT
+ return signal.SIGINT.value
except SystemExit as e:
if spack.config.get("config:debug") or SHOW_BACKTRACE:
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index 1bd9c6cd28..41853c39b9 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -11,6 +11,7 @@ where spack is run is not connected to the internet, it allows spack
to download packages directly from a mirror (e.g., on an intranet).
"""
import collections
+import collections.abc
import operator
import os
import os.path
@@ -21,7 +22,6 @@ import ruamel.yaml.error as yaml_error
import six
import llnl.util.tty as tty
-from llnl.util.compat import Mapping
from llnl.util.filesystem import mkdirp
import spack.config
@@ -228,7 +228,7 @@ class Mirror(object):
self._push_url = None
-class MirrorCollection(Mapping):
+class MirrorCollection(collections.abc.Mapping):
"""A mapping of mirror names to mirrors."""
def __init__(self, mirrors=None, scope=None):
diff --git a/lib/spack/spack/mixins.py b/lib/spack/spack/mixins.py
index b43b85aa02..2f6a68a2c0 100644
--- a/lib/spack/spack/mixins.py
+++ b/lib/spack/spack/mixins.py
@@ -7,13 +7,6 @@
package.
"""
import os
-import sys
-from typing import Callable, DefaultDict, List # novm
-
-if sys.version_info >= (3, 5):
- CallbackDict = DefaultDict[str, List[Callable]]
-else:
- CallbackDict = None
import llnl.util.filesystem
diff --git a/lib/spack/spack/operating_systems/windows_os.py b/lib/spack/spack/operating_systems/windows_os.py
index 02d45fd8d2..ec563f5336 100755
--- a/lib/spack/spack/operating_systems/windows_os.py
+++ b/lib/spack/spack/operating_systems/windows_os.py
@@ -7,7 +7,6 @@ import glob
import os
import platform
import subprocess
-import sys
from spack.error import SpackError
from spack.version import Version
@@ -34,9 +33,7 @@ class WindowsOs(OperatingSystem):
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
if root:
try:
- extra_args = {}
- if sys.version_info[:3] >= (3, 6, 0):
- extra_args = {"encoding": "mbcs", "errors": "strict"}
+ extra_args = {"encoding": "mbcs", "errors": "strict"}
paths = subprocess.check_output( # type: ignore[call-overload] # novermin
[
os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
@@ -48,10 +45,8 @@ class WindowsOs(OperatingSystem):
"-products",
"*",
],
- **extra_args
+ **extra_args,
).strip()
- if (3, 0) <= sys.version_info[:2] <= (3, 5):
- paths = paths.decode()
vs_install_paths = paths.split("\n")
msvc_paths = [os.path.join(path, "VC", "Tools", "MSVC") for path in vs_install_paths]
for p in msvc_paths:
diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py
index 4e25cb6b04..957b947f79 100644
--- a/lib/spack/spack/package_base.py
+++ b/lib/spack/spack/package_base.py
@@ -66,13 +66,12 @@ from spack.util.prefix import Prefix
from spack.util.web import FetchError
from spack.version import GitVersion, Version, VersionBase
-if sys.version_info[0] >= 3:
- FLAG_HANDLER_RETURN_TYPE = Tuple[
- Optional[Iterable[str]],
- Optional[Iterable[str]],
- Optional[Iterable[str]],
- ]
- FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
+FLAG_HANDLER_RETURN_TYPE = Tuple[
+ Optional[Iterable[str]],
+ Optional[Iterable[str]],
+ Optional[Iterable[str]],
+]
+FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
@@ -1661,10 +1660,7 @@ class PackageBase(six.with_metaclass(PackageMeta, WindowsRPathMeta, PackageViewM
b32_hash = base64.b32encode(
hashlib.sha256(bytes().join(sorted(hash_content))).digest()
).lower()
-
- # convert from bytes if running python 3
- if sys.version_info[0] >= 3:
- b32_hash = b32_hash.decode("utf-8")
+ b32_hash = b32_hash.decode("utf-8")
return b32_hash
diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py
index 11370f7b56..e8c593f948 100644
--- a/lib/spack/spack/repo.py
+++ b/lib/spack/spack/repo.py
@@ -4,10 +4,13 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import abc
+import collections.abc
import contextlib
import errno
import functools
import importlib
+import importlib.machinery # novm
+import importlib.util
import inspect
import itertools
import os
@@ -18,7 +21,6 @@ import shutil
import stat
import string
import sys
-import tempfile
import traceback
import types
import uuid
@@ -30,7 +32,6 @@ import six
import llnl.util.filesystem as fs
import llnl.util.lang
import llnl.util.tty as tty
-from llnl.util.compat import Mapping
from llnl.util.filesystem import working_dir
import spack.caches
@@ -79,125 +80,23 @@ def namespace_from_fullname(fullname):
return namespace
-# The code below is needed to have a uniform Loader interface that could cover both
-# Python 2.7 and Python 3.X when we load Spack packages as Python modules, e.g. when
-# we do "import spack.pkg.builtin.mpich" in package recipes.
-if sys.version_info[0] == 2:
- import imp
+class _PrependFileLoader(importlib.machinery.SourceFileLoader): # novm
+ def __init__(self, fullname, path, prepend=None):
+ super(_PrependFileLoader, self).__init__(fullname, path)
+ self.prepend = prepend
- @contextlib.contextmanager
- def import_lock():
- try:
- imp.acquire_lock()
- yield
- finally:
- imp.release_lock()
-
- def load_source(fullname, path, prepend=None):
- """Import a Python module from source.
-
- Load the source file and add it to ``sys.modules``.
-
- Args:
- fullname (str): full name of the module to be loaded
- path (str): path to the file that should be loaded
- prepend (str or None): some optional code to prepend to the
- loaded module; e.g., can be used to inject import statements
-
- Returns:
- the loaded module
- """
- with import_lock():
- with prepend_open(path, text=prepend) as f:
- return imp.load_source(fullname, path, f)
-
- @contextlib.contextmanager
- def prepend_open(f, *args, **kwargs):
- """Open a file for reading, but prepend with some text prepended
-
- Arguments are same as for ``open()``, with one keyword argument,
- ``text``, specifying the text to prepend.
-
- We have to write and read a tempfile for the ``imp``-based importer,
- as the ``file`` argument to ``imp.load_source()`` requires a
- low-level file handle.
-
- See the ``importlib``-based importer for a faster way to do this in
- later versions of python.
- """
- text = kwargs.get("text", None)
-
- with open(f, *args) as f:
- with tempfile.NamedTemporaryFile(mode="w+") as tf:
- if text:
- tf.write(text + "\n")
- tf.write(f.read())
- tf.seek(0)
- yield tf.file
-
- class _PrependFileLoader(object):
- def __init__(self, fullname, path, prepend=None):
- # Done to have a compatible interface with Python 3
- #
- # All the object attributes used in this method must be defined
- # by a derived class
- pass
-
- def package_module(self):
- try:
- module = load_source(self.fullname, self.package_py, prepend=self._package_prepend)
- except SyntaxError as e:
- # SyntaxError strips the path from the filename, so we need to
- # manually construct the error message in order to give the
- # user the correct package.py where the syntax error is located
- msg = "invalid syntax in {0:}, line {1:}"
- raise SyntaxError(msg.format(self.package_py, e.lineno))
+ def path_stats(self, path):
+ stats = super(_PrependFileLoader, self).path_stats(path)
+ if self.prepend:
+ stats["size"] += len(self.prepend) + 1
+ return stats
- module.__package__ = self.repo.full_namespace
- module.__loader__ = self
- return module
-
- def load_module(self, fullname):
- # Compatibility method to support Python 2.7
- if fullname in sys.modules:
- return sys.modules[fullname]
-
- namespace, dot, module_name = fullname.rpartition(".")
-
- try:
- module = self.package_module()
- except Exception as e:
- raise ImportError(str(e))
-
- module.__loader__ = self
- sys.modules[fullname] = module
- if namespace != fullname:
- parent = sys.modules[namespace]
- if not hasattr(parent, module_name):
- setattr(parent, module_name, module)
-
- return module
-
-else:
- import importlib.machinery # novm
-
- class _PrependFileLoader(importlib.machinery.SourceFileLoader): # novm
- def __init__(self, fullname, path, prepend=None):
- super(_PrependFileLoader, self).__init__(fullname, path)
- self.prepend = prepend
-
- def path_stats(self, path):
- stats = super(_PrependFileLoader, self).path_stats(path)
- if self.prepend:
- stats["size"] += len(self.prepend) + 1
- return stats
-
- def get_data(self, path):
- data = super(_PrependFileLoader, self).get_data(path)
- if path != self.path or self.prepend is None:
- return data
- else:
- return self.prepend.encode() + b"\n" + data
+ def get_data(self, path):
+ data = super(_PrependFileLoader, self).get_data(path)
+ if path != self.path or self.prepend is None:
+ return data
+ else:
+ return self.prepend.encode() + b"\n" + data
class RepoLoader(_PrependFileLoader):
@@ -227,22 +126,6 @@ class SpackNamespaceLoader(object):
def exec_module(self, module):
module.__loader__ = self
- def load_module(self, fullname):
- # Compatibility method to support Python 2.7
- if fullname in sys.modules:
- return sys.modules[fullname]
- module = SpackNamespace(fullname)
- self.exec_module(module)
-
- namespace, dot, module_name = fullname.rpartition(".")
- sys.modules[fullname] = module
- if namespace != fullname:
- parent = sys.modules[namespace]
- if not hasattr(parent, module_name):
- setattr(parent, module_name, module)
-
- return module
-
class ReposFinder(object):
"""MetaPathFinder class that loads a Python module corresponding to a Spack package
@@ -251,9 +134,6 @@ class ReposFinder(object):
"""
def find_spec(self, fullname, python_path, target=None):
- # This function is Python 3 only and will not be called by Python 2.7
- import importlib.util
-
# "target" is not None only when calling importlib.reload()
if target is not None:
raise RuntimeError('cannot reload module "{0}"'.format(fullname))
@@ -292,12 +172,6 @@ class ReposFinder(object):
return None
- def find_module(self, fullname, python_path=None):
- # Compatibility method to support Python 2.7
- if not fullname.startswith(ROOT_PYTHON_NAMESPACE):
- return None
- return self.compute_loader(fullname)
-
#
# These names describe how repos should be laid out in the filesystem.
@@ -483,7 +357,7 @@ class SpackNamespace(types.ModuleType):
return getattr(self, name)
-class FastPackageChecker(Mapping):
+class FastPackageChecker(collections.abc.Mapping):
"""Cache that maps package names to the stats obtained on the
'package.py' files associated with them.
diff --git a/lib/spack/spack/schema/environment.py b/lib/spack/spack/schema/environment.py
index 2a295764a2..b192ad7206 100644
--- a/lib/spack/spack/schema/environment.py
+++ b/lib/spack/spack/schema/environment.py
@@ -5,6 +5,7 @@
"""Schema for environment modifications. Meant for inclusion in other
schemas.
"""
+import collections.abc
array_of_strings_or_num = {
"type": "array",
@@ -39,15 +40,13 @@ def parse(config_obj):
config_obj: a configuration dictionary conforming to the
schema definition for environment modifications
"""
- from llnl.util.compat import Sequence
-
import spack.util.environment as ev
env = ev.EnvironmentModifications()
for command, variable in config_obj.items():
# Distinguish between commands that take only a name as argument
# (e.g. unset) and commands that take a name and a value.
- if isinstance(variable, Sequence):
+ if isinstance(variable, collections.abc.Sequence):
for name in variable:
getattr(env, command)(name)
else:
diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py
index 5f387636cc..844252c97b 100644
--- a/lib/spack/spack/solver/asp.py
+++ b/lib/spack/spack/solver/asp.py
@@ -5,6 +5,7 @@
from __future__ import division, print_function
import collections
+import collections.abc
import copy
import itertools
import os
@@ -17,8 +18,6 @@ from six import string_types
import archspec.cpu
-from llnl.util.compat import Sequence
-
try:
import clingo # type: ignore[import]
@@ -216,7 +215,7 @@ def build_criteria_names(costs, tuples):
def issequence(obj):
if isinstance(obj, string_types):
return False
- return isinstance(obj, (Sequence, types.GeneratorType))
+ return isinstance(obj, (collections.abc.Sequence, types.GeneratorType))
def listify(args):
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 96b137622a..015c5b8061 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -80,6 +80,7 @@ specs to avoid ambiguity. Both are provided because ~ can cause shell
expansion when it is the first character in an id typed on the command line.
"""
import collections
+import collections.abc
import itertools
import os
import re
@@ -93,7 +94,6 @@ import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty
import llnl.util.tty.color as clr
-from llnl.util.compat import Mapping
import spack.compiler
import spack.compilers
@@ -894,7 +894,7 @@ EdgeDirection = lang.enum(parent=0, child=1)
@lang.lazy_lexicographic_ordering
-class _EdgeMap(Mapping):
+class _EdgeMap(collections.abc.Mapping):
"""Represent a collection of edges (DependencySpec objects) in the DAG.
Objects of this class are used in Specs to track edges that are
@@ -2409,8 +2409,54 @@ class Spec(object):
Args:
data: a nested dict/list data structure read from YAML or JSON.
"""
+ if isinstance(data["spec"], list): # Legacy specfile format
+ return _spec_from_old_dict(data)
+
+ # Current specfile format
+ nodes = data["spec"]["nodes"]
+ hash_type = None
+ any_deps = False
+
+ # Pass 0: Determine hash type
+ for node in nodes:
+ if "dependencies" in node.keys():
+ any_deps = True
+ for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node):
+ if dhash_type:
+ hash_type = dhash_type
+ break
+
+ if not any_deps: # If we never see a dependency...
+ hash_type = ht.dag_hash.name
+ elif not hash_type: # Seen a dependency, still don't know hash_type
+ raise spack.error.SpecError(
+ "Spec dictionary contains malformed " "dependencies. Old format?"
+ )
- return _spec_from_dict(data)
+ hash_dict = {}
+ root_spec_hash = None
+
+ # Pass 1: Create a single lookup dictionary by hash
+ for i, node in enumerate(nodes):
+ node_hash = node[hash_type]
+ node_spec = Spec.from_node_dict(node)
+ hash_dict[node_hash] = node
+ hash_dict[node_hash]["node_spec"] = node_spec
+ if i == 0:
+ root_spec_hash = node_hash
+ if not root_spec_hash:
+ raise spack.error.SpecError("Spec dictionary contains no nodes.")
+
+ # Pass 2: Finish construction of all DAG edges (including build specs)
+ for node_hash, node in hash_dict.items():
+ node_spec = node["node_spec"]
+ for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
+ node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes)
+ if "build_spec" in node.keys():
+ _, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type)
+ node_spec._build_spec = hash_dict[bhash]["node_spec"]
+
+ return hash_dict[root_spec_hash]["node_spec"]
@staticmethod
def from_yaml(stream):
@@ -2496,7 +2542,7 @@ class Spec(object):
msg = 'cannot validate "{0}" since it was not created ' "using Spec.from_detection".format(
self
)
- assert isinstance(self.extra_attributes, Mapping), msg
+ assert isinstance(self.extra_attributes, collections.abc.Mapping), msg
# Validate the spec calling a package specific method
pkg_cls = spack.repo.path.get_pkg_class(self.name)
@@ -4854,7 +4900,7 @@ class Spec(object):
return hash(lang.tuplify(self._cmp_iter))
def __reduce__(self):
- return _spec_from_dict, (self.to_dict(hash=ht.process_hash),)
+ return Spec.from_dict, (self.to_dict(hash=ht.process_hash),)
def merge_abstract_anonymous_specs(*abstract_specs):
@@ -4914,66 +4960,6 @@ def _spec_from_old_dict(data):
return spec
-# Note: This function has been refactored from being a static method
-# of Spec to be a function at the module level. This was needed to
-# support its use in __reduce__ to pickle a Spec object in Python 2.
-# It can be moved back safely after we drop support for Python 2.7
-def _spec_from_dict(data):
- """Construct a spec from YAML.
-
- Parameters:
- data -- a nested dict/list data structure read from YAML or JSON.
- """
- if isinstance(data["spec"], list): # Legacy specfile format
- return _spec_from_old_dict(data)
-
- # Current specfile format
- nodes = data["spec"]["nodes"]
- hash_type = None
- any_deps = False
-
- # Pass 0: Determine hash type
- for node in nodes:
- if "dependencies" in node.keys():
- any_deps = True
- for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node):
- if dhash_type:
- hash_type = dhash_type
- break
-
- if not any_deps: # If we never see a dependency...
- hash_type = ht.dag_hash.name
- elif not hash_type: # Seen a dependency, still don't know hash_type
- raise spack.error.SpecError(
- "Spec dictionary contains malformed " "dependencies. Old format?"
- )
-
- hash_dict = {}
- root_spec_hash = None
-
- # Pass 1: Create a single lookup dictionary by hash
- for i, node in enumerate(nodes):
- node_hash = node[hash_type]
- node_spec = Spec.from_node_dict(node)
- hash_dict[node_hash] = node
- hash_dict[node_hash]["node_spec"] = node_spec
- if i == 0:
- root_spec_hash = node_hash
- if not root_spec_hash:
- raise spack.error.SpecError("Spec dictionary contains no nodes.")
-
- # Pass 2: Finish construction of all DAG edges (including build specs)
- for node_hash, node in hash_dict.items():
- node_spec = node["node_spec"]
- for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
- node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes)
- if "build_spec" in node.keys():
- _, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type)
- node_spec._build_spec = hash_dict[bhash]["node_spec"]
-
- return hash_dict[root_spec_hash]["node_spec"]
-
-
class LazySpecCache(collections.defaultdict):
"""Cache for Specs that uses a spec_like as key, and computes lazily
the corresponding value ``Spec(spec_like``.
diff --git a/lib/spack/spack/tag.py b/lib/spack/spack/tag.py
index fb019b9c45..fd08883093 100644
--- a/lib/spack/spack/tag.py
+++ b/lib/spack/spack/tag.py
@@ -5,12 +5,7 @@
"""Classes and functions to manage package tags"""
import collections
import copy
-import sys
-
-if sys.version_info >= (3, 5):
- from collections.abc import Mapping # novm
-else:
- from collections import Mapping
+from collections.abc import Mapping
import spack.error
import spack.util.spack_json as sjson
diff --git a/lib/spack/spack/test/cmd/commands.py b/lib/spack/spack/test/cmd/commands.py
index da20927589..d972f86d7f 100644
--- a/lib/spack/spack/test/cmd/commands.py
+++ b/lib/spack/spack/test/cmd/commands.py
@@ -22,9 +22,6 @@ parser = spack.main.make_argument_parser()
spack.main.add_all_commands(parser)
-@pytest.mark.skipif(
- sys.version_info[:2] == (2, 7), reason="Fails as the output contains a warning on Python 2.7"
-)
def test_names():
"""Test default output of spack commands."""
out1 = commands().strip().split("\n")
diff --git a/lib/spack/spack/test/cmd/style.py b/lib/spack/spack/test/cmd/style.py
index fd727d088e..6738f90cf8 100644
--- a/lib/spack/spack/test/cmd/style.py
+++ b/lib/spack/spack/test/cmd/style.py
@@ -6,7 +6,6 @@
import filecmp
import os
import shutil
-import sys
import pytest
@@ -38,12 +37,6 @@ pytestmark = pytest.mark.skipif(
not has_develop_branch(), reason="requires git with develop branch"
)
-# The style tools have requirements to use newer Python versions. We simplify by
-# requiring Python 3.6 or higher to run spack style.
-skip_old_python = pytest.mark.skipif(
- sys.version_info < (3, 6), reason="requires Python 3.6 or higher"
-)
-
@pytest.fixture(scope="function")
def flake8_package(tmpdir):
@@ -156,14 +149,6 @@ def test_changed_files_all_files():
assert not any(f.startswith(spack.paths.external_path) for f in files)
-@pytest.mark.skipif(sys.version_info >= (3, 6), reason="doesn't apply to newer python")
-def test_fail_on_old_python():
- """Ensure that `spack style` runs but fails with older python."""
- output = style(fail_on_error=False)
- assert "spack style requires Python 3.6" in output
-
-
-@skip_old_python
def test_bad_root(tmpdir):
"""Ensure that `spack style` doesn't run on non-spack directories."""
output = style("--root", str(tmpdir), fail_on_error=False)
@@ -215,7 +200,6 @@ def external_style_root(flake8_package_with_errors, tmpdir):
yield tmpdir, py_file
-@skip_old_python
@pytest.mark.skipif(not which("isort"), reason="isort is not installed.")
@pytest.mark.skipif(not which("black"), reason="black is not installed.")
def test_fix_style(external_style_root):
@@ -235,7 +219,6 @@ def test_fix_style(external_style_root):
assert filecmp.cmp(broken_py, fixed_py)
-@skip_old_python
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
@pytest.mark.skipif(not which("isort"), reason="isort is not installed.")
@pytest.mark.skipif(not which("mypy"), reason="mypy is not installed.")
@@ -265,7 +248,6 @@ def test_external_root(external_style_root):
assert "lib/spack/spack/dummy.py:7: [F401] 'os' imported but unused" in output
-@skip_old_python
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
def test_style(flake8_package, tmpdir):
root_relative = os.path.relpath(flake8_package, spack.paths.prefix)
@@ -292,7 +274,6 @@ def test_style(flake8_package, tmpdir):
assert "spack style checks were clean" in output
-@skip_old_python
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
def test_style_with_errors(flake8_package_with_errors):
root_relative = os.path.relpath(flake8_package_with_errors, spack.paths.prefix)
@@ -304,7 +285,6 @@ def test_style_with_errors(flake8_package_with_errors):
assert "spack style found errors" in output
-@skip_old_python
@pytest.mark.skipif(not which("black"), reason="black is not installed.")
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
def test_style_with_black(flake8_package_with_errors):
@@ -314,7 +294,6 @@ def test_style_with_black(flake8_package_with_errors):
assert "spack style found errors" in output
-@skip_old_python
def test_skip_tools():
output = style("--skip", "isort,mypy,black,flake8")
assert "Nothing to run" in output
diff --git a/lib/spack/spack/test/compilers/basics.py b/lib/spack/spack/test/compilers/basics.py
index 6405acc7d1..4e03c9a288 100644
--- a/lib/spack/spack/test/compilers/basics.py
+++ b/lib/spack/spack/test/compilers/basics.py
@@ -84,7 +84,6 @@ def test_all_compilers(config):
assert len(filtered) == 1
-@pytest.mark.skipif(sys.version_info[0] == 2, reason="make_args_for_version requires python 3")
@pytest.mark.parametrize(
"input_version,expected_version,expected_error",
[(None, None, "Couldn't get version for compiler /usr/bin/gcc"), ("4.9", "4.9", None)],
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index f2d7edf126..9afbf8f45b 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -840,7 +840,6 @@ class TestConcretize(object):
("py-extension3@1.0 ^python@3.5.1", ["patchelf@0.10"], []),
],
)
- @pytest.mark.skipif(sys.version_info[:2] == (3, 5), reason="Known failure with Python3.5")
def test_conditional_dependencies(self, spec_str, expected, unexpected):
s = Spec(spec_str).concretized()
@@ -955,7 +954,6 @@ class TestConcretize(object):
assert s.satisfies("^cumulative-vrange-bottom@2.2")
@pytest.mark.regression("9937")
- @pytest.mark.skipif(sys.version_info[:2] == (3, 5), reason="Known failure with Python3.5")
def test_dependency_conditional_on_another_dependency_state(self):
root_str = "variant-on-dependency-condition-root"
dep_str = "variant-on-dependency-condition-a"
@@ -1225,9 +1223,6 @@ class TestConcretize(object):
second_spec.concretize()
assert first_spec.dag_hash() != second_spec.dag_hash()
- @pytest.mark.skipif(
- sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7"
- )
@pytest.mark.regression("20292")
@pytest.mark.parametrize(
"context",
@@ -1552,9 +1547,6 @@ class TestConcretize(object):
s = Spec("python target=k10").concretized()
assert s.satisfies("target=k10")
- @pytest.mark.skipif(
- sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7"
- )
@pytest.mark.regression("29201")
def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_recipe):
"""Test that we can reuse installed specs with versions not
@@ -1573,9 +1565,6 @@ class TestConcretize(object):
assert root.dag_hash() == new_root.dag_hash()
@pytest.mark.regression("29201")
- @pytest.mark.skipif(
- sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7"
- )
def test_installed_version_is_selected_only_for_reuse(
self, mutable_database, repo_with_changing_recipe
):
@@ -1841,9 +1830,6 @@ class TestConcretize(object):
s.concretized()
@pytest.mark.regression("31484")
- @pytest.mark.skipif(
- sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7"
- )
def test_installed_externals_are_reused(self, mutable_database, repo_with_changing_recipe):
"""Test that external specs that are in the DB can be reused."""
if spack.config.get("config:concretizer") == "original":
diff --git a/lib/spack/spack/test/graph.py b/lib/spack/spack/test/graph.py
index 60d041d60b..dc7181538e 100644
--- a/lib/spack/spack/test/graph.py
+++ b/lib/spack/spack/test/graph.py
@@ -79,7 +79,6 @@ def test_dynamic_dot_graph_mpileaks(mock_packages, config):
assert ' "{0}" -> "{1}"\n'.format(hashes[parent], hashes[child]) in dot
-@pytest.mark.skipif(sys.version_info < (3, 6), reason="Ordering might not be consistent")
def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch):
monkeypatch.setattr(spack.graph.AsciiGraph, "_node_label", lambda self, node: node.name)
s = spack.spec.Spec("mpileaks").concretized()
diff --git a/lib/spack/spack/test/llnl/util/filesystem.py b/lib/spack/spack/test/llnl/util/filesystem.py
index 4950558db6..7999726dd2 100644
--- a/lib/spack/spack/test/llnl/util/filesystem.py
+++ b/lib/spack/spack/test/llnl/util/filesystem.py
@@ -498,9 +498,7 @@ def test_filter_files_with_different_encodings(regex, replacement, filename, tmp
# This should not raise exceptions
fs.filter_file(regex, replacement, target_file, **keyword_args)
# Check the strings have been replaced
- extra_kwargs = {}
- if sys.version_info > (3, 0):
- extra_kwargs = {"errors": "surrogateescape"}
+ extra_kwargs = {"errors": "surrogateescape"}
with open(target_file, mode="r", **extra_kwargs) as f:
assert replacement in f.read()
@@ -518,9 +516,7 @@ def test_filter_files_multiple(tmpdir):
fs.filter_file(r"\<string.h\>", "<unistd.h>", target_file)
fs.filter_file(r"\<stdio.h\>", "<unistd.h>", target_file)
# Check the strings have been replaced
- extra_kwargs = {}
- if sys.version_info > (3, 0):
- extra_kwargs = {"errors": "surrogateescape"}
+ extra_kwargs = {"errors": "surrogateescape"}
with open(target_file, mode="r", **extra_kwargs) as f:
assert "<malloc.h>" not in f.read()
diff --git a/lib/spack/spack/test/llnl/util/tty/log.py b/lib/spack/spack/test/llnl/util/tty/log.py
index 333088b6af..24171ecc11 100644
--- a/lib/spack/spack/test/llnl/util/tty/log.py
+++ b/lib/spack/spack/test/llnl/util/tty/log.py
@@ -72,11 +72,7 @@ def test_log_python_output_with_invalid_utf8(capfd, tmpdir):
with log.log_output("foo.txt"):
sys.stdout.buffer.write(b"\xc3\x28\n")
- # python2 and 3 treat invalid UTF-8 differently
- if sys.version_info.major == 2:
- expected = b"\xc3(\n"
- else:
- expected = b"<line lost: output was not encoded as UTF-8>\n"
+ expected = b"<line lost: output was not encoded as UTF-8>\n"
with open("foo.txt", "rb") as f:
written = f.read()
assert written == expected
@@ -465,7 +461,6 @@ def mock_shell_v_v_no_termios(proc, ctl, **kwargs):
def test_foreground_background_output(test_fn, capfd, termios_on_or_off, tmpdir):
"""Tests hitting 'v' toggles output, and that force_echo works."""
if sys.version_info >= (3, 8) and sys.platform == "darwin" and termios_on_or_off == no_termios:
-
return
shell = pty.PseudoShell(test_fn, synchronized_logger)
diff --git a/lib/spack/spack/test/repo.py b/lib/spack/spack/test/repo.py
index 402723d226..a22a431173 100644
--- a/lib/spack/spack/test/repo.py
+++ b/lib/spack/spack/test/repo.py
@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
-import sys
import pytest
@@ -56,9 +55,6 @@ def test_repo_unknown_pkg(mutable_mock_repo):
@pytest.mark.maybeslow
-@pytest.mark.skipif(
- sys.version_info < (3, 5), reason="Test started failing spuriously on Python 2.7"
-)
def test_repo_last_mtime():
latest_mtime = max(
os.path.getmtime(p.module.__file__) for p in spack.repo.path.all_package_classes()
diff --git a/lib/spack/spack/test/schema.py b/lib/spack/spack/test/schema.py
index 214a2e52fd..4623823609 100644
--- a/lib/spack/spack/test/schema.py
+++ b/lib/spack/spack/test/schema.py
@@ -5,7 +5,6 @@
import json
import os.path
-import sys
import jsonschema
import pytest
@@ -87,9 +86,6 @@ def test_module_suffixes(module_suffixes_schema):
@pytest.mark.regression("10246")
-@pytest.mark.skipif(
- sys.version_info < (2, 7), reason="requires python2.7 or higher because of importlib"
-)
@pytest.mark.parametrize(
"config_name",
["compilers", "config", "env", "merged", "mirrors", "modules", "packages", "repos"],
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index e2cbf706da..811f35b1d7 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -3,8 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-import sys
-
import pytest
import spack.directives
@@ -894,7 +892,6 @@ class TestSpecSematics(object):
with pytest.raises(spack.variant.InvalidVariantValueCombinationError):
Spec("multivalue-variant foo=*,bar")
- @pytest.mark.skipif(sys.version_info[0] == 2, reason="__wrapped__ requires python 3")
def test_errors_in_variant_directive(self):
variant = spack.directives.variant.__wrapped__
diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py
index b69c0a1d9a..b939674550 100644
--- a/lib/spack/spack/test/spec_yaml.py
+++ b/lib/spack/spack/test/spec_yaml.py
@@ -12,13 +12,12 @@ from __future__ import print_function
import ast
import collections
+import collections.abc
import inspect
import os
import pytest
-from llnl.util.compat import Iterable, Mapping
-
import spack.hash_types as ht
import spack.paths
import spack.repo
@@ -148,12 +147,12 @@ def test_using_ordered_dict(mock_packages):
"""
def descend_and_check(iterable, level=0):
- if isinstance(iterable, Mapping):
+ if isinstance(iterable, collections.abc.Mapping):
assert isinstance(iterable, syaml_dict)
return descend_and_check(iterable.values(), level=level + 1)
max_level = level
for value in iterable:
- if isinstance(value, Iterable) and not isinstance(value, str):
+ if isinstance(value, collections.abc.Iterable) and not isinstance(value, str):
nlevel = descend_and_check(value, level=level + 1)
if nlevel > max_level:
max_level = nlevel
diff --git a/lib/spack/spack/test/util/executable.py b/lib/spack/spack/test/util/executable.py
index 8d360ea4e9..ea254f5634 100644
--- a/lib/spack/spack/test/util/executable.py
+++ b/lib/spack/spack/test/util/executable.py
@@ -31,10 +31,6 @@ def test_read_unicode(tmpdir, working_env):
f.write(
"""#!{0}
from __future__ import print_function
-import sys
-if sys.version_info < (3, 0, 0):
- reload(sys)
- sys.setdefaultencoding('utf8')
print(u'\\xc3')
""".format(
sys.executable
@@ -45,7 +41,7 @@ print(u'\\xc3')
fs.set_executable(script_name)
filter_shebangs_in_directory(".", [script_name])
- assert u"\xc3" == script(output=str).strip()
+ assert "\xc3" == script(output=str).strip()
def test_which_relative_path_with_slash(tmpdir, working_env):
diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py
index 5595d15cd3..69004ae8a2 100644
--- a/lib/spack/spack/util/crypto.py
+++ b/lib/spack/spack/util/crypto.py
@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import hashlib
-import sys
from typing import Any, Callable, Dict # novm
import llnl.util.tty as tty
@@ -82,7 +81,7 @@ def checksum(hashlib_algo, filename, **kwargs):
"""Returns a hex digest of the filename generated using an
algorithm from hashlib.
"""
- block_size = kwargs.get("block_size", 2 ** 20)
+ block_size = kwargs.get("block_size", 2**20)
hasher = hashlib_algo()
with open(filename, "rb") as file:
while True:
@@ -116,7 +115,7 @@ class Checker(object):
"""
def __init__(self, hexdigest, **kwargs):
- self.block_size = kwargs.get("block_size", 2 ** 20)
+ self.block_size = kwargs.get("block_size", 2**20)
self.hexdigest = hexdigest
self.sum = None
self.hash_fun = hash_fun_for_digest(hexdigest)
@@ -137,11 +136,7 @@ class Checker(object):
def prefix_bits(byte_array, bits):
"""Return the first <bits> bits of a byte array as an integer."""
- if sys.version_info < (3,):
- b2i = ord # In Python 2, indexing byte_array gives str
- else:
- b2i = lambda b: b # In Python 3, indexing byte_array gives int
-
+ b2i = lambda b: b # In Python 3, indexing byte_array gives int
result = 0
n = 0
for i, b in enumerate(byte_array):
diff --git a/lib/spack/spack/util/elf.py b/lib/spack/spack/util/elf.py
index 0b2e5a4e71..e93a107d67 100644
--- a/lib/spack/spack/util/elf.py
+++ b/lib/spack/spack/util/elf.py
@@ -6,7 +6,6 @@
import bisect
import re
import struct
-import sys
from collections import namedtuple
from struct import calcsize, unpack, unpack_from
@@ -94,12 +93,6 @@ class ELF_CONSTANTS:
SHT_STRTAB = 3
-def get_byte_at(byte_array, idx):
- if sys.version_info[0] < 3:
- return ord(byte_array[idx])
- return byte_array[idx]
-
-
class ElfFile(object):
"""Parsed ELF file."""
@@ -381,7 +374,7 @@ def parse_header(f, elf):
raise ElfParsingError("Not an ELF file")
# Defensively require a valid class and data.
- e_ident_class, e_ident_data = get_byte_at(e_ident, 4), get_byte_at(e_ident, 5)
+ e_ident_class, e_ident_data = e_ident[4], e_ident[5]
if e_ident_class not in (ELF_CONSTANTS.CLASS32, ELF_CONSTANTS.CLASS64):
raise ElfParsingError("Invalid class found")
@@ -453,8 +446,7 @@ def get_rpaths(path):
# If it does, split the string in components
rpath = elf.dt_rpath_str
- if sys.version_info[0] >= 3:
- rpath = rpath.decode("utf-8")
+ rpath = rpath.decode("utf-8")
return rpath.split(":")
diff --git a/lib/spack/spack/util/hash.py b/lib/spack/spack/util/hash.py
index 929c97977c..b8a55524ea 100644
--- a/lib/spack/spack/util/hash.py
+++ b/lib/spack/spack/util/hash.py
@@ -5,7 +5,6 @@
import base64
import hashlib
-import sys
import spack.util.crypto
@@ -14,10 +13,7 @@ def b32_hash(content):
"""Return the b32 encoded sha1 hash of the input string as a string."""
sha = hashlib.sha1(content.encode("utf-8"))
b32_hash = base64.b32encode(sha.digest()).lower()
-
- if sys.version_info[0] >= 3:
- b32_hash = b32_hash.decode("utf-8")
-
+ b32_hash = b32_hash.decode("utf-8")
return b32_hash
diff --git a/lib/spack/spack/util/module_cmd.py b/lib/spack/spack/util/module_cmd.py
index 007861085a..f83af678a3 100644
--- a/lib/spack/spack/util/module_cmd.py
+++ b/lib/spack/spack/util/module_cmd.py
@@ -10,7 +10,6 @@ parsing environment modules.
import os
import re
import subprocess
-import sys
import llnl.util.tty as tty
@@ -50,10 +49,7 @@ def module(*args, **kwargs):
# Update os.environ with new dict
os.environ.clear()
- if sys.version_info >= (3, 2):
- os.environb.update(environ) # novermin
- else:
- os.environ.update(environ)
+ os.environb.update(environ) # novermin
else:
# Simply execute commands that don't change state and return output
diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py
index 6732fbf945..7d689d3c50 100644
--- a/lib/spack/spack/util/pattern.py
+++ b/lib/spack/spack/util/pattern.py
@@ -2,12 +2,10 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
+import collections.abc
import functools
import inspect
-from llnl.util.compat import MutableSequence
-
class Delegate(object):
def __init__(self, name, container):
@@ -38,7 +36,7 @@ def composite(interface=None, method_list=None, container=list):
non-special methods will be taken into account
method_list (list): names of methods that should be part
of the composite
- container (MutableSequence): container for the composite object
+ container (collections.abc.MutableSequence): container for the composite object
(default = list). Must fulfill the MutableSequence
contract. The composite class will expose the container API
to manage object composition
@@ -52,7 +50,7 @@ def composite(interface=None, method_list=None, container=list):
# exception if it doesn't. The patched class returned by the decorator will
# inherit from the container class to expose the interface needed to manage
# objects composition
- if not issubclass(container, MutableSequence):
+ if not issubclass(container, collections.abc.MutableSequence):
raise TypeError("Container must fulfill the MutableSequence contract")
# Check if at least one of the 'interface' or the 'method_list' arguments
diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py
index a6c2a5660c..88bd4b45a6 100644
--- a/lib/spack/spack/util/spack_yaml.py
+++ b/lib/spack/spack/util/spack_yaml.py
@@ -13,6 +13,7 @@
"""
import collections
+import collections.abc
import ctypes
import re
from typing import List # novm
@@ -21,7 +22,6 @@ import ruamel.yaml as yaml
from ruamel.yaml import RoundTripDumper, RoundTripLoader
from six import StringIO, string_types
-from llnl.util.compat import Mapping
from llnl.util.tty.color import cextra, clen, colorize
import spack.error
@@ -352,7 +352,7 @@ def sorted_dict(dict_like):
"""
result = syaml_dict(sorted(dict_like.items()))
for key, value in result.items():
- if isinstance(value, Mapping):
+ if isinstance(value, collections.abc.Mapping):
result[key] = sorted_dict(value)
return result
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 939ec669c0..9d8588c2b9 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -15,6 +15,7 @@ import shutil
import ssl
import sys
import traceback
+from html.parser import HTMLParser
import six
from six.moves.urllib.error import URLError
@@ -39,16 +40,10 @@ from spack.util.path import convert_to_posix_path
#: User-Agent used in Request objects
SPACK_USER_AGENT = "Spackbot/{0}".format(spack.spack_version)
-if sys.version_info < (3, 0):
- # Python 2 had these in the HTMLParser package.
- from HTMLParser import HTMLParseError, HTMLParser # novm
-else:
- # In Python 3, things moved to html.parser
- from html.parser import HTMLParser
- # Also, HTMLParseError is deprecated and never raised.
- class HTMLParseError(Exception):
- pass
+# Also, HTMLParseError is deprecated and never raised.
+class HTMLParseError(Exception):
+ pass
class LinkParser(HTMLParser):
@@ -676,11 +671,6 @@ def spider(root_urls, depth=0, concurrency=32):
except HTMLParseError as e:
# This error indicates that Python's HTML parser sucks.
msg = "Got an error parsing HTML."
-
- # Pre-2.7.3 Pythons in particular have rather prickly HTML parsing.
- if sys.version_info[:3] < (2, 7, 3):
- msg += " Use Python 2.7.3 or newer for better HTML parsing."
-
tty.warn(msg, url, "HTMLParseError: " + str(e))
except Exception as e:
diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py
index cfb43cfc70..b006ae985e 100644
--- a/lib/spack/spack/variant.py
+++ b/lib/spack/spack/variant.py
@@ -6,7 +6,7 @@
"""The variant module contains data structures that are needed to manage
variants both in packages and in specs.
"""
-
+import collections.abc
import functools
import inspect
import itertools
@@ -17,7 +17,6 @@ from six import StringIO
import llnl.util.lang as lang
import llnl.util.tty.color
-from llnl.util.compat import Sequence
import spack.directives
import spack.error as error
@@ -712,7 +711,7 @@ def substitute_abstract_variants(spec):
# The class below inherit from Sequence to disguise as a tuple and comply
# with the semantic expected by the 'values' argument of the variant directive
-class DisjointSetsOfValues(Sequence):
+class DisjointSetsOfValues(collections.abc.Sequence):
"""Allows combinations from one of many mutually exclusive sets.
The value ``('none',)`` is reserved to denote the empty set
diff --git a/lib/spack/spack_installable/main.py b/lib/spack/spack_installable/main.py
index 4a4001b999..7b4c40b8d9 100644
--- a/lib/spack/spack_installable/main.py
+++ b/lib/spack/spack_installable/main.py
@@ -1,3 +1,7 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import sys
from os.path import dirname as dn
@@ -14,10 +18,6 @@ def main(argv=None):
# Add external libs
spack_external_libs = os.path.join(spack_lib_path, "external")
-
- if sys.version_info[:2] <= (2, 7):
- sys.path.insert(0, os.path.join(spack_external_libs, "py2"))
-
sys.path.insert(0, spack_external_libs)
# Here we delete ruamel.yaml in case it has been already imported from site
# (see #9206 for a broader description of the issue).
@@ -31,29 +31,6 @@ def main(argv=None):
if "ruamel" in sys.modules:
del sys.modules["ruamel"]
- # The following code is here to avoid failures when updating
- # the develop version, due to spurious argparse.pyc files remaining
- # in the libs/spack/external directory, see:
- # https://github.com/spack/spack/pull/25376
- # TODO: Remove in v0.18.0 or later
- try:
- import argparse # noqa: F401
- except ImportError:
- argparse_pyc = os.path.join(spack_external_libs, "argparse.pyc")
- if not os.path.exists(argparse_pyc):
- raise
- try:
- os.remove(argparse_pyc)
- import argparse # noqa: F401
- except Exception:
- msg = (
- "The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. "
- "Either delete it manually or ask some administrator to "
- "delete it for you."
- )
- print(msg.format(argparse_pyc))
- sys.exit(1)
-
import spack.main # noqa: E402
sys.exit(spack.main.main(argv))
diff --git a/pyproject.toml b/pyproject.toml
index 30b621dec4..512d48546e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -71,7 +71,7 @@ features = [
[tool.black]
line-length = 99
-target-version = ['py27', 'py35', 'py36', 'py37', 'py38', 'py39', 'py310']
+target-version = ['py36', 'py37', 'py38', 'py39', 'py310']
include = '''
\.pyi?$
'''
diff --git a/var/spack/repos/builtin/packages/libpng/package.py b/var/spack/repos/builtin/packages/libpng/package.py
index eb3b10a2d9..6dd83d732f 100644
--- a/var/spack/repos/builtin/packages/libpng/package.py
+++ b/var/spack/repos/builtin/packages/libpng/package.py
@@ -31,8 +31,8 @@ class Libpng(AutotoolsPackage):
# not honored, see
# https://sourceforge.net/p/libpng/bugs/210/#33f1
# '--with-zlib=' + self.spec['zlib'].prefix,
- "CPPFLAGS={0}".format(self.spec["zlib"].headers.include_flags),
- "LDFLAGS={0}".format(self.spec["zlib"].libs.search_flags),
+ f"CPPFLAGS={self.spec['zlib'].headers.include_flags}",
+ f"LDFLAGS={self.spec['zlib'].libs.search_flags}",
]
return args